]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(group_insns): Delete.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
99310285 2 Copyright (C) 1988, 1992, 1993, 1994 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
ca695ac9 22#include "machmode.h"
bbf6f052
RK
23#include "rtl.h"
24#include "tree.h"
ca695ac9 25#include "obstack.h"
bbf6f052
RK
26#include "flags.h"
27#include "function.h"
28#include "insn-flags.h"
29#include "insn-codes.h"
30#include "expr.h"
31#include "insn-config.h"
32#include "recog.h"
33#include "output.h"
bbf6f052
RK
34#include "typeclass.h"
35
ca695ac9
JB
36#include "bytecode.h"
37#include "bc-opcode.h"
38#include "bc-typecd.h"
39#include "bc-optab.h"
40#include "bc-emit.h"
41
42
bbf6f052
RK
43#define CEIL(x,y) (((x) + (y) - 1) / (y))
44
45/* Decide whether a function's arguments should be processed
bbc8a071
RK
46 from first to last or from last to first.
47
48 They should if the stack and args grow in opposite directions, but
49 only if we have push insns. */
bbf6f052 50
bbf6f052 51#ifdef PUSH_ROUNDING
bbc8a071 52
3319a347 53#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
54#define PUSH_ARGS_REVERSED /* If it's last to first */
55#endif
bbc8a071 56
bbf6f052
RK
57#endif
58
59#ifndef STACK_PUSH_CODE
60#ifdef STACK_GROWS_DOWNWARD
61#define STACK_PUSH_CODE PRE_DEC
62#else
63#define STACK_PUSH_CODE PRE_INC
64#endif
65#endif
66
67/* Like STACK_BOUNDARY but in units of bytes, not bits. */
68#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
69
70/* If this is nonzero, we do not bother generating VOLATILE
71 around volatile memory references, and we are willing to
72 output indirect addresses. If cse is to follow, we reject
73 indirect addresses so a useful potential cse is generated;
74 if it is used only once, instruction combination will produce
75 the same indirect address eventually. */
76int cse_not_expected;
77
78/* Nonzero to generate code for all the subroutines within an
79 expression before generating the upper levels of the expression.
80 Nowadays this is never zero. */
81int do_preexpand_calls = 1;
82
83/* Number of units that we should eventually pop off the stack.
84 These are the arguments to function calls that have already returned. */
85int pending_stack_adjust;
86
87/* Nonzero means stack pops must not be deferred, and deferred stack
88 pops must not be output. It is nonzero inside a function call,
89 inside a conditional expression, inside a statement expression,
90 and in other cases as well. */
91int inhibit_defer_pop;
92
93/* A list of all cleanups which belong to the arguments of
94 function calls being expanded by expand_call. */
95tree cleanups_this_call;
96
97/* Nonzero means __builtin_saveregs has already been done in this function.
98 The value is the pseudoreg containing the value __builtin_saveregs
99 returned. */
100static rtx saveregs_value;
101
dcf76fff
TW
102/* Similarly for __builtin_apply_args. */
103static rtx apply_args_value;
104
4969d05d
RK
105/* This structure is used by move_by_pieces to describe the move to
106 be performed. */
107
108struct move_by_pieces
109{
110 rtx to;
111 rtx to_addr;
112 int autinc_to;
113 int explicit_inc_to;
114 rtx from;
115 rtx from_addr;
116 int autinc_from;
117 int explicit_inc_from;
118 int len;
119 int offset;
120 int reverse;
121};
122
c02bd5d9
JB
123/* Used to generate bytecodes: keep track of size of local variables,
124 as well as depth of arithmetic stack. (Notice that variables are
125 stored on the machine's stack, not the arithmetic stack.) */
126
186f92ce 127extern int local_vars_size;
c02bd5d9
JB
128extern int stack_depth;
129extern int max_stack_depth;
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9
JB
131
132
4969d05d
RK
133static rtx enqueue_insn PROTO((rtx, rtx));
134static int queued_subexp_p PROTO((rtx));
135static void init_queue PROTO((void));
136static void move_by_pieces PROTO((rtx, rtx, int, int));
137static int move_by_pieces_ninsns PROTO((unsigned int, int));
138static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
139 struct move_by_pieces *));
4969d05d
RK
140static void store_constructor PROTO((tree, rtx));
141static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
142 enum machine_mode, int, int, int));
143static tree save_noncopied_parts PROTO((tree, tree));
144static tree init_noncopied_parts PROTO((tree, tree));
145static int safe_from_p PROTO((rtx, tree));
146static int fixed_type_p PROTO((tree));
147static int get_pointer_alignment PROTO((tree, unsigned));
148static tree string_constant PROTO((tree, tree *));
149static tree c_strlen PROTO((tree));
307b821c
RK
150static rtx expand_builtin PROTO((tree, rtx, rtx,
151 enum machine_mode, int));
0006469d
TW
152static int apply_args_size PROTO((void));
153static int apply_result_size PROTO((void));
154static rtx result_vector PROTO((int, rtx));
155static rtx expand_builtin_apply_args PROTO((void));
156static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
157static void expand_builtin_return PROTO((rtx));
4969d05d 158static rtx expand_increment PROTO((tree, int));
ca695ac9
JB
159rtx bc_expand_increment PROTO((struct increment_operator *, tree));
160tree bc_runtime_type_code PROTO((tree));
161rtx bc_allocate_local PROTO((int, int));
162void bc_store_memory PROTO((tree, tree));
163tree bc_expand_component_address PROTO((tree));
164tree bc_expand_address PROTO((tree));
165void bc_expand_constructor PROTO((tree));
166void bc_adjust_stack PROTO((int));
167tree bc_canonicalize_array_ref PROTO((tree));
168void bc_load_memory PROTO((tree, tree));
169void bc_load_externaddr PROTO((rtx));
170void bc_load_externaddr_id PROTO((tree, int));
171void bc_load_localaddr PROTO((rtx));
172void bc_load_parmaddr PROTO((rtx));
4969d05d
RK
173static void preexpand_calls PROTO((tree));
174static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 175static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
176static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
177static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
178static void do_jump_for_compare PROTO((rtx, rtx, rtx));
179static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
180static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 181
4fa52007
RK
182/* Record for each mode whether we can move a register directly to or
183 from an object of that mode in memory. If we can't, we won't try
184 to use that mode directly when accessing a field of that mode. */
185
186static char direct_load[NUM_MACHINE_MODES];
187static char direct_store[NUM_MACHINE_MODES];
188
bbf6f052
RK
189/* MOVE_RATIO is the number of move instructions that is better than
190 a block move. */
191
192#ifndef MOVE_RATIO
266007a7 193#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
194#define MOVE_RATIO 2
195#else
196/* A value of around 6 would minimize code size; infinity would minimize
197 execution time. */
198#define MOVE_RATIO 15
199#endif
200#endif
e87b4f3f 201
266007a7 202/* This array records the insn_code of insns to perform block moves. */
e6677db3 203enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 204
e87b4f3f
RS
205/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
206
207#ifndef SLOW_UNALIGNED_ACCESS
208#define SLOW_UNALIGNED_ACCESS 0
209#endif
0006469d
TW
210
211/* Register mappings for target machines without register windows. */
212#ifndef INCOMING_REGNO
213#define INCOMING_REGNO(OUT) (OUT)
214#endif
215#ifndef OUTGOING_REGNO
216#define OUTGOING_REGNO(IN) (IN)
217#endif
bbf6f052 218\f
ca695ac9
JB
219/* Maps used to convert modes to const, load, and store bytecodes. */
220enum bytecode_opcode mode_to_const_map[MAX_MACHINE_MODE];
221enum bytecode_opcode mode_to_load_map[MAX_MACHINE_MODE];
222enum bytecode_opcode mode_to_store_map[MAX_MACHINE_MODE];
223
224/* Initialize maps used to convert modes to const, load, and store
225 bytecodes. */
226void
227bc_init_mode_to_opcode_maps ()
228{
229 int mode;
230
6bd6178d 231 for (mode = 0; mode < (int) MAX_MACHINE_MODE; mode++)
ca695ac9
JB
232 mode_to_const_map[mode] =
233 mode_to_load_map[mode] =
234 mode_to_store_map[mode] = neverneverland;
235
236#define DEF_MODEMAP(SYM, CODE, UCODE, CONST, LOAD, STORE) \
6bd6178d
RK
237 mode_to_const_map[(int) SYM] = CONST; \
238 mode_to_load_map[(int) SYM] = LOAD; \
239 mode_to_store_map[(int) SYM] = STORE;
ca695ac9
JB
240
241#include "modemap.def"
242#undef DEF_MODEMAP
243}
244\f
4fa52007 245/* This is run once per compilation to set up which modes can be used
266007a7 246 directly in memory and to initialize the block move optab. */
4fa52007
RK
247
248void
249init_expr_once ()
250{
251 rtx insn, pat;
252 enum machine_mode mode;
e2549997
RS
253 /* Try indexing by frame ptr and try by stack ptr.
254 It is known that on the Convex the stack ptr isn't a valid index.
255 With luck, one or the other is valid on any machine. */
4fa52007 256 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 257 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
258
259 start_sequence ();
260 insn = emit_insn (gen_rtx (SET, 0, 0));
261 pat = PATTERN (insn);
262
263 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
264 mode = (enum machine_mode) ((int) mode + 1))
265 {
266 int regno;
267 rtx reg;
268 int num_clobbers;
269
270 direct_load[(int) mode] = direct_store[(int) mode] = 0;
271 PUT_MODE (mem, mode);
e2549997 272 PUT_MODE (mem1, mode);
4fa52007 273
e6fe56a4
RK
274 /* See if there is some register that can be used in this mode and
275 directly loaded or stored from memory. */
276
7308a047
RS
277 if (mode != VOIDmode && mode != BLKmode)
278 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
279 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
280 regno++)
281 {
282 if (! HARD_REGNO_MODE_OK (regno, mode))
283 continue;
e6fe56a4 284
7308a047 285 reg = gen_rtx (REG, mode, regno);
e6fe56a4 286
7308a047
RS
287 SET_SRC (pat) = mem;
288 SET_DEST (pat) = reg;
289 if (recog (pat, insn, &num_clobbers) >= 0)
290 direct_load[(int) mode] = 1;
e6fe56a4 291
e2549997
RS
292 SET_SRC (pat) = mem1;
293 SET_DEST (pat) = reg;
294 if (recog (pat, insn, &num_clobbers) >= 0)
295 direct_load[(int) mode] = 1;
296
7308a047
RS
297 SET_SRC (pat) = reg;
298 SET_DEST (pat) = mem;
299 if (recog (pat, insn, &num_clobbers) >= 0)
300 direct_store[(int) mode] = 1;
e2549997
RS
301
302 SET_SRC (pat) = reg;
303 SET_DEST (pat) = mem1;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_store[(int) mode] = 1;
7308a047 306 }
4fa52007
RK
307 }
308
309 end_sequence ();
310}
311
bbf6f052
RK
312/* This is run at the start of compiling a function. */
313
314void
315init_expr ()
316{
317 init_queue ();
318
319 pending_stack_adjust = 0;
320 inhibit_defer_pop = 0;
321 cleanups_this_call = 0;
322 saveregs_value = 0;
0006469d 323 apply_args_value = 0;
e87b4f3f 324 forced_labels = 0;
bbf6f052
RK
325}
326
327/* Save all variables describing the current status into the structure *P.
328 This is used before starting a nested function. */
329
330void
331save_expr_status (p)
332 struct function *p;
333{
334 /* Instead of saving the postincrement queue, empty it. */
335 emit_queue ();
336
337 p->pending_stack_adjust = pending_stack_adjust;
338 p->inhibit_defer_pop = inhibit_defer_pop;
339 p->cleanups_this_call = cleanups_this_call;
340 p->saveregs_value = saveregs_value;
0006469d 341 p->apply_args_value = apply_args_value;
e87b4f3f 342 p->forced_labels = forced_labels;
bbf6f052
RK
343
344 pending_stack_adjust = 0;
345 inhibit_defer_pop = 0;
346 cleanups_this_call = 0;
347 saveregs_value = 0;
0006469d 348 apply_args_value = 0;
e87b4f3f 349 forced_labels = 0;
bbf6f052
RK
350}
351
352/* Restore all variables describing the current status from the structure *P.
353 This is used after a nested function. */
354
355void
356restore_expr_status (p)
357 struct function *p;
358{
359 pending_stack_adjust = p->pending_stack_adjust;
360 inhibit_defer_pop = p->inhibit_defer_pop;
361 cleanups_this_call = p->cleanups_this_call;
362 saveregs_value = p->saveregs_value;
0006469d 363 apply_args_value = p->apply_args_value;
e87b4f3f 364 forced_labels = p->forced_labels;
bbf6f052
RK
365}
366\f
367/* Manage the queue of increment instructions to be output
368 for POSTINCREMENT_EXPR expressions, etc. */
369
370static rtx pending_chain;
371
372/* Queue up to increment (or change) VAR later. BODY says how:
373 BODY should be the same thing you would pass to emit_insn
374 to increment right away. It will go to emit_insn later on.
375
376 The value is a QUEUED expression to be used in place of VAR
377 where you want to guarantee the pre-incrementation value of VAR. */
378
379static rtx
380enqueue_insn (var, body)
381 rtx var, body;
382{
383 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 384 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
385 return pending_chain;
386}
387
388/* Use protect_from_queue to convert a QUEUED expression
389 into something that you can put immediately into an instruction.
390 If the queued incrementation has not happened yet,
391 protect_from_queue returns the variable itself.
392 If the incrementation has happened, protect_from_queue returns a temp
393 that contains a copy of the old value of the variable.
394
395 Any time an rtx which might possibly be a QUEUED is to be put
396 into an instruction, it must be passed through protect_from_queue first.
397 QUEUED expressions are not meaningful in instructions.
398
399 Do not pass a value through protect_from_queue and then hold
400 on to it for a while before putting it in an instruction!
401 If the queue is flushed in between, incorrect code will result. */
402
403rtx
404protect_from_queue (x, modify)
405 register rtx x;
406 int modify;
407{
408 register RTX_CODE code = GET_CODE (x);
409
410#if 0 /* A QUEUED can hang around after the queue is forced out. */
411 /* Shortcut for most common case. */
412 if (pending_chain == 0)
413 return x;
414#endif
415
416 if (code != QUEUED)
417 {
e9baa644
RK
418 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
419 use of autoincrement. Make a copy of the contents of the memory
420 location rather than a copy of the address, but not if the value is
421 of mode BLKmode. Don't modify X in place since it might be
422 shared. */
bbf6f052
RK
423 if (code == MEM && GET_MODE (x) != BLKmode
424 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
425 {
426 register rtx y = XEXP (x, 0);
e9baa644
RK
427 register rtx new = gen_rtx (MEM, GET_MODE (x), QUEUED_VAR (y));
428
429 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
430 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
431 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
432
bbf6f052
RK
433 if (QUEUED_INSN (y))
434 {
e9baa644
RK
435 register rtx temp = gen_reg_rtx (GET_MODE (new));
436 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
437 QUEUED_INSN (y));
438 return temp;
439 }
e9baa644 440 return new;
bbf6f052
RK
441 }
442 /* Otherwise, recursively protect the subexpressions of all
443 the kinds of rtx's that can contain a QUEUED. */
444 if (code == MEM)
3f15938e
RS
445 {
446 rtx tem = protect_from_queue (XEXP (x, 0), 0);
447 if (tem != XEXP (x, 0))
448 {
449 x = copy_rtx (x);
450 XEXP (x, 0) = tem;
451 }
452 }
bbf6f052
RK
453 else if (code == PLUS || code == MULT)
454 {
3f15938e
RS
455 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
456 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
457 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = new0;
461 XEXP (x, 1) = new1;
462 }
bbf6f052
RK
463 }
464 return x;
465 }
466 /* If the increment has not happened, use the variable itself. */
467 if (QUEUED_INSN (x) == 0)
468 return QUEUED_VAR (x);
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
479}
480
481/* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
485
486static int
487queued_subexp_p (x)
488 rtx x;
489{
490 register enum rtx_code code = GET_CODE (x);
491 switch (code)
492 {
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
500 return queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1));
502 }
503 return 0;
504}
505
506/* Perform all the pending incrementations. */
507
508void
509emit_queue ()
510{
511 register rtx p;
512 while (p = pending_chain)
513 {
514 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
515 pending_chain = QUEUED_NEXT (p);
516 }
517}
518
519static void
520init_queue ()
521{
522 if (pending_chain)
523 abort ();
524}
525\f
526/* Copy data from FROM to TO, where the machine modes are not the same.
527 Both modes may be integer, or both may be floating.
528 UNSIGNEDP should be nonzero if FROM is an unsigned type.
529 This causes zero-extension instead of sign-extension. */
530
531void
532convert_move (to, from, unsignedp)
533 register rtx to, from;
534 int unsignedp;
535{
536 enum machine_mode to_mode = GET_MODE (to);
537 enum machine_mode from_mode = GET_MODE (from);
538 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
539 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
540 enum insn_code code;
541 rtx libcall;
542
543 /* rtx code for making an equivalent value. */
544 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
545
546 to = protect_from_queue (to, 1);
547 from = protect_from_queue (from, 0);
548
549 if (to_real != from_real)
550 abort ();
551
1499e0a8
RK
552 /* If FROM is a SUBREG that indicates that we have already done at least
553 the required extension, strip it. We don't handle such SUBREGs as
554 TO here. */
555
556 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
557 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
558 >= GET_MODE_SIZE (to_mode))
559 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
560 from = gen_lowpart (to_mode, from), from_mode = to_mode;
561
562 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
563 abort ();
564
bbf6f052
RK
565 if (to_mode == from_mode
566 || (from_mode == VOIDmode && CONSTANT_P (from)))
567 {
568 emit_move_insn (to, from);
569 return;
570 }
571
572 if (to_real)
573 {
81d79e2c
RS
574 rtx value;
575
b424402e
RS
576#ifdef HAVE_extendqfhf2
577 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
583#ifdef HAVE_extendqfsf2
584 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
585 {
586 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
590#ifdef HAVE_extendqfdf2
591 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
592 {
593 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
597#ifdef HAVE_extendqfxf2
598 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
599 {
600 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
604#ifdef HAVE_extendqftf2
605 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
606 {
607 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611
612#ifdef HAVE_extendhfsf2
613 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
614 {
615 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_extendhfdf2
620 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
621 {
622 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_extendhfxf2
627 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
628 {
629 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_extendhftf2
634 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
635 {
636 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640
bbf6f052
RK
641#ifdef HAVE_extendsfdf2
642 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
643 {
644 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
b092b471
JW
648#ifdef HAVE_extendsfxf2
649 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
650 {
651 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
652 return;
653 }
654#endif
bbf6f052
RK
655#ifdef HAVE_extendsftf2
656 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
657 {
658 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
b092b471
JW
662#ifdef HAVE_extenddfxf2
663 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
664 {
665 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
bbf6f052
RK
669#ifdef HAVE_extenddftf2
670 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
671 {
672 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
b424402e
RS
676
677#ifdef HAVE_trunchfqf2
678 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
684#ifdef HAVE_truncsfqf2
685 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
691#ifdef HAVE_truncdfqf2
692 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
698#ifdef HAVE_truncxfqf2
699 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
705#ifdef HAVE_trunctfqf2
706 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
707 {
708 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
712#ifdef HAVE_truncsfhf2
713 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
716 return;
717 }
718#endif
719#ifdef HAVE_truncdfhf2
720 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
721 {
722 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
723 return;
724 }
725#endif
726#ifdef HAVE_truncxfhf2
727 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
728 {
729 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
730 return;
731 }
732#endif
733#ifdef HAVE_trunctfhf2
734 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
735 {
736 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
737 return;
738 }
739#endif
bbf6f052
RK
740#ifdef HAVE_truncdfsf2
741 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
742 {
743 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
744 return;
745 }
746#endif
b092b471
JW
747#ifdef HAVE_truncxfsf2
748 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
749 {
750 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
751 return;
752 }
753#endif
bbf6f052
RK
754#ifdef HAVE_trunctfsf2
755 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
756 {
757 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
758 return;
759 }
760#endif
b092b471
JW
761#ifdef HAVE_truncxfdf2
762 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
763 {
764 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
765 return;
766 }
767#endif
bbf6f052
RK
768#ifdef HAVE_trunctfdf2
769 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
770 {
771 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
772 return;
773 }
774#endif
775
b092b471
JW
776 libcall = (rtx) 0;
777 switch (from_mode)
778 {
779 case SFmode:
780 switch (to_mode)
781 {
782 case DFmode:
783 libcall = extendsfdf2_libfunc;
784 break;
785
786 case XFmode:
787 libcall = extendsfxf2_libfunc;
788 break;
789
790 case TFmode:
791 libcall = extendsftf2_libfunc;
792 break;
793 }
794 break;
795
796 case DFmode:
797 switch (to_mode)
798 {
799 case SFmode:
800 libcall = truncdfsf2_libfunc;
801 break;
802
803 case XFmode:
804 libcall = extenddfxf2_libfunc;
805 break;
806
807 case TFmode:
808 libcall = extenddftf2_libfunc;
809 break;
810 }
811 break;
812
813 case XFmode:
814 switch (to_mode)
815 {
816 case SFmode:
817 libcall = truncxfsf2_libfunc;
818 break;
819
820 case DFmode:
821 libcall = truncxfdf2_libfunc;
822 break;
823 }
824 break;
825
826 case TFmode:
827 switch (to_mode)
828 {
829 case SFmode:
830 libcall = trunctfsf2_libfunc;
831 break;
832
833 case DFmode:
834 libcall = trunctfdf2_libfunc;
835 break;
836 }
837 break;
838 }
839
840 if (libcall == (rtx) 0)
841 /* This conversion is not implemented yet. */
bbf6f052
RK
842 abort ();
843
81d79e2c
RS
844 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
845 1, from, from_mode);
846 emit_move_insn (to, value);
bbf6f052
RK
847 return;
848 }
849
850 /* Now both modes are integers. */
851
852 /* Handle expanding beyond a word. */
853 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
854 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
855 {
856 rtx insns;
857 rtx lowpart;
858 rtx fill_value;
859 rtx lowfrom;
860 int i;
861 enum machine_mode lowpart_mode;
862 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
863
864 /* Try converting directly if the insn is supported. */
865 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
866 != CODE_FOR_nothing)
867 {
cd1b4b44
RK
868 /* If FROM is a SUBREG, put it into a register. Do this
869 so that we always generate the same set of insns for
870 better cse'ing; if an intermediate assignment occurred,
871 we won't be doing the operation directly on the SUBREG. */
872 if (optimize > 0 && GET_CODE (from) == SUBREG)
873 from = force_reg (from_mode, from);
bbf6f052
RK
874 emit_unop_insn (code, to, from, equiv_code);
875 return;
876 }
877 /* Next, try converting via full word. */
878 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
879 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
880 != CODE_FOR_nothing))
881 {
a81fee56
RS
882 if (GET_CODE (to) == REG)
883 emit_insn (gen_rtx (CLOBBER, VOIDmode, to));
bbf6f052
RK
884 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
885 emit_unop_insn (code, to,
886 gen_lowpart (word_mode, to), equiv_code);
887 return;
888 }
889
890 /* No special multiword conversion insn; do it by hand. */
891 start_sequence ();
892
893 /* Get a copy of FROM widened to a word, if necessary. */
894 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
895 lowpart_mode = word_mode;
896 else
897 lowpart_mode = from_mode;
898
899 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
900
901 lowpart = gen_lowpart (lowpart_mode, to);
902 emit_move_insn (lowpart, lowfrom);
903
904 /* Compute the value to put in each remaining word. */
905 if (unsignedp)
906 fill_value = const0_rtx;
907 else
908 {
909#ifdef HAVE_slt
910 if (HAVE_slt
911 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
912 && STORE_FLAG_VALUE == -1)
913 {
906c4e36
RK
914 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
915 lowpart_mode, 0, 0);
bbf6f052
RK
916 fill_value = gen_reg_rtx (word_mode);
917 emit_insn (gen_slt (fill_value));
918 }
919 else
920#endif
921 {
922 fill_value
923 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
924 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 925 NULL_RTX, 0);
bbf6f052
RK
926 fill_value = convert_to_mode (word_mode, fill_value, 1);
927 }
928 }
929
930 /* Fill the remaining words. */
931 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
932 {
933 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
934 rtx subword = operand_subword (to, index, 1, to_mode);
935
936 if (subword == 0)
937 abort ();
938
939 if (fill_value != subword)
940 emit_move_insn (subword, fill_value);
941 }
942
943 insns = get_insns ();
944 end_sequence ();
945
906c4e36 946 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 947 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
948 return;
949 }
950
d3c64ee3
RS
951 /* Truncating multi-word to a word or less. */
952 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
953 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 954 {
431a6eca
JW
955 if (!((GET_CODE (from) == MEM
956 && ! MEM_VOLATILE_P (from)
957 && direct_load[(int) to_mode]
958 && ! mode_dependent_address_p (XEXP (from, 0)))
959 || GET_CODE (from) == REG
960 || GET_CODE (from) == SUBREG))
961 from = force_reg (from_mode, from);
bbf6f052
RK
962 convert_move (to, gen_lowpart (word_mode, from), 0);
963 return;
964 }
965
966 /* Handle pointer conversion */ /* SPEE 900220 */
967 if (to_mode == PSImode)
968 {
969 if (from_mode != SImode)
970 from = convert_to_mode (SImode, from, unsignedp);
971
972#ifdef HAVE_truncsipsi
973 if (HAVE_truncsipsi)
974 {
975 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
976 return;
977 }
978#endif /* HAVE_truncsipsi */
979 abort ();
980 }
981
982 if (from_mode == PSImode)
983 {
984 if (to_mode != SImode)
985 {
986 from = convert_to_mode (SImode, from, unsignedp);
987 from_mode = SImode;
988 }
989 else
990 {
991#ifdef HAVE_extendpsisi
992 if (HAVE_extendpsisi)
993 {
994 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
995 return;
996 }
997#endif /* HAVE_extendpsisi */
998 abort ();
999 }
1000 }
1001
1002 /* Now follow all the conversions between integers
1003 no more than a word long. */
1004
1005 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1006 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1007 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1008 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1009 {
d3c64ee3
RS
1010 if (!((GET_CODE (from) == MEM
1011 && ! MEM_VOLATILE_P (from)
1012 && direct_load[(int) to_mode]
1013 && ! mode_dependent_address_p (XEXP (from, 0)))
1014 || GET_CODE (from) == REG
1015 || GET_CODE (from) == SUBREG))
1016 from = force_reg (from_mode, from);
bbf6f052
RK
1017 emit_move_insn (to, gen_lowpart (to_mode, from));
1018 return;
1019 }
1020
d3c64ee3 1021 /* Handle extension. */
bbf6f052
RK
1022 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1023 {
1024 /* Convert directly if that works. */
1025 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1026 != CODE_FOR_nothing)
1027 {
3dc4195c
RK
1028 /* If FROM is a SUBREG, put it into a register. Do this
1029 so that we always generate the same set of insns for
1030 better cse'ing; if an intermediate assignment occurred,
1031 we won't be doing the operation directly on the SUBREG. */
1032 if (optimize > 0 && GET_CODE (from) == SUBREG)
1033 from = force_reg (from_mode, from);
bbf6f052
RK
1034 emit_unop_insn (code, to, from, equiv_code);
1035 return;
1036 }
1037 else
1038 {
1039 enum machine_mode intermediate;
1040
1041 /* Search for a mode to convert via. */
1042 for (intermediate = from_mode; intermediate != VOIDmode;
1043 intermediate = GET_MODE_WIDER_MODE (intermediate))
1044 if ((can_extend_p (to_mode, intermediate, unsignedp)
1045 != CODE_FOR_nothing)
1046 && (can_extend_p (intermediate, from_mode, unsignedp)
1047 != CODE_FOR_nothing))
1048 {
1049 convert_move (to, convert_to_mode (intermediate, from,
1050 unsignedp), unsignedp);
1051 return;
1052 }
1053
1054 /* No suitable intermediate mode. */
1055 abort ();
1056 }
1057 }
1058
1059 /* Support special truncate insns for certain modes. */
1060
1061 if (from_mode == DImode && to_mode == SImode)
1062 {
1063#ifdef HAVE_truncdisi2
1064 if (HAVE_truncdisi2)
1065 {
1066 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1067 return;
1068 }
1069#endif
1070 convert_move (to, force_reg (from_mode, from), unsignedp);
1071 return;
1072 }
1073
1074 if (from_mode == DImode && to_mode == HImode)
1075 {
1076#ifdef HAVE_truncdihi2
1077 if (HAVE_truncdihi2)
1078 {
1079 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1080 return;
1081 }
1082#endif
1083 convert_move (to, force_reg (from_mode, from), unsignedp);
1084 return;
1085 }
1086
1087 if (from_mode == DImode && to_mode == QImode)
1088 {
1089#ifdef HAVE_truncdiqi2
1090 if (HAVE_truncdiqi2)
1091 {
1092 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1093 return;
1094 }
1095#endif
1096 convert_move (to, force_reg (from_mode, from), unsignedp);
1097 return;
1098 }
1099
1100 if (from_mode == SImode && to_mode == HImode)
1101 {
1102#ifdef HAVE_truncsihi2
1103 if (HAVE_truncsihi2)
1104 {
1105 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1106 return;
1107 }
1108#endif
1109 convert_move (to, force_reg (from_mode, from), unsignedp);
1110 return;
1111 }
1112
1113 if (from_mode == SImode && to_mode == QImode)
1114 {
1115#ifdef HAVE_truncsiqi2
1116 if (HAVE_truncsiqi2)
1117 {
1118 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1119 return;
1120 }
1121#endif
1122 convert_move (to, force_reg (from_mode, from), unsignedp);
1123 return;
1124 }
1125
1126 if (from_mode == HImode && to_mode == QImode)
1127 {
1128#ifdef HAVE_trunchiqi2
1129 if (HAVE_trunchiqi2)
1130 {
1131 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1132 return;
1133 }
1134#endif
1135 convert_move (to, force_reg (from_mode, from), unsignedp);
1136 return;
1137 }
1138
1139 /* Handle truncation of volatile memrefs, and so on;
1140 the things that couldn't be truncated directly,
1141 and for which there was no special instruction. */
1142 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1143 {
1144 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1145 emit_move_insn (to, temp);
1146 return;
1147 }
1148
1149 /* Mode combination is not recognized. */
1150 abort ();
1151}
1152
1153/* Return an rtx for a value that would result
1154 from converting X to mode MODE.
1155 Both X and MODE may be floating, or both integer.
1156 UNSIGNEDP is nonzero if X is an unsigned value.
1157 This can be done by referring to a part of X in place
5d901c31
RS
1158 or by copying to a new temporary with conversion.
1159
1160 This function *must not* call protect_from_queue
1161 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1162
1163rtx
1164convert_to_mode (mode, x, unsignedp)
1165 enum machine_mode mode;
1166 rtx x;
1167 int unsignedp;
5ffe63ed
RS
1168{
1169 return convert_modes (mode, VOIDmode, x, unsignedp);
1170}
1171
1172/* Return an rtx for a value that would result
1173 from converting X from mode OLDMODE to mode MODE.
1174 Both modes may be floating, or both integer.
1175 UNSIGNEDP is nonzero if X is an unsigned value.
1176
1177 This can be done by referring to a part of X in place
1178 or by copying to a new temporary with conversion.
1179
1180 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1181
1182 This function *must not* call protect_from_queue
1183 except when putting X into an insn (in which case convert_move does it). */
1184
1185rtx
1186convert_modes (mode, oldmode, x, unsignedp)
1187 enum machine_mode mode, oldmode;
1188 rtx x;
1189 int unsignedp;
bbf6f052
RK
1190{
1191 register rtx temp;
5ffe63ed 1192
1499e0a8
RK
1193 /* If FROM is a SUBREG that indicates that we have already done at least
1194 the required extension, strip it. */
1195
1196 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1197 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1198 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1199 x = gen_lowpart (mode, x);
bbf6f052 1200
64791b18
RK
1201 if (GET_MODE (x) != VOIDmode)
1202 oldmode = GET_MODE (x);
1203
5ffe63ed 1204 if (mode == oldmode)
bbf6f052
RK
1205 return x;
1206
1207 /* There is one case that we must handle specially: If we are converting
906c4e36 1208 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1209 we are to interpret the constant as unsigned, gen_lowpart will do
1210 the wrong if the constant appears negative. What we want to do is
1211 make the high-order word of the constant zero, not all ones. */
1212
1213 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1214 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1215 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1216 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1217
1218 /* We can do this with a gen_lowpart if both desired and current modes
1219 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1220 non-volatile MEM. Except for the constant case where MODE is no
1221 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1222
ba2e110c
RK
1223 if ((GET_CODE (x) == CONST_INT
1224 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1225 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1226 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1227 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1228 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1229 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1230 && direct_load[(int) mode])
2bf29316
JW
1231 || (GET_CODE (x) == REG
1232 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1233 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1234 {
1235 /* ?? If we don't know OLDMODE, we have to assume here that
1236 X does not need sign- or zero-extension. This may not be
1237 the case, but it's the best we can do. */
1238 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1239 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1240 {
1241 HOST_WIDE_INT val = INTVAL (x);
1242 int width = GET_MODE_BITSIZE (oldmode);
1243
1244 /* We must sign or zero-extend in this case. Start by
1245 zero-extending, then sign extend if we need to. */
1246 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1247 if (! unsignedp
1248 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1249 val |= (HOST_WIDE_INT) (-1) << width;
1250
1251 return GEN_INT (val);
1252 }
1253
1254 return gen_lowpart (mode, x);
1255 }
bbf6f052
RK
1256
1257 temp = gen_reg_rtx (mode);
1258 convert_move (temp, x, unsignedp);
1259 return temp;
1260}
1261\f
1262/* Generate several move instructions to copy LEN bytes
1263 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1264 The caller must pass FROM and TO
1265 through protect_from_queue before calling.
1266 ALIGN (in bytes) is maximum alignment we can assume. */
1267
bbf6f052
RK
1268static void
1269move_by_pieces (to, from, len, align)
1270 rtx to, from;
1271 int len, align;
1272{
1273 struct move_by_pieces data;
1274 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1275 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1276
1277 data.offset = 0;
1278 data.to_addr = to_addr;
1279 data.from_addr = from_addr;
1280 data.to = to;
1281 data.from = from;
1282 data.autinc_to
1283 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1284 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1285 data.autinc_from
1286 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1287 || GET_CODE (from_addr) == POST_INC
1288 || GET_CODE (from_addr) == POST_DEC);
1289
1290 data.explicit_inc_from = 0;
1291 data.explicit_inc_to = 0;
1292 data.reverse
1293 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1294 if (data.reverse) data.offset = len;
1295 data.len = len;
1296
1297 /* If copying requires more than two move insns,
1298 copy addresses to registers (to make displacements shorter)
1299 and use post-increment if available. */
1300 if (!(data.autinc_from && data.autinc_to)
1301 && move_by_pieces_ninsns (len, align) > 2)
1302 {
1303#ifdef HAVE_PRE_DECREMENT
1304 if (data.reverse && ! data.autinc_from)
1305 {
1306 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1307 data.autinc_from = 1;
1308 data.explicit_inc_from = -1;
1309 }
1310#endif
1311#ifdef HAVE_POST_INCREMENT
1312 if (! data.autinc_from)
1313 {
1314 data.from_addr = copy_addr_to_reg (from_addr);
1315 data.autinc_from = 1;
1316 data.explicit_inc_from = 1;
1317 }
1318#endif
1319 if (!data.autinc_from && CONSTANT_P (from_addr))
1320 data.from_addr = copy_addr_to_reg (from_addr);
1321#ifdef HAVE_PRE_DECREMENT
1322 if (data.reverse && ! data.autinc_to)
1323 {
1324 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1325 data.autinc_to = 1;
1326 data.explicit_inc_to = -1;
1327 }
1328#endif
1329#ifdef HAVE_POST_INCREMENT
1330 if (! data.reverse && ! data.autinc_to)
1331 {
1332 data.to_addr = copy_addr_to_reg (to_addr);
1333 data.autinc_to = 1;
1334 data.explicit_inc_to = 1;
1335 }
1336#endif
1337 if (!data.autinc_to && CONSTANT_P (to_addr))
1338 data.to_addr = copy_addr_to_reg (to_addr);
1339 }
1340
e87b4f3f
RS
1341 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1342 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1343 align = MOVE_MAX;
bbf6f052
RK
1344
1345 /* First move what we can in the largest integer mode, then go to
1346 successively smaller modes. */
1347
1348 while (max_size > 1)
1349 {
1350 enum machine_mode mode = VOIDmode, tmode;
1351 enum insn_code icode;
1352
e7c33f54
RK
1353 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1354 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1355 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1356 mode = tmode;
1357
1358 if (mode == VOIDmode)
1359 break;
1360
1361 icode = mov_optab->handlers[(int) mode].insn_code;
1362 if (icode != CODE_FOR_nothing
1363 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1364 GET_MODE_SIZE (mode)))
1365 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1366
1367 max_size = GET_MODE_SIZE (mode);
1368 }
1369
1370 /* The code above should have handled everything. */
1371 if (data.len != 0)
1372 abort ();
1373}
1374
1375/* Return number of insns required to move L bytes by pieces.
1376 ALIGN (in bytes) is maximum alignment we can assume. */
1377
1378static int
1379move_by_pieces_ninsns (l, align)
1380 unsigned int l;
1381 int align;
1382{
1383 register int n_insns = 0;
e87b4f3f 1384 int max_size = MOVE_MAX + 1;
bbf6f052 1385
e87b4f3f
RS
1386 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1387 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1388 align = MOVE_MAX;
bbf6f052
RK
1389
1390 while (max_size > 1)
1391 {
1392 enum machine_mode mode = VOIDmode, tmode;
1393 enum insn_code icode;
1394
e7c33f54
RK
1395 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1396 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1397 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1398 mode = tmode;
1399
1400 if (mode == VOIDmode)
1401 break;
1402
1403 icode = mov_optab->handlers[(int) mode].insn_code;
1404 if (icode != CODE_FOR_nothing
1405 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1406 GET_MODE_SIZE (mode)))
1407 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1408
1409 max_size = GET_MODE_SIZE (mode);
1410 }
1411
1412 return n_insns;
1413}
1414
1415/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1416 with move instructions for mode MODE. GENFUN is the gen_... function
1417 to make a move insn for that mode. DATA has all the other info. */
1418
1419static void
1420move_by_pieces_1 (genfun, mode, data)
1421 rtx (*genfun) ();
1422 enum machine_mode mode;
1423 struct move_by_pieces *data;
1424{
1425 register int size = GET_MODE_SIZE (mode);
1426 register rtx to1, from1;
1427
1428 while (data->len >= size)
1429 {
1430 if (data->reverse) data->offset -= size;
1431
1432 to1 = (data->autinc_to
1433 ? gen_rtx (MEM, mode, data->to_addr)
1434 : change_address (data->to, mode,
1435 plus_constant (data->to_addr, data->offset)));
1436 from1 =
1437 (data->autinc_from
1438 ? gen_rtx (MEM, mode, data->from_addr)
1439 : change_address (data->from, mode,
1440 plus_constant (data->from_addr, data->offset)));
1441
1442#ifdef HAVE_PRE_DECREMENT
1443 if (data->explicit_inc_to < 0)
906c4e36 1444 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1445 if (data->explicit_inc_from < 0)
906c4e36 1446 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1447#endif
1448
1449 emit_insn ((*genfun) (to1, from1));
1450#ifdef HAVE_POST_INCREMENT
1451 if (data->explicit_inc_to > 0)
906c4e36 1452 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1453 if (data->explicit_inc_from > 0)
906c4e36 1454 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1455#endif
1456
1457 if (! data->reverse) data->offset += size;
1458
1459 data->len -= size;
1460 }
1461}
1462\f
1463/* Emit code to move a block Y to a block X.
1464 This may be done with string-move instructions,
1465 with multiple scalar move instructions, or with a library call.
1466
1467 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1468 with mode BLKmode.
1469 SIZE is an rtx that says how long they are.
1470 ALIGN is the maximum alignment we can assume they have,
1471 measured in bytes. */
1472
1473void
1474emit_block_move (x, y, size, align)
1475 rtx x, y;
1476 rtx size;
1477 int align;
1478{
1479 if (GET_MODE (x) != BLKmode)
1480 abort ();
1481
1482 if (GET_MODE (y) != BLKmode)
1483 abort ();
1484
1485 x = protect_from_queue (x, 1);
1486 y = protect_from_queue (y, 0);
5d901c31 1487 size = protect_from_queue (size, 0);
bbf6f052
RK
1488
1489 if (GET_CODE (x) != MEM)
1490 abort ();
1491 if (GET_CODE (y) != MEM)
1492 abort ();
1493 if (size == 0)
1494 abort ();
1495
1496 if (GET_CODE (size) == CONST_INT
906c4e36 1497 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1498 move_by_pieces (x, y, INTVAL (size), align);
1499 else
1500 {
1501 /* Try the most limited insn first, because there's no point
1502 including more than one in the machine description unless
1503 the more limited one has some advantage. */
266007a7 1504
0bba3f6f 1505 rtx opalign = GEN_INT (align);
266007a7
RK
1506 enum machine_mode mode;
1507
1508 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1509 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1510 {
266007a7 1511 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1512
1513 if (code != CODE_FOR_nothing
803090c4
RK
1514 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1515 here because if SIZE is less than the mode mask, as it is
8008b228 1516 returned by the macro, it will definitely be less than the
803090c4 1517 actual mode mask. */
f85b95d1 1518 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1519 && (insn_operand_predicate[(int) code][0] == 0
1520 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1521 && (insn_operand_predicate[(int) code][1] == 0
1522 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1523 && (insn_operand_predicate[(int) code][3] == 0
1524 || (*insn_operand_predicate[(int) code][3]) (opalign,
1525 VOIDmode)))
bbf6f052 1526 {
1ba1e2a8 1527 rtx op2;
266007a7
RK
1528 rtx last = get_last_insn ();
1529 rtx pat;
1530
1ba1e2a8 1531 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1532 if (insn_operand_predicate[(int) code][2] != 0
1533 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1534 op2 = copy_to_mode_reg (mode, op2);
1535
1536 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1537 if (pat)
1538 {
1539 emit_insn (pat);
1540 return;
1541 }
1542 else
1543 delete_insns_since (last);
bbf6f052
RK
1544 }
1545 }
bbf6f052
RK
1546
1547#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1548 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3, XEXP (x, 0), Pmode,
1550 XEXP (y, 0), Pmode,
0fa83258
RK
1551 convert_to_mode (TYPE_MODE (sizetype), size,
1552 TREE_UNSIGNED (sizetype)),
1553 TYPE_MODE (sizetype));
bbf6f052 1554#else
d562e42e 1555 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1556 VOIDmode, 3, XEXP (y, 0), Pmode,
1557 XEXP (x, 0), Pmode,
0fa83258
RK
1558 convert_to_mode (TYPE_MODE (sizetype), size,
1559 TREE_UNSIGNED (sizetype)),
1560 TYPE_MODE (sizetype));
bbf6f052
RK
1561#endif
1562 }
1563}
1564\f
1565/* Copy all or part of a value X into registers starting at REGNO.
1566 The number of registers to be filled is NREGS. */
1567
1568void
1569move_block_to_reg (regno, x, nregs, mode)
1570 int regno;
1571 rtx x;
1572 int nregs;
1573 enum machine_mode mode;
1574{
1575 int i;
1576 rtx pat, last;
1577
1578 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1579 x = validize_mem (force_const_mem (mode, x));
1580
1581 /* See if the machine can do this with a load multiple insn. */
1582#ifdef HAVE_load_multiple
c3a02afe 1583 if (HAVE_load_multiple)
bbf6f052 1584 {
c3a02afe
RK
1585 last = get_last_insn ();
1586 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
1587 GEN_INT (nregs));
1588 if (pat)
1589 {
1590 emit_insn (pat);
1591 return;
1592 }
1593 else
1594 delete_insns_since (last);
bbf6f052 1595 }
bbf6f052
RK
1596#endif
1597
1598 for (i = 0; i < nregs; i++)
1599 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1600 operand_subword_force (x, i, mode));
1601}
1602
1603/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1604 The number of registers to be filled is NREGS. SIZE indicates the number
1605 of bytes in the object X. */
1606
bbf6f052
RK
1607
1608void
0040593d 1609move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1610 int regno;
1611 rtx x;
1612 int nregs;
0040593d 1613 int size;
bbf6f052
RK
1614{
1615 int i;
1616 rtx pat, last;
1617
0040593d
JW
1618 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1619 to the left before storing to memory. */
1620 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1621 {
1622 rtx tem = operand_subword (x, 0, 1, BLKmode);
1623 rtx shift;
1624
1625 if (tem == 0)
1626 abort ();
1627
1628 shift = expand_shift (LSHIFT_EXPR, word_mode,
1629 gen_rtx (REG, word_mode, regno),
1630 build_int_2 ((UNITS_PER_WORD - size)
1631 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1632 emit_move_insn (tem, shift);
1633 return;
1634 }
1635
bbf6f052
RK
1636 /* See if the machine can do this with a store multiple insn. */
1637#ifdef HAVE_store_multiple
c3a02afe 1638 if (HAVE_store_multiple)
bbf6f052 1639 {
c3a02afe
RK
1640 last = get_last_insn ();
1641 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
1642 GEN_INT (nregs));
1643 if (pat)
1644 {
1645 emit_insn (pat);
1646 return;
1647 }
1648 else
1649 delete_insns_since (last);
bbf6f052 1650 }
bbf6f052
RK
1651#endif
1652
1653 for (i = 0; i < nregs; i++)
1654 {
1655 rtx tem = operand_subword (x, i, 1, BLKmode);
1656
1657 if (tem == 0)
1658 abort ();
1659
1660 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1661 }
1662}
1663
1664/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1665
1666void
1667use_regs (regno, nregs)
1668 int regno;
1669 int nregs;
1670{
1671 int i;
1672
1673 for (i = 0; i < nregs; i++)
1674 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1675}
1676\f
1677/* Write zeros through the storage of OBJECT.
1678 If OBJECT has BLKmode, SIZE is its length in bytes. */
1679
1680void
1681clear_storage (object, size)
1682 rtx object;
1683 int size;
1684{
1685 if (GET_MODE (object) == BLKmode)
1686 {
1687#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1688 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1689 VOIDmode, 3,
1690 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1691 GEN_INT (size), Pmode);
bbf6f052 1692#else
d562e42e 1693 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1694 VOIDmode, 2,
1695 XEXP (object, 0), Pmode,
906c4e36 1696 GEN_INT (size), Pmode);
bbf6f052
RK
1697#endif
1698 }
1699 else
1700 emit_move_insn (object, const0_rtx);
1701}
1702
1703/* Generate code to copy Y into X.
1704 Both Y and X must have the same mode, except that
1705 Y can be a constant with VOIDmode.
1706 This mode cannot be BLKmode; use emit_block_move for that.
1707
1708 Return the last instruction emitted. */
1709
1710rtx
1711emit_move_insn (x, y)
1712 rtx x, y;
1713{
1714 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1715 enum machine_mode submode;
1716 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1717 int i;
1718
1719 x = protect_from_queue (x, 1);
1720 y = protect_from_queue (y, 0);
1721
1722 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1723 abort ();
1724
1725 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1726 y = force_const_mem (mode, y);
1727
1728 /* If X or Y are memory references, verify that their addresses are valid
1729 for the machine. */
1730 if (GET_CODE (x) == MEM
1731 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1732 && ! push_operand (x, GET_MODE (x)))
1733 || (flag_force_addr
1734 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1735 x = change_address (x, VOIDmode, XEXP (x, 0));
1736
1737 if (GET_CODE (y) == MEM
1738 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1739 || (flag_force_addr
1740 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1741 y = change_address (y, VOIDmode, XEXP (y, 0));
1742
1743 if (mode == BLKmode)
1744 abort ();
1745
261c4230
RS
1746 return emit_move_insn_1 (x, y);
1747}
1748
1749/* Low level part of emit_move_insn.
1750 Called just like emit_move_insn, but assumes X and Y
1751 are basically valid. */
1752
1753rtx
1754emit_move_insn_1 (x, y)
1755 rtx x, y;
1756{
1757 enum machine_mode mode = GET_MODE (x);
1758 enum machine_mode submode;
1759 enum mode_class class = GET_MODE_CLASS (mode);
1760 int i;
1761
7308a047
RS
1762 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1763 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1764 (class == MODE_COMPLEX_INT
1765 ? MODE_INT : MODE_FLOAT),
1766 0);
1767
bbf6f052
RK
1768 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1769 return
1770 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1771
89742723 1772 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1773 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1774 && submode != BLKmode
1775 && (mov_optab->handlers[(int) submode].insn_code
1776 != CODE_FOR_nothing))
1777 {
1778 /* Don't split destination if it is a stack push. */
1779 int stack = push_operand (x, GET_MODE (x));
6551fa4d 1780 rtx insns;
7308a047 1781
6551fa4d 1782 start_sequence ();
7308a047
RS
1783
1784 /* If this is a stack, push the highpart first, so it
1785 will be in the argument order.
1786
1787 In that case, change_address is used only to convert
1788 the mode, not to change the address. */
c937357e
RS
1789 if (stack)
1790 {
e33c0d66
RS
1791 /* Note that the real part always precedes the imag part in memory
1792 regardless of machine's endianness. */
c937357e
RS
1793#ifdef STACK_GROWS_DOWNWARD
1794 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1795 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1796 gen_imagpart (submode, y)));
c937357e
RS
1797 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1798 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1799 gen_realpart (submode, y)));
c937357e
RS
1800#else
1801 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1802 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1803 gen_realpart (submode, y)));
c937357e
RS
1804 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1805 (gen_rtx (MEM, submode, (XEXP (x, 0))),
e33c0d66 1806 gen_imagpart (submode, y)));
c937357e
RS
1807#endif
1808 }
1809 else
1810 {
1811 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 1812 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 1813 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 1814 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 1815 }
7308a047 1816
6551fa4d
JW
1817 insns = get_insns ();
1818 end_sequence ();
1819
1820 /* If X is a CONCAT, we got insns like RD = RS, ID = IS,
1821 each with a separate pseudo as destination.
1822 It's not correct for flow to treat them as a unit. */
6d6e61ce 1823 if (GET_CODE (x) != CONCAT)
6551fa4d
JW
1824 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
1825 else
1826 emit_insns (insns);
7a1ab50a
RS
1827
1828 return get_last_insn ();
7308a047
RS
1829 }
1830
bbf6f052
RK
1831 /* This will handle any multi-word mode that lacks a move_insn pattern.
1832 However, you will get better code if you define such patterns,
1833 even if they must turn into multiple assembler instructions. */
a4320483 1834 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1835 {
1836 rtx last_insn = 0;
6551fa4d
JW
1837 rtx insns;
1838
1839 start_sequence ();
bbf6f052
RK
1840
1841 for (i = 0;
1842 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1843 i++)
1844 {
1845 rtx xpart = operand_subword (x, i, 1, mode);
1846 rtx ypart = operand_subword (y, i, 1, mode);
1847
1848 /* If we can't get a part of Y, put Y into memory if it is a
1849 constant. Otherwise, force it into a register. If we still
1850 can't get a part of Y, abort. */
1851 if (ypart == 0 && CONSTANT_P (y))
1852 {
1853 y = force_const_mem (mode, y);
1854 ypart = operand_subword (y, i, 1, mode);
1855 }
1856 else if (ypart == 0)
1857 ypart = operand_subword_force (y, i, mode);
1858
1859 if (xpart == 0 || ypart == 0)
1860 abort ();
1861
1862 last_insn = emit_move_insn (xpart, ypart);
1863 }
6551fa4d
JW
1864
1865 insns = get_insns ();
1866 end_sequence ();
1867 emit_no_conflict_block (insns, x, y, NULL_RTX, NULL_RTX);
7308a047 1868
bbf6f052
RK
1869 return last_insn;
1870 }
1871 else
1872 abort ();
1873}
1874\f
1875/* Pushing data onto the stack. */
1876
1877/* Push a block of length SIZE (perhaps variable)
1878 and return an rtx to address the beginning of the block.
1879 Note that it is not possible for the value returned to be a QUEUED.
1880 The value may be virtual_outgoing_args_rtx.
1881
1882 EXTRA is the number of bytes of padding to push in addition to SIZE.
1883 BELOW nonzero means this padding comes at low addresses;
1884 otherwise, the padding comes at high addresses. */
1885
1886rtx
1887push_block (size, extra, below)
1888 rtx size;
1889 int extra, below;
1890{
1891 register rtx temp;
1892 if (CONSTANT_P (size))
1893 anti_adjust_stack (plus_constant (size, extra));
1894 else if (GET_CODE (size) == REG && extra == 0)
1895 anti_adjust_stack (size);
1896 else
1897 {
1898 rtx temp = copy_to_mode_reg (Pmode, size);
1899 if (extra != 0)
906c4e36 1900 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1901 temp, 0, OPTAB_LIB_WIDEN);
1902 anti_adjust_stack (temp);
1903 }
1904
1905#ifdef STACK_GROWS_DOWNWARD
1906 temp = virtual_outgoing_args_rtx;
1907 if (extra != 0 && below)
1908 temp = plus_constant (temp, extra);
1909#else
1910 if (GET_CODE (size) == CONST_INT)
1911 temp = plus_constant (virtual_outgoing_args_rtx,
1912 - INTVAL (size) - (below ? 0 : extra));
1913 else if (extra != 0 && !below)
1914 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1915 negate_rtx (Pmode, plus_constant (size, extra)));
1916 else
1917 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1918 negate_rtx (Pmode, size));
1919#endif
1920
1921 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1922}
1923
87e38d84 1924rtx
bbf6f052
RK
1925gen_push_operand ()
1926{
1927 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1928}
1929
1930/* Generate code to push X onto the stack, assuming it has mode MODE and
1931 type TYPE.
1932 MODE is redundant except when X is a CONST_INT (since they don't
1933 carry mode info).
1934 SIZE is an rtx for the size of data to be copied (in bytes),
1935 needed only if X is BLKmode.
1936
1937 ALIGN (in bytes) is maximum alignment we can assume.
1938
cd048831
RK
1939 If PARTIAL and REG are both nonzero, then copy that many of the first
1940 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1941 The amount of space pushed is decreased by PARTIAL words,
1942 rounded *down* to a multiple of PARM_BOUNDARY.
1943 REG must be a hard register in this case.
cd048831
RK
1944 If REG is zero but PARTIAL is not, take any all others actions for an
1945 argument partially in registers, but do not actually load any
1946 registers.
bbf6f052
RK
1947
1948 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1949 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1950
1951 On a machine that lacks real push insns, ARGS_ADDR is the address of
1952 the bottom of the argument block for this call. We use indexing off there
1953 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1954 argument block has not been preallocated.
1955
1956 ARGS_SO_FAR is the size of args previously pushed for this call. */
1957
1958void
1959emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1960 args_addr, args_so_far)
1961 register rtx x;
1962 enum machine_mode mode;
1963 tree type;
1964 rtx size;
1965 int align;
1966 int partial;
1967 rtx reg;
1968 int extra;
1969 rtx args_addr;
1970 rtx args_so_far;
1971{
1972 rtx xinner;
1973 enum direction stack_direction
1974#ifdef STACK_GROWS_DOWNWARD
1975 = downward;
1976#else
1977 = upward;
1978#endif
1979
1980 /* Decide where to pad the argument: `downward' for below,
1981 `upward' for above, or `none' for don't pad it.
1982 Default is below for small data on big-endian machines; else above. */
1983 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1984
1985 /* Invert direction if stack is post-update. */
1986 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1987 if (where_pad != none)
1988 where_pad = (where_pad == downward ? upward : downward);
1989
1990 xinner = x = protect_from_queue (x, 0);
1991
1992 if (mode == BLKmode)
1993 {
1994 /* Copy a block into the stack, entirely or partially. */
1995
1996 register rtx temp;
1997 int used = partial * UNITS_PER_WORD;
1998 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1999 int skip;
2000
2001 if (size == 0)
2002 abort ();
2003
2004 used -= offset;
2005
2006 /* USED is now the # of bytes we need not copy to the stack
2007 because registers will take care of them. */
2008
2009 if (partial != 0)
2010 xinner = change_address (xinner, BLKmode,
2011 plus_constant (XEXP (xinner, 0), used));
2012
2013 /* If the partial register-part of the arg counts in its stack size,
2014 skip the part of stack space corresponding to the registers.
2015 Otherwise, start copying to the beginning of the stack space,
2016 by setting SKIP to 0. */
2017#ifndef REG_PARM_STACK_SPACE
2018 skip = 0;
2019#else
2020 skip = used;
2021#endif
2022
2023#ifdef PUSH_ROUNDING
2024 /* Do it with several push insns if that doesn't take lots of insns
2025 and if there is no difficulty with push insns that skip bytes
2026 on the stack for alignment purposes. */
2027 if (args_addr == 0
2028 && GET_CODE (size) == CONST_INT
2029 && skip == 0
2030 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2031 < MOVE_RATIO)
bbf6f052
RK
2032 /* Here we avoid the case of a structure whose weak alignment
2033 forces many pushes of a small amount of data,
2034 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
2035 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
2036 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2037 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2038 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2039 {
2040 /* Push padding now if padding above and stack grows down,
2041 or if padding below and stack grows up.
2042 But if space already allocated, this has already been done. */
2043 if (extra && args_addr == 0
2044 && where_pad != none && where_pad != stack_direction)
906c4e36 2045 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2046
2047 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
2048 INTVAL (size) - used, align);
2049 }
2050 else
2051#endif /* PUSH_ROUNDING */
2052 {
2053 /* Otherwise make space on the stack and copy the data
2054 to the address of that space. */
2055
2056 /* Deduct words put into registers from the size we must copy. */
2057 if (partial != 0)
2058 {
2059 if (GET_CODE (size) == CONST_INT)
906c4e36 2060 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2061 else
2062 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2063 GEN_INT (used), NULL_RTX, 0,
2064 OPTAB_LIB_WIDEN);
bbf6f052
RK
2065 }
2066
2067 /* Get the address of the stack space.
2068 In this case, we do not deal with EXTRA separately.
2069 A single stack adjust will do. */
2070 if (! args_addr)
2071 {
2072 temp = push_block (size, extra, where_pad == downward);
2073 extra = 0;
2074 }
2075 else if (GET_CODE (args_so_far) == CONST_INT)
2076 temp = memory_address (BLKmode,
2077 plus_constant (args_addr,
2078 skip + INTVAL (args_so_far)));
2079 else
2080 temp = memory_address (BLKmode,
2081 plus_constant (gen_rtx (PLUS, Pmode,
2082 args_addr, args_so_far),
2083 skip));
2084
2085 /* TEMP is the address of the block. Copy the data there. */
2086 if (GET_CODE (size) == CONST_INT
2087 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2088 < MOVE_RATIO))
2089 {
2090 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
2091 INTVAL (size), align);
2092 goto ret;
2093 }
2094 /* Try the most limited insn first, because there's no point
2095 including more than one in the machine description unless
2096 the more limited one has some advantage. */
2097#ifdef HAVE_movstrqi
2098 if (HAVE_movstrqi
2099 && GET_CODE (size) == CONST_INT
2100 && ((unsigned) INTVAL (size)
2101 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2102 {
c841050e
RS
2103 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2104 xinner, size, GEN_INT (align));
2105 if (pat != 0)
2106 {
2107 emit_insn (pat);
2108 goto ret;
2109 }
bbf6f052
RK
2110 }
2111#endif
2112#ifdef HAVE_movstrhi
2113 if (HAVE_movstrhi
2114 && GET_CODE (size) == CONST_INT
2115 && ((unsigned) INTVAL (size)
2116 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2117 {
c841050e
RS
2118 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2119 xinner, size, GEN_INT (align));
2120 if (pat != 0)
2121 {
2122 emit_insn (pat);
2123 goto ret;
2124 }
bbf6f052
RK
2125 }
2126#endif
2127#ifdef HAVE_movstrsi
2128 if (HAVE_movstrsi)
2129 {
c841050e
RS
2130 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2131 xinner, size, GEN_INT (align));
2132 if (pat != 0)
2133 {
2134 emit_insn (pat);
2135 goto ret;
2136 }
bbf6f052
RK
2137 }
2138#endif
2139#ifdef HAVE_movstrdi
2140 if (HAVE_movstrdi)
2141 {
c841050e
RS
2142 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2143 xinner, size, GEN_INT (align));
2144 if (pat != 0)
2145 {
2146 emit_insn (pat);
2147 goto ret;
2148 }
bbf6f052
RK
2149 }
2150#endif
2151
2152#ifndef ACCUMULATE_OUTGOING_ARGS
2153 /* If the source is referenced relative to the stack pointer,
2154 copy it to another register to stabilize it. We do not need
2155 to do this if we know that we won't be changing sp. */
2156
2157 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2158 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2159 temp = copy_to_reg (temp);
2160#endif
2161
2162 /* Make inhibit_defer_pop nonzero around the library call
2163 to force it to pop the bcopy-arguments right away. */
2164 NO_DEFER_POP;
2165#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2166 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2167 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2168 convert_to_mode (TYPE_MODE (sizetype),
2169 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2170 TYPE_MODE (sizetype));
bbf6f052 2171#else
d562e42e 2172 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2173 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2174 convert_to_mode (TYPE_MODE (sizetype),
2175 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2176 TYPE_MODE (sizetype));
bbf6f052
RK
2177#endif
2178 OK_DEFER_POP;
2179 }
2180 }
2181 else if (partial > 0)
2182 {
2183 /* Scalar partly in registers. */
2184
2185 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2186 int i;
2187 int not_stack;
2188 /* # words of start of argument
2189 that we must make space for but need not store. */
2190 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2191 int args_offset = INTVAL (args_so_far);
2192 int skip;
2193
2194 /* Push padding now if padding above and stack grows down,
2195 or if padding below and stack grows up.
2196 But if space already allocated, this has already been done. */
2197 if (extra && args_addr == 0
2198 && where_pad != none && where_pad != stack_direction)
906c4e36 2199 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2200
2201 /* If we make space by pushing it, we might as well push
2202 the real data. Otherwise, we can leave OFFSET nonzero
2203 and leave the space uninitialized. */
2204 if (args_addr == 0)
2205 offset = 0;
2206
2207 /* Now NOT_STACK gets the number of words that we don't need to
2208 allocate on the stack. */
2209 not_stack = partial - offset;
2210
2211 /* If the partial register-part of the arg counts in its stack size,
2212 skip the part of stack space corresponding to the registers.
2213 Otherwise, start copying to the beginning of the stack space,
2214 by setting SKIP to 0. */
2215#ifndef REG_PARM_STACK_SPACE
2216 skip = 0;
2217#else
2218 skip = not_stack;
2219#endif
2220
2221 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2222 x = validize_mem (force_const_mem (mode, x));
2223
2224 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2225 SUBREGs of such registers are not allowed. */
2226 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2227 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2228 x = copy_to_reg (x);
2229
2230 /* Loop over all the words allocated on the stack for this arg. */
2231 /* We can do it by words, because any scalar bigger than a word
2232 has a size a multiple of a word. */
2233#ifndef PUSH_ARGS_REVERSED
2234 for (i = not_stack; i < size; i++)
2235#else
2236 for (i = size - 1; i >= not_stack; i--)
2237#endif
2238 if (i >= not_stack + offset)
2239 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2240 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2241 0, args_addr,
2242 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2243 * UNITS_PER_WORD)));
2244 }
2245 else
2246 {
2247 rtx addr;
2248
2249 /* Push padding now if padding above and stack grows down,
2250 or if padding below and stack grows up.
2251 But if space already allocated, this has already been done. */
2252 if (extra && args_addr == 0
2253 && where_pad != none && where_pad != stack_direction)
906c4e36 2254 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2255
2256#ifdef PUSH_ROUNDING
2257 if (args_addr == 0)
2258 addr = gen_push_operand ();
2259 else
2260#endif
2261 if (GET_CODE (args_so_far) == CONST_INT)
2262 addr
2263 = memory_address (mode,
2264 plus_constant (args_addr, INTVAL (args_so_far)));
2265 else
2266 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2267 args_so_far));
2268
2269 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2270 }
2271
2272 ret:
2273 /* If part should go in registers, copy that part
2274 into the appropriate registers. Do this now, at the end,
2275 since mem-to-mem copies above may do function calls. */
cd048831 2276 if (partial > 0 && reg != 0)
bbf6f052
RK
2277 move_block_to_reg (REGNO (reg), x, partial, mode);
2278
2279 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2280 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2281}
2282\f
bbf6f052
RK
2283/* Expand an assignment that stores the value of FROM into TO.
2284 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2285 (This may contain a QUEUED rtx;
2286 if the value is constant, this rtx is a constant.)
2287 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2288
2289 SUGGEST_REG is no longer actually used.
2290 It used to mean, copy the value through a register
2291 and return that register, if that is possible.
709f5be1 2292 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2293
2294rtx
2295expand_assignment (to, from, want_value, suggest_reg)
2296 tree to, from;
2297 int want_value;
2298 int suggest_reg;
2299{
2300 register rtx to_rtx = 0;
2301 rtx result;
2302
2303 /* Don't crash if the lhs of the assignment was erroneous. */
2304
2305 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2306 {
2307 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2308 return want_value ? result : NULL_RTX;
2309 }
bbf6f052 2310
ca695ac9
JB
2311 if (output_bytecode)
2312 {
2313 tree dest_innermost;
2314
2315 bc_expand_expr (from);
6d6e61ce 2316 bc_emit_instruction (duplicate);
ca695ac9
JB
2317
2318 dest_innermost = bc_expand_address (to);
2319
2320 /* Can't deduce from TYPE that we're dealing with a bitfield, so
2321 take care of it here. */
2322
2323 bc_store_memory (TREE_TYPE (to), dest_innermost);
2324 return NULL;
2325 }
2326
bbf6f052
RK
2327 /* Assignment of a structure component needs special treatment
2328 if the structure component's rtx is not simply a MEM.
2329 Assignment of an array element at a constant index
2330 has the same problem. */
2331
2332 if (TREE_CODE (to) == COMPONENT_REF
2333 || TREE_CODE (to) == BIT_FIELD_REF
2334 || (TREE_CODE (to) == ARRAY_REF
2335 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2336 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2337 {
2338 enum machine_mode mode1;
2339 int bitsize;
2340 int bitpos;
7bb0943f 2341 tree offset;
bbf6f052
RK
2342 int unsignedp;
2343 int volatilep = 0;
0088fcb1 2344 tree tem;
d78d243c 2345 int alignment;
0088fcb1
RK
2346
2347 push_temp_slots ();
2348 tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2349 &mode1, &unsignedp, &volatilep);
2350
2351 /* If we are going to use store_bit_field and extract_bit_field,
2352 make sure to_rtx will be safe for multiple use. */
2353
2354 if (mode1 == VOIDmode && want_value)
2355 tem = stabilize_reference (tem);
2356
d78d243c 2357 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
906c4e36 2358 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2359 if (offset != 0)
2360 {
906c4e36 2361 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2362
2363 if (GET_CODE (to_rtx) != MEM)
2364 abort ();
2365 to_rtx = change_address (to_rtx, VOIDmode,
2366 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2367 force_reg (Pmode, offset_rtx)));
d78d243c
RS
2368 /* If we have a variable offset, the known alignment
2369 is only that of the innermost structure containing the field.
2370 (Actually, we could sometimes do better by using the
2371 align of an element of the innermost array, but no need.) */
2372 if (TREE_CODE (to) == COMPONENT_REF
2373 || TREE_CODE (to) == BIT_FIELD_REF)
2374 alignment
2375 = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (to, 0))) / BITS_PER_UNIT;
7bb0943f 2376 }
bbf6f052
RK
2377 if (volatilep)
2378 {
2379 if (GET_CODE (to_rtx) == MEM)
2380 MEM_VOLATILE_P (to_rtx) = 1;
2381#if 0 /* This was turned off because, when a field is volatile
2382 in an object which is not volatile, the object may be in a register,
2383 and then we would abort over here. */
2384 else
2385 abort ();
2386#endif
2387 }
2388
2389 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2390 (want_value
2391 /* Spurious cast makes HPUX compiler happy. */
2392 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2393 : VOIDmode),
2394 unsignedp,
2395 /* Required alignment of containing datum. */
d78d243c 2396 alignment,
bbf6f052
RK
2397 int_size_in_bytes (TREE_TYPE (tem)));
2398 preserve_temp_slots (result);
2399 free_temp_slots ();
0088fcb1 2400 pop_temp_slots ();
bbf6f052 2401
709f5be1
RS
2402 /* If the value is meaningful, convert RESULT to the proper mode.
2403 Otherwise, return nothing. */
5ffe63ed
RS
2404 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2405 TYPE_MODE (TREE_TYPE (from)),
2406 result,
2407 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2408 : NULL_RTX);
bbf6f052
RK
2409 }
2410
cd1db108
RS
2411 /* If the rhs is a function call and its value is not an aggregate,
2412 call the function before we start to compute the lhs.
2413 This is needed for correct code for cases such as
2414 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2415 requires loading up part of an address in a separate insn.
2416
2417 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2418 a promoted variable where the zero- or sign- extension needs to be done.
2419 Handling this in the normal way is safe because no computation is done
2420 before the call. */
2421 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
2422 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2423 {
0088fcb1
RK
2424 rtx value;
2425
2426 push_temp_slots ();
2427 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108
RS
2428 if (to_rtx == 0)
2429 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2430 emit_move_insn (to_rtx, value);
2431 preserve_temp_slots (to_rtx);
2432 free_temp_slots ();
0088fcb1 2433 pop_temp_slots ();
709f5be1 2434 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2435 }
2436
bbf6f052
RK
2437 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2438 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2439
2440 if (to_rtx == 0)
906c4e36 2441 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2442
86d38d25
RS
2443 /* Don't move directly into a return register. */
2444 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2445 {
0088fcb1
RK
2446 rtx temp;
2447
2448 push_temp_slots ();
2449 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2450 emit_move_insn (to_rtx, temp);
2451 preserve_temp_slots (to_rtx);
2452 free_temp_slots ();
0088fcb1 2453 pop_temp_slots ();
709f5be1 2454 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2455 }
2456
bbf6f052
RK
2457 /* In case we are returning the contents of an object which overlaps
2458 the place the value is being stored, use a safe function when copying
2459 a value through a pointer into a structure value return block. */
2460 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2461 && current_function_returns_struct
2462 && !current_function_returns_pcc_struct)
2463 {
0088fcb1
RK
2464 rtx from_rtx, size;
2465
2466 push_temp_slots ();
33a20d10
RK
2467 size = expr_size (from);
2468 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2469
2470#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2471 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2472 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2473 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2474 convert_to_mode (TYPE_MODE (sizetype),
2475 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2476 TYPE_MODE (sizetype));
bbf6f052 2477#else
d562e42e 2478 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2479 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2480 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2481 convert_to_mode (TYPE_MODE (sizetype),
2482 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2483 TYPE_MODE (sizetype));
bbf6f052
RK
2484#endif
2485
2486 preserve_temp_slots (to_rtx);
2487 free_temp_slots ();
0088fcb1 2488 pop_temp_slots ();
709f5be1 2489 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2490 }
2491
2492 /* Compute FROM and store the value in the rtx we got. */
2493
0088fcb1 2494 push_temp_slots ();
bbf6f052
RK
2495 result = store_expr (from, to_rtx, want_value);
2496 preserve_temp_slots (result);
2497 free_temp_slots ();
0088fcb1 2498 pop_temp_slots ();
709f5be1 2499 return want_value ? result : NULL_RTX;
bbf6f052
RK
2500}
2501
2502/* Generate code for computing expression EXP,
2503 and storing the value into TARGET.
bbf6f052
RK
2504 TARGET may contain a QUEUED rtx.
2505
709f5be1
RS
2506 If WANT_VALUE is nonzero, return a copy of the value
2507 not in TARGET, so that we can be sure to use the proper
2508 value in a containing expression even if TARGET has something
2509 else stored in it. If possible, we copy the value through a pseudo
2510 and return that pseudo. Or, if the value is constant, we try to
2511 return the constant. In some cases, we return a pseudo
2512 copied *from* TARGET.
2513
2514 If the mode is BLKmode then we may return TARGET itself.
2515 It turns out that in BLKmode it doesn't cause a problem.
2516 because C has no operators that could combine two different
2517 assignments into the same BLKmode object with different values
2518 with no sequence point. Will other languages need this to
2519 be more thorough?
2520
2521 If WANT_VALUE is 0, we return NULL, to make sure
2522 to catch quickly any cases where the caller uses the value
2523 and fails to set WANT_VALUE. */
bbf6f052
RK
2524
2525rtx
709f5be1 2526store_expr (exp, target, want_value)
bbf6f052
RK
2527 register tree exp;
2528 register rtx target;
709f5be1 2529 int want_value;
bbf6f052
RK
2530{
2531 register rtx temp;
2532 int dont_return_target = 0;
2533
2534 if (TREE_CODE (exp) == COMPOUND_EXPR)
2535 {
2536 /* Perform first part of compound expression, then assign from second
2537 part. */
2538 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2539 emit_queue ();
709f5be1 2540 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2541 }
2542 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2543 {
2544 /* For conditional expression, get safe form of the target. Then
2545 test the condition, doing the appropriate assignment on either
2546 side. This avoids the creation of unnecessary temporaries.
2547 For non-BLKmode, it is more efficient not to do this. */
2548
2549 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2550
2551 emit_queue ();
2552 target = protect_from_queue (target, 1);
2553
2554 NO_DEFER_POP;
2555 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2556 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2557 emit_queue ();
2558 emit_jump_insn (gen_jump (lab2));
2559 emit_barrier ();
2560 emit_label (lab1);
709f5be1 2561 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2562 emit_queue ();
2563 emit_label (lab2);
2564 OK_DEFER_POP;
709f5be1 2565 return want_value ? target : NULL_RTX;
bbf6f052 2566 }
709f5be1 2567 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2568 && GET_MODE (target) != BLKmode)
2569 /* If target is in memory and caller wants value in a register instead,
2570 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2571 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2572 We know expand_expr will not use the target in that case.
2573 Don't do this if TARGET is volatile because we are supposed
2574 to write it and then read it. */
bbf6f052 2575 {
906c4e36 2576 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2577 GET_MODE (target), 0);
2578 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2579 temp = copy_to_reg (temp);
2580 dont_return_target = 1;
2581 }
2582 else if (queued_subexp_p (target))
709f5be1
RS
2583 /* If target contains a postincrement, let's not risk
2584 using it as the place to generate the rhs. */
bbf6f052
RK
2585 {
2586 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2587 {
2588 /* Expand EXP into a new pseudo. */
2589 temp = gen_reg_rtx (GET_MODE (target));
2590 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2591 }
2592 else
906c4e36 2593 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2594
2595 /* If target is volatile, ANSI requires accessing the value
2596 *from* the target, if it is accessed. So make that happen.
2597 In no case return the target itself. */
2598 if (! MEM_VOLATILE_P (target) && want_value)
2599 dont_return_target = 1;
bbf6f052 2600 }
1499e0a8
RK
2601 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2602 /* If this is an scalar in a register that is stored in a wider mode
2603 than the declared mode, compute the result into its declared mode
2604 and then convert to the wider mode. Our value is the computed
2605 expression. */
2606 {
2607 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c
RS
2608
2609 /* If TEMP is a VOIDmode constant, use convert_modes to make
2610 sure that we properly convert it. */
2611 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
2612 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
2613 TYPE_MODE (TREE_TYPE (exp)), temp,
2614 SUBREG_PROMOTED_UNSIGNED_P (target));
2615
1499e0a8
RK
2616 convert_move (SUBREG_REG (target), temp,
2617 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2618 return want_value ? temp : NULL_RTX;
1499e0a8 2619 }
bbf6f052
RK
2620 else
2621 {
2622 temp = expand_expr (exp, target, GET_MODE (target), 0);
2623 /* DO return TARGET if it's a specified hardware register.
c2e6aff6 2624 expand_return relies on this.
709f5be1
RS
2625 If TARGET is a volatile mem ref, either return TARGET
2626 or return a reg copied *from* TARGET; ANSI requires this.
2627
2628 Otherwise, if TEMP is not TARGET, return TEMP
2629 if it is constant (for efficiency),
2630 or if we really want the correct value. */
bbf6f052
RK
2631 if (!(target && GET_CODE (target) == REG
2632 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2633 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2634 && temp != target
2635 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2636 dont_return_target = 1;
2637 }
2638
b258707c
RS
2639 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
2640 the same as that of TARGET, adjust the constant. This is needed, for
2641 example, in case it is a CONST_DOUBLE and we want only a word-sized
2642 value. */
2643 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
2644 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
2645 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
2646 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
2647
bbf6f052
RK
2648 /* If value was not generated in the target, store it there.
2649 Convert the value to TARGET's type first if nec. */
2650
2651 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2652 {
2653 target = protect_from_queue (target, 1);
2654 if (GET_MODE (temp) != GET_MODE (target)
2655 && GET_MODE (temp) != VOIDmode)
2656 {
2657 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2658 if (dont_return_target)
2659 {
2660 /* In this case, we will return TEMP,
2661 so make sure it has the proper mode.
2662 But don't forget to store the value into TARGET. */
2663 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2664 emit_move_insn (target, temp);
2665 }
2666 else
2667 convert_move (target, temp, unsignedp);
2668 }
2669
2670 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2671 {
2672 /* Handle copying a string constant into an array.
2673 The string constant may be shorter than the array.
2674 So copy just the string's actual length, and clear the rest. */
2675 rtx size;
2676
e87b4f3f
RS
2677 /* Get the size of the data type of the string,
2678 which is actually the size of the target. */
2679 size = expr_size (exp);
2680 if (GET_CODE (size) == CONST_INT
2681 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2682 emit_block_move (target, temp, size,
2683 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2684 else
bbf6f052 2685 {
e87b4f3f
RS
2686 /* Compute the size of the data to copy from the string. */
2687 tree copy_size
c03b7665 2688 = size_binop (MIN_EXPR,
b50d17a1 2689 make_tree (sizetype, size),
c03b7665
RK
2690 convert (sizetype,
2691 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2692 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2693 VOIDmode, 0);
e87b4f3f
RS
2694 rtx label = 0;
2695
2696 /* Copy that much. */
2697 emit_block_move (target, temp, copy_size_rtx,
2698 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2699
2700 /* Figure out how much is left in TARGET
2701 that we have to clear. */
2702 if (GET_CODE (copy_size_rtx) == CONST_INT)
2703 {
2704 temp = plus_constant (XEXP (target, 0),
2705 TREE_STRING_LENGTH (exp));
2706 size = plus_constant (size,
2707 - TREE_STRING_LENGTH (exp));
2708 }
2709 else
2710 {
2711 enum machine_mode size_mode = Pmode;
2712
2713 temp = force_reg (Pmode, XEXP (target, 0));
2714 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2715 copy_size_rtx, NULL_RTX, 0,
2716 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2717
2718 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2719 copy_size_rtx, NULL_RTX, 0,
2720 OPTAB_LIB_WIDEN);
e87b4f3f 2721
906c4e36 2722 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2723 GET_MODE (size), 0, 0);
2724 label = gen_label_rtx ();
2725 emit_jump_insn (gen_blt (label));
2726 }
2727
2728 if (size != const0_rtx)
2729 {
bbf6f052 2730#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2731 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2732 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2733#else
d562e42e 2734 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2735 temp, Pmode, size, Pmode);
bbf6f052 2736#endif
e87b4f3f
RS
2737 }
2738 if (label)
2739 emit_label (label);
bbf6f052
RK
2740 }
2741 }
2742 else if (GET_MODE (temp) == BLKmode)
2743 emit_block_move (target, temp, expr_size (exp),
2744 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2745 else
2746 emit_move_insn (target, temp);
2747 }
709f5be1 2748
7d26fec6 2749 if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2750 return temp;
709f5be1
RS
2751 if (want_value && GET_MODE (target) != BLKmode)
2752 return copy_to_reg (target);
2753 if (want_value)
2754 return target;
2755 return NULL_RTX;
bbf6f052
RK
2756}
2757\f
2758/* Store the value of constructor EXP into the rtx TARGET.
2759 TARGET is either a REG or a MEM. */
2760
2761static void
2762store_constructor (exp, target)
2763 tree exp;
2764 rtx target;
2765{
4af3895e
JVA
2766 tree type = TREE_TYPE (exp);
2767
bbf6f052
RK
2768 /* We know our target cannot conflict, since safe_from_p has been called. */
2769#if 0
2770 /* Don't try copying piece by piece into a hard register
2771 since that is vulnerable to being clobbered by EXP.
2772 Instead, construct in a pseudo register and then copy it all. */
2773 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2774 {
2775 rtx temp = gen_reg_rtx (GET_MODE (target));
2776 store_constructor (exp, temp);
2777 emit_move_insn (target, temp);
2778 return;
2779 }
2780#endif
2781
e44842fe
RK
2782 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2783 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2784 {
2785 register tree elt;
2786
4af3895e 2787 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2788 if (TREE_CODE (type) == UNION_TYPE
2789 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2790 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2791
2792 /* If we are building a static constructor into a register,
2793 set the initial value as zero so we can fold the value into
2794 a constant. */
2795 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2796 emit_move_insn (target, const0_rtx);
2797
bbf6f052
RK
2798 /* If the constructor has fewer fields than the structure,
2799 clear the whole structure first. */
2800 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2801 != list_length (TYPE_FIELDS (type)))
2802 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2803 else
2804 /* Inform later passes that the old value is dead. */
2805 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2806
2807 /* Store each element of the constructor into
2808 the corresponding field of TARGET. */
2809
2810 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2811 {
2812 register tree field = TREE_PURPOSE (elt);
2813 register enum machine_mode mode;
2814 int bitsize;
b50d17a1 2815 int bitpos = 0;
bbf6f052 2816 int unsignedp;
b50d17a1
RK
2817 tree pos, constant = 0, offset = 0;
2818 rtx to_rtx = target;
bbf6f052 2819
f32fd778
RS
2820 /* Just ignore missing fields.
2821 We cleared the whole structure, above,
2822 if any fields are missing. */
2823 if (field == 0)
2824 continue;
2825
bbf6f052
RK
2826 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2827 unsignedp = TREE_UNSIGNED (field);
2828 mode = DECL_MODE (field);
2829 if (DECL_BIT_FIELD (field))
2830 mode = VOIDmode;
2831
b50d17a1
RK
2832 pos = DECL_FIELD_BITPOS (field);
2833 if (TREE_CODE (pos) == INTEGER_CST)
2834 constant = pos;
2835 else if (TREE_CODE (pos) == PLUS_EXPR
2836 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2837 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
2838 else
2839 offset = pos;
2840
2841 if (constant)
2842 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2843
2844 if (offset)
2845 {
2846 rtx offset_rtx;
2847
2848 if (contains_placeholder_p (offset))
2849 offset = build (WITH_RECORD_EXPR, sizetype,
2850 offset, exp);
bbf6f052 2851
b50d17a1
RK
2852 offset = size_binop (FLOOR_DIV_EXPR, offset,
2853 size_int (BITS_PER_UNIT));
bbf6f052 2854
b50d17a1
RK
2855 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
2856 if (GET_CODE (to_rtx) != MEM)
2857 abort ();
2858
2859 to_rtx
2860 = change_address (to_rtx, VOIDmode,
2861 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2862 force_reg (Pmode, offset_rtx)));
2863 }
2864
2865 store_field (to_rtx, bitsize, bitpos, mode, TREE_VALUE (elt),
bbf6f052
RK
2866 /* The alignment of TARGET is
2867 at least what its type requires. */
2868 VOIDmode, 0,
4af3895e
JVA
2869 TYPE_ALIGN (type) / BITS_PER_UNIT,
2870 int_size_in_bytes (type));
bbf6f052
RK
2871 }
2872 }
4af3895e 2873 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2874 {
2875 register tree elt;
2876 register int i;
4af3895e 2877 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2878 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2879 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2880 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2881
2882 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2883 clear the whole structure first. Similarly if this this is
2884 static constructor of a non-BLKmode object. */
bbf6f052 2885
4af3895e
JVA
2886 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2887 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2888 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2889 else
2890 /* Inform later passes that the old value is dead. */
2891 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2892
2893 /* Store each element of the constructor into
2894 the corresponding element of TARGET, determined
2895 by counting the elements. */
2896 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2897 elt;
2898 elt = TREE_CHAIN (elt), i++)
2899 {
2900 register enum machine_mode mode;
2901 int bitsize;
2902 int bitpos;
2903 int unsignedp;
03dc44a6
RS
2904 tree index = TREE_PURPOSE (elt);
2905 rtx xtarget = target;
bbf6f052
RK
2906
2907 mode = TYPE_MODE (elttype);
2908 bitsize = GET_MODE_BITSIZE (mode);
2909 unsignedp = TREE_UNSIGNED (elttype);
2910
03dc44a6
RS
2911 if (index != 0 && TREE_CODE (index) != INTEGER_CST)
2912 {
2913 /* We don't currently allow variable indices in a
2914 C initializer, but let's try here to support them. */
2915 rtx pos_rtx, addr, xtarget;
2916 tree position;
2917
2918 position = size_binop (MULT_EXPR, index, TYPE_SIZE (elttype));
2919 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
2920 addr = gen_rtx (PLUS, Pmode, XEXP (target, 0), pos_rtx);
2921 xtarget = change_address (target, mode, addr);
2922 store_expr (TREE_VALUE (elt), xtarget, 0);
2923 }
2924 else
2925 {
2926 if (index != 0)
7c314719 2927 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
2928 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2929 else
2930 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2931
2932 store_field (xtarget, bitsize, bitpos, mode, TREE_VALUE (elt),
2933 /* The alignment of TARGET is
2934 at least what its type requires. */
2935 VOIDmode, 0,
2936 TYPE_ALIGN (type) / BITS_PER_UNIT,
2937 int_size_in_bytes (type));
2938 }
bbf6f052
RK
2939 }
2940 }
2941
2942 else
2943 abort ();
2944}
2945
2946/* Store the value of EXP (an expression tree)
2947 into a subfield of TARGET which has mode MODE and occupies
2948 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2949 If MODE is VOIDmode, it means that we are storing into a bit-field.
2950
2951 If VALUE_MODE is VOIDmode, return nothing in particular.
2952 UNSIGNEDP is not used in this case.
2953
2954 Otherwise, return an rtx for the value stored. This rtx
2955 has mode VALUE_MODE if that is convenient to do.
2956 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2957
2958 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2959 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2960
2961static rtx
2962store_field (target, bitsize, bitpos, mode, exp, value_mode,
2963 unsignedp, align, total_size)
2964 rtx target;
2965 int bitsize, bitpos;
2966 enum machine_mode mode;
2967 tree exp;
2968 enum machine_mode value_mode;
2969 int unsignedp;
2970 int align;
2971 int total_size;
2972{
906c4e36 2973 HOST_WIDE_INT width_mask = 0;
bbf6f052 2974
906c4e36
RK
2975 if (bitsize < HOST_BITS_PER_WIDE_INT)
2976 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2977
2978 /* If we are storing into an unaligned field of an aligned union that is
2979 in a register, we may have the mode of TARGET being an integer mode but
2980 MODE == BLKmode. In that case, get an aligned object whose size and
2981 alignment are the same as TARGET and store TARGET into it (we can avoid
2982 the store if the field being stored is the entire width of TARGET). Then
2983 call ourselves recursively to store the field into a BLKmode version of
2984 that object. Finally, load from the object into TARGET. This is not
2985 very efficient in general, but should only be slightly more expensive
2986 than the otherwise-required unaligned accesses. Perhaps this can be
2987 cleaned up later. */
2988
2989 if (mode == BLKmode
2990 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2991 {
2992 rtx object = assign_stack_temp (GET_MODE (target),
2993 GET_MODE_SIZE (GET_MODE (target)), 0);
2994 rtx blk_object = copy_rtx (object);
2995
2996 PUT_MODE (blk_object, BLKmode);
2997
2998 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2999 emit_move_insn (object, target);
3000
3001 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3002 align, total_size);
3003
46093b97
RS
3004 /* Even though we aren't returning target, we need to
3005 give it the updated value. */
bbf6f052
RK
3006 emit_move_insn (target, object);
3007
46093b97 3008 return blk_object;
bbf6f052
RK
3009 }
3010
3011 /* If the structure is in a register or if the component
3012 is a bit field, we cannot use addressing to access it.
3013 Use bit-field techniques or SUBREG to store in it. */
3014
4fa52007
RK
3015 if (mode == VOIDmode
3016 || (mode != BLKmode && ! direct_store[(int) mode])
3017 || GET_CODE (target) == REG
c980ac49 3018 || GET_CODE (target) == SUBREG
ccc98036
RS
3019 /* If the field isn't aligned enough to store as an ordinary memref,
3020 store it as a bit field. */
3021 || (STRICT_ALIGNMENT
3022 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
3023 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3024 {
906c4e36 3025 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73
RK
3026
3027 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
3028 MODE. */
3029 if (mode != VOIDmode && mode != BLKmode
3030 && mode != TYPE_MODE (TREE_TYPE (exp)))
3031 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
3032
bbf6f052
RK
3033 /* Store the value in the bitfield. */
3034 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3035 if (value_mode != VOIDmode)
3036 {
3037 /* The caller wants an rtx for the value. */
3038 /* If possible, avoid refetching from the bitfield itself. */
3039 if (width_mask != 0
3040 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3041 {
9074de27 3042 tree count;
5c4d7cfb 3043 enum machine_mode tmode;
86a2c12a 3044
5c4d7cfb
RS
3045 if (unsignedp)
3046 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3047 tmode = GET_MODE (temp);
86a2c12a
RS
3048 if (tmode == VOIDmode)
3049 tmode = value_mode;
5c4d7cfb
RS
3050 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3051 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3052 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3053 }
bbf6f052 3054 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3055 NULL_RTX, value_mode, 0, align,
3056 total_size);
bbf6f052
RK
3057 }
3058 return const0_rtx;
3059 }
3060 else
3061 {
3062 rtx addr = XEXP (target, 0);
3063 rtx to_rtx;
3064
3065 /* If a value is wanted, it must be the lhs;
3066 so make the address stable for multiple use. */
3067
3068 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3069 && ! CONSTANT_ADDRESS_P (addr)
3070 /* A frame-pointer reference is already stable. */
3071 && ! (GET_CODE (addr) == PLUS
3072 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3073 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3074 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3075 addr = copy_to_reg (addr);
3076
3077 /* Now build a reference to just the desired component. */
3078
3079 to_rtx = change_address (target, mode,
3080 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3081 MEM_IN_STRUCT_P (to_rtx) = 1;
3082
3083 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3084 }
3085}
3086\f
3087/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3088 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 3089 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
3090
3091 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3092 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3093 If the position of the field is variable, we store a tree
3094 giving the variable offset (in units) in *POFFSET.
3095 This offset is in addition to the bit position.
3096 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3097
3098 If any of the extraction expressions is volatile,
3099 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3100
3101 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3102 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3103 is redundant.
3104
3105 If the field describes a variable-sized object, *PMODE is set to
3106 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3107 this case, but the address of the object can be found. */
bbf6f052
RK
3108
3109tree
4969d05d
RK
3110get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3111 punsignedp, pvolatilep)
bbf6f052
RK
3112 tree exp;
3113 int *pbitsize;
3114 int *pbitpos;
7bb0943f 3115 tree *poffset;
bbf6f052
RK
3116 enum machine_mode *pmode;
3117 int *punsignedp;
3118 int *pvolatilep;
3119{
b50d17a1 3120 tree orig_exp = exp;
bbf6f052
RK
3121 tree size_tree = 0;
3122 enum machine_mode mode = VOIDmode;
742920c7 3123 tree offset = integer_zero_node;
bbf6f052
RK
3124
3125 if (TREE_CODE (exp) == COMPONENT_REF)
3126 {
3127 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3128 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3129 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3130 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3131 }
3132 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3133 {
3134 size_tree = TREE_OPERAND (exp, 1);
3135 *punsignedp = TREE_UNSIGNED (exp);
3136 }
3137 else
3138 {
3139 mode = TYPE_MODE (TREE_TYPE (exp));
3140 *pbitsize = GET_MODE_BITSIZE (mode);
3141 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3142 }
3143
3144 if (size_tree)
3145 {
3146 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3147 mode = BLKmode, *pbitsize = -1;
3148 else
3149 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3150 }
3151
3152 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3153 and find the ultimate containing object. */
3154
3155 *pbitpos = 0;
3156
3157 while (1)
3158 {
7bb0943f 3159 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3160 {
7bb0943f
RS
3161 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3162 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3163 : TREE_OPERAND (exp, 2));
bbf6f052 3164
e7f3c83f
RK
3165 /* If this field hasn't been filled in yet, don't go
3166 past it. This should only happen when folding expressions
3167 made during type construction. */
3168 if (pos == 0)
3169 break;
3170
7bb0943f
RS
3171 if (TREE_CODE (pos) == PLUS_EXPR)
3172 {
3173 tree constant, var;
3174 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3175 {
3176 constant = TREE_OPERAND (pos, 0);
3177 var = TREE_OPERAND (pos, 1);
3178 }
3179 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3180 {
3181 constant = TREE_OPERAND (pos, 1);
3182 var = TREE_OPERAND (pos, 0);
3183 }
3184 else
3185 abort ();
742920c7 3186
7bb0943f 3187 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
3188 offset = size_binop (PLUS_EXPR, offset,
3189 size_binop (FLOOR_DIV_EXPR, var,
3190 size_int (BITS_PER_UNIT)));
7bb0943f
RS
3191 }
3192 else if (TREE_CODE (pos) == INTEGER_CST)
3193 *pbitpos += TREE_INT_CST_LOW (pos);
3194 else
3195 {
3196 /* Assume here that the offset is a multiple of a unit.
3197 If not, there should be an explicitly added constant. */
742920c7
RK
3198 offset = size_binop (PLUS_EXPR, offset,
3199 size_binop (FLOOR_DIV_EXPR, pos,
3200 size_int (BITS_PER_UNIT)));
7bb0943f 3201 }
bbf6f052 3202 }
bbf6f052 3203
742920c7 3204 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 3205 {
742920c7
RK
3206 /* This code is based on the code in case ARRAY_REF in expand_expr
3207 below. We assume here that the size of an array element is
3208 always an integral multiple of BITS_PER_UNIT. */
3209
3210 tree index = TREE_OPERAND (exp, 1);
3211 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
3212 tree low_bound
3213 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3214 tree index_type = TREE_TYPE (index);
3215
3216 if (! integer_zerop (low_bound))
3217 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3218
3219 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3220 {
3221 index = convert (type_for_size (POINTER_SIZE, 0), index);
3222 index_type = TREE_TYPE (index);
3223 }
3224
3225 index = fold (build (MULT_EXPR, index_type, index,
3226 TYPE_SIZE (TREE_TYPE (exp))));
3227
3228 if (TREE_CODE (index) == INTEGER_CST
3229 && TREE_INT_CST_HIGH (index) == 0)
3230 *pbitpos += TREE_INT_CST_LOW (index);
3231 else
3232 offset = size_binop (PLUS_EXPR, offset,
3233 size_binop (FLOOR_DIV_EXPR, index,
3234 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3235 }
3236 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3237 && ! ((TREE_CODE (exp) == NOP_EXPR
3238 || TREE_CODE (exp) == CONVERT_EXPR)
3239 && (TYPE_MODE (TREE_TYPE (exp))
3240 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3241 break;
7bb0943f
RS
3242
3243 /* If any reference in the chain is volatile, the effect is volatile. */
3244 if (TREE_THIS_VOLATILE (exp))
3245 *pvolatilep = 1;
bbf6f052
RK
3246 exp = TREE_OPERAND (exp, 0);
3247 }
3248
3249 /* If this was a bit-field, see if there is a mode that allows direct
3250 access in case EXP is in memory. */
e7f3c83f 3251 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3252 {
3253 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3254 if (mode == BLKmode)
3255 mode = VOIDmode;
3256 }
3257
742920c7
RK
3258 if (integer_zerop (offset))
3259 offset = 0;
3260
b50d17a1
RK
3261 if (offset != 0 && contains_placeholder_p (offset))
3262 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
3263
bbf6f052 3264 *pmode = mode;
7bb0943f 3265 *poffset = offset;
bbf6f052
RK
3266 return exp;
3267}
3268\f
3269/* Given an rtx VALUE that may contain additions and multiplications,
3270 return an equivalent value that just refers to a register or memory.
3271 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3272 and returning a pseudo-register containing the value.
3273
3274 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3275
3276rtx
3277force_operand (value, target)
3278 rtx value, target;
3279{
3280 register optab binoptab = 0;
3281 /* Use a temporary to force order of execution of calls to
3282 `force_operand'. */
3283 rtx tmp;
3284 register rtx op2;
3285 /* Use subtarget as the target for operand 0 of a binary operation. */
3286 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3287
3288 if (GET_CODE (value) == PLUS)
3289 binoptab = add_optab;
3290 else if (GET_CODE (value) == MINUS)
3291 binoptab = sub_optab;
3292 else if (GET_CODE (value) == MULT)
3293 {
3294 op2 = XEXP (value, 1);
3295 if (!CONSTANT_P (op2)
3296 && !(GET_CODE (op2) == REG && op2 != subtarget))
3297 subtarget = 0;
3298 tmp = force_operand (XEXP (value, 0), subtarget);
3299 return expand_mult (GET_MODE (value), tmp,
906c4e36 3300 force_operand (op2, NULL_RTX),
bbf6f052
RK
3301 target, 0);
3302 }
3303
3304 if (binoptab)
3305 {
3306 op2 = XEXP (value, 1);
3307 if (!CONSTANT_P (op2)
3308 && !(GET_CODE (op2) == REG && op2 != subtarget))
3309 subtarget = 0;
3310 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3311 {
3312 binoptab = add_optab;
3313 op2 = negate_rtx (GET_MODE (value), op2);
3314 }
3315
3316 /* Check for an addition with OP2 a constant integer and our first
3317 operand a PLUS of a virtual register and something else. In that
3318 case, we want to emit the sum of the virtual register and the
3319 constant first and then add the other value. This allows virtual
3320 register instantiation to simply modify the constant rather than
3321 creating another one around this addition. */
3322 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3323 && GET_CODE (XEXP (value, 0)) == PLUS
3324 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3325 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3326 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3327 {
3328 rtx temp = expand_binop (GET_MODE (value), binoptab,
3329 XEXP (XEXP (value, 0), 0), op2,
3330 subtarget, 0, OPTAB_LIB_WIDEN);
3331 return expand_binop (GET_MODE (value), binoptab, temp,
3332 force_operand (XEXP (XEXP (value, 0), 1), 0),
3333 target, 0, OPTAB_LIB_WIDEN);
3334 }
3335
3336 tmp = force_operand (XEXP (value, 0), subtarget);
3337 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3338 force_operand (op2, NULL_RTX),
bbf6f052 3339 target, 0, OPTAB_LIB_WIDEN);
8008b228 3340 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3341 because the only operations we are expanding here are signed ones. */
3342 }
3343 return value;
3344}
3345\f
3346/* Subroutine of expand_expr:
3347 save the non-copied parts (LIST) of an expr (LHS), and return a list
3348 which can restore these values to their previous values,
3349 should something modify their storage. */
3350
3351static tree
3352save_noncopied_parts (lhs, list)
3353 tree lhs;
3354 tree list;
3355{
3356 tree tail;
3357 tree parts = 0;
3358
3359 for (tail = list; tail; tail = TREE_CHAIN (tail))
3360 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3361 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3362 else
3363 {
3364 tree part = TREE_VALUE (tail);
3365 tree part_type = TREE_TYPE (part);
906c4e36 3366 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3367 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3368 int_size_in_bytes (part_type), 0);
3369 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3370 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3371 parts = tree_cons (to_be_saved,
906c4e36
RK
3372 build (RTL_EXPR, part_type, NULL_TREE,
3373 (tree) target),
bbf6f052
RK
3374 parts);
3375 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3376 }
3377 return parts;
3378}
3379
3380/* Subroutine of expand_expr:
3381 record the non-copied parts (LIST) of an expr (LHS), and return a list
3382 which specifies the initial values of these parts. */
3383
3384static tree
3385init_noncopied_parts (lhs, list)
3386 tree lhs;
3387 tree list;
3388{
3389 tree tail;
3390 tree parts = 0;
3391
3392 for (tail = list; tail; tail = TREE_CHAIN (tail))
3393 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3394 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3395 else
3396 {
3397 tree part = TREE_VALUE (tail);
3398 tree part_type = TREE_TYPE (part);
906c4e36 3399 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3400 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3401 }
3402 return parts;
3403}
3404
3405/* Subroutine of expand_expr: return nonzero iff there is no way that
3406 EXP can reference X, which is being modified. */
3407
3408static int
3409safe_from_p (x, exp)
3410 rtx x;
3411 tree exp;
3412{
3413 rtx exp_rtl = 0;
3414 int i, nops;
3415
3416 if (x == 0)
3417 return 1;
3418
3419 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3420 find the underlying pseudo. */
3421 if (GET_CODE (x) == SUBREG)
3422 {
3423 x = SUBREG_REG (x);
3424 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3425 return 0;
3426 }
3427
3428 /* If X is a location in the outgoing argument area, it is always safe. */
3429 if (GET_CODE (x) == MEM
3430 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3431 || (GET_CODE (XEXP (x, 0)) == PLUS
3432 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3433 return 1;
3434
3435 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3436 {
3437 case 'd':
3438 exp_rtl = DECL_RTL (exp);
3439 break;
3440
3441 case 'c':
3442 return 1;
3443
3444 case 'x':
3445 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3446 return ((TREE_VALUE (exp) == 0
3447 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3448 && (TREE_CHAIN (exp) == 0
3449 || safe_from_p (x, TREE_CHAIN (exp))));
3450 else
3451 return 0;
3452
3453 case '1':
3454 return safe_from_p (x, TREE_OPERAND (exp, 0));
3455
3456 case '2':
3457 case '<':
3458 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3459 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3460
3461 case 'e':
3462 case 'r':
3463 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3464 the expression. If it is set, we conflict iff we are that rtx or
3465 both are in memory. Otherwise, we check all operands of the
3466 expression recursively. */
3467
3468 switch (TREE_CODE (exp))
3469 {
3470 case ADDR_EXPR:
e44842fe
RK
3471 return (staticp (TREE_OPERAND (exp, 0))
3472 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3473
3474 case INDIRECT_REF:
3475 if (GET_CODE (x) == MEM)
3476 return 0;
3477 break;
3478
3479 case CALL_EXPR:
3480 exp_rtl = CALL_EXPR_RTL (exp);
3481 if (exp_rtl == 0)
3482 {
3483 /* Assume that the call will clobber all hard registers and
3484 all of memory. */
3485 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3486 || GET_CODE (x) == MEM)
3487 return 0;
3488 }
3489
3490 break;
3491
3492 case RTL_EXPR:
3493 exp_rtl = RTL_EXPR_RTL (exp);
3494 if (exp_rtl == 0)
3495 /* We don't know what this can modify. */
3496 return 0;
3497
3498 break;
3499
3500 case WITH_CLEANUP_EXPR:
3501 exp_rtl = RTL_EXPR_RTL (exp);
3502 break;
3503
3504 case SAVE_EXPR:
3505 exp_rtl = SAVE_EXPR_RTL (exp);
3506 break;
3507
8129842c
RS
3508 case BIND_EXPR:
3509 /* The only operand we look at is operand 1. The rest aren't
3510 part of the expression. */
3511 return safe_from_p (x, TREE_OPERAND (exp, 1));
3512
bbf6f052
RK
3513 case METHOD_CALL_EXPR:
3514 /* This takes a rtx argument, but shouldn't appear here. */
3515 abort ();
3516 }
3517
3518 /* If we have an rtx, we do not need to scan our operands. */
3519 if (exp_rtl)
3520 break;
3521
3522 nops = tree_code_length[(int) TREE_CODE (exp)];
3523 for (i = 0; i < nops; i++)
3524 if (TREE_OPERAND (exp, i) != 0
3525 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3526 return 0;
3527 }
3528
3529 /* If we have an rtl, find any enclosed object. Then see if we conflict
3530 with it. */
3531 if (exp_rtl)
3532 {
3533 if (GET_CODE (exp_rtl) == SUBREG)
3534 {
3535 exp_rtl = SUBREG_REG (exp_rtl);
3536 if (GET_CODE (exp_rtl) == REG
3537 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3538 return 0;
3539 }
3540
3541 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3542 are memory and EXP is not readonly. */
3543 return ! (rtx_equal_p (x, exp_rtl)
3544 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3545 && ! TREE_READONLY (exp)));
3546 }
3547
3548 /* If we reach here, it is safe. */
3549 return 1;
3550}
3551
3552/* Subroutine of expand_expr: return nonzero iff EXP is an
3553 expression whose type is statically determinable. */
3554
3555static int
3556fixed_type_p (exp)
3557 tree exp;
3558{
3559 if (TREE_CODE (exp) == PARM_DECL
3560 || TREE_CODE (exp) == VAR_DECL
3561 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3562 || TREE_CODE (exp) == COMPONENT_REF
3563 || TREE_CODE (exp) == ARRAY_REF)
3564 return 1;
3565 return 0;
3566}
3567\f
3568/* expand_expr: generate code for computing expression EXP.
3569 An rtx for the computed value is returned. The value is never null.
3570 In the case of a void EXP, const0_rtx is returned.
3571
3572 The value may be stored in TARGET if TARGET is nonzero.
3573 TARGET is just a suggestion; callers must assume that
3574 the rtx returned may not be the same as TARGET.
3575
3576 If TARGET is CONST0_RTX, it means that the value will be ignored.
3577
3578 If TMODE is not VOIDmode, it suggests generating the
3579 result in mode TMODE. But this is done only when convenient.
3580 Otherwise, TMODE is ignored and the value generated in its natural mode.
3581 TMODE is just a suggestion; callers must assume that
3582 the rtx returned may not have mode TMODE.
3583
3584 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3585 with a constant address even if that address is not normally legitimate.
3586 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3587
3588 If MODIFIER is EXPAND_SUM then when EXP is an addition
3589 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3590 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3591 products as above, or REG or MEM, or constant.
3592 Ordinarily in such cases we would output mul or add instructions
3593 and then return a pseudo reg containing the sum.
3594
3595 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3596 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3597 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3598 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3599
3600rtx
3601expand_expr (exp, target, tmode, modifier)
3602 register tree exp;
3603 rtx target;
3604 enum machine_mode tmode;
3605 enum expand_modifier modifier;
3606{
b50d17a1
RK
3607 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
3608 This is static so it will be accessible to our recursive callees. */
3609 static tree placeholder_list = 0;
bbf6f052
RK
3610 register rtx op0, op1, temp;
3611 tree type = TREE_TYPE (exp);
3612 int unsignedp = TREE_UNSIGNED (type);
3613 register enum machine_mode mode = TYPE_MODE (type);
3614 register enum tree_code code = TREE_CODE (exp);
3615 optab this_optab;
3616 /* Use subtarget as the target for operand 0 of a binary operation. */
3617 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3618 rtx original_target = target;
ca695ac9 3619 /* Maybe defer this until sure not doing bytecode? */
dd27116b
RK
3620 int ignore = (target == const0_rtx
3621 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3622 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3623 || code == COND_EXPR)
dd27116b 3624 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3625 tree context;
3626
ca695ac9
JB
3627
3628 if (output_bytecode)
3629 {
3630 bc_expand_expr (exp);
3631 return NULL;
3632 }
3633
bbf6f052
RK
3634 /* Don't use hard regs as subtargets, because the combiner
3635 can only handle pseudo regs. */
3636 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3637 subtarget = 0;
3638 /* Avoid subtargets inside loops,
3639 since they hide some invariant expressions. */
3640 if (preserve_subexpressions_p ())
3641 subtarget = 0;
3642
dd27116b
RK
3643 /* If we are going to ignore this result, we need only do something
3644 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
3645 is, short-circuit the most common cases here. Note that we must
3646 not call expand_expr with anything but const0_rtx in case this
3647 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 3648
dd27116b
RK
3649 if (ignore)
3650 {
3651 if (! TREE_SIDE_EFFECTS (exp))
3652 return const0_rtx;
3653
3654 /* Ensure we reference a volatile object even if value is ignored. */
3655 if (TREE_THIS_VOLATILE (exp)
3656 && TREE_CODE (exp) != FUNCTION_DECL
3657 && mode != VOIDmode && mode != BLKmode)
3658 {
3659 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3660 if (GET_CODE (temp) == MEM)
3661 temp = copy_to_reg (temp);
3662 return const0_rtx;
3663 }
3664
3665 if (TREE_CODE_CLASS (code) == '1')
3666 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3667 VOIDmode, modifier);
3668 else if (TREE_CODE_CLASS (code) == '2'
3669 || TREE_CODE_CLASS (code) == '<')
3670 {
3671 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3672 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3673 return const0_rtx;
3674 }
3675 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3676 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3677 /* If the second operand has no side effects, just evaluate
3678 the first. */
3679 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3680 VOIDmode, modifier);
dd27116b 3681
90764a87 3682 target = 0;
dd27116b 3683 }
bbf6f052 3684
e44842fe
RK
3685 /* If will do cse, generate all results into pseudo registers
3686 since 1) that allows cse to find more things
3687 and 2) otherwise cse could produce an insn the machine
3688 cannot support. */
3689
bbf6f052
RK
3690 if (! cse_not_expected && mode != BLKmode && target
3691 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3692 target = subtarget;
3693
bbf6f052
RK
3694 switch (code)
3695 {
3696 case LABEL_DECL:
b552441b
RS
3697 {
3698 tree function = decl_function_context (exp);
3699 /* Handle using a label in a containing function. */
3700 if (function != current_function_decl && function != 0)
3701 {
3702 struct function *p = find_function_data (function);
3703 /* Allocate in the memory associated with the function
3704 that the label is in. */
3705 push_obstacks (p->function_obstack,
3706 p->function_maybepermanent_obstack);
3707
3708 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3709 label_rtx (exp), p->forced_labels);
3710 pop_obstacks ();
3711 }
3712 else if (modifier == EXPAND_INITIALIZER)
3713 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3714 label_rtx (exp), forced_labels);
26fcb35a 3715 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3716 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3717 if (function != current_function_decl && function != 0)
3718 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3719 return temp;
b552441b 3720 }
bbf6f052
RK
3721
3722 case PARM_DECL:
3723 if (DECL_RTL (exp) == 0)
3724 {
3725 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3726 return CONST0_RTX (mode);
bbf6f052
RK
3727 }
3728
bbf6f052 3729 case VAR_DECL:
2dca20cd
RS
3730 /* If a static var's type was incomplete when the decl was written,
3731 but the type is complete now, lay out the decl now. */
3732 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
3733 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
3734 {
3735 push_obstacks_nochange ();
3736 end_temporary_allocation ();
3737 layout_decl (exp, 0);
3738 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
3739 pop_obstacks ();
3740 }
3741 case FUNCTION_DECL:
bbf6f052
RK
3742 case RESULT_DECL:
3743 if (DECL_RTL (exp) == 0)
3744 abort ();
e44842fe
RK
3745 /* Ensure variable marked as used even if it doesn't go through
3746 a parser. If it hasn't be used yet, write out an external
3747 definition. */
3748 if (! TREE_USED (exp))
3749 {
3750 assemble_external (exp);
3751 TREE_USED (exp) = 1;
3752 }
3753
bbf6f052
RK
3754 /* Handle variables inherited from containing functions. */
3755 context = decl_function_context (exp);
3756
3757 /* We treat inline_function_decl as an alias for the current function
3758 because that is the inline function whose vars, types, etc.
3759 are being merged into the current function.
3760 See expand_inline_function. */
3761 if (context != 0 && context != current_function_decl
3762 && context != inline_function_decl
3763 /* If var is static, we don't need a static chain to access it. */
3764 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3765 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3766 {
3767 rtx addr;
3768
3769 /* Mark as non-local and addressable. */
81feeecb 3770 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3771 mark_addressable (exp);
3772 if (GET_CODE (DECL_RTL (exp)) != MEM)
3773 abort ();
3774 addr = XEXP (DECL_RTL (exp), 0);
3775 if (GET_CODE (addr) == MEM)
3776 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3777 else
3778 addr = fix_lexical_addr (addr, exp);
3779 return change_address (DECL_RTL (exp), mode, addr);
3780 }
4af3895e 3781
bbf6f052
RK
3782 /* This is the case of an array whose size is to be determined
3783 from its initializer, while the initializer is still being parsed.
3784 See expand_decl. */
3785 if (GET_CODE (DECL_RTL (exp)) == MEM
3786 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3787 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3788 XEXP (DECL_RTL (exp), 0));
3789 if (GET_CODE (DECL_RTL (exp)) == MEM
3790 && modifier != EXPAND_CONST_ADDRESS
3791 && modifier != EXPAND_SUM
3792 && modifier != EXPAND_INITIALIZER)
3793 {
3794 /* DECL_RTL probably contains a constant address.
3795 On RISC machines where a constant address isn't valid,
3796 make some insns to get that address into a register. */
3797 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3798 || (flag_force_addr
3799 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3800 return change_address (DECL_RTL (exp), VOIDmode,
3801 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3802 }
1499e0a8
RK
3803
3804 /* If the mode of DECL_RTL does not match that of the decl, it
3805 must be a promoted value. We return a SUBREG of the wanted mode,
3806 but mark it so that we know that it was already extended. */
3807
3808 if (GET_CODE (DECL_RTL (exp)) == REG
3809 && GET_MODE (DECL_RTL (exp)) != mode)
3810 {
3811 enum machine_mode decl_mode = DECL_MODE (exp);
3812
3813 /* Get the signedness used for this variable. Ensure we get the
3814 same mode we got when the variable was declared. */
3815
3816 PROMOTE_MODE (decl_mode, unsignedp, type);
3817
3818 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3819 abort ();
3820
3821 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3822 SUBREG_PROMOTED_VAR_P (temp) = 1;
3823 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3824 return temp;
3825 }
3826
bbf6f052
RK
3827 return DECL_RTL (exp);
3828
3829 case INTEGER_CST:
3830 return immed_double_const (TREE_INT_CST_LOW (exp),
3831 TREE_INT_CST_HIGH (exp),
3832 mode);
3833
3834 case CONST_DECL:
3835 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3836
3837 case REAL_CST:
3838 /* If optimized, generate immediate CONST_DOUBLE
3839 which will be turned into memory by reload if necessary.
3840
3841 We used to force a register so that loop.c could see it. But
3842 this does not allow gen_* patterns to perform optimizations with
3843 the constants. It also produces two insns in cases like "x = 1.0;".
3844 On most machines, floating-point constants are not permitted in
3845 many insns, so we'd end up copying it to a register in any case.
3846
3847 Now, we do the copying in expand_binop, if appropriate. */
3848 return immed_real_const (exp);
3849
3850 case COMPLEX_CST:
3851 case STRING_CST:
3852 if (! TREE_CST_RTL (exp))
3853 output_constant_def (exp);
3854
3855 /* TREE_CST_RTL probably contains a constant address.
3856 On RISC machines where a constant address isn't valid,
3857 make some insns to get that address into a register. */
3858 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3859 && modifier != EXPAND_CONST_ADDRESS
3860 && modifier != EXPAND_INITIALIZER
3861 && modifier != EXPAND_SUM
3862 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3863 return change_address (TREE_CST_RTL (exp), VOIDmode,
3864 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3865 return TREE_CST_RTL (exp);
3866
3867 case SAVE_EXPR:
3868 context = decl_function_context (exp);
3869 /* We treat inline_function_decl as an alias for the current function
3870 because that is the inline function whose vars, types, etc.
3871 are being merged into the current function.
3872 See expand_inline_function. */
3873 if (context == current_function_decl || context == inline_function_decl)
3874 context = 0;
3875
3876 /* If this is non-local, handle it. */
3877 if (context)
3878 {
3879 temp = SAVE_EXPR_RTL (exp);
3880 if (temp && GET_CODE (temp) == REG)
3881 {
3882 put_var_into_stack (exp);
3883 temp = SAVE_EXPR_RTL (exp);
3884 }
3885 if (temp == 0 || GET_CODE (temp) != MEM)
3886 abort ();
3887 return change_address (temp, mode,
3888 fix_lexical_addr (XEXP (temp, 0), exp));
3889 }
3890 if (SAVE_EXPR_RTL (exp) == 0)
3891 {
3892 if (mode == BLKmode)
34a25822
RK
3893 {
3894 temp
3895 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3896 MEM_IN_STRUCT_P (temp)
3897 = (TREE_CODE (type) == RECORD_TYPE
3898 || TREE_CODE (type) == UNION_TYPE
3899 || TREE_CODE (type) == QUAL_UNION_TYPE
3900 || TREE_CODE (type) == ARRAY_TYPE);
3901 }
bbf6f052 3902 else
1499e0a8
RK
3903 {
3904 enum machine_mode var_mode = mode;
3905
3906 if (TREE_CODE (type) == INTEGER_TYPE
3907 || TREE_CODE (type) == ENUMERAL_TYPE
3908 || TREE_CODE (type) == BOOLEAN_TYPE
3909 || TREE_CODE (type) == CHAR_TYPE
3910 || TREE_CODE (type) == REAL_TYPE
3911 || TREE_CODE (type) == POINTER_TYPE
3912 || TREE_CODE (type) == OFFSET_TYPE)
3913 {
3914 PROMOTE_MODE (var_mode, unsignedp, type);
3915 }
3916
3917 temp = gen_reg_rtx (var_mode);
3918 }
3919
bbf6f052 3920 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3921 if (!optimize && GET_CODE (temp) == REG)
3922 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3923 save_expr_regs);
ff78f773
RK
3924
3925 /* If the mode of TEMP does not match that of the expression, it
3926 must be a promoted value. We pass store_expr a SUBREG of the
3927 wanted mode but mark it so that we know that it was already
3928 extended. Note that `unsignedp' was modified above in
3929 this case. */
3930
3931 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3932 {
3933 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3934 SUBREG_PROMOTED_VAR_P (temp) = 1;
3935 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3936 }
3937
3938 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3939 }
1499e0a8
RK
3940
3941 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3942 must be a promoted value. We return a SUBREG of the wanted mode,
adc22a04 3943 but mark it so that we know that it was already extended. */
1499e0a8
RK
3944
3945 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3946 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3947 {
adc22a04
RK
3948 enum machine_mode var_mode = mode;
3949
3950 if (TREE_CODE (type) == INTEGER_TYPE
3951 || TREE_CODE (type) == ENUMERAL_TYPE
3952 || TREE_CODE (type) == BOOLEAN_TYPE
3953 || TREE_CODE (type) == CHAR_TYPE
3954 || TREE_CODE (type) == REAL_TYPE
3955 || TREE_CODE (type) == POINTER_TYPE
3956 || TREE_CODE (type) == OFFSET_TYPE)
3957 {
3958 PROMOTE_MODE (var_mode, unsignedp, type);
3959 }
3960
1499e0a8
RK
3961 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3962 SUBREG_PROMOTED_VAR_P (temp) = 1;
3963 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3964 return temp;
3965 }
3966
bbf6f052
RK
3967 return SAVE_EXPR_RTL (exp);
3968
b50d17a1
RK
3969 case PLACEHOLDER_EXPR:
3970 /* If there is an object on the head of the placeholder list,
3971 see if some object in it's references is of type TYPE. For
3972 further information, see tree.def. */
3973 if (placeholder_list)
3974 {
3975 tree object;
f59d43a9 3976 tree old_list = placeholder_list;
b50d17a1
RK
3977
3978 for (object = TREE_PURPOSE (placeholder_list);
3979 TREE_TYPE (object) != type
3980 && (TREE_CODE_CLASS (TREE_CODE (object)) == 'r'
4805bfa0
RK
3981 || TREE_CODE_CLASS (TREE_CODE (object)) == '1'
3982 || TREE_CODE_CLASS (TREE_CODE (object)) == '2'
3983 || TREE_CODE_CLASS (TREE_CODE (object)) == 'e');
b50d17a1
RK
3984 object = TREE_OPERAND (object, 0))
3985 ;
3986
4805bfa0 3987 if (object && TREE_TYPE (object) == type)
f59d43a9
RK
3988 {
3989 /* Expand this object skipping the list entries before
3990 it was found in case it is also a PLACEHOLDER_EXPR.
3991 In that case, we want to translate it using subsequent
3992 entries. */
3993 placeholder_list = TREE_CHAIN (placeholder_list);
3994 temp = expand_expr (object, original_target, tmode, modifier);
3995 placeholder_list = old_list;
3996 return temp;
3997 }
b50d17a1
RK
3998 }
3999
4000 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
4001 abort ();
4002
4003 case WITH_RECORD_EXPR:
4004 /* Put the object on the placeholder list, expand our first operand,
4005 and pop the list. */
4006 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
4007 placeholder_list);
4008 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
4009 tmode, modifier);
4010 placeholder_list = TREE_CHAIN (placeholder_list);
4011 return target;
4012
bbf6f052 4013 case EXIT_EXPR:
e44842fe
RK
4014 expand_exit_loop_if_false (NULL_PTR,
4015 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
4016 return const0_rtx;
4017
4018 case LOOP_EXPR:
0088fcb1 4019 push_temp_slots ();
bbf6f052
RK
4020 expand_start_loop (1);
4021 expand_expr_stmt (TREE_OPERAND (exp, 0));
4022 expand_end_loop ();
0088fcb1 4023 pop_temp_slots ();
bbf6f052
RK
4024
4025 return const0_rtx;
4026
4027 case BIND_EXPR:
4028 {
4029 tree vars = TREE_OPERAND (exp, 0);
4030 int vars_need_expansion = 0;
4031
4032 /* Need to open a binding contour here because
4033 if there are any cleanups they most be contained here. */
4034 expand_start_bindings (0);
4035
2df53c0b
RS
4036 /* Mark the corresponding BLOCK for output in its proper place. */
4037 if (TREE_OPERAND (exp, 2) != 0
4038 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4039 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4040
4041 /* If VARS have not yet been expanded, expand them now. */
4042 while (vars)
4043 {
4044 if (DECL_RTL (vars) == 0)
4045 {
4046 vars_need_expansion = 1;
4047 expand_decl (vars);
4048 }
4049 expand_decl_init (vars);
4050 vars = TREE_CHAIN (vars);
4051 }
4052
4053 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4054
4055 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4056
4057 return temp;
4058 }
4059
4060 case RTL_EXPR:
4061 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4062 abort ();
4063 emit_insns (RTL_EXPR_SEQUENCE (exp));
4064 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
99310285 4065 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 4066 free_temps_for_rtl_expr (exp);
bbf6f052
RK
4067 return RTL_EXPR_RTL (exp);
4068
4069 case CONSTRUCTOR:
dd27116b
RK
4070 /* If we don't need the result, just ensure we evaluate any
4071 subexpressions. */
4072 if (ignore)
4073 {
4074 tree elt;
4075 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4076 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4077 return const0_rtx;
4078 }
3207b172 4079
4af3895e
JVA
4080 /* All elts simple constants => refer to a constant in memory. But
4081 if this is a non-BLKmode mode, let it store a field at a time
4082 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172
RK
4083 fold. Likewise, if we have a target we can use, it is best to
4084 store directly into the target. If we are making an initializer and
4085 all operands are constant, put it in memory as well. */
dd27116b 4086 else if ((TREE_STATIC (exp)
3207b172
RK
4087 && ((mode == BLKmode
4088 && ! (target != 0 && safe_from_p (target, exp)))
4089 || TREE_ADDRESSABLE (exp)))
dd27116b 4090 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
4091 {
4092 rtx constructor = output_constant_def (exp);
b552441b
RS
4093 if (modifier != EXPAND_CONST_ADDRESS
4094 && modifier != EXPAND_INITIALIZER
4095 && modifier != EXPAND_SUM
4096 && !memory_address_p (GET_MODE (constructor),
4097 XEXP (constructor, 0)))
bbf6f052
RK
4098 constructor = change_address (constructor, VOIDmode,
4099 XEXP (constructor, 0));
4100 return constructor;
4101 }
4102
bbf6f052
RK
4103 else
4104 {
4105 if (target == 0 || ! safe_from_p (target, exp))
4106 {
4107 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4108 target = gen_reg_rtx (mode);
4109 else
4110 {
3b94d087
RS
4111 enum tree_code c = TREE_CODE (type);
4112 target
4113 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
4114 if (c == RECORD_TYPE || c == UNION_TYPE
4115 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 4116 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4117 }
4118 }
4119 store_constructor (exp, target);
4120 return target;
4121 }
4122
4123 case INDIRECT_REF:
4124 {
4125 tree exp1 = TREE_OPERAND (exp, 0);
4126 tree exp2;
4127
4128 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4129 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4130 This code has the same general effect as simply doing
4131 expand_expr on the save expr, except that the expression PTR
4132 is computed for use as a memory address. This means different
4133 code, suitable for indexing, may be generated. */
4134 if (TREE_CODE (exp1) == SAVE_EXPR
4135 && SAVE_EXPR_RTL (exp1) == 0
4136 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4137 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4138 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4139 {
906c4e36
RK
4140 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4141 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4142 op0 = memory_address (mode, temp);
4143 op0 = copy_all_regs (op0);
4144 SAVE_EXPR_RTL (exp1) = op0;
4145 }
4146 else
4147 {
906c4e36 4148 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4149 op0 = memory_address (mode, op0);
4150 }
8c8a8e34
JW
4151
4152 temp = gen_rtx (MEM, mode, op0);
4153 /* If address was computed by addition,
4154 mark this as an element of an aggregate. */
4155 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4156 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4157 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4158 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4159 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4160 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 4161 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
4162 || (TREE_CODE (exp1) == ADDR_EXPR
4163 && (exp2 = TREE_OPERAND (exp1, 0))
4164 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4165 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
4166 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
4167 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 4168 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 4169 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
89742723 4170#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4171 a location is accessed through a pointer to const does not mean
4172 that the value there can never change. */
8c8a8e34 4173 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4174#endif
8c8a8e34
JW
4175 return temp;
4176 }
bbf6f052
RK
4177
4178 case ARRAY_REF:
742920c7
RK
4179 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
4180 abort ();
bbf6f052 4181
bbf6f052 4182 {
742920c7
RK
4183 tree array = TREE_OPERAND (exp, 0);
4184 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
4185 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4186 tree index = TREE_OPERAND (exp, 1);
4187 tree index_type = TREE_TYPE (index);
bbf6f052 4188 int i;
bbf6f052 4189
b50d17a1
RK
4190 if (TREE_CODE (low_bound) != INTEGER_CST
4191 && contains_placeholder_p (low_bound))
4192 low_bound = build (WITH_RECORD_EXPR, sizetype, low_bound, exp);
4193
d4c89139
PB
4194 /* Optimize the special-case of a zero lower bound.
4195
4196 We convert the low_bound to sizetype to avoid some problems
4197 with constant folding. (E.g. suppose the lower bound is 1,
4198 and its mode is QI. Without the conversion, (ARRAY
4199 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4200 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4201
4202 But sizetype isn't quite right either (especially if
4203 the lowbound is negative). FIXME */
4204
742920c7 4205 if (! integer_zerop (low_bound))
d4c89139
PB
4206 index = fold (build (MINUS_EXPR, index_type, index,
4207 convert (sizetype, low_bound)));
742920c7
RK
4208
4209 if (TREE_CODE (index) != INTEGER_CST
4210 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4211 {
4212 /* Nonconstant array index or nonconstant element size.
4213 Generate the tree for *(&array+index) and expand that,
4214 except do it in a language-independent way
4215 and don't complain about non-lvalue arrays.
4216 `mark_addressable' should already have been called
4217 for any array for which this case will be reached. */
4218
4219 /* Don't forget the const or volatile flag from the array
4220 element. */
4221 tree variant_type = build_type_variant (type,
4222 TREE_READONLY (exp),
4223 TREE_THIS_VOLATILE (exp));
4224 tree array_adr = build1 (ADDR_EXPR,
4225 build_pointer_type (variant_type), array);
4226 tree elt;
b50d17a1 4227 tree size = size_in_bytes (type);
742920c7
RK
4228
4229 /* Convert the integer argument to a type the same size as a
4230 pointer so the multiply won't overflow spuriously. */
4231 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
4232 index = convert (type_for_size (POINTER_SIZE, 0), index);
4233
b50d17a1
RK
4234 if (TREE_CODE (size) != INTEGER_CST
4235 && contains_placeholder_p (size))
4236 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
4237
742920c7
RK
4238 /* Don't think the address has side effects
4239 just because the array does.
4240 (In some cases the address might have side effects,
4241 and we fail to record that fact here. However, it should not
4242 matter, since expand_expr should not care.) */
4243 TREE_SIDE_EFFECTS (array_adr) = 0;
4244
4245 elt = build1 (INDIRECT_REF, type,
4246 fold (build (PLUS_EXPR,
4247 TYPE_POINTER_TO (variant_type),
4248 array_adr,
4249 fold (build (MULT_EXPR,
4250 TYPE_POINTER_TO (variant_type),
b50d17a1 4251 index, size)))));
742920c7
RK
4252
4253 /* Volatility, etc., of new expression is same as old
4254 expression. */
4255 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4256 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4257 TREE_READONLY (elt) = TREE_READONLY (exp);
4258
4259 return expand_expr (elt, target, tmode, modifier);
4260 }
4261
4262 /* Fold an expression like: "foo"[2].
4263 This is not done in fold so it won't happen inside &. */
4264
4265 if (TREE_CODE (array) == STRING_CST
4266 && TREE_CODE (index) == INTEGER_CST
4267 && !TREE_INT_CST_HIGH (index)
307b821c
RK
4268 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
4269 && GET_MODE_CLASS (mode) == MODE_INT)
4270 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 4271
742920c7
RK
4272 /* If this is a constant index into a constant array,
4273 just get the value from the array. Handle both the cases when
4274 we have an explicit constructor and when our operand is a variable
4275 that was declared const. */
4af3895e 4276
742920c7
RK
4277 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
4278 {
4279 if (TREE_CODE (index) == INTEGER_CST
4280 && TREE_INT_CST_HIGH (index) == 0)
4281 {
4282 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4283
4284 i = TREE_INT_CST_LOW (index);
4285 while (elem && i--)
4286 elem = TREE_CHAIN (elem);
4287 if (elem)
4288 return expand_expr (fold (TREE_VALUE (elem)), target,
4289 tmode, modifier);
4290 }
4291 }
4af3895e 4292
742920c7
RK
4293 else if (optimize >= 1
4294 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
4295 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
4296 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
4297 {
4298 if (TREE_CODE (index) == INTEGER_CST
4299 && TREE_INT_CST_HIGH (index) == 0)
4300 {
4301 tree init = DECL_INITIAL (array);
4302
4303 i = TREE_INT_CST_LOW (index);
4304 if (TREE_CODE (init) == CONSTRUCTOR)
4305 {
4306 tree elem = CONSTRUCTOR_ELTS (init);
4307
03dc44a6
RS
4308 while (elem
4309 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
4310 elem = TREE_CHAIN (elem);
4311 if (elem)
4312 return expand_expr (fold (TREE_VALUE (elem)), target,
4313 tmode, modifier);
4314 }
4315 else if (TREE_CODE (init) == STRING_CST
4316 && i < TREE_STRING_LENGTH (init))
307b821c 4317 return GEN_INT (TREE_STRING_POINTER (init)[i]);
742920c7
RK
4318 }
4319 }
4320 }
8c8a8e34 4321
bbf6f052
RK
4322 /* Treat array-ref with constant index as a component-ref. */
4323
4324 case COMPONENT_REF:
4325 case BIT_FIELD_REF:
4af3895e
JVA
4326 /* If the operand is a CONSTRUCTOR, we can just extract the
4327 appropriate field if it is present. */
4328 if (code != ARRAY_REF
4329 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4330 {
4331 tree elt;
4332
4333 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4334 elt = TREE_CHAIN (elt))
4335 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4336 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4337 }
4338
bbf6f052
RK
4339 {
4340 enum machine_mode mode1;
4341 int bitsize;
4342 int bitpos;
7bb0943f 4343 tree offset;
bbf6f052 4344 int volatilep = 0;
7bb0943f 4345 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052 4346 &mode1, &unsignedp, &volatilep);
034f9101 4347 int alignment;
bbf6f052 4348
e7f3c83f
RK
4349 /* If we got back the original object, something is wrong. Perhaps
4350 we are evaluating an expression too early. In any event, don't
4351 infinitely recurse. */
4352 if (tem == exp)
4353 abort ();
4354
bbf6f052
RK
4355 /* In some cases, we will be offsetting OP0's address by a constant.
4356 So get it as a sum, if possible. If we will be using it
4357 directly in an insn, we validate it. */
906c4e36 4358 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4359
8c8a8e34 4360 /* If this is a constant, put it into a register if it is a
8008b228 4361 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4362 if (CONSTANT_P (op0))
4363 {
4364 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4365 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4366 op0 = force_reg (mode, op0);
4367 else
4368 op0 = validize_mem (force_const_mem (mode, op0));
4369 }
4370
034f9101 4371 alignment = TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT;
7bb0943f
RS
4372 if (offset != 0)
4373 {
906c4e36 4374 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4375
4376 if (GET_CODE (op0) != MEM)
4377 abort ();
4378 op0 = change_address (op0, VOIDmode,
4379 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4380 force_reg (Pmode, offset_rtx)));
034f9101
RS
4381 /* If we have a variable offset, the known alignment
4382 is only that of the innermost structure containing the field.
4383 (Actually, we could sometimes do better by using the
4384 size of an element of the innermost array, but no need.) */
4385 if (TREE_CODE (exp) == COMPONENT_REF
4386 || TREE_CODE (exp) == BIT_FIELD_REF)
4387 alignment = (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
4388 / BITS_PER_UNIT);
7bb0943f
RS
4389 }
4390
bbf6f052
RK
4391 /* Don't forget about volatility even if this is a bitfield. */
4392 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4393 {
4394 op0 = copy_rtx (op0);
4395 MEM_VOLATILE_P (op0) = 1;
4396 }
4397
ccc98036
RS
4398 /* In cases where an aligned union has an unaligned object
4399 as a field, we might be extracting a BLKmode value from
4400 an integer-mode (e.g., SImode) object. Handle this case
4401 by doing the extract into an object as wide as the field
4402 (which we know to be the width of a basic mode), then
4403 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4404 if (mode1 == VOIDmode
0bba3f6f
RK
4405 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4406 && modifier != EXPAND_CONST_ADDRESS
4407 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4408 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4409 /* If the field isn't aligned enough to fetch as a memref,
4410 fetch it as a bit field. */
4411 || (STRICT_ALIGNMENT
4412 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4413 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4414 {
bbf6f052
RK
4415 enum machine_mode ext_mode = mode;
4416
4417 if (ext_mode == BLKmode)
4418 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4419
4420 if (ext_mode == BLKmode)
4421 abort ();
4422
4423 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4424 unsignedp, target, ext_mode, ext_mode,
034f9101 4425 alignment,
bbf6f052
RK
4426 int_size_in_bytes (TREE_TYPE (tem)));
4427 if (mode == BLKmode)
4428 {
4429 rtx new = assign_stack_temp (ext_mode,
4430 bitsize / BITS_PER_UNIT, 0);
4431
4432 emit_move_insn (new, op0);
4433 op0 = copy_rtx (new);
4434 PUT_MODE (op0, BLKmode);
092dded9 4435 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4436 }
4437
4438 return op0;
4439 }
4440
4441 /* Get a reference to just this component. */
4442 if (modifier == EXPAND_CONST_ADDRESS
4443 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4444 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4445 (bitpos / BITS_PER_UNIT)));
4446 else
4447 op0 = change_address (op0, mode1,
4448 plus_constant (XEXP (op0, 0),
4449 (bitpos / BITS_PER_UNIT)));
4450 MEM_IN_STRUCT_P (op0) = 1;
4451 MEM_VOLATILE_P (op0) |= volatilep;
4452 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4453 return op0;
4454 if (target == 0)
4455 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4456 convert_move (target, op0, unsignedp);
4457 return target;
4458 }
4459
4460 case OFFSET_REF:
4461 {
da120c2f 4462 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4463 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4464 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4465 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4466 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4467 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4468#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4469 a location is accessed through a pointer to const does not mean
4470 that the value there can never change. */
4471 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4472#endif
4473 return temp;
4474 }
4475
4476 /* Intended for a reference to a buffer of a file-object in Pascal.
4477 But it's not certain that a special tree code will really be
4478 necessary for these. INDIRECT_REF might work for them. */
4479 case BUFFER_REF:
4480 abort ();
4481
7308a047
RS
4482 /* IN_EXPR: Inlined pascal set IN expression.
4483
4484 Algorithm:
4485 rlo = set_low - (set_low%bits_per_word);
4486 the_word = set [ (index - rlo)/bits_per_word ];
4487 bit_index = index % bits_per_word;
4488 bitmask = 1 << bit_index;
4489 return !!(the_word & bitmask); */
4490 case IN_EXPR:
4491 preexpand_calls (exp);
4492 {
4493 tree set = TREE_OPERAND (exp, 0);
4494 tree index = TREE_OPERAND (exp, 1);
4495 tree set_type = TREE_TYPE (set);
4496
4497 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4498 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4499
4500 rtx index_val;
4501 rtx lo_r;
4502 rtx hi_r;
4503 rtx rlow;
4504 rtx diff, quo, rem, addr, bit, result;
4505 rtx setval, setaddr;
4506 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4507
4508 if (target == 0)
17938e57 4509 target = gen_reg_rtx (mode);
7308a047
RS
4510
4511 /* If domain is empty, answer is no. */
4512 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4513 return const0_rtx;
4514
4515 index_val = expand_expr (index, 0, VOIDmode, 0);
4516 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4517 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4518 setval = expand_expr (set, 0, VOIDmode, 0);
4519 setaddr = XEXP (setval, 0);
4520
4521 /* Compare index against bounds, if they are constant. */
4522 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4523 && GET_CODE (lo_r) == CONST_INT
4524 && INTVAL (index_val) < INTVAL (lo_r))
4525 return const0_rtx;
7308a047
RS
4526
4527 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4528 && GET_CODE (hi_r) == CONST_INT
4529 && INTVAL (hi_r) < INTVAL (index_val))
4530 return const0_rtx;
7308a047
RS
4531
4532 /* If we get here, we have to generate the code for both cases
4533 (in range and out of range). */
4534
4535 op0 = gen_label_rtx ();
4536 op1 = gen_label_rtx ();
4537
4538 if (! (GET_CODE (index_val) == CONST_INT
4539 && GET_CODE (lo_r) == CONST_INT))
4540 {
17938e57
RK
4541 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4542 GET_MODE (index_val), 0, 0);
7308a047
RS
4543 emit_jump_insn (gen_blt (op1));
4544 }
4545
4546 if (! (GET_CODE (index_val) == CONST_INT
4547 && GET_CODE (hi_r) == CONST_INT))
4548 {
17938e57
RK
4549 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4550 GET_MODE (index_val), 0, 0);
7308a047
RS
4551 emit_jump_insn (gen_bgt (op1));
4552 }
4553
4554 /* Calculate the element number of bit zero in the first word
4555 of the set. */
4556 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4557 rlow = GEN_INT (INTVAL (lo_r)
4558 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4559 else
17938e57
RK
4560 rlow = expand_binop (index_mode, and_optab, lo_r,
4561 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4562 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4563
4564 diff = expand_binop (index_mode, sub_optab,
17938e57 4565 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4566
4567 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4568 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4569 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4570 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4571 addr = memory_address (byte_mode,
4572 expand_binop (index_mode, add_optab,
17938e57
RK
4573 diff, setaddr, NULL_RTX, 0,
4574 OPTAB_LIB_WIDEN));
7308a047
RS
4575 /* Extract the bit we want to examine */
4576 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4577 gen_rtx (MEM, byte_mode, addr),
4578 make_tree (TREE_TYPE (index), rem),
4579 NULL_RTX, 1);
4580 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4581 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4582 1, OPTAB_LIB_WIDEN);
17938e57
RK
4583
4584 if (result != target)
4585 convert_move (target, result, 1);
7308a047
RS
4586
4587 /* Output the code to handle the out-of-range case. */
4588 emit_jump (op0);
4589 emit_label (op1);
4590 emit_move_insn (target, const0_rtx);
4591 emit_label (op0);
4592 return target;
4593 }
4594
bbf6f052
RK
4595 case WITH_CLEANUP_EXPR:
4596 if (RTL_EXPR_RTL (exp) == 0)
4597 {
4598 RTL_EXPR_RTL (exp)
e287fd6e
RK
4599 = expand_expr (TREE_OPERAND (exp, 0),
4600 target ? target : const0_rtx,
4601 tmode, modifier);
906c4e36
RK
4602 cleanups_this_call
4603 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4604 /* That's it for this cleanup. */
4605 TREE_OPERAND (exp, 2) = 0;
4606 }
4607 return RTL_EXPR_RTL (exp);
4608
4609 case CALL_EXPR:
4610 /* Check for a built-in function. */
4611 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4612 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4613 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4614 return expand_builtin (exp, target, subtarget, tmode, ignore);
4615 /* If this call was expanded already by preexpand_calls,
4616 just return the result we got. */
4617 if (CALL_EXPR_RTL (exp) != 0)
4618 return CALL_EXPR_RTL (exp);
8129842c 4619 return expand_call (exp, target, ignore);
bbf6f052
RK
4620
4621 case NON_LVALUE_EXPR:
4622 case NOP_EXPR:
4623 case CONVERT_EXPR:
4624 case REFERENCE_EXPR:
bbf6f052
RK
4625 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4626 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4627 if (TREE_CODE (type) == UNION_TYPE)
4628 {
4629 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4630 if (target == 0)
4631 {
4632 if (mode == BLKmode)
4633 {
4634 if (TYPE_SIZE (type) == 0
4635 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4636 abort ();
4637 target = assign_stack_temp (BLKmode,
4638 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4639 + BITS_PER_UNIT - 1)
4640 / BITS_PER_UNIT, 0);
4641 }
4642 else
4643 target = gen_reg_rtx (mode);
4644 }
4645 if (GET_CODE (target) == MEM)
4646 /* Store data into beginning of memory target. */
4647 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4648 change_address (target, TYPE_MODE (valtype), 0), 0);
4649
bbf6f052
RK
4650 else if (GET_CODE (target) == REG)
4651 /* Store this field into a union of the proper type. */
4652 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4653 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4654 VOIDmode, 0, 1,
4655 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4656 else
4657 abort ();
4658
4659 /* Return the entire union. */
4660 return target;
4661 }
1499e0a8 4662 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4663 if (GET_MODE (op0) == mode)
4664 return op0;
4665 /* If arg is a constant integer being extended from a narrower mode,
4666 we must really truncate to get the extended bits right. Otherwise
4667 (unsigned long) (unsigned char) ("\377"[0])
4668 would come out as ffffffff. */
4669 if (GET_MODE (op0) == VOIDmode
4670 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4671 < GET_MODE_BITSIZE (mode)))
4672 {
4673 /* MODE must be narrower than HOST_BITS_PER_INT. */
4674 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4675
4676 if (width < HOST_BITS_PER_WIDE_INT)
4677 {
4678 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4679 : CONST_DOUBLE_LOW (op0));
4680 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4681 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4682 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4683 else
4684 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4685
4686 op0 = GEN_INT (val);
4687 }
4688 else
4689 {
4690 op0 = (simplify_unary_operation
4691 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4692 ? ZERO_EXTEND : SIGN_EXTEND),
4693 mode, op0,
4694 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4695 if (op0 == 0)
4696 abort ();
4697 }
4698 }
4699 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4700 return op0;
26fcb35a
RS
4701 if (modifier == EXPAND_INITIALIZER)
4702 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4703 if (flag_force_mem && GET_CODE (op0) == MEM)
4704 op0 = copy_to_reg (op0);
4705
4706 if (target == 0)
4707 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4708 else
4709 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4710 return target;
4711
4712 case PLUS_EXPR:
4713 /* We come here from MINUS_EXPR when the second operand is a constant. */
4714 plus_expr:
4715 this_optab = add_optab;
4716
4717 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4718 something else, make sure we add the register to the constant and
4719 then to the other thing. This case can occur during strength
4720 reduction and doing it this way will produce better code if the
4721 frame pointer or argument pointer is eliminated.
4722
4723 fold-const.c will ensure that the constant is always in the inner
4724 PLUS_EXPR, so the only case we need to do anything about is if
4725 sp, ap, or fp is our second argument, in which case we must swap
4726 the innermost first argument and our second argument. */
4727
4728 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4729 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4730 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4731 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4732 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4733 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4734 {
4735 tree t = TREE_OPERAND (exp, 1);
4736
4737 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4738 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4739 }
4740
4741 /* If the result is to be Pmode and we are adding an integer to
4742 something, we might be forming a constant. So try to use
4743 plus_constant. If it produces a sum and we can't accept it,
4744 use force_operand. This allows P = &ARR[const] to generate
4745 efficient code on machines where a SYMBOL_REF is not a valid
4746 address.
4747
4748 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4749 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4750 || mode == Pmode)
bbf6f052 4751 {
c980ac49
RS
4752 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4753 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4754 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4755 {
4756 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4757 EXPAND_SUM);
4758 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4759 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4760 op1 = force_operand (op1, target);
4761 return op1;
4762 }
bbf6f052 4763
c980ac49
RS
4764 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4765 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4766 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4767 {
4768 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4769 EXPAND_SUM);
4770 if (! CONSTANT_P (op0))
4771 {
4772 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4773 VOIDmode, modifier);
709f5be1
RS
4774 /* Don't go to both_summands if modifier
4775 says it's not right to return a PLUS. */
4776 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4777 goto binop2;
c980ac49
RS
4778 goto both_summands;
4779 }
4780 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4781 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4782 op0 = force_operand (op0, target);
4783 return op0;
4784 }
bbf6f052
RK
4785 }
4786
4787 /* No sense saving up arithmetic to be done
4788 if it's all in the wrong mode to form part of an address.
4789 And force_operand won't know whether to sign-extend or
4790 zero-extend. */
4791 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4792 || mode != Pmode)
4793 goto binop;
bbf6f052
RK
4794
4795 preexpand_calls (exp);
4796 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4797 subtarget = 0;
4798
4799 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4800 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4801
c980ac49 4802 both_summands:
bbf6f052
RK
4803 /* Make sure any term that's a sum with a constant comes last. */
4804 if (GET_CODE (op0) == PLUS
4805 && CONSTANT_P (XEXP (op0, 1)))
4806 {
4807 temp = op0;
4808 op0 = op1;
4809 op1 = temp;
4810 }
4811 /* If adding to a sum including a constant,
4812 associate it to put the constant outside. */
4813 if (GET_CODE (op1) == PLUS
4814 && CONSTANT_P (XEXP (op1, 1)))
4815 {
4816 rtx constant_term = const0_rtx;
4817
4818 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4819 if (temp != 0)
4820 op0 = temp;
6f90e075
JW
4821 /* Ensure that MULT comes first if there is one. */
4822 else if (GET_CODE (op0) == MULT)
4823 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4824 else
4825 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4826
4827 /* Let's also eliminate constants from op0 if possible. */
4828 op0 = eliminate_constant_term (op0, &constant_term);
4829
4830 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4831 their sum should be a constant. Form it into OP1, since the
4832 result we want will then be OP0 + OP1. */
4833
4834 temp = simplify_binary_operation (PLUS, mode, constant_term,
4835 XEXP (op1, 1));
4836 if (temp != 0)
4837 op1 = temp;
4838 else
4839 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4840 }
4841
4842 /* Put a constant term last and put a multiplication first. */
4843 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4844 temp = op1, op1 = op0, op0 = temp;
4845
4846 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4847 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4848
4849 case MINUS_EXPR:
ea87523e
RK
4850 /* For initializers, we are allowed to return a MINUS of two
4851 symbolic constants. Here we handle all cases when both operands
4852 are constant. */
bbf6f052
RK
4853 /* Handle difference of two symbolic constants,
4854 for the sake of an initializer. */
4855 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4856 && really_constant_p (TREE_OPERAND (exp, 0))
4857 && really_constant_p (TREE_OPERAND (exp, 1)))
4858 {
906c4e36
RK
4859 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4860 VOIDmode, modifier);
4861 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4862 VOIDmode, modifier);
ea87523e
RK
4863
4864 /* If one operand is a CONST_INT, put it last. */
4865 if (GET_CODE (op0) == CONST_INT)
4866 temp = op0, op0 = op1, op1 = temp;
4867
4868 /* If the last operand is a CONST_INT, use plus_constant of
4869 the negated constant. Else make the MINUS. */
4870 if (GET_CODE (op1) == CONST_INT)
4871 return plus_constant (op0, - INTVAL (op1));
4872 else
4873 return gen_rtx (MINUS, mode, op0, op1);
bbf6f052
RK
4874 }
4875 /* Convert A - const to A + (-const). */
4876 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4877 {
4878 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4879 fold (build1 (NEGATE_EXPR, type,
4880 TREE_OPERAND (exp, 1))));
4881 goto plus_expr;
4882 }
4883 this_optab = sub_optab;
4884 goto binop;
4885
4886 case MULT_EXPR:
4887 preexpand_calls (exp);
4888 /* If first operand is constant, swap them.
4889 Thus the following special case checks need only
4890 check the second operand. */
4891 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4892 {
4893 register tree t1 = TREE_OPERAND (exp, 0);
4894 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4895 TREE_OPERAND (exp, 1) = t1;
4896 }
4897
4898 /* Attempt to return something suitable for generating an
4899 indexed address, for machines that support that. */
4900
4901 if (modifier == EXPAND_SUM && mode == Pmode
4902 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4903 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4904 {
4905 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4906
4907 /* Apply distributive law if OP0 is x+c. */
4908 if (GET_CODE (op0) == PLUS
4909 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4910 return gen_rtx (PLUS, mode,
4911 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4912 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4913 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4914 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4915
4916 if (GET_CODE (op0) != REG)
906c4e36 4917 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4918 if (GET_CODE (op0) != REG)
4919 op0 = copy_to_mode_reg (mode, op0);
4920
4921 return gen_rtx (MULT, mode, op0,
906c4e36 4922 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4923 }
4924
4925 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4926 subtarget = 0;
4927
4928 /* Check for multiplying things that have been extended
4929 from a narrower type. If this machine supports multiplying
4930 in that narrower type with a result in the desired type,
4931 do it that way, and avoid the explicit type-conversion. */
4932 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4933 && TREE_CODE (type) == INTEGER_TYPE
4934 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4935 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4936 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4937 && int_fits_type_p (TREE_OPERAND (exp, 1),
4938 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4939 /* Don't use a widening multiply if a shift will do. */
4940 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4941 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4942 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4943 ||
4944 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4945 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4946 ==
4947 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4948 /* If both operands are extended, they must either both
4949 be zero-extended or both be sign-extended. */
4950 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4951 ==
4952 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4953 {
4954 enum machine_mode innermode
4955 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4956 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4957 ? umul_widen_optab : smul_widen_optab);
4958 if (mode == GET_MODE_WIDER_MODE (innermode)
4959 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4960 {
4961 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4962 NULL_RTX, VOIDmode, 0);
bbf6f052 4963 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4964 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4965 VOIDmode, 0);
bbf6f052
RK
4966 else
4967 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4968 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4969 goto binop2;
4970 }
4971 }
4972 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4973 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4974 return expand_mult (mode, op0, op1, target, unsignedp);
4975
4976 case TRUNC_DIV_EXPR:
4977 case FLOOR_DIV_EXPR:
4978 case CEIL_DIV_EXPR:
4979 case ROUND_DIV_EXPR:
4980 case EXACT_DIV_EXPR:
4981 preexpand_calls (exp);
4982 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4983 subtarget = 0;
4984 /* Possible optimization: compute the dividend with EXPAND_SUM
4985 then if the divisor is constant can optimize the case
4986 where some terms of the dividend have coeffs divisible by it. */
4987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4988 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4989 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4990
4991 case RDIV_EXPR:
4992 this_optab = flodiv_optab;
4993 goto binop;
4994
4995 case TRUNC_MOD_EXPR:
4996 case FLOOR_MOD_EXPR:
4997 case CEIL_MOD_EXPR:
4998 case ROUND_MOD_EXPR:
4999 preexpand_calls (exp);
5000 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5001 subtarget = 0;
5002 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5003 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5004 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5005
5006 case FIX_ROUND_EXPR:
5007 case FIX_FLOOR_EXPR:
5008 case FIX_CEIL_EXPR:
5009 abort (); /* Not used for C. */
5010
5011 case FIX_TRUNC_EXPR:
906c4e36 5012 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5013 if (target == 0)
5014 target = gen_reg_rtx (mode);
5015 expand_fix (target, op0, unsignedp);
5016 return target;
5017
5018 case FLOAT_EXPR:
906c4e36 5019 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5020 if (target == 0)
5021 target = gen_reg_rtx (mode);
5022 /* expand_float can't figure out what to do if FROM has VOIDmode.
5023 So give it the correct mode. With -O, cse will optimize this. */
5024 if (GET_MODE (op0) == VOIDmode)
5025 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5026 op0);
5027 expand_float (target, op0,
5028 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5029 return target;
5030
5031 case NEGATE_EXPR:
5b22bee8 5032 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
5033 temp = expand_unop (mode, neg_optab, op0, target, 0);
5034 if (temp == 0)
5035 abort ();
5036 return temp;
5037
5038 case ABS_EXPR:
5039 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5040
2d7050fd
RS
5041 /* Handle complex values specially. */
5042 {
5043 enum machine_mode opmode
5044 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5045
5046 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5047 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5048 return expand_complex_abs (opmode, op0, target, unsignedp);
5049 }
5050
bbf6f052
RK
5051 /* Unsigned abs is simply the operand. Testing here means we don't
5052 risk generating incorrect code below. */
5053 if (TREE_UNSIGNED (type))
5054 return op0;
5055
5056 /* First try to do it with a special abs instruction. */
5057 temp = expand_unop (mode, abs_optab, op0, target, 0);
5058 if (temp != 0)
5059 return temp;
5060
5061 /* If this machine has expensive jumps, we can do integer absolute
5062 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5063 where W is the width of MODE. */
5064
5065 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5066 {
5067 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5068 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 5069 NULL_RTX, 0);
bbf6f052
RK
5070
5071 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5072 OPTAB_LIB_WIDEN);
5073 if (temp != 0)
5074 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5075 OPTAB_LIB_WIDEN);
5076
5077 if (temp != 0)
5078 return temp;
5079 }
5080
5081 /* If that does not win, use conditional jump and negate. */
5082 target = original_target;
5083 temp = gen_label_rtx ();
5084 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
37568125 5085 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5086 || (GET_CODE (target) == REG
5087 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5088 target = gen_reg_rtx (mode);
5089 emit_move_insn (target, op0);
5090 emit_cmp_insn (target,
5091 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
5092 NULL_RTX, VOIDmode, 0),
5093 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
5094 NO_DEFER_POP;
5095 emit_jump_insn (gen_bge (temp));
5096 op0 = expand_unop (mode, neg_optab, target, target, 0);
5097 if (op0 != target)
5098 emit_move_insn (target, op0);
5099 emit_label (temp);
5100 OK_DEFER_POP;
5101 return target;
5102
5103 case MAX_EXPR:
5104 case MIN_EXPR:
5105 target = original_target;
5106 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 5107 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
bbf6f052
RK
5108 || (GET_CODE (target) == REG
5109 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5110 target = gen_reg_rtx (mode);
906c4e36 5111 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5112 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5113
5114 /* First try to do it with a special MIN or MAX instruction.
5115 If that does not win, use a conditional jump to select the proper
5116 value. */
5117 this_optab = (TREE_UNSIGNED (type)
5118 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5119 : (code == MIN_EXPR ? smin_optab : smax_optab));
5120
5121 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5122 OPTAB_WIDEN);
5123 if (temp != 0)
5124 return temp;
5125
ee456b1c
RK
5126 if (target != op0)
5127 emit_move_insn (target, op0);
bbf6f052 5128 op0 = gen_label_rtx ();
f81497d9
RS
5129 /* If this mode is an integer too wide to compare properly,
5130 compare word by word. Rely on cse to optimize constant cases. */
5131 if (GET_MODE_CLASS (mode) == MODE_INT
5132 && !can_compare_p (mode))
bbf6f052 5133 {
f81497d9 5134 if (code == MAX_EXPR)
ee456b1c 5135 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 5136 else
ee456b1c
RK
5137 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
5138 emit_move_insn (target, op1);
bbf6f052 5139 }
f81497d9
RS
5140 else
5141 {
5142 if (code == MAX_EXPR)
5143 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5144 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5145 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
5146 else
5147 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
5148 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5149 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 5150 if (temp == const0_rtx)
ee456b1c 5151 emit_move_insn (target, op1);
f81497d9
RS
5152 else if (temp != const_true_rtx)
5153 {
5154 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5155 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5156 else
5157 abort ();
ee456b1c 5158 emit_move_insn (target, op1);
f81497d9
RS
5159 }
5160 }
bbf6f052
RK
5161 emit_label (op0);
5162 return target;
5163
5164/* ??? Can optimize when the operand of this is a bitwise operation,
5165 by using a different bitwise operation. */
5166 case BIT_NOT_EXPR:
5167 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5168 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5169 if (temp == 0)
5170 abort ();
5171 return temp;
5172
5173 case FFS_EXPR:
5174 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5175 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5176 if (temp == 0)
5177 abort ();
5178 return temp;
5179
5180/* ??? Can optimize bitwise operations with one arg constant.
5181 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5182 and (a bitwise1 b) bitwise2 b (etc)
5183 but that is probably not worth while. */
5184
5185/* BIT_AND_EXPR is for bitwise anding.
5186 TRUTH_AND_EXPR is for anding two boolean values
5187 when we want in all cases to compute both of them.
5188 In general it is fastest to do TRUTH_AND_EXPR by
5189 computing both operands as actual zero-or-1 values
5190 and then bitwise anding. In cases where there cannot
5191 be any side effects, better code would be made by
5192 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5193 but the question is how to recognize those cases. */
5194
b258707c
RS
5195 /* TRUTH_AND_EXPR can have a result whose mode doesn't match
5196 th operands. If so, don't use our target. */
bbf6f052 5197 case TRUTH_AND_EXPR:
b258707c
RS
5198 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5199 subtarget = 0;
bbf6f052
RK
5200 case BIT_AND_EXPR:
5201 this_optab = and_optab;
5202 goto binop;
5203
5204/* See comment above about TRUTH_AND_EXPR; it applies here too. */
5205 case TRUTH_OR_EXPR:
b258707c
RS
5206 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5207 subtarget = 0;
bbf6f052
RK
5208 case BIT_IOR_EXPR:
5209 this_optab = ior_optab;
5210 goto binop;
5211
874726a8 5212 case TRUTH_XOR_EXPR:
b258707c
RS
5213 if (mode != TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
5214 subtarget = 0;
bbf6f052
RK
5215 case BIT_XOR_EXPR:
5216 this_optab = xor_optab;
5217 goto binop;
5218
5219 case LSHIFT_EXPR:
5220 case RSHIFT_EXPR:
5221 case LROTATE_EXPR:
5222 case RROTATE_EXPR:
5223 preexpand_calls (exp);
5224 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5225 subtarget = 0;
5226 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5227 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5228 unsignedp);
5229
5230/* Could determine the answer when only additive constants differ.
5231 Also, the addition of one can be handled by changing the condition. */
5232 case LT_EXPR:
5233 case LE_EXPR:
5234 case GT_EXPR:
5235 case GE_EXPR:
5236 case EQ_EXPR:
5237 case NE_EXPR:
5238 preexpand_calls (exp);
5239 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5240 if (temp != 0)
5241 return temp;
5242 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5243 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5244 && original_target
5245 && GET_CODE (original_target) == REG
5246 && (GET_MODE (original_target)
5247 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5248 {
5249 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5250 if (temp != original_target)
5251 temp = copy_to_reg (temp);
5252 op1 = gen_label_rtx ();
906c4e36 5253 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5254 GET_MODE (temp), unsignedp, 0);
5255 emit_jump_insn (gen_beq (op1));
5256 emit_move_insn (temp, const1_rtx);
5257 emit_label (op1);
5258 return temp;
5259 }
5260 /* If no set-flag instruction, must generate a conditional
5261 store into a temporary variable. Drop through
5262 and handle this like && and ||. */
5263
5264 case TRUTH_ANDIF_EXPR:
5265 case TRUTH_ORIF_EXPR:
e44842fe
RK
5266 if (! ignore
5267 && (target == 0 || ! safe_from_p (target, exp)
5268 /* Make sure we don't have a hard reg (such as function's return
5269 value) live across basic blocks, if not optimizing. */
5270 || (!optimize && GET_CODE (target) == REG
5271 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 5272 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
5273
5274 if (target)
5275 emit_clr_insn (target);
5276
bbf6f052
RK
5277 op1 = gen_label_rtx ();
5278 jumpifnot (exp, op1);
e44842fe
RK
5279
5280 if (target)
5281 emit_0_to_1_insn (target);
5282
bbf6f052 5283 emit_label (op1);
e44842fe 5284 return ignore ? const0_rtx : target;
bbf6f052
RK
5285
5286 case TRUTH_NOT_EXPR:
5287 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5288 /* The parser is careful to generate TRUTH_NOT_EXPR
5289 only with operands that are always zero or one. */
906c4e36 5290 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5291 target, 1, OPTAB_LIB_WIDEN);
5292 if (temp == 0)
5293 abort ();
5294 return temp;
5295
5296 case COMPOUND_EXPR:
5297 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5298 emit_queue ();
5299 return expand_expr (TREE_OPERAND (exp, 1),
5300 (ignore ? const0_rtx : target),
5301 VOIDmode, 0);
5302
5303 case COND_EXPR:
5304 {
5305 /* Note that COND_EXPRs whose type is a structure or union
5306 are required to be constructed to contain assignments of
5307 a temporary variable, so that we can evaluate them here
5308 for side effect only. If type is void, we must do likewise. */
5309
5310 /* If an arm of the branch requires a cleanup,
5311 only that cleanup is performed. */
5312
5313 tree singleton = 0;
5314 tree binary_op = 0, unary_op = 0;
5315 tree old_cleanups = cleanups_this_call;
5316 cleanups_this_call = 0;
5317
5318 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5319 convert it to our mode, if necessary. */
5320 if (integer_onep (TREE_OPERAND (exp, 1))
5321 && integer_zerop (TREE_OPERAND (exp, 2))
5322 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5323 {
dd27116b
RK
5324 if (ignore)
5325 {
5326 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5327 modifier);
5328 return const0_rtx;
5329 }
5330
bbf6f052
RK
5331 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5332 if (GET_MODE (op0) == mode)
5333 return op0;
5334 if (target == 0)
5335 target = gen_reg_rtx (mode);
5336 convert_move (target, op0, unsignedp);
5337 return target;
5338 }
5339
5340 /* If we are not to produce a result, we have no target. Otherwise,
5341 if a target was specified use it; it will not be used as an
5342 intermediate target unless it is safe. If no target, use a
5343 temporary. */
5344
dd27116b 5345 if (ignore)
bbf6f052
RK
5346 temp = 0;
5347 else if (original_target
5348 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5349 temp = original_target;
5350 else if (mode == BLKmode)
5351 {
5352 if (TYPE_SIZE (type) == 0
5353 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5354 abort ();
673bc773 5355
bbf6f052
RK
5356 temp = assign_stack_temp (BLKmode,
5357 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5358 + BITS_PER_UNIT - 1)
5359 / BITS_PER_UNIT, 0);
673bc773
RS
5360 MEM_IN_STRUCT_P (temp)
5361 = (TREE_CODE (type) == RECORD_TYPE
5362 || TREE_CODE (type) == UNION_TYPE
5363 || TREE_CODE (type) == QUAL_UNION_TYPE
5364 || TREE_CODE (type) == ARRAY_TYPE);
bbf6f052
RK
5365 }
5366 else
5367 temp = gen_reg_rtx (mode);
5368
5369 /* Check for X ? A + B : A. If we have this, we can copy
5370 A to the output and conditionally add B. Similarly for unary
5371 operations. Don't do this if X has side-effects because
5372 those side effects might affect A or B and the "?" operation is
5373 a sequence point in ANSI. (We test for side effects later.) */
5374
5375 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5376 && operand_equal_p (TREE_OPERAND (exp, 2),
5377 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5378 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5379 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5380 && operand_equal_p (TREE_OPERAND (exp, 1),
5381 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5382 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5383 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5384 && operand_equal_p (TREE_OPERAND (exp, 2),
5385 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5386 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5387 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5388 && operand_equal_p (TREE_OPERAND (exp, 1),
5389 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5390 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5391
5392 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5393 operation, do this as A + (X != 0). Similarly for other simple
5394 binary operators. */
dd27116b 5395 if (temp && singleton && binary_op
bbf6f052
RK
5396 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5397 && (TREE_CODE (binary_op) == PLUS_EXPR
5398 || TREE_CODE (binary_op) == MINUS_EXPR
5399 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5400 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5401 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5402 && integer_onep (TREE_OPERAND (binary_op, 1))
5403 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5404 {
5405 rtx result;
5406 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5407 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5408 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5409 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5410 : and_optab);
5411
5412 /* If we had X ? A : A + 1, do this as A + (X == 0).
5413
5414 We have to invert the truth value here and then put it
5415 back later if do_store_flag fails. We cannot simply copy
5416 TREE_OPERAND (exp, 0) to another variable and modify that
5417 because invert_truthvalue can modify the tree pointed to
5418 by its argument. */
5419 if (singleton == TREE_OPERAND (exp, 1))
5420 TREE_OPERAND (exp, 0)
5421 = invert_truthvalue (TREE_OPERAND (exp, 0));
5422
5423 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5424 (safe_from_p (temp, singleton)
5425 ? temp : NULL_RTX),
bbf6f052
RK
5426 mode, BRANCH_COST <= 1);
5427
5428 if (result)
5429 {
906c4e36 5430 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5431 return expand_binop (mode, boptab, op1, result, temp,
5432 unsignedp, OPTAB_LIB_WIDEN);
5433 }
5434 else if (singleton == TREE_OPERAND (exp, 1))
5435 TREE_OPERAND (exp, 0)
5436 = invert_truthvalue (TREE_OPERAND (exp, 0));
5437 }
5438
5439 NO_DEFER_POP;
5440 op0 = gen_label_rtx ();
5441
5442 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5443 {
5444 if (temp != 0)
5445 {
5446 /* If the target conflicts with the other operand of the
5447 binary op, we can't use it. Also, we can't use the target
5448 if it is a hard register, because evaluating the condition
5449 might clobber it. */
5450 if ((binary_op
5451 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5452 || (GET_CODE (temp) == REG
5453 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5454 temp = gen_reg_rtx (mode);
5455 store_expr (singleton, temp, 0);
5456 }
5457 else
906c4e36 5458 expand_expr (singleton,
2937cf87 5459 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5460 if (cleanups_this_call)
5461 {
5462 sorry ("aggregate value in COND_EXPR");
5463 cleanups_this_call = 0;
5464 }
5465 if (singleton == TREE_OPERAND (exp, 1))
5466 jumpif (TREE_OPERAND (exp, 0), op0);
5467 else
5468 jumpifnot (TREE_OPERAND (exp, 0), op0);
5469
5470 if (binary_op && temp == 0)
5471 /* Just touch the other operand. */
5472 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5473 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5474 else if (binary_op)
5475 store_expr (build (TREE_CODE (binary_op), type,
5476 make_tree (type, temp),
5477 TREE_OPERAND (binary_op, 1)),
5478 temp, 0);
5479 else
5480 store_expr (build1 (TREE_CODE (unary_op), type,
5481 make_tree (type, temp)),
5482 temp, 0);
5483 op1 = op0;
5484 }
5485#if 0
5486 /* This is now done in jump.c and is better done there because it
5487 produces shorter register lifetimes. */
5488
5489 /* Check for both possibilities either constants or variables
5490 in registers (but not the same as the target!). If so, can
5491 save branches by assigning one, branching, and assigning the
5492 other. */
5493 else if (temp && GET_MODE (temp) != BLKmode
5494 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5495 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5496 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5497 && DECL_RTL (TREE_OPERAND (exp, 1))
5498 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5499 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5500 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5501 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5502 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5503 && DECL_RTL (TREE_OPERAND (exp, 2))
5504 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5505 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5506 {
5507 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5508 temp = gen_reg_rtx (mode);
5509 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5510 jumpifnot (TREE_OPERAND (exp, 0), op0);
5511 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5512 op1 = op0;
5513 }
5514#endif
5515 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5516 comparison operator. If we have one of these cases, set the
5517 output to A, branch on A (cse will merge these two references),
5518 then set the output to FOO. */
5519 else if (temp
5520 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5521 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5522 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5523 TREE_OPERAND (exp, 1), 0)
5524 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5525 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5526 {
5527 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5528 temp = gen_reg_rtx (mode);
5529 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5530 jumpif (TREE_OPERAND (exp, 0), op0);
5531 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5532 op1 = op0;
5533 }
5534 else if (temp
5535 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5536 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5537 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5538 TREE_OPERAND (exp, 2), 0)
5539 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5540 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5541 {
5542 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5543 temp = gen_reg_rtx (mode);
5544 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5545 jumpifnot (TREE_OPERAND (exp, 0), op0);
5546 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5547 op1 = op0;
5548 }
5549 else
5550 {
5551 op1 = gen_label_rtx ();
5552 jumpifnot (TREE_OPERAND (exp, 0), op0);
5553 if (temp != 0)
5554 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5555 else
906c4e36
RK
5556 expand_expr (TREE_OPERAND (exp, 1),
5557 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5558 if (cleanups_this_call)
5559 {
5560 sorry ("aggregate value in COND_EXPR");
5561 cleanups_this_call = 0;
5562 }
5563
5564 emit_queue ();
5565 emit_jump_insn (gen_jump (op1));
5566 emit_barrier ();
5567 emit_label (op0);
5568 if (temp != 0)
5569 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5570 else
906c4e36
RK
5571 expand_expr (TREE_OPERAND (exp, 2),
5572 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5573 }
5574
5575 if (cleanups_this_call)
5576 {
5577 sorry ("aggregate value in COND_EXPR");
5578 cleanups_this_call = 0;
5579 }
5580
5581 emit_queue ();
5582 emit_label (op1);
5583 OK_DEFER_POP;
5584 cleanups_this_call = old_cleanups;
5585 return temp;
5586 }
5587
5588 case TARGET_EXPR:
5589 {
5590 /* Something needs to be initialized, but we didn't know
5591 where that thing was when building the tree. For example,
5592 it could be the return value of a function, or a parameter
5593 to a function which lays down in the stack, or a temporary
5594 variable which must be passed by reference.
5595
5596 We guarantee that the expression will either be constructed
5597 or copied into our original target. */
5598
5599 tree slot = TREE_OPERAND (exp, 0);
5c062816 5600 tree exp1;
bbf6f052
RK
5601
5602 if (TREE_CODE (slot) != VAR_DECL)
5603 abort ();
5604
5605 if (target == 0)
5606 {
5607 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5608 {
5609 target = DECL_RTL (slot);
5c062816 5610 /* If we have already expanded the slot, so don't do
ac993f4f 5611 it again. (mrs) */
5c062816
MS
5612 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5613 return target;
ac993f4f 5614 }
bbf6f052
RK
5615 else
5616 {
5617 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5618 /* All temp slots at this level must not conflict. */
5619 preserve_temp_slots (target);
5620 DECL_RTL (slot) = target;
5621 }
5622
e287fd6e
RK
5623 /* We set IGNORE when we know that we're already
5624 doing this for a cleanup. */
5625 if (ignore == 0)
5626 {
5627 /* Since SLOT is not known to the called function
5628 to belong to its stack frame, we must build an explicit
5629 cleanup. This case occurs when we must build up a reference
5630 to pass the reference as an argument. In this case,
5631 it is very likely that such a reference need not be
5632 built here. */
5633
5634 if (TREE_OPERAND (exp, 2) == 0)
5635 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5636 if (TREE_OPERAND (exp, 2))
5637 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5638 cleanups_this_call);
5639 }
bbf6f052
RK
5640 }
5641 else
5642 {
5643 /* This case does occur, when expanding a parameter which
5644 needs to be constructed on the stack. The target
5645 is the actual stack address that we want to initialize.
5646 The function we call will perform the cleanup in this case. */
5647
8c042b47
RS
5648 /* If we have already assigned it space, use that space,
5649 not target that we were passed in, as our target
5650 parameter is only a hint. */
5651 if (DECL_RTL (slot) != 0)
5652 {
5653 target = DECL_RTL (slot);
5654 /* If we have already expanded the slot, so don't do
5655 it again. (mrs) */
5656 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5657 return target;
5658 }
5659
bbf6f052
RK
5660 DECL_RTL (slot) = target;
5661 }
5662
5c062816
MS
5663 exp1 = TREE_OPERAND (exp, 1);
5664 /* Mark it as expanded. */
5665 TREE_OPERAND (exp, 1) = NULL_TREE;
5666
5667 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5668 }
5669
5670 case INIT_EXPR:
5671 {
5672 tree lhs = TREE_OPERAND (exp, 0);
5673 tree rhs = TREE_OPERAND (exp, 1);
5674 tree noncopied_parts = 0;
5675 tree lhs_type = TREE_TYPE (lhs);
5676
5677 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5678 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5679 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5680 TYPE_NONCOPIED_PARTS (lhs_type));
5681 while (noncopied_parts != 0)
5682 {
5683 expand_assignment (TREE_VALUE (noncopied_parts),
5684 TREE_PURPOSE (noncopied_parts), 0, 0);
5685 noncopied_parts = TREE_CHAIN (noncopied_parts);
5686 }
5687 return temp;
5688 }
5689
5690 case MODIFY_EXPR:
5691 {
5692 /* If lhs is complex, expand calls in rhs before computing it.
5693 That's so we don't compute a pointer and save it over a call.
5694 If lhs is simple, compute it first so we can give it as a
5695 target if the rhs is just a call. This avoids an extra temp and copy
5696 and that prevents a partial-subsumption which makes bad code.
5697 Actually we could treat component_ref's of vars like vars. */
5698
5699 tree lhs = TREE_OPERAND (exp, 0);
5700 tree rhs = TREE_OPERAND (exp, 1);
5701 tree noncopied_parts = 0;
5702 tree lhs_type = TREE_TYPE (lhs);
5703
5704 temp = 0;
5705
5706 if (TREE_CODE (lhs) != VAR_DECL
5707 && TREE_CODE (lhs) != RESULT_DECL
5708 && TREE_CODE (lhs) != PARM_DECL)
5709 preexpand_calls (exp);
5710
5711 /* Check for |= or &= of a bitfield of size one into another bitfield
5712 of size 1. In this case, (unless we need the result of the
5713 assignment) we can do this more efficiently with a
5714 test followed by an assignment, if necessary.
5715
5716 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5717 things change so we do, this code should be enhanced to
5718 support it. */
5719 if (ignore
5720 && TREE_CODE (lhs) == COMPONENT_REF
5721 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5722 || TREE_CODE (rhs) == BIT_AND_EXPR)
5723 && TREE_OPERAND (rhs, 0) == lhs
5724 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5725 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5726 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5727 {
5728 rtx label = gen_label_rtx ();
5729
5730 do_jump (TREE_OPERAND (rhs, 1),
5731 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5732 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5733 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5734 (TREE_CODE (rhs) == BIT_IOR_EXPR
5735 ? integer_one_node
5736 : integer_zero_node)),
5737 0, 0);
e7c33f54 5738 do_pending_stack_adjust ();
bbf6f052
RK
5739 emit_label (label);
5740 return const0_rtx;
5741 }
5742
5743 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5744 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5745 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5746 TYPE_NONCOPIED_PARTS (lhs_type));
5747
5748 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5749 while (noncopied_parts != 0)
5750 {
5751 expand_assignment (TREE_PURPOSE (noncopied_parts),
5752 TREE_VALUE (noncopied_parts), 0, 0);
5753 noncopied_parts = TREE_CHAIN (noncopied_parts);
5754 }
5755 return temp;
5756 }
5757
5758 case PREINCREMENT_EXPR:
5759 case PREDECREMENT_EXPR:
5760 return expand_increment (exp, 0);
5761
5762 case POSTINCREMENT_EXPR:
5763 case POSTDECREMENT_EXPR:
5764 /* Faster to treat as pre-increment if result is not used. */
5765 return expand_increment (exp, ! ignore);
5766
5767 case ADDR_EXPR:
5768 /* Are we taking the address of a nested function? */
5769 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5770 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5771 {
5772 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5773 op0 = force_operand (op0, target);
5774 }
5775 else
5776 {
e287fd6e
RK
5777 /* We make sure to pass const0_rtx down if we came in with
5778 ignore set, to avoid doing the cleanups twice for something. */
5779 op0 = expand_expr (TREE_OPERAND (exp, 0),
5780 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
5781 (modifier == EXPAND_INITIALIZER
5782 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5783
5784 /* We would like the object in memory. If it is a constant,
5785 we can have it be statically allocated into memory. For
5786 a non-constant (REG or SUBREG), we need to allocate some
5787 memory and store the value into it. */
5788
5789 if (CONSTANT_P (op0))
5790 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5791 op0);
5792
b6f01001
RS
5793 /* These cases happen in Fortran. Is that legitimate?
5794 Should Fortran work in another way?
5795 Do they happen in C? */
5796 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5797 || GET_CODE (op0) == CONCAT)
896102d0
RK
5798 {
5799 /* If this object is in a register, it must be not
5800 be BLKmode. */
5801 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5802 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5803 rtx memloc
5804 = assign_stack_temp (inner_mode,
5805 int_size_in_bytes (inner_type), 1);
5806
5807 emit_move_insn (memloc, op0);
5808 op0 = memloc;
5809 }
5810
bbf6f052
RK
5811 if (GET_CODE (op0) != MEM)
5812 abort ();
5813
5814 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5815 return XEXP (op0, 0);
5816 op0 = force_operand (XEXP (op0, 0), target);
5817 }
5818 if (flag_force_addr && GET_CODE (op0) != REG)
5819 return force_reg (Pmode, op0);
5820 return op0;
5821
5822 case ENTRY_VALUE_EXPR:
5823 abort ();
5824
7308a047
RS
5825 /* COMPLEX type for Extended Pascal & Fortran */
5826 case COMPLEX_EXPR:
5827 {
5828 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 5829 rtx insns;
7308a047
RS
5830
5831 /* Get the rtx code of the operands. */
5832 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5833 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5834
5835 if (! target)
5836 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5837
6551fa4d 5838 start_sequence ();
7308a047
RS
5839
5840 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5841 emit_move_insn (gen_realpart (mode, target), op0);
5842 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 5843
6551fa4d
JW
5844 insns = get_insns ();
5845 end_sequence ();
5846
7308a047 5847 /* Complex construction should appear as a single unit. */
6551fa4d
JW
5848 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
5849 each with a separate pseudo as destination.
5850 It's not correct for flow to treat them as a unit. */
6d6e61ce 5851 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
5852 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
5853 else
5854 emit_insns (insns);
7308a047
RS
5855
5856 return target;
5857 }
5858
5859 case REALPART_EXPR:
2d7050fd
RS
5860 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5861 return gen_realpart (mode, op0);
7308a047
RS
5862
5863 case IMAGPART_EXPR:
2d7050fd
RS
5864 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5865 return gen_imagpart (mode, op0);
7308a047
RS
5866
5867 case CONJ_EXPR:
5868 {
5869 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5870 rtx imag_t;
6551fa4d 5871 rtx insns;
7308a047
RS
5872
5873 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5874
5875 if (! target)
5876 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5877
6551fa4d 5878 start_sequence ();
7308a047
RS
5879
5880 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5881 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5882
2d7050fd 5883 imag_t = gen_imagpart (mode, target);
7308a047 5884 temp = expand_unop (mode, neg_optab,
2d7050fd 5885 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5886 if (temp != imag_t)
5887 emit_move_insn (imag_t, temp);
5888
6551fa4d
JW
5889 insns = get_insns ();
5890 end_sequence ();
5891
7308a047 5892 /* Conjugate should appear as a single unit */
6551fa4d
JW
5893 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
5894 each with a separate pseudo as destination.
5895 It's not correct for flow to treat them as a unit. */
6d6e61ce 5896 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
5897 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
5898 else
5899 emit_insns (insns);
7308a047
RS
5900
5901 return target;
5902 }
5903
bbf6f052 5904 case ERROR_MARK:
66538193
RS
5905 op0 = CONST0_RTX (tmode);
5906 if (op0 != 0)
5907 return op0;
bbf6f052
RK
5908 return const0_rtx;
5909
5910 default:
90764a87 5911 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
5912 }
5913
5914 /* Here to do an ordinary binary operator, generating an instruction
5915 from the optab already placed in `this_optab'. */
5916 binop:
5917 preexpand_calls (exp);
5918 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5919 subtarget = 0;
5920 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5921 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5922 binop2:
5923 temp = expand_binop (mode, this_optab, op0, op1, target,
5924 unsignedp, OPTAB_LIB_WIDEN);
5925 if (temp == 0)
5926 abort ();
5927 return temp;
5928}
bbf6f052 5929
bbf6f052 5930
ca695ac9
JB
5931/* Emit bytecode to evaluate the given expression EXP to the stack. */
5932void
5933bc_expand_expr (exp)
5934 tree exp;
bbf6f052 5935{
ca695ac9
JB
5936 enum tree_code code;
5937 tree type, arg0;
5938 rtx r;
5939 struct binary_operator *binoptab;
5940 struct unary_operator *unoptab;
5941 struct increment_operator *incroptab;
5942 struct bc_label *lab, *lab1;
5943 enum bytecode_opcode opcode;
5944
5945
5946 code = TREE_CODE (exp);
5947
5948 switch (code)
bbf6f052 5949 {
ca695ac9
JB
5950 case PARM_DECL:
5951
5952 if (DECL_RTL (exp) == 0)
bbf6f052 5953 {
ca695ac9
JB
5954 error_with_decl (exp, "prior parameter's size depends on `%s'");
5955 return;
bbf6f052 5956 }
ca695ac9
JB
5957
5958 bc_load_parmaddr (DECL_RTL (exp));
5959 bc_load_memory (TREE_TYPE (exp), exp);
5960
5961 return;
5962
5963 case VAR_DECL:
5964
5965 if (DECL_RTL (exp) == 0)
5966 abort ();
5967
5968#if 0
e7a42772 5969 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
5970 bc_load_externaddr (DECL_RTL (exp));
5971 else
5972 bc_load_localaddr (DECL_RTL (exp));
5973#endif
5974 if (TREE_PUBLIC (exp))
e7a42772
JB
5975 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
5976 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
ca695ac9
JB
5977 else
5978 bc_load_localaddr (DECL_RTL (exp));
5979
5980 bc_load_memory (TREE_TYPE (exp), exp);
5981 return;
5982
5983 case INTEGER_CST:
5984
5985#ifdef DEBUG_PRINT_CODE
5986 fprintf (stderr, " [%x]\n", TREE_INT_CST_LOW (exp));
5987#endif
6bd6178d 5988 bc_emit_instruction (mode_to_const_map[(int) (DECL_BIT_FIELD (exp)
ca695ac9 5989 ? SImode
6bd6178d 5990 : TYPE_MODE (TREE_TYPE (exp)))],
ca695ac9
JB
5991 (HOST_WIDE_INT) TREE_INT_CST_LOW (exp));
5992 return;
5993
5994 case REAL_CST:
5995
c02bd5d9 5996#if 0
ca695ac9
JB
5997#ifdef DEBUG_PRINT_CODE
5998 fprintf (stderr, " [%g]\n", (double) TREE_INT_CST_LOW (exp));
5999#endif
c02bd5d9 6000 /* FIX THIS: find a better way to pass real_cst's. -bson */
ca695ac9
JB
6001 bc_emit_instruction (mode_to_const_map[TYPE_MODE (TREE_TYPE (exp))],
6002 (double) TREE_REAL_CST (exp));
c02bd5d9
JB
6003#else
6004 abort ();
6005#endif
6006
ca695ac9
JB
6007 return;
6008
6009 case CALL_EXPR:
6010
6011 /* We build a call description vector describing the type of
6012 the return value and of the arguments; this call vector,
6013 together with a pointer to a location for the return value
6014 and the base of the argument list, is passed to the low
6015 level machine dependent call subroutine, which is responsible
6016 for putting the arguments wherever real functions expect
6017 them, as well as getting the return value back. */
6018 {
6019 tree calldesc = 0, arg;
6020 int nargs = 0, i;
6021 rtx retval;
6022
6023 /* Push the evaluated args on the evaluation stack in reverse
6024 order. Also make an entry for each arg in the calldesc
6025 vector while we're at it. */
6026
6027 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6028
6029 for (arg = TREE_OPERAND (exp, 1); arg; arg = TREE_CHAIN (arg))
6030 {
6031 ++nargs;
6032 bc_expand_expr (TREE_VALUE (arg));
6033
6034 calldesc = tree_cons ((tree) 0,
6035 size_in_bytes (TREE_TYPE (TREE_VALUE (arg))),
6036 calldesc);
6037 calldesc = tree_cons ((tree) 0,
6038 bc_runtime_type_code (TREE_TYPE (TREE_VALUE (arg))),
6039 calldesc);
6040 }
6041
6042 TREE_OPERAND (exp, 1) = nreverse (TREE_OPERAND (exp, 1));
6043
6044 /* Allocate a location for the return value and push its
6045 address on the evaluation stack. Also make an entry
6046 at the front of the calldesc for the return value type. */
6047
6048 type = TREE_TYPE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))));
6049 retval = bc_allocate_local (int_size_in_bytes (type), TYPE_ALIGN (type));
6050 bc_load_localaddr (retval);
6051
6052 calldesc = tree_cons ((tree) 0, size_in_bytes (type), calldesc);
6053 calldesc = tree_cons ((tree) 0, bc_runtime_type_code (type), calldesc);
6054
6055 /* Prepend the argument count. */
6056 calldesc = tree_cons ((tree) 0,
6057 build_int_2 (nargs, 0),
6058 calldesc);
6059
6060 /* Push the address of the call description vector on the stack. */
6061 calldesc = build_nt (CONSTRUCTOR, (tree) 0, calldesc);
6062 TREE_TYPE (calldesc) = build_array_type (integer_type_node,
6063 build_index_type (build_int_2 (nargs * 2, 0)));
6064 r = output_constant_def (calldesc);
6065 bc_load_externaddr (r);
6066
6067 /* Push the address of the function to be called. */
6068 bc_expand_expr (TREE_OPERAND (exp, 0));
6069
6070 /* Call the function, popping its address and the calldesc vector
6071 address off the evaluation stack in the process. */
6072 bc_emit_instruction (call);
6073
6074 /* Pop the arguments off the stack. */
6075 bc_adjust_stack (nargs);
6076
6077 /* Load the return value onto the stack. */
6078 bc_load_localaddr (retval);
6079 bc_load_memory (type, TREE_OPERAND (exp, 0));
6080 }
6081 return;
6082
6083 case SAVE_EXPR:
6084
6085 if (!SAVE_EXPR_RTL (exp))
bbf6f052 6086 {
ca695ac9
JB
6087 /* First time around: copy to local variable */
6088 SAVE_EXPR_RTL (exp) = bc_allocate_local (int_size_in_bytes (TREE_TYPE (exp)),
6089 TYPE_ALIGN (TREE_TYPE(exp)));
6090 bc_expand_expr (TREE_OPERAND (exp, 0));
6d6e61ce 6091 bc_emit_instruction (duplicate);
ca695ac9
JB
6092
6093 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6094 bc_store_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6095 }
ca695ac9 6096 else
bbf6f052 6097 {
ca695ac9
JB
6098 /* Consecutive reference: use saved copy */
6099 bc_load_localaddr (SAVE_EXPR_RTL (exp));
6100 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
bbf6f052 6101 }
ca695ac9
JB
6102 return;
6103
6104#if 0
6105 /* FIXME: the XXXX_STMT codes have been removed in GCC2, but
6106 how are they handled instead? */
6107 case LET_STMT:
6108
6109 TREE_USED (exp) = 1;
6110 bc_expand_expr (STMT_BODY (exp));
6111 return;
6112#endif
6113
6114 case NOP_EXPR:
6115 case CONVERT_EXPR:
6116
6117 bc_expand_expr (TREE_OPERAND (exp, 0));
6118 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)), TREE_TYPE (exp));
6119 return;
6120
6121 case MODIFY_EXPR:
6122
c02bd5d9 6123 expand_assignment (TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1), 0, 0);
ca695ac9
JB
6124 return;
6125
6126 case ADDR_EXPR:
6127
6128 bc_expand_address (TREE_OPERAND (exp, 0));
6129 return;
6130
6131 case INDIRECT_REF:
6132
6133 bc_expand_expr (TREE_OPERAND (exp, 0));
6134 bc_load_memory (TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6135 return;
6136
6137 case ARRAY_REF:
6138
6139 bc_expand_expr (bc_canonicalize_array_ref (exp));
6140 return;
6141
6142 case COMPONENT_REF:
6143
6144 bc_expand_component_address (exp);
6145
6146 /* If we have a bitfield, generate a proper load */
6147 bc_load_memory (TREE_TYPE (TREE_OPERAND (exp, 1)), TREE_OPERAND (exp, 1));
6148 return;
6149
6150 case COMPOUND_EXPR:
6151
6152 bc_expand_expr (TREE_OPERAND (exp, 0));
6153 bc_emit_instruction (drop);
6154 bc_expand_expr (TREE_OPERAND (exp, 1));
6155 return;
6156
6157 case COND_EXPR:
6158
6159 bc_expand_expr (TREE_OPERAND (exp, 0));
6160 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6161 lab = bc_get_bytecode_label ();
c02bd5d9 6162 bc_emit_bytecode (xjumpifnot);
ca695ac9
JB
6163 bc_emit_bytecode_labelref (lab);
6164
6165#ifdef DEBUG_PRINT_CODE
6166 fputc ('\n', stderr);
6167#endif
6168 bc_expand_expr (TREE_OPERAND (exp, 1));
6169 lab1 = bc_get_bytecode_label ();
6170 bc_emit_bytecode (jump);
6171 bc_emit_bytecode_labelref (lab1);
6172
6173#ifdef DEBUG_PRINT_CODE
6174 fputc ('\n', stderr);
6175#endif
6176
6177 bc_emit_bytecode_labeldef (lab);
6178 bc_expand_expr (TREE_OPERAND (exp, 2));
6179 bc_emit_bytecode_labeldef (lab1);
6180 return;
6181
6182 case TRUTH_ANDIF_EXPR:
6183
c02bd5d9 6184 opcode = xjumpifnot;
ca695ac9
JB
6185 goto andorif;
6186
6187 case TRUTH_ORIF_EXPR:
6188
c02bd5d9 6189 opcode = xjumpif;
ca695ac9
JB
6190 goto andorif;
6191
6192 case PLUS_EXPR:
6193
6194 binoptab = optab_plus_expr;
6195 goto binop;
6196
6197 case MINUS_EXPR:
6198
6199 binoptab = optab_minus_expr;
6200 goto binop;
6201
6202 case MULT_EXPR:
6203
6204 binoptab = optab_mult_expr;
6205 goto binop;
6206
6207 case TRUNC_DIV_EXPR:
6208 case FLOOR_DIV_EXPR:
6209 case CEIL_DIV_EXPR:
6210 case ROUND_DIV_EXPR:
6211 case EXACT_DIV_EXPR:
6212
6213 binoptab = optab_trunc_div_expr;
6214 goto binop;
6215
6216 case TRUNC_MOD_EXPR:
6217 case FLOOR_MOD_EXPR:
6218 case CEIL_MOD_EXPR:
6219 case ROUND_MOD_EXPR:
6220
6221 binoptab = optab_trunc_mod_expr;
6222 goto binop;
6223
6224 case FIX_ROUND_EXPR:
6225 case FIX_FLOOR_EXPR:
6226 case FIX_CEIL_EXPR:
6227 abort (); /* Not used for C. */
6228
6229 case FIX_TRUNC_EXPR:
6230 case FLOAT_EXPR:
6231 case MAX_EXPR:
6232 case MIN_EXPR:
6233 case FFS_EXPR:
6234 case LROTATE_EXPR:
6235 case RROTATE_EXPR:
6236 abort (); /* FIXME */
6237
6238 case RDIV_EXPR:
6239
6240 binoptab = optab_rdiv_expr;
6241 goto binop;
6242
6243 case BIT_AND_EXPR:
6244
6245 binoptab = optab_bit_and_expr;
6246 goto binop;
6247
6248 case BIT_IOR_EXPR:
6249
6250 binoptab = optab_bit_ior_expr;
6251 goto binop;
6252
6253 case BIT_XOR_EXPR:
6254
6255 binoptab = optab_bit_xor_expr;
6256 goto binop;
6257
6258 case LSHIFT_EXPR:
6259
6260 binoptab = optab_lshift_expr;
6261 goto binop;
6262
6263 case RSHIFT_EXPR:
6264
6265 binoptab = optab_rshift_expr;
6266 goto binop;
6267
6268 case TRUTH_AND_EXPR:
6269
6270 binoptab = optab_truth_and_expr;
6271 goto binop;
6272
6273 case TRUTH_OR_EXPR:
6274
6275 binoptab = optab_truth_or_expr;
6276 goto binop;
6277
6278 case LT_EXPR:
6279
6280 binoptab = optab_lt_expr;
6281 goto binop;
6282
6283 case LE_EXPR:
6284
6285 binoptab = optab_le_expr;
6286 goto binop;
6287
6288 case GE_EXPR:
6289
6290 binoptab = optab_ge_expr;
6291 goto binop;
6292
6293 case GT_EXPR:
6294
6295 binoptab = optab_gt_expr;
6296 goto binop;
6297
6298 case EQ_EXPR:
6299
6300 binoptab = optab_eq_expr;
6301 goto binop;
6302
6303 case NE_EXPR:
6304
6305 binoptab = optab_ne_expr;
6306 goto binop;
6307
6308 case NEGATE_EXPR:
6309
6310 unoptab = optab_negate_expr;
6311 goto unop;
6312
6313 case BIT_NOT_EXPR:
6314
6315 unoptab = optab_bit_not_expr;
6316 goto unop;
6317
6318 case TRUTH_NOT_EXPR:
6319
6320 unoptab = optab_truth_not_expr;
6321 goto unop;
6322
6323 case PREDECREMENT_EXPR:
6324
6325 incroptab = optab_predecrement_expr;
6326 goto increment;
6327
6328 case PREINCREMENT_EXPR:
6329
6330 incroptab = optab_preincrement_expr;
6331 goto increment;
6332
6333 case POSTDECREMENT_EXPR:
6334
6335 incroptab = optab_postdecrement_expr;
6336 goto increment;
6337
6338 case POSTINCREMENT_EXPR:
6339
6340 incroptab = optab_postincrement_expr;
6341 goto increment;
6342
6343 case CONSTRUCTOR:
6344
6345 bc_expand_constructor (exp);
6346 return;
6347
6348 case ERROR_MARK:
6349 case RTL_EXPR:
6350
6351 return;
6352
6353 case BIND_EXPR:
6354 {
6355 tree vars = TREE_OPERAND (exp, 0);
6356 int vars_need_expansion = 0;
6357
6358 /* Need to open a binding contour here because
6359 if there are any cleanups they most be contained here. */
6360 expand_start_bindings (0);
6361
6362 /* Mark the corresponding BLOCK for output. */
6363 if (TREE_OPERAND (exp, 2) != 0)
6364 TREE_USED (TREE_OPERAND (exp, 2)) = 1;
6365
6366 /* If VARS have not yet been expanded, expand them now. */
6367 while (vars)
6368 {
6369 if (DECL_RTL (vars) == 0)
6370 {
6371 vars_need_expansion = 1;
6372 bc_expand_decl (vars, 0);
6373 }
6374 bc_expand_decl_init (vars);
6375 vars = TREE_CHAIN (vars);
6376 }
6377
6378 bc_expand_expr (TREE_OPERAND (exp, 1));
6379
6380 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6381
6382 return;
6383 }
6384 }
6385
6386 abort ();
6387
6388 binop:
6389
6390 bc_expand_binary_operation (binoptab, TREE_TYPE (exp),
6391 TREE_OPERAND (exp, 0), TREE_OPERAND (exp, 1));
6392 return;
6393
6394
6395 unop:
6396
6397 bc_expand_unary_operation (unoptab, TREE_TYPE (exp), TREE_OPERAND (exp, 0));
6398 return;
6399
6400
6401 andorif:
6402
6403 bc_expand_expr (TREE_OPERAND (exp, 0));
6404 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 0)));
6405 lab = bc_get_bytecode_label ();
6406
6d6e61ce 6407 bc_emit_instruction (duplicate);
ca695ac9
JB
6408 bc_emit_bytecode (opcode);
6409 bc_emit_bytecode_labelref (lab);
6410
6411#ifdef DEBUG_PRINT_CODE
6412 fputc ('\n', stderr);
6413#endif
6414
6415 bc_emit_instruction (drop);
6416
6417 bc_expand_expr (TREE_OPERAND (exp, 1));
6418 bc_expand_truth_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)));
6419 bc_emit_bytecode_labeldef (lab);
6420 return;
6421
6422
6423 increment:
6424
6425 type = TREE_TYPE (TREE_OPERAND (exp, 0));
6426
6427 /* Push the quantum. */
6428 bc_expand_expr (TREE_OPERAND (exp, 1));
6429
6430 /* Convert it to the lvalue's type. */
6431 bc_expand_conversion (TREE_TYPE (TREE_OPERAND (exp, 1)), type);
6432
6433 /* Push the address of the lvalue */
c02bd5d9 6434 bc_expand_expr (build1 (ADDR_EXPR, TYPE_POINTER_TO (type), TREE_OPERAND (exp, 0)));
ca695ac9
JB
6435
6436 /* Perform actual increment */
c02bd5d9 6437 bc_expand_increment (incroptab, type);
ca695ac9
JB
6438 return;
6439}
6440\f
6441/* Return the alignment in bits of EXP, a pointer valued expression.
6442 But don't return more than MAX_ALIGN no matter what.
6443 The alignment returned is, by default, the alignment of the thing that
6444 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
6445
6446 Otherwise, look at the expression to see if we can do better, i.e., if the
6447 expression is actually pointing at an object whose alignment is tighter. */
6448
6449static int
6450get_pointer_alignment (exp, max_align)
6451 tree exp;
6452 unsigned max_align;
6453{
6454 unsigned align, inner;
6455
6456 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6457 return 0;
6458
6459 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6460 align = MIN (align, max_align);
6461
6462 while (1)
6463 {
6464 switch (TREE_CODE (exp))
6465 {
6466 case NOP_EXPR:
6467 case CONVERT_EXPR:
6468 case NON_LVALUE_EXPR:
6469 exp = TREE_OPERAND (exp, 0);
6470 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
6471 return align;
6472 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
6473 inner = MIN (inner, max_align);
6474 align = MAX (align, inner);
6475 break;
6476
6477 case PLUS_EXPR:
6478 /* If sum of pointer + int, restrict our maximum alignment to that
6479 imposed by the integer. If not, we can't do any better than
6480 ALIGN. */
6481 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
6482 return align;
6483
6484 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
6485 & (max_align - 1))
6486 != 0)
6487 max_align >>= 1;
6488
6489 exp = TREE_OPERAND (exp, 0);
6490 break;
6491
6492 case ADDR_EXPR:
6493 /* See what we are pointing at and look at its alignment. */
6494 exp = TREE_OPERAND (exp, 0);
6495 if (TREE_CODE (exp) == FUNCTION_DECL)
6496 align = MAX (align, FUNCTION_BOUNDARY);
6497 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
6498 align = MAX (align, DECL_ALIGN (exp));
6499#ifdef CONSTANT_ALIGNMENT
6500 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
6501 align = CONSTANT_ALIGNMENT (exp, align);
6502#endif
6503 return MIN (align, max_align);
6504
6505 default:
6506 return align;
6507 }
6508 }
6509}
6510\f
6511/* Return the tree node and offset if a given argument corresponds to
6512 a string constant. */
6513
6514static tree
6515string_constant (arg, ptr_offset)
6516 tree arg;
6517 tree *ptr_offset;
6518{
6519 STRIP_NOPS (arg);
6520
6521 if (TREE_CODE (arg) == ADDR_EXPR
6522 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
6523 {
6524 *ptr_offset = integer_zero_node;
6525 return TREE_OPERAND (arg, 0);
6526 }
6527 else if (TREE_CODE (arg) == PLUS_EXPR)
6528 {
6529 tree arg0 = TREE_OPERAND (arg, 0);
6530 tree arg1 = TREE_OPERAND (arg, 1);
6531
6532 STRIP_NOPS (arg0);
6533 STRIP_NOPS (arg1);
6534
6535 if (TREE_CODE (arg0) == ADDR_EXPR
6536 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
6537 {
6538 *ptr_offset = arg1;
6539 return TREE_OPERAND (arg0, 0);
6540 }
6541 else if (TREE_CODE (arg1) == ADDR_EXPR
6542 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
6543 {
6544 *ptr_offset = arg0;
6545 return TREE_OPERAND (arg1, 0);
6546 }
6547 }
6548
6549 return 0;
6550}
6551
6552/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
6553 way, because it could contain a zero byte in the middle.
6554 TREE_STRING_LENGTH is the size of the character array, not the string.
6555
6556 Unfortunately, string_constant can't access the values of const char
6557 arrays with initializers, so neither can we do so here. */
6558
6559static tree
6560c_strlen (src)
6561 tree src;
6562{
6563 tree offset_node;
6564 int offset, max;
6565 char *ptr;
6566
6567 src = string_constant (src, &offset_node);
6568 if (src == 0)
6569 return 0;
6570 max = TREE_STRING_LENGTH (src);
6571 ptr = TREE_STRING_POINTER (src);
6572 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
6573 {
6574 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
6575 compute the offset to the following null if we don't know where to
6576 start searching for it. */
6577 int i;
6578 for (i = 0; i < max; i++)
6579 if (ptr[i] == 0)
6580 return 0;
6581 /* We don't know the starting offset, but we do know that the string
6582 has no internal zero bytes. We can assume that the offset falls
6583 within the bounds of the string; otherwise, the programmer deserves
6584 what he gets. Subtract the offset from the length of the string,
6585 and return that. */
6586 /* This would perhaps not be valid if we were dealing with named
6587 arrays in addition to literal string constants. */
6588 return size_binop (MINUS_EXPR, size_int (max), offset_node);
6589 }
6590
6591 /* We have a known offset into the string. Start searching there for
6592 a null character. */
6593 if (offset_node == 0)
6594 offset = 0;
6595 else
6596 {
6597 /* Did we get a long long offset? If so, punt. */
6598 if (TREE_INT_CST_HIGH (offset_node) != 0)
6599 return 0;
6600 offset = TREE_INT_CST_LOW (offset_node);
6601 }
6602 /* If the offset is known to be out of bounds, warn, and call strlen at
6603 runtime. */
6604 if (offset < 0 || offset > max)
6605 {
6606 warning ("offset outside bounds of constant string");
6607 return 0;
6608 }
6609 /* Use strlen to search for the first zero byte. Since any strings
6610 constructed with build_string will have nulls appended, we win even
6611 if we get handed something like (char[4])"abcd".
6612
6613 Since OFFSET is our starting index into the string, no further
6614 calculation is needed. */
6615 return size_int (strlen (ptr + offset));
6616}
6617\f
6618/* Expand an expression EXP that calls a built-in function,
6619 with result going to TARGET if that's convenient
6620 (and in mode MODE if that's convenient).
6621 SUBTARGET may be used as the target for computing one of EXP's operands.
6622 IGNORE is nonzero if the value is to be ignored. */
6623
98aad286
RK
6624#define CALLED_AS_BUILT_IN(NODE) \
6625 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
6626
ca695ac9
JB
6627static rtx
6628expand_builtin (exp, target, subtarget, mode, ignore)
6629 tree exp;
6630 rtx target;
6631 rtx subtarget;
6632 enum machine_mode mode;
6633 int ignore;
6634{
6635 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6636 tree arglist = TREE_OPERAND (exp, 1);
6637 rtx op0;
6638 rtx lab1, insns;
6639 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
6640 optab builtin_optab;
6641
6642 switch (DECL_FUNCTION_CODE (fndecl))
6643 {
6644 case BUILT_IN_ABS:
6645 case BUILT_IN_LABS:
6646 case BUILT_IN_FABS:
6647 /* build_function_call changes these into ABS_EXPR. */
6648 abort ();
6649
6650 case BUILT_IN_SIN:
6651 case BUILT_IN_COS:
6652 case BUILT_IN_FSQRT:
6653 /* If not optimizing, call the library function. */
6654 if (! optimize)
6655 break;
6656
6657 if (arglist == 0
6658 /* Arg could be wrong type if user redeclared this fcn wrong. */
6659 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
7b073ca6 6660 break;
ca695ac9
JB
6661
6662 /* Stabilize and compute the argument. */
6663 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6664 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6665 {
6666 exp = copy_node (exp);
6667 arglist = copy_node (arglist);
6668 TREE_OPERAND (exp, 1) = arglist;
6669 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6670 }
6671 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6672
6673 /* Make a suitable register to place result in. */
6674 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6675
6676 emit_queue ();
6677 start_sequence ();
6678
6679 switch (DECL_FUNCTION_CODE (fndecl))
6680 {
6681 case BUILT_IN_SIN:
6682 builtin_optab = sin_optab; break;
6683 case BUILT_IN_COS:
6684 builtin_optab = cos_optab; break;
6685 case BUILT_IN_FSQRT:
6686 builtin_optab = sqrt_optab; break;
6687 default:
6688 abort ();
6689 }
6690
6691 /* Compute into TARGET.
6692 Set TARGET to wherever the result comes back. */
6693 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6694 builtin_optab, op0, target, 0);
6695
6696 /* If we were unable to expand via the builtin, stop the
6697 sequence (without outputting the insns) and break, causing
6698 a call the the library function. */
6699 if (target == 0)
6700 {
6701 end_sequence ();
6702 break;
6703 }
6704
6705 /* Check the results by default. But if flag_fast_math is turned on,
6706 then assume sqrt will always be called with valid arguments. */
6707
6708 if (! flag_fast_math)
6709 {
6710 /* Don't define the builtin FP instructions
6711 if your machine is not IEEE. */
6712 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6713 abort ();
6714
6715 lab1 = gen_label_rtx ();
6716
6717 /* Test the result; if it is NaN, set errno=EDOM because
6718 the argument was not in the domain. */
6719 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6720 emit_jump_insn (gen_beq (lab1));
6721
6722#if TARGET_EDOM
6723 {
6724#ifdef GEN_ERRNO_RTX
6725 rtx errno_rtx = GEN_ERRNO_RTX;
6726#else
6727 rtx errno_rtx
6728 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6729#endif
6730
6731 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6732 }
6733#else
6734 /* We can't set errno=EDOM directly; let the library call do it.
6735 Pop the arguments right away in case the call gets deleted. */
6736 NO_DEFER_POP;
6737 expand_call (exp, target, 0);
6738 OK_DEFER_POP;
6739#endif
6740
6741 emit_label (lab1);
6742 }
6743
6744 /* Output the entire sequence. */
6745 insns = get_insns ();
6746 end_sequence ();
6747 emit_insns (insns);
6748
6749 return target;
6750
6751 /* __builtin_apply_args returns block of memory allocated on
6752 the stack into which is stored the arg pointer, structure
6753 value address, static chain, and all the registers that might
6754 possibly be used in performing a function call. The code is
6755 moved to the start of the function so the incoming values are
6756 saved. */
6757 case BUILT_IN_APPLY_ARGS:
6758 /* Don't do __builtin_apply_args more than once in a function.
6759 Save the result of the first call and reuse it. */
6760 if (apply_args_value != 0)
6761 return apply_args_value;
6762 {
6763 /* When this function is called, it means that registers must be
6764 saved on entry to this function. So we migrate the
6765 call to the first insn of this function. */
6766 rtx temp;
6767 rtx seq;
6768
6769 start_sequence ();
6770 temp = expand_builtin_apply_args ();
6771 seq = get_insns ();
6772 end_sequence ();
6773
6774 apply_args_value = temp;
6775
6776 /* Put the sequence after the NOTE that starts the function.
6777 If this is inside a SEQUENCE, make the outer-level insn
6778 chain current, so the code is placed at the start of the
6779 function. */
6780 push_topmost_sequence ();
6781 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6782 pop_topmost_sequence ();
6783 return temp;
6784 }
6785
6786 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
6787 FUNCTION with a copy of the parameters described by
6788 ARGUMENTS, and ARGSIZE. It returns a block of memory
6789 allocated on the stack into which is stored all the registers
6790 that might possibly be used for returning the result of a
6791 function. ARGUMENTS is the value returned by
6792 __builtin_apply_args. ARGSIZE is the number of bytes of
6793 arguments that must be copied. ??? How should this value be
6794 computed? We'll also need a safe worst case value for varargs
6795 functions. */
6796 case BUILT_IN_APPLY:
6797 if (arglist == 0
6798 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6799 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6800 || TREE_CHAIN (arglist) == 0
6801 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6802 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6803 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6804 return const0_rtx;
6805 else
6806 {
6807 int i;
6808 tree t;
6809 rtx ops[3];
6810
6811 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
6812 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
6813
6814 return expand_builtin_apply (ops[0], ops[1], ops[2]);
6815 }
6816
6817 /* __builtin_return (RESULT) causes the function to return the
6818 value described by RESULT. RESULT is address of the block of
6819 memory returned by __builtin_apply. */
6820 case BUILT_IN_RETURN:
6821 if (arglist
6822 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6823 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
6824 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
6825 NULL_RTX, VOIDmode, 0));
6826 return const0_rtx;
6827
6828 case BUILT_IN_SAVEREGS:
6829 /* Don't do __builtin_saveregs more than once in a function.
6830 Save the result of the first call and reuse it. */
6831 if (saveregs_value != 0)
6832 return saveregs_value;
6833 {
6834 /* When this function is called, it means that registers must be
6835 saved on entry to this function. So we migrate the
6836 call to the first insn of this function. */
6837 rtx temp;
6838 rtx seq;
6839 rtx valreg, saved_valreg;
6840
6841 /* Now really call the function. `expand_call' does not call
6842 expand_builtin, so there is no danger of infinite recursion here. */
6843 start_sequence ();
6844
6845#ifdef EXPAND_BUILTIN_SAVEREGS
6846 /* Do whatever the machine needs done in this case. */
6847 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6848#else
6849 /* The register where the function returns its value
6850 is likely to have something else in it, such as an argument.
6851 So preserve that register around the call. */
6852 if (value_mode != VOIDmode)
6853 {
6854 valreg = hard_libcall_value (value_mode);
6855 saved_valreg = gen_reg_rtx (value_mode);
6856 emit_move_insn (saved_valreg, valreg);
6857 }
6858
6859 /* Generate the call, putting the value in a pseudo. */
6860 temp = expand_call (exp, target, ignore);
6861
6862 if (value_mode != VOIDmode)
6863 emit_move_insn (valreg, saved_valreg);
6864#endif
6865
6866 seq = get_insns ();
6867 end_sequence ();
6868
6869 saveregs_value = temp;
6870
6871 /* Put the sequence after the NOTE that starts the function.
6872 If this is inside a SEQUENCE, make the outer-level insn
6873 chain current, so the code is placed at the start of the
6874 function. */
6875 push_topmost_sequence ();
6876 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6877 pop_topmost_sequence ();
6878 return temp;
6879 }
6880
6881 /* __builtin_args_info (N) returns word N of the arg space info
6882 for the current function. The number and meanings of words
6883 is controlled by the definition of CUMULATIVE_ARGS. */
6884 case BUILT_IN_ARGS_INFO:
6885 {
6886 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6887 int i;
6888 int *word_ptr = (int *) &current_function_args_info;
6889 tree type, elts, result;
6890
6891 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6892 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6893 __FILE__, __LINE__);
6894
6895 if (arglist != 0)
6896 {
6897 tree arg = TREE_VALUE (arglist);
6898 if (TREE_CODE (arg) != INTEGER_CST)
6899 error ("argument of `__builtin_args_info' must be constant");
6900 else
6901 {
6902 int wordnum = TREE_INT_CST_LOW (arg);
6903
6904 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6905 error ("argument of `__builtin_args_info' out of range");
6906 else
6907 return GEN_INT (word_ptr[wordnum]);
6908 }
6909 }
6910 else
6911 error ("missing argument in `__builtin_args_info'");
6912
6913 return const0_rtx;
6914
6915#if 0
6916 for (i = 0; i < nwords; i++)
6917 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6918
6919 type = build_array_type (integer_type_node,
6920 build_index_type (build_int_2 (nwords, 0)));
6921 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6922 TREE_CONSTANT (result) = 1;
6923 TREE_STATIC (result) = 1;
6924 result = build (INDIRECT_REF, build_pointer_type (type), result);
6925 TREE_CONSTANT (result) = 1;
6926 return expand_expr (result, NULL_RTX, VOIDmode, 0);
6927#endif
6928 }
6929
6930 /* Return the address of the first anonymous stack arg. */
6931 case BUILT_IN_NEXT_ARG:
6932 {
1f9becfc 6933 tree parm;
ca695ac9 6934 tree fntype = TREE_TYPE (current_function_decl);
1f9becfc 6935 tree fnargs = DECL_ARGUMENTS (current_function_decl);
ca695ac9
JB
6936 if (!(TYPE_ARG_TYPES (fntype) != 0
6937 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
1f9becfc
RK
6938 != void_type_node))
6939 && !(fnargs
6940 && (parm = tree_last (fnargs)) != 0
6941 && DECL_NAME (parm)
6942 && (! strcmp (IDENTIFIER_POINTER (DECL_NAME (parm)),
6943 "__builtin_va_alist"))))
ca695ac9
JB
6944 {
6945 error ("`va_start' used in function with fixed args");
6946 return const0_rtx;
6947 }
6948 }
6949
6950 return expand_binop (Pmode, add_optab,
6951 current_function_internal_arg_pointer,
6952 current_function_arg_offset_rtx,
6953 NULL_RTX, 0, OPTAB_LIB_WIDEN);
6954
6955 case BUILT_IN_CLASSIFY_TYPE:
6956 if (arglist != 0)
6957 {
6958 tree type = TREE_TYPE (TREE_VALUE (arglist));
6959 enum tree_code code = TREE_CODE (type);
6960 if (code == VOID_TYPE)
6961 return GEN_INT (void_type_class);
6962 if (code == INTEGER_TYPE)
6963 return GEN_INT (integer_type_class);
6964 if (code == CHAR_TYPE)
6965 return GEN_INT (char_type_class);
6966 if (code == ENUMERAL_TYPE)
6967 return GEN_INT (enumeral_type_class);
6968 if (code == BOOLEAN_TYPE)
6969 return GEN_INT (boolean_type_class);
6970 if (code == POINTER_TYPE)
6971 return GEN_INT (pointer_type_class);
6972 if (code == REFERENCE_TYPE)
6973 return GEN_INT (reference_type_class);
6974 if (code == OFFSET_TYPE)
6975 return GEN_INT (offset_type_class);
6976 if (code == REAL_TYPE)
6977 return GEN_INT (real_type_class);
6978 if (code == COMPLEX_TYPE)
6979 return GEN_INT (complex_type_class);
6980 if (code == FUNCTION_TYPE)
6981 return GEN_INT (function_type_class);
6982 if (code == METHOD_TYPE)
6983 return GEN_INT (method_type_class);
6984 if (code == RECORD_TYPE)
6985 return GEN_INT (record_type_class);
6986 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
6987 return GEN_INT (union_type_class);
6988 if (code == ARRAY_TYPE)
4042d440
PB
6989 {
6990 if (TYPE_STRING_FLAG (type))
6991 return GEN_INT (string_type_class);
6992 else
6993 return GEN_INT (array_type_class);
6994 }
ca695ac9
JB
6995 if (code == SET_TYPE)
6996 return GEN_INT (set_type_class);
6997 if (code == FILE_TYPE)
6998 return GEN_INT (file_type_class);
6999 if (code == LANG_TYPE)
7000 return GEN_INT (lang_type_class);
7001 }
7002 return GEN_INT (no_type_class);
7003
7004 case BUILT_IN_CONSTANT_P:
7005 if (arglist == 0)
7006 return const0_rtx;
7007 else
7008 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
7009 ? const1_rtx : const0_rtx);
7010
7011 case BUILT_IN_FRAME_ADDRESS:
7012 /* The argument must be a nonnegative integer constant.
7013 It counts the number of frames to scan up the stack.
7014 The value is the address of that frame. */
7015 case BUILT_IN_RETURN_ADDRESS:
7016 /* The argument must be a nonnegative integer constant.
7017 It counts the number of frames to scan up the stack.
7018 The value is the return address saved in that frame. */
7019 if (arglist == 0)
7020 /* Warning about missing arg was already issued. */
7021 return const0_rtx;
7022 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
7023 {
7024 error ("invalid arg to `__builtin_return_address'");
7025 return const0_rtx;
7026 }
7027 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
7028 {
7029 error ("invalid arg to `__builtin_return_address'");
7030 return const0_rtx;
7031 }
7032 else
7033 {
7034 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
7035 rtx tem = frame_pointer_rtx;
7036 int i;
7037
7038 /* Some machines need special handling before we can access arbitrary
7039 frames. For example, on the sparc, we must first flush all
7040 register windows to the stack. */
7041#ifdef SETUP_FRAME_ADDRESSES
7042 SETUP_FRAME_ADDRESSES ();
7043#endif
7044
7045 /* On the sparc, the return address is not in the frame, it is
7046 in a register. There is no way to access it off of the current
7047 frame pointer, but it can be accessed off the previous frame
7048 pointer by reading the value from the register window save
7049 area. */
7050#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7051 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
7052 count--;
7053#endif
7054
7055 /* Scan back COUNT frames to the specified frame. */
7056 for (i = 0; i < count; i++)
7057 {
7058 /* Assume the dynamic chain pointer is in the word that
7059 the frame address points to, unless otherwise specified. */
7060#ifdef DYNAMIC_CHAIN_ADDRESS
7061 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7062#endif
7063 tem = memory_address (Pmode, tem);
7064 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
7065 }
7066
7067 /* For __builtin_frame_address, return what we've got. */
7068 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7069 return tem;
7070
7071 /* For __builtin_return_address,
7072 Get the return address from that frame. */
7073#ifdef RETURN_ADDR_RTX
7074 return RETURN_ADDR_RTX (count, tem);
7075#else
7076 tem = memory_address (Pmode,
7077 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7078 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
7079#endif
7080 }
7081
7082 case BUILT_IN_ALLOCA:
7083 if (arglist == 0
7084 /* Arg could be non-integer if user redeclared this fcn wrong. */
7085 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7086 break;
ca695ac9
JB
7087 current_function_calls_alloca = 1;
7088 /* Compute the argument. */
7089 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
7090
7091 /* Allocate the desired space. */
7092 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
7093
7094 /* Record the new stack level for nonlocal gotos. */
7095 if (nonlocal_goto_handler_slot != 0)
7096 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
7097 return target;
7098
7099 case BUILT_IN_FFS:
7100 /* If not optimizing, call the library function. */
98aad286 7101 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7102 break;
7103
7104 if (arglist == 0
7105 /* Arg could be non-integer if user redeclared this fcn wrong. */
7106 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
7b073ca6 7107 break;
ca695ac9
JB
7108
7109 /* Compute the argument. */
7110 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
7111 /* Compute ffs, into TARGET if possible.
7112 Set TARGET to wherever the result comes back. */
7113 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7114 ffs_optab, op0, target, 1);
7115 if (target == 0)
7116 abort ();
7117 return target;
7118
7119 case BUILT_IN_STRLEN:
7120 /* If not optimizing, call the library function. */
98aad286 7121 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7122 break;
7123
7124 if (arglist == 0
7125 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7126 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
7b073ca6 7127 break;
ca695ac9
JB
7128 else
7129 {
7130 tree src = TREE_VALUE (arglist);
7131 tree len = c_strlen (src);
7132
7133 int align
7134 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7135
7136 rtx result, src_rtx, char_rtx;
7137 enum machine_mode insn_mode = value_mode, char_mode;
7138 enum insn_code icode;
7139
7140 /* If the length is known, just return it. */
7141 if (len != 0)
7142 return expand_expr (len, target, mode, 0);
7143
7144 /* If SRC is not a pointer type, don't do this operation inline. */
7145 if (align == 0)
7146 break;
7147
7148 /* Call a function if we can't compute strlen in the right mode. */
7149
7150 while (insn_mode != VOIDmode)
7151 {
7152 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
7153 if (icode != CODE_FOR_nothing)
7154 break;
bbf6f052 7155
ca695ac9
JB
7156 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
7157 }
7158 if (insn_mode == VOIDmode)
7159 break;
bbf6f052 7160
ca695ac9
JB
7161 /* Make a place to write the result of the instruction. */
7162 result = target;
7163 if (! (result != 0
7164 && GET_CODE (result) == REG
7165 && GET_MODE (result) == insn_mode
7166 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7167 result = gen_reg_rtx (insn_mode);
bbf6f052 7168
ca695ac9
JB
7169 /* Make sure the operands are acceptable to the predicates. */
7170
7171 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
7172 result = gen_reg_rtx (insn_mode);
7173
7174 src_rtx = memory_address (BLKmode,
7175 expand_expr (src, NULL_RTX, Pmode,
7176 EXPAND_NORMAL));
7177 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
7178 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
7179
7180 char_rtx = const0_rtx;
7181 char_mode = insn_operand_mode[(int)icode][2];
7182 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
7183 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
7184
7185 emit_insn (GEN_FCN (icode) (result,
7186 gen_rtx (MEM, BLKmode, src_rtx),
7187 char_rtx, GEN_INT (align)));
7188
7189 /* Return the value in the proper mode for this function. */
7190 if (GET_MODE (result) == value_mode)
7191 return result;
7192 else if (target != 0)
7193 {
7194 convert_move (target, result, 0);
7195 return target;
7196 }
7197 else
7198 return convert_to_mode (value_mode, result, 0);
7199 }
7200
7201 case BUILT_IN_STRCPY:
e87b4f3f 7202 /* If not optimizing, call the library function. */
98aad286 7203 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
e87b4f3f
RS
7204 break;
7205
7206 if (arglist == 0
ca695ac9
JB
7207 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7208 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7209 || TREE_CHAIN (arglist) == 0
7210 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7211 break;
ca695ac9 7212 else
db0e6d01 7213 {
ca695ac9 7214 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
e7c33f54 7215
ca695ac9
JB
7216 if (len == 0)
7217 break;
e7c33f54 7218
ca695ac9 7219 len = size_binop (PLUS_EXPR, len, integer_one_node);
e7c33f54 7220
ca695ac9 7221 chainon (arglist, build_tree_list (NULL_TREE, len));
1bbddf11
JVA
7222 }
7223
ca695ac9
JB
7224 /* Drops in. */
7225 case BUILT_IN_MEMCPY:
7226 /* If not optimizing, call the library function. */
98aad286 7227 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9 7228 break;
e7c33f54 7229
ca695ac9
JB
7230 if (arglist == 0
7231 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7232 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7233 || TREE_CHAIN (arglist) == 0
7234 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7235 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7236 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7237 break;
ca695ac9 7238 else
e7c33f54 7239 {
ca695ac9
JB
7240 tree dest = TREE_VALUE (arglist);
7241 tree src = TREE_VALUE (TREE_CHAIN (arglist));
7242 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e87b4f3f 7243
ca695ac9
JB
7244 int src_align
7245 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7246 int dest_align
7247 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7248 rtx dest_rtx, dest_mem, src_mem;
60bac6ea 7249
ca695ac9
JB
7250 /* If either SRC or DEST is not a pointer type, don't do
7251 this operation in-line. */
7252 if (src_align == 0 || dest_align == 0)
7253 {
7254 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
7255 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7256 break;
7257 }
7258
7259 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
7260 dest_mem = gen_rtx (MEM, BLKmode,
7261 memory_address (BLKmode, dest_rtx));
7262 src_mem = gen_rtx (MEM, BLKmode,
7263 memory_address (BLKmode,
7264 expand_expr (src, NULL_RTX,
7265 Pmode,
7266 EXPAND_NORMAL)));
7267
7268 /* Copy word part most expediently. */
7269 emit_block_move (dest_mem, src_mem,
7270 expand_expr (len, NULL_RTX, VOIDmode, 0),
7271 MIN (src_align, dest_align));
7272 return dest_rtx;
7273 }
7274
7275/* These comparison functions need an instruction that returns an actual
7276 index. An ordinary compare that just sets the condition codes
7277 is not enough. */
7278#ifdef HAVE_cmpstrsi
7279 case BUILT_IN_STRCMP:
7280 /* If not optimizing, call the library function. */
98aad286 7281 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7282 break;
7283
7284 if (arglist == 0
7285 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7286 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7287 || TREE_CHAIN (arglist) == 0
7288 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
7b073ca6 7289 break;
ca695ac9
JB
7290 else if (!HAVE_cmpstrsi)
7291 break;
7292 {
7293 tree arg1 = TREE_VALUE (arglist);
7294 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7295 tree offset;
7296 tree len, len2;
7297
7298 len = c_strlen (arg1);
7299 if (len)
7300 len = size_binop (PLUS_EXPR, integer_one_node, len);
7301 len2 = c_strlen (arg2);
7302 if (len2)
7303 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
7304
7305 /* If we don't have a constant length for the first, use the length
7306 of the second, if we know it. We don't require a constant for
7307 this case; some cost analysis could be done if both are available
7308 but neither is constant. For now, assume they're equally cheap.
7309
7310 If both strings have constant lengths, use the smaller. This
7311 could arise if optimization results in strcpy being called with
7312 two fixed strings, or if the code was machine-generated. We should
7313 add some code to the `memcmp' handler below to deal with such
7314 situations, someday. */
7315 if (!len || TREE_CODE (len) != INTEGER_CST)
7316 {
7317 if (len2)
7318 len = len2;
7319 else if (len == 0)
7320 break;
7321 }
7322 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
7323 {
7324 if (tree_int_cst_lt (len2, len))
7325 len = len2;
7326 }
7327
7328 chainon (arglist, build_tree_list (NULL_TREE, len));
7329 }
7330
7331 /* Drops in. */
7332 case BUILT_IN_MEMCMP:
7333 /* If not optimizing, call the library function. */
98aad286 7334 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
ca695ac9
JB
7335 break;
7336
7337 if (arglist == 0
7338 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7339 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7340 || TREE_CHAIN (arglist) == 0
7341 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7342 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7343 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7b073ca6 7344 break;
ca695ac9
JB
7345 else if (!HAVE_cmpstrsi)
7346 break;
7347 {
7348 tree arg1 = TREE_VALUE (arglist);
7349 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
7350 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
7351 rtx result;
7352
7353 int arg1_align
7354 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7355 int arg2_align
7356 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
7357 enum machine_mode insn_mode
7358 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
60bac6ea 7359
ca695ac9
JB
7360 /* If we don't have POINTER_TYPE, call the function. */
7361 if (arg1_align == 0 || arg2_align == 0)
7362 {
7363 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
7364 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
7365 break;
7366 }
60bac6ea 7367
ca695ac9
JB
7368 /* Make a place to write the result of the instruction. */
7369 result = target;
7370 if (! (result != 0
7371 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
7372 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
7373 result = gen_reg_rtx (insn_mode);
60bac6ea 7374
ca695ac9
JB
7375 emit_insn (gen_cmpstrsi (result,
7376 gen_rtx (MEM, BLKmode,
7377 expand_expr (arg1, NULL_RTX, Pmode,
7378 EXPAND_NORMAL)),
7379 gen_rtx (MEM, BLKmode,
7380 expand_expr (arg2, NULL_RTX, Pmode,
7381 EXPAND_NORMAL)),
7382 expand_expr (len, NULL_RTX, VOIDmode, 0),
7383 GEN_INT (MIN (arg1_align, arg2_align))));
60bac6ea 7384
ca695ac9
JB
7385 /* Return the value in the proper mode for this function. */
7386 mode = TYPE_MODE (TREE_TYPE (exp));
7387 if (GET_MODE (result) == mode)
7388 return result;
7389 else if (target != 0)
7390 {
7391 convert_move (target, result, 0);
7392 return target;
60bac6ea 7393 }
ca695ac9
JB
7394 else
7395 return convert_to_mode (mode, result, 0);
7396 }
60bac6ea 7397#else
ca695ac9
JB
7398 case BUILT_IN_STRCMP:
7399 case BUILT_IN_MEMCMP:
7400 break;
60bac6ea
RS
7401#endif
7402
ca695ac9
JB
7403 default: /* just do library call, if unknown builtin */
7404 error ("built-in function `%s' not currently supported",
7405 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
7406 }
e87b4f3f 7407
ca695ac9
JB
7408 /* The switch statement above can drop through to cause the function
7409 to be called normally. */
e7c33f54 7410
ca695ac9
JB
7411 return expand_call (exp, target, ignore);
7412}
7413\f
7414/* Built-in functions to perform an untyped call and return. */
0006469d 7415
ca695ac9
JB
7416/* For each register that may be used for calling a function, this
7417 gives a mode used to copy the register's value. VOIDmode indicates
7418 the register is not used for calling a function. If the machine
7419 has register windows, this gives only the outbound registers.
7420 INCOMING_REGNO gives the corresponding inbound register. */
7421static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 7422
ca695ac9
JB
7423/* For each register that may be used for returning values, this gives
7424 a mode used to copy the register's value. VOIDmode indicates the
7425 register is not used for returning values. If the machine has
7426 register windows, this gives only the outbound registers.
7427 INCOMING_REGNO gives the corresponding inbound register. */
7428static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 7429
ca695ac9
JB
7430/* For each register that may be used for calling a function, this
7431 gives the offset of that register into the block returned by
7432 __bultin_apply_args. 0 indicates that the register is not
7433 used for calling a function. */
7434static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
0006469d 7435
ca695ac9
JB
7436/* Return the offset of register REGNO into the block returned by
7437 __builtin_apply_args. This is not declared static, since it is
7438 needed in objc-act.c. */
0006469d 7439
ca695ac9
JB
7440int
7441apply_args_register_offset (regno)
7442 int regno;
7443{
7444 apply_args_size ();
0006469d 7445
ca695ac9
JB
7446 /* Arguments are always put in outgoing registers (in the argument
7447 block) if such make sense. */
7448#ifdef OUTGOING_REGNO
7449 regno = OUTGOING_REGNO(regno);
7450#endif
7451 return apply_args_reg_offset[regno];
7452}
0006469d 7453
ca695ac9
JB
7454/* Return the size required for the block returned by __builtin_apply_args,
7455 and initialize apply_args_mode. */
0006469d 7456
ca695ac9
JB
7457static int
7458apply_args_size ()
7459{
7460 static int size = -1;
7461 int align, regno;
7462 enum machine_mode mode;
bbf6f052 7463
ca695ac9
JB
7464 /* The values computed by this function never change. */
7465 if (size < 0)
7466 {
7467 /* The first value is the incoming arg-pointer. */
7468 size = GET_MODE_SIZE (Pmode);
bbf6f052 7469
ca695ac9
JB
7470 /* The second value is the structure value address unless this is
7471 passed as an "invisible" first argument. */
7472 if (struct_value_rtx)
7473 size += GET_MODE_SIZE (Pmode);
7474
7475 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7476 if (FUNCTION_ARG_REGNO_P (regno))
bbf6f052 7477 {
ca695ac9
JB
7478 /* Search for the proper mode for copying this register's
7479 value. I'm not sure this is right, but it works so far. */
7480 enum machine_mode best_mode = VOIDmode;
7481
7482 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7483 mode != VOIDmode;
7484 mode = GET_MODE_WIDER_MODE (mode))
7485 if (HARD_REGNO_MODE_OK (regno, mode)
7486 && HARD_REGNO_NREGS (regno, mode) == 1)
7487 best_mode = mode;
7488
7489 if (best_mode == VOIDmode)
7490 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7491 mode != VOIDmode;
7492 mode = GET_MODE_WIDER_MODE (mode))
7493 if (HARD_REGNO_MODE_OK (regno, mode)
7494 && (mov_optab->handlers[(int) mode].insn_code
7495 != CODE_FOR_nothing))
7496 best_mode = mode;
7497
7498 mode = best_mode;
7499 if (mode == VOIDmode)
7500 abort ();
7501
7502 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7503 if (size % align != 0)
7504 size = CEIL (size, align) * align;
7505 apply_args_reg_offset[regno] = size;
7506 size += GET_MODE_SIZE (mode);
7507 apply_args_mode[regno] = mode;
7508 }
7509 else
7510 {
7511 apply_args_mode[regno] = VOIDmode;
7512 apply_args_reg_offset[regno] = 0;
bbf6f052 7513 }
ca695ac9
JB
7514 }
7515 return size;
7516}
bbf6f052 7517
ca695ac9
JB
7518/* Return the size required for the block returned by __builtin_apply,
7519 and initialize apply_result_mode. */
bbf6f052 7520
ca695ac9
JB
7521static int
7522apply_result_size ()
7523{
7524 static int size = -1;
7525 int align, regno;
7526 enum machine_mode mode;
bbf6f052 7527
ca695ac9
JB
7528 /* The values computed by this function never change. */
7529 if (size < 0)
7530 {
7531 size = 0;
bbf6f052 7532
ca695ac9
JB
7533 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7534 if (FUNCTION_VALUE_REGNO_P (regno))
7535 {
7536 /* Search for the proper mode for copying this register's
7537 value. I'm not sure this is right, but it works so far. */
7538 enum machine_mode best_mode = VOIDmode;
bbf6f052 7539
ca695ac9
JB
7540 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
7541 mode != TImode;
7542 mode = GET_MODE_WIDER_MODE (mode))
7543 if (HARD_REGNO_MODE_OK (regno, mode))
7544 best_mode = mode;
bbf6f052 7545
ca695ac9
JB
7546 if (best_mode == VOIDmode)
7547 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
7548 mode != VOIDmode;
7549 mode = GET_MODE_WIDER_MODE (mode))
7550 if (HARD_REGNO_MODE_OK (regno, mode)
7551 && (mov_optab->handlers[(int) mode].insn_code
7552 != CODE_FOR_nothing))
7553 best_mode = mode;
bbf6f052 7554
ca695ac9
JB
7555 mode = best_mode;
7556 if (mode == VOIDmode)
7557 abort ();
bbf6f052 7558
ca695ac9
JB
7559 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7560 if (size % align != 0)
7561 size = CEIL (size, align) * align;
7562 size += GET_MODE_SIZE (mode);
7563 apply_result_mode[regno] = mode;
bbf6f052
RK
7564 }
7565 else
ca695ac9 7566 apply_result_mode[regno] = VOIDmode;
bbf6f052 7567
ca695ac9
JB
7568 /* Allow targets that use untyped_call and untyped_return to override
7569 the size so that machine-specific information can be stored here. */
7570#ifdef APPLY_RESULT_SIZE
7571 size = APPLY_RESULT_SIZE;
7572#endif
7573 }
7574 return size;
7575}
bbf6f052 7576
ca695ac9
JB
7577#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
7578/* Create a vector describing the result block RESULT. If SAVEP is true,
7579 the result block is used to save the values; otherwise it is used to
7580 restore the values. */
bbf6f052 7581
ca695ac9
JB
7582static rtx
7583result_vector (savep, result)
7584 int savep;
7585 rtx result;
7586{
7587 int regno, size, align, nelts;
7588 enum machine_mode mode;
7589 rtx reg, mem;
7590 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
7591
7592 size = nelts = 0;
7593 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7594 if ((mode = apply_result_mode[regno]) != VOIDmode)
7595 {
7596 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7597 if (size % align != 0)
7598 size = CEIL (size, align) * align;
7599 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
7600 mem = change_address (result, mode,
7601 plus_constant (XEXP (result, 0), size));
7602 savevec[nelts++] = (savep
7603 ? gen_rtx (SET, VOIDmode, mem, reg)
7604 : gen_rtx (SET, VOIDmode, reg, mem));
7605 size += GET_MODE_SIZE (mode);
bbf6f052 7606 }
ca695ac9
JB
7607 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
7608}
7609#endif /* HAVE_untyped_call or HAVE_untyped_return */
bbf6f052 7610
ca695ac9
JB
7611/* Save the state required to perform an untyped call with the same
7612 arguments as were passed to the current function. */
7613
7614static rtx
7615expand_builtin_apply_args ()
7616{
7617 rtx registers;
7618 int size, align, regno;
7619 enum machine_mode mode;
7620
7621 /* Create a block where the arg-pointer, structure value address,
7622 and argument registers can be saved. */
7623 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
7624
7625 /* Walk past the arg-pointer and structure value address. */
7626 size = GET_MODE_SIZE (Pmode);
7627 if (struct_value_rtx)
7628 size += GET_MODE_SIZE (Pmode);
7629
7630 /* Save each register used in calling a function to the block. */
7631 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7632 if ((mode = apply_args_mode[regno]) != VOIDmode)
bbf6f052 7633 {
ca695ac9
JB
7634 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7635 if (size % align != 0)
7636 size = CEIL (size, align) * align;
7637 emit_move_insn (change_address (registers, mode,
7638 plus_constant (XEXP (registers, 0),
7639 size)),
7640 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
7641 size += GET_MODE_SIZE (mode);
bbf6f052
RK
7642 }
7643
ca695ac9
JB
7644 /* Save the arg pointer to the block. */
7645 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
7646 copy_to_reg (virtual_incoming_args_rtx));
7647 size = GET_MODE_SIZE (Pmode);
bbf6f052 7648
ca695ac9
JB
7649 /* Save the structure value address unless this is passed as an
7650 "invisible" first argument. */
7651 if (struct_value_incoming_rtx)
7652 {
7653 emit_move_insn (change_address (registers, Pmode,
7654 plus_constant (XEXP (registers, 0),
7655 size)),
7656 copy_to_reg (struct_value_incoming_rtx));
7657 size += GET_MODE_SIZE (Pmode);
7658 }
7659
7660 /* Return the address of the block. */
7661 return copy_addr_to_reg (XEXP (registers, 0));
7662}
7663
7664/* Perform an untyped call and save the state required to perform an
7665 untyped return of whatever value was returned by the given function. */
7666
7667static rtx
7668expand_builtin_apply (function, arguments, argsize)
7669 rtx function, arguments, argsize;
7670{
7671 int size, align, regno;
7672 enum machine_mode mode;
7673 rtx incoming_args, result, reg, dest, call_insn;
7674 rtx old_stack_level = 0;
7675 rtx use_insns = 0;
bbf6f052 7676
ca695ac9
JB
7677 /* Create a block where the return registers can be saved. */
7678 result = assign_stack_local (BLKmode, apply_result_size (), -1);
bbf6f052 7679
ca695ac9 7680 /* ??? The argsize value should be adjusted here. */
bbf6f052 7681
ca695ac9
JB
7682 /* Fetch the arg pointer from the ARGUMENTS block. */
7683 incoming_args = gen_reg_rtx (Pmode);
7684 emit_move_insn (incoming_args,
7685 gen_rtx (MEM, Pmode, arguments));
7686#ifndef STACK_GROWS_DOWNWARD
7687 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
7688 incoming_args, 0, OPTAB_LIB_WIDEN);
46b68a37
JW
7689#endif
7690
ca695ac9
JB
7691 /* Perform postincrements before actually calling the function. */
7692 emit_queue ();
46b68a37 7693
ca695ac9
JB
7694 /* Push a new argument block and copy the arguments. */
7695 do_pending_stack_adjust ();
7696 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
bbf6f052 7697
ca695ac9
JB
7698 /* Push a block of memory onto the stack to store the memory arguments.
7699 Save the address in a register, and copy the memory arguments. ??? I
7700 haven't figured out how the calling convention macros effect this,
7701 but it's likely that the source and/or destination addresses in
7702 the block copy will need updating in machine specific ways. */
7703 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
7704 emit_block_move (gen_rtx (MEM, BLKmode, dest),
7705 gen_rtx (MEM, BLKmode, incoming_args),
7706 argsize,
7707 PARM_BOUNDARY / BITS_PER_UNIT);
bbf6f052 7708
ca695ac9
JB
7709 /* Refer to the argument block. */
7710 apply_args_size ();
7711 arguments = gen_rtx (MEM, BLKmode, arguments);
7712
7713 /* Walk past the arg-pointer and structure value address. */
7714 size = GET_MODE_SIZE (Pmode);
7715 if (struct_value_rtx)
7716 size += GET_MODE_SIZE (Pmode);
7717
7718 /* Restore each of the registers previously saved. Make USE insns
7719 for each of these registers for use in making the call. */
7720 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7721 if ((mode = apply_args_mode[regno]) != VOIDmode)
7722 {
7723 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7724 if (size % align != 0)
7725 size = CEIL (size, align) * align;
7726 reg = gen_rtx (REG, mode, regno);
7727 emit_move_insn (reg,
7728 change_address (arguments, mode,
7729 plus_constant (XEXP (arguments, 0),
7730 size)));
7731
7732 push_to_sequence (use_insns);
7733 emit_insn (gen_rtx (USE, VOIDmode, reg));
7734 use_insns = get_insns ();
7735 end_sequence ();
7736 size += GET_MODE_SIZE (mode);
7737 }
7738
7739 /* Restore the structure value address unless this is passed as an
7740 "invisible" first argument. */
7741 size = GET_MODE_SIZE (Pmode);
7742 if (struct_value_rtx)
7743 {
7744 rtx value = gen_reg_rtx (Pmode);
7745 emit_move_insn (value,
7746 change_address (arguments, Pmode,
7747 plus_constant (XEXP (arguments, 0),
7748 size)));
7749 emit_move_insn (struct_value_rtx, value);
7750 if (GET_CODE (struct_value_rtx) == REG)
7751 {
7752 push_to_sequence (use_insns);
7753 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
7754 use_insns = get_insns ();
7755 end_sequence ();
bbf6f052 7756 }
ca695ac9
JB
7757 size += GET_MODE_SIZE (Pmode);
7758 }
bbf6f052 7759
ca695ac9
JB
7760 /* All arguments and registers used for the call are set up by now! */
7761 function = prepare_call_address (function, NULL_TREE, &use_insns);
bbf6f052 7762
ca695ac9
JB
7763 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
7764 and we don't want to load it into a register as an optimization,
7765 because prepare_call_address already did it if it should be done. */
7766 if (GET_CODE (function) != SYMBOL_REF)
7767 function = memory_address (FUNCTION_MODE, function);
bbf6f052 7768
ca695ac9
JB
7769 /* Generate the actual call instruction and save the return value. */
7770#ifdef HAVE_untyped_call
7771 if (HAVE_untyped_call)
7772 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
7773 result, result_vector (1, result)));
7774 else
7775#endif
7776#ifdef HAVE_call_value
7777 if (HAVE_call_value)
7778 {
7779 rtx valreg = 0;
bbf6f052 7780
ca695ac9
JB
7781 /* Locate the unique return register. It is not possible to
7782 express a call that sets more than one return register using
7783 call_value; use untyped_call for that. In fact, untyped_call
7784 only needs to save the return registers in the given block. */
7785 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7786 if ((mode = apply_result_mode[regno]) != VOIDmode)
7787 {
7788 if (valreg)
7789 abort (); /* HAVE_untyped_call required. */
7790 valreg = gen_rtx (REG, mode, regno);
7791 }
bbf6f052 7792
ca695ac9
JB
7793 emit_call_insn (gen_call_value (valreg,
7794 gen_rtx (MEM, FUNCTION_MODE, function),
7795 const0_rtx, NULL_RTX, const0_rtx));
bbf6f052 7796
ca695ac9
JB
7797 emit_move_insn (change_address (result, GET_MODE (valreg),
7798 XEXP (result, 0)),
7799 valreg);
7800 }
7801 else
7802#endif
7803 abort ();
bbf6f052 7804
ca695ac9
JB
7805 /* Find the CALL insn we just emitted and write the USE insns before it. */
7806 for (call_insn = get_last_insn ();
7807 call_insn && GET_CODE (call_insn) != CALL_INSN;
7808 call_insn = PREV_INSN (call_insn))
7809 ;
bbf6f052 7810
ca695ac9
JB
7811 if (! call_insn)
7812 abort ();
bbf6f052 7813
ca695ac9
JB
7814 /* Put the USE insns before the CALL. */
7815 emit_insns_before (use_insns, call_insn);
e7c33f54 7816
ca695ac9
JB
7817 /* Restore the stack. */
7818 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
e7c33f54 7819
ca695ac9
JB
7820 /* Return the address of the result block. */
7821 return copy_addr_to_reg (XEXP (result, 0));
7822}
e7c33f54 7823
ca695ac9 7824/* Perform an untyped return. */
e7c33f54 7825
ca695ac9
JB
7826static void
7827expand_builtin_return (result)
7828 rtx result;
7829{
7830 int size, align, regno;
7831 enum machine_mode mode;
7832 rtx reg;
7833 rtx use_insns = 0;
e7c33f54 7834
ca695ac9
JB
7835 apply_result_size ();
7836 result = gen_rtx (MEM, BLKmode, result);
e7c33f54 7837
ca695ac9
JB
7838#ifdef HAVE_untyped_return
7839 if (HAVE_untyped_return)
7840 {
7841 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
7842 emit_barrier ();
7843 return;
7844 }
7845#endif
e7c33f54 7846
ca695ac9
JB
7847 /* Restore the return value and note that each value is used. */
7848 size = 0;
7849 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
7850 if ((mode = apply_result_mode[regno]) != VOIDmode)
7851 {
7852 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
7853 if (size % align != 0)
7854 size = CEIL (size, align) * align;
7855 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
7856 emit_move_insn (reg,
7857 change_address (result, mode,
7858 plus_constant (XEXP (result, 0),
7859 size)));
e7c33f54 7860
ca695ac9
JB
7861 push_to_sequence (use_insns);
7862 emit_insn (gen_rtx (USE, VOIDmode, reg));
7863 use_insns = get_insns ();
7864 end_sequence ();
7865 size += GET_MODE_SIZE (mode);
7866 }
e7c33f54 7867
ca695ac9
JB
7868 /* Put the USE insns before the return. */
7869 emit_insns (use_insns);
e7c33f54 7870
ca695ac9
JB
7871 /* Return whatever values was restored by jumping directly to the end
7872 of the function. */
7873 expand_null_return ();
7874}
7875\f
7876/* Expand code for a post- or pre- increment or decrement
7877 and return the RTX for the result.
7878 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
e7c33f54 7879
ca695ac9
JB
7880static rtx
7881expand_increment (exp, post)
7882 register tree exp;
7883 int post;
7884{
7885 register rtx op0, op1;
7886 register rtx temp, value;
7887 register tree incremented = TREE_OPERAND (exp, 0);
7888 optab this_optab = add_optab;
7889 int icode;
7890 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7891 int op0_is_copy = 0;
7892 int single_insn = 0;
a97f5a86
RS
7893 /* 1 means we can't store into OP0 directly,
7894 because it is a subreg narrower than a word,
7895 and we don't dare clobber the rest of the word. */
7896 int bad_subreg = 0;
e7c33f54 7897
ca695ac9 7898 if (output_bytecode)
c02bd5d9
JB
7899 {
7900 bc_expand_expr (exp);
7901 return NULL_RTX;
7902 }
e7c33f54 7903
ca695ac9
JB
7904 /* Stabilize any component ref that might need to be
7905 evaluated more than once below. */
7906 if (!post
7907 || TREE_CODE (incremented) == BIT_FIELD_REF
7908 || (TREE_CODE (incremented) == COMPONENT_REF
7909 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7910 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7911 incremented = stabilize_reference (incremented);
7912 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7913 ones into save exprs so that they don't accidentally get evaluated
7914 more than once by the code below. */
7915 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7916 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7917 incremented = save_expr (incremented);
bbf6f052 7918
ca695ac9
JB
7919 /* Compute the operands as RTX.
7920 Note whether OP0 is the actual lvalue or a copy of it:
7921 I believe it is a copy iff it is a register or subreg
7922 and insns were generated in computing it. */
bbf6f052 7923
ca695ac9
JB
7924 temp = get_last_insn ();
7925 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
bbf6f052 7926
ca695ac9
JB
7927 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7928 in place but intead must do sign- or zero-extension during assignment,
7929 so we copy it into a new register and let the code below use it as
7930 a copy.
bbf6f052 7931
ca695ac9
JB
7932 Note that we can safely modify this SUBREG since it is know not to be
7933 shared (it was made by the expand_expr call above). */
bbf6f052 7934
ca695ac9
JB
7935 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7936 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
a97f5a86
RS
7937 else if (GET_CODE (op0) == SUBREG
7938 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
79777b79
RK
7939 {
7940 /* We cannot increment this SUBREG in place. If we are
7941 post-incrementing, get a copy of the old value. Otherwise,
7942 just mark that we cannot increment in place. */
7943 if (post)
7944 op0 = copy_to_reg (op0);
7945 else
7946 bad_subreg = 1;
7947 }
bbf6f052 7948
ca695ac9
JB
7949 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7950 && temp != get_last_insn ());
7951 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 7952
ca695ac9
JB
7953 /* Decide whether incrementing or decrementing. */
7954 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7955 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7956 this_optab = sub_optab;
bbf6f052 7957
ca695ac9
JB
7958 /* Convert decrement by a constant into a negative increment. */
7959 if (this_optab == sub_optab
7960 && GET_CODE (op1) == CONST_INT)
7961 {
7962 op1 = GEN_INT (- INTVAL (op1));
7963 this_optab = add_optab;
7964 }
bbf6f052 7965
ca695ac9
JB
7966 /* For a preincrement, see if we can do this with a single instruction. */
7967 if (!post)
7968 {
7969 icode = (int) this_optab->handlers[(int) mode].insn_code;
7970 if (icode != (int) CODE_FOR_nothing
7971 /* Make sure that OP0 is valid for operands 0 and 1
7972 of the insn we want to queue. */
7973 && (*insn_operand_predicate[icode][0]) (op0, mode)
7974 && (*insn_operand_predicate[icode][1]) (op0, mode)
7975 && (*insn_operand_predicate[icode][2]) (op1, mode))
7976 single_insn = 1;
7977 }
bbf6f052 7978
ca695ac9
JB
7979 /* If OP0 is not the actual lvalue, but rather a copy in a register,
7980 then we cannot just increment OP0. We must therefore contrive to
7981 increment the original value. Then, for postincrement, we can return
7982 OP0 since it is a copy of the old value. For preincrement, expand here
a97f5a86
RS
7983 unless we can do it with a single insn.
7984
7985 Likewise if storing directly into OP0 would clobber high bits
7986 we need to preserve (bad_subreg). */
7987 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
ca695ac9
JB
7988 {
7989 /* This is the easiest way to increment the value wherever it is.
7990 Problems with multiple evaluation of INCREMENTED are prevented
7991 because either (1) it is a component_ref or preincrement,
7992 in which case it was stabilized above, or (2) it is an array_ref
7993 with constant index in an array in a register, which is
7994 safe to reevaluate. */
7995 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7996 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7997 ? MINUS_EXPR : PLUS_EXPR),
7998 TREE_TYPE (exp),
7999 incremented,
8000 TREE_OPERAND (exp, 1));
8001 temp = expand_assignment (incremented, newexp, ! post, 0);
8002 return post ? op0 : temp;
8003 }
bbf6f052 8004
ca695ac9
JB
8005 if (post)
8006 {
8007 /* We have a true reference to the value in OP0.
8008 If there is an insn to add or subtract in this mode, queue it.
8009 Queueing the increment insn avoids the register shuffling
8010 that often results if we must increment now and first save
8011 the old value for subsequent use. */
bbf6f052 8012
ca695ac9
JB
8013#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8014 op0 = stabilize (op0);
8015#endif
bbf6f052 8016
ca695ac9
JB
8017 icode = (int) this_optab->handlers[(int) mode].insn_code;
8018 if (icode != (int) CODE_FOR_nothing
8019 /* Make sure that OP0 is valid for operands 0 and 1
8020 of the insn we want to queue. */
8021 && (*insn_operand_predicate[icode][0]) (op0, mode)
8022 && (*insn_operand_predicate[icode][1]) (op0, mode))
8023 {
8024 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8025 op1 = force_reg (mode, op1);
bbf6f052 8026
ca695ac9
JB
8027 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8028 }
8029 }
bbf6f052 8030
ca695ac9
JB
8031 /* Preincrement, or we can't increment with one simple insn. */
8032 if (post)
8033 /* Save a copy of the value before inc or dec, to return it later. */
8034 temp = value = copy_to_reg (op0);
8035 else
8036 /* Arrange to return the incremented value. */
8037 /* Copy the rtx because expand_binop will protect from the queue,
8038 and the results of that would be invalid for us to return
8039 if our caller does emit_queue before using our result. */
8040 temp = copy_rtx (value = op0);
bbf6f052 8041
ca695ac9
JB
8042 /* Increment however we can. */
8043 op1 = expand_binop (mode, this_optab, value, op1, op0,
8044 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8045 /* Make sure the value is stored into OP0. */
8046 if (op1 != op0)
8047 emit_move_insn (op0, op1);
bbf6f052 8048
ca695ac9
JB
8049 return temp;
8050}
8051\f
8052/* Expand all function calls contained within EXP, innermost ones first.
8053 But don't look within expressions that have sequence points.
8054 For each CALL_EXPR, record the rtx for its value
8055 in the CALL_EXPR_RTL field. */
bbf6f052 8056
ca695ac9
JB
8057static void
8058preexpand_calls (exp)
8059 tree exp;
8060{
8061 register int nops, i;
8062 int type = TREE_CODE_CLASS (TREE_CODE (exp));
bbf6f052 8063
ca695ac9
JB
8064 if (! do_preexpand_calls)
8065 return;
bbf6f052 8066
ca695ac9 8067 /* Only expressions and references can contain calls. */
bbf6f052 8068
ca695ac9
JB
8069 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8070 return;
bbf6f052 8071
ca695ac9
JB
8072 switch (TREE_CODE (exp))
8073 {
8074 case CALL_EXPR:
8075 /* Do nothing if already expanded. */
8076 if (CALL_EXPR_RTL (exp) != 0)
8077 return;
bbf6f052 8078
ca695ac9
JB
8079 /* Do nothing to built-in functions. */
8080 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
8081 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
8082 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8083 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8084 return;
bbf6f052 8085
ca695ac9
JB
8086 case COMPOUND_EXPR:
8087 case COND_EXPR:
8088 case TRUTH_ANDIF_EXPR:
8089 case TRUTH_ORIF_EXPR:
8090 /* If we find one of these, then we can be sure
8091 the adjust will be done for it (since it makes jumps).
8092 Do it now, so that if this is inside an argument
8093 of a function, we don't get the stack adjustment
8094 after some other args have already been pushed. */
8095 do_pending_stack_adjust ();
8096 return;
bbf6f052 8097
ca695ac9
JB
8098 case BLOCK:
8099 case RTL_EXPR:
8100 case WITH_CLEANUP_EXPR:
8101 return;
bbf6f052 8102
ca695ac9
JB
8103 case SAVE_EXPR:
8104 if (SAVE_EXPR_RTL (exp) != 0)
8105 return;
8106 }
bbf6f052 8107
ca695ac9
JB
8108 nops = tree_code_length[(int) TREE_CODE (exp)];
8109 for (i = 0; i < nops; i++)
8110 if (TREE_OPERAND (exp, i) != 0)
8111 {
8112 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8113 if (type == 'e' || type == '<' || type == '1' || type == '2'
8114 || type == 'r')
8115 preexpand_calls (TREE_OPERAND (exp, i));
8116 }
bbf6f052
RK
8117}
8118\f
ca695ac9
JB
8119/* At the start of a function, record that we have no previously-pushed
8120 arguments waiting to be popped. */
0006469d 8121
ca695ac9
JB
8122void
8123init_pending_stack_adjust ()
8124{
8125 pending_stack_adjust = 0;
8126}
fb2ca25a 8127
ca695ac9
JB
8128/* When exiting from function, if safe, clear out any pending stack adjust
8129 so the adjustment won't get done. */
904762c8 8130
ca695ac9
JB
8131void
8132clear_pending_stack_adjust ()
fb2ca25a 8133{
ca695ac9
JB
8134#ifdef EXIT_IGNORE_STACK
8135 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
8136 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8137 && ! flag_inline_functions)
8138 pending_stack_adjust = 0;
fb2ca25a 8139#endif
fb2ca25a
KKT
8140}
8141
ca695ac9
JB
8142/* Pop any previously-pushed arguments that have not been popped yet. */
8143
8144void
8145do_pending_stack_adjust ()
8146{
8147 if (inhibit_defer_pop == 0)
8148 {
8149 if (pending_stack_adjust != 0)
8150 adjust_stack (GEN_INT (pending_stack_adjust));
8151 pending_stack_adjust = 0;
8152 }
8153}
8154
8155/* Expand all cleanups up to OLD_CLEANUPS.
8156 Needed here, and also for language-dependent calls. */
904762c8 8157
ca695ac9
JB
8158void
8159expand_cleanups_to (old_cleanups)
8160 tree old_cleanups;
0006469d 8161{
ca695ac9 8162 while (cleanups_this_call != old_cleanups)
0006469d 8163 {
ca695ac9
JB
8164 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
8165 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
8166 }
8167}
8168\f
8169/* Expand conditional expressions. */
0006469d 8170
ca695ac9
JB
8171/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8172 LABEL is an rtx of code CODE_LABEL, in this function and all the
8173 functions here. */
0006469d 8174
ca695ac9
JB
8175void
8176jumpifnot (exp, label)
8177 tree exp;
8178 rtx label;
8179{
8180 do_jump (exp, label, NULL_RTX);
8181}
0006469d 8182
ca695ac9 8183/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
0006469d 8184
ca695ac9
JB
8185void
8186jumpif (exp, label)
8187 tree exp;
8188 rtx label;
8189{
8190 do_jump (exp, NULL_RTX, label);
8191}
0006469d 8192
ca695ac9
JB
8193/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8194 the result is zero, or IF_TRUE_LABEL if the result is one.
8195 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8196 meaning fall through in that case.
0006469d 8197
ca695ac9
JB
8198 do_jump always does any pending stack adjust except when it does not
8199 actually perform a jump. An example where there is no jump
8200 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
0006469d 8201
ca695ac9
JB
8202 This function is responsible for optimizing cases such as
8203 &&, || and comparison operators in EXP. */
904762c8 8204
ca695ac9
JB
8205void
8206do_jump (exp, if_false_label, if_true_label)
8207 tree exp;
8208 rtx if_false_label, if_true_label;
0006469d 8209{
ca695ac9
JB
8210 register enum tree_code code = TREE_CODE (exp);
8211 /* Some cases need to create a label to jump to
8212 in order to properly fall through.
8213 These cases set DROP_THROUGH_LABEL nonzero. */
8214 rtx drop_through_label = 0;
8215 rtx temp;
8216 rtx comparison = 0;
8217 int i;
8218 tree type;
0006469d 8219
ca695ac9 8220 emit_queue ();
0006469d 8221
ca695ac9
JB
8222 switch (code)
8223 {
8224 case ERROR_MARK:
8225 break;
0006469d 8226
ca695ac9
JB
8227 case INTEGER_CST:
8228 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8229 if (temp)
8230 emit_jump (temp);
8231 break;
0006469d 8232
ca695ac9
JB
8233#if 0
8234 /* This is not true with #pragma weak */
8235 case ADDR_EXPR:
8236 /* The address of something can never be zero. */
8237 if (if_true_label)
8238 emit_jump (if_true_label);
8239 break;
8240#endif
0006469d 8241
ca695ac9
JB
8242 case NOP_EXPR:
8243 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8244 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8245 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8246 goto normal;
8247 case CONVERT_EXPR:
8248 /* If we are narrowing the operand, we have to do the compare in the
8249 narrower mode. */
8250 if ((TYPE_PRECISION (TREE_TYPE (exp))
8251 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8252 goto normal;
8253 case NON_LVALUE_EXPR:
8254 case REFERENCE_EXPR:
8255 case ABS_EXPR:
8256 case NEGATE_EXPR:
8257 case LROTATE_EXPR:
8258 case RROTATE_EXPR:
8259 /* These cannot change zero->non-zero or vice versa. */
8260 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8261 break;
0006469d 8262
ca695ac9
JB
8263#if 0
8264 /* This is never less insns than evaluating the PLUS_EXPR followed by
8265 a test and can be longer if the test is eliminated. */
8266 case PLUS_EXPR:
8267 /* Reduce to minus. */
8268 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8269 TREE_OPERAND (exp, 0),
8270 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8271 TREE_OPERAND (exp, 1))));
8272 /* Process as MINUS. */
0006469d 8273#endif
0006469d 8274
ca695ac9
JB
8275 case MINUS_EXPR:
8276 /* Non-zero iff operands of minus differ. */
8277 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
8278 TREE_OPERAND (exp, 0),
8279 TREE_OPERAND (exp, 1)),
8280 NE, NE);
8281 break;
904762c8 8282
ca695ac9
JB
8283 case BIT_AND_EXPR:
8284 /* If we are AND'ing with a small constant, do this comparison in the
8285 smallest type that fits. If the machine doesn't have comparisons
8286 that small, it will be converted back to the wider comparison.
8287 This helps if we are testing the sign bit of a narrower object.
8288 combine can't do this for us because it can't know whether a
8289 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
0006469d 8290
ca695ac9
JB
8291 if (! SLOW_BYTE_ACCESS
8292 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8293 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8294 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8295 && (type = type_for_size (i + 1, 1)) != 0
8296 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8297 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8298 != CODE_FOR_nothing))
8299 {
8300 do_jump (convert (type, exp), if_false_label, if_true_label);
8301 break;
8302 }
8303 goto normal;
904762c8 8304
ca695ac9
JB
8305 case TRUTH_NOT_EXPR:
8306 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8307 break;
0006469d 8308
ca695ac9
JB
8309 case TRUTH_ANDIF_EXPR:
8310 if (if_false_label == 0)
8311 if_false_label = drop_through_label = gen_label_rtx ();
8312 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8313 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8314 break;
0006469d 8315
ca695ac9
JB
8316 case TRUTH_ORIF_EXPR:
8317 if (if_true_label == 0)
8318 if_true_label = drop_through_label = gen_label_rtx ();
8319 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8320 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8321 break;
0006469d 8322
ca695ac9 8323 case COMPOUND_EXPR:
0088fcb1 8324 push_temp_slots ();
ca695ac9
JB
8325 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8326 free_temp_slots ();
0088fcb1 8327 pop_temp_slots ();
ca695ac9
JB
8328 emit_queue ();
8329 do_pending_stack_adjust ();
8330 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8331 break;
0006469d 8332
ca695ac9
JB
8333 case COMPONENT_REF:
8334 case BIT_FIELD_REF:
8335 case ARRAY_REF:
8336 {
8337 int bitsize, bitpos, unsignedp;
8338 enum machine_mode mode;
8339 tree type;
8340 tree offset;
8341 int volatilep = 0;
0006469d 8342
ca695ac9
JB
8343 /* Get description of this reference. We don't actually care
8344 about the underlying object here. */
8345 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8346 &mode, &unsignedp, &volatilep);
0006469d 8347
ca695ac9
JB
8348 type = type_for_size (bitsize, unsignedp);
8349 if (! SLOW_BYTE_ACCESS
8350 && type != 0 && bitsize >= 0
8351 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8352 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8353 != CODE_FOR_nothing))
8354 {
8355 do_jump (convert (type, exp), if_false_label, if_true_label);
8356 break;
8357 }
8358 goto normal;
8359 }
0006469d 8360
ca695ac9
JB
8361 case COND_EXPR:
8362 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8363 if (integer_onep (TREE_OPERAND (exp, 1))
8364 && integer_zerop (TREE_OPERAND (exp, 2)))
8365 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
904762c8 8366
ca695ac9
JB
8367 else if (integer_zerop (TREE_OPERAND (exp, 1))
8368 && integer_onep (TREE_OPERAND (exp, 2)))
8369 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0006469d 8370
ca695ac9
JB
8371 else
8372 {
8373 register rtx label1 = gen_label_rtx ();
8374 drop_through_label = gen_label_rtx ();
8375 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
8376 /* Now the THEN-expression. */
8377 do_jump (TREE_OPERAND (exp, 1),
8378 if_false_label ? if_false_label : drop_through_label,
8379 if_true_label ? if_true_label : drop_through_label);
8380 /* In case the do_jump just above never jumps. */
8381 do_pending_stack_adjust ();
8382 emit_label (label1);
8383 /* Now the ELSE-expression. */
8384 do_jump (TREE_OPERAND (exp, 2),
8385 if_false_label ? if_false_label : drop_through_label,
8386 if_true_label ? if_true_label : drop_through_label);
8387 }
8388 break;
0006469d 8389
ca695ac9
JB
8390 case EQ_EXPR:
8391 if (integer_zerop (TREE_OPERAND (exp, 1)))
8392 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
0766f239
RS
8393 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8394 == MODE_INT)
8395 &&
8396 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8397 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8398 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8399 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8400 else
8401 comparison = compare (exp, EQ, EQ);
8402 break;
0006469d 8403
ca695ac9
JB
8404 case NE_EXPR:
8405 if (integer_zerop (TREE_OPERAND (exp, 1)))
8406 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
0766f239
RS
8407 else if (((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8408 == MODE_INT)
8409 &&
8410 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8411 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_FLOAT
8412 || GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))) == MODE_COMPLEX_INT)
ca695ac9
JB
8413 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8414 else
8415 comparison = compare (exp, NE, NE);
8416 break;
0006469d 8417
ca695ac9
JB
8418 case LT_EXPR:
8419 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8420 == MODE_INT)
8421 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8422 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8423 else
8424 comparison = compare (exp, LT, LTU);
8425 break;
0006469d 8426
ca695ac9
JB
8427 case LE_EXPR:
8428 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8429 == MODE_INT)
8430 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8431 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8432 else
8433 comparison = compare (exp, LE, LEU);
8434 break;
0006469d 8435
ca695ac9
JB
8436 case GT_EXPR:
8437 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8438 == MODE_INT)
8439 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8440 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8441 else
8442 comparison = compare (exp, GT, GTU);
8443 break;
0006469d 8444
ca695ac9
JB
8445 case GE_EXPR:
8446 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8447 == MODE_INT)
8448 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8449 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8450 else
8451 comparison = compare (exp, GE, GEU);
8452 break;
0006469d 8453
ca695ac9
JB
8454 default:
8455 normal:
8456 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8457#if 0
8458 /* This is not needed any more and causes poor code since it causes
8459 comparisons and tests from non-SI objects to have different code
8460 sequences. */
8461 /* Copy to register to avoid generating bad insns by cse
8462 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8463 if (!cse_not_expected && GET_CODE (temp) == MEM)
8464 temp = copy_to_reg (temp);
8465#endif
8466 do_pending_stack_adjust ();
8467 if (GET_CODE (temp) == CONST_INT)
8468 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
8469 else if (GET_CODE (temp) == LABEL_REF)
8470 comparison = const_true_rtx;
8471 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
8472 && !can_compare_p (GET_MODE (temp)))
8473 /* Note swapping the labels gives us not-equal. */
8474 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8475 else if (GET_MODE (temp) != VOIDmode)
8476 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
8477 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8478 GET_MODE (temp), NULL_RTX, 0);
8479 else
8480 abort ();
8481 }
0006469d 8482
ca695ac9
JB
8483 /* Do any postincrements in the expression that was tested. */
8484 emit_queue ();
0006469d 8485
ca695ac9
JB
8486 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
8487 straight into a conditional jump instruction as the jump condition.
8488 Otherwise, all the work has been done already. */
0006469d 8489
ca695ac9 8490 if (comparison == const_true_rtx)
0006469d 8491 {
ca695ac9
JB
8492 if (if_true_label)
8493 emit_jump (if_true_label);
0006469d 8494 }
ca695ac9
JB
8495 else if (comparison == const0_rtx)
8496 {
8497 if (if_false_label)
8498 emit_jump (if_false_label);
8499 }
8500 else if (comparison)
8501 do_jump_for_compare (comparison, if_false_label, if_true_label);
0006469d 8502
ca695ac9 8503 if (drop_through_label)
0006469d 8504 {
ca695ac9
JB
8505 /* If do_jump produces code that might be jumped around,
8506 do any stack adjusts from that code, before the place
8507 where control merges in. */
8508 do_pending_stack_adjust ();
8509 emit_label (drop_through_label);
8510 }
8511}
8512\f
8513/* Given a comparison expression EXP for values too wide to be compared
8514 with one insn, test the comparison and jump to the appropriate label.
8515 The code of EXP is ignored; we always test GT if SWAP is 0,
8516 and LT if SWAP is 1. */
0006469d 8517
ca695ac9
JB
8518static void
8519do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8520 tree exp;
8521 int swap;
8522 rtx if_false_label, if_true_label;
8523{
8524 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8525 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8526 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8527 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8528 rtx drop_through_label = 0;
8529 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
8530 int i;
0006469d 8531
ca695ac9
JB
8532 if (! if_true_label || ! if_false_label)
8533 drop_through_label = gen_label_rtx ();
8534 if (! if_true_label)
8535 if_true_label = drop_through_label;
8536 if (! if_false_label)
8537 if_false_label = drop_through_label;
0006469d 8538
ca695ac9
JB
8539 /* Compare a word at a time, high order first. */
8540 for (i = 0; i < nwords; i++)
8541 {
8542 rtx comp;
8543 rtx op0_word, op1_word;
0006469d 8544
ca695ac9
JB
8545 if (WORDS_BIG_ENDIAN)
8546 {
8547 op0_word = operand_subword_force (op0, i, mode);
8548 op1_word = operand_subword_force (op1, i, mode);
8549 }
8550 else
8551 {
8552 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8553 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8554 }
0006469d 8555
ca695ac9
JB
8556 /* All but high-order word must be compared as unsigned. */
8557 comp = compare_from_rtx (op0_word, op1_word,
8558 (unsignedp || i > 0) ? GTU : GT,
8559 unsignedp, word_mode, NULL_RTX, 0);
8560 if (comp == const_true_rtx)
8561 emit_jump (if_true_label);
8562 else if (comp != const0_rtx)
8563 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8564
ca695ac9
JB
8565 /* Consider lower words only if these are equal. */
8566 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8567 NULL_RTX, 0);
8568 if (comp == const_true_rtx)
8569 emit_jump (if_false_label);
8570 else if (comp != const0_rtx)
8571 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8572 }
0006469d 8573
ca695ac9
JB
8574 if (if_false_label)
8575 emit_jump (if_false_label);
8576 if (drop_through_label)
8577 emit_label (drop_through_label);
0006469d
TW
8578}
8579
ca695ac9
JB
8580/* Compare OP0 with OP1, word at a time, in mode MODE.
8581 UNSIGNEDP says to do unsigned comparison.
8582 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
904762c8 8583
0006469d 8584static void
ca695ac9
JB
8585do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8586 enum machine_mode mode;
8587 int unsignedp;
8588 rtx op0, op1;
8589 rtx if_false_label, if_true_label;
0006469d 8590{
ca695ac9
JB
8591 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8592 rtx drop_through_label = 0;
8593 int i;
0006469d 8594
ca695ac9
JB
8595 if (! if_true_label || ! if_false_label)
8596 drop_through_label = gen_label_rtx ();
8597 if (! if_true_label)
8598 if_true_label = drop_through_label;
8599 if (! if_false_label)
8600 if_false_label = drop_through_label;
0006469d 8601
ca695ac9
JB
8602 /* Compare a word at a time, high order first. */
8603 for (i = 0; i < nwords; i++)
0006469d 8604 {
ca695ac9
JB
8605 rtx comp;
8606 rtx op0_word, op1_word;
0006469d 8607
ca695ac9
JB
8608 if (WORDS_BIG_ENDIAN)
8609 {
8610 op0_word = operand_subword_force (op0, i, mode);
8611 op1_word = operand_subword_force (op1, i, mode);
8612 }
8613 else
8614 {
8615 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
8616 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
8617 }
0006469d 8618
ca695ac9
JB
8619 /* All but high-order word must be compared as unsigned. */
8620 comp = compare_from_rtx (op0_word, op1_word,
8621 (unsignedp || i > 0) ? GTU : GT,
8622 unsignedp, word_mode, NULL_RTX, 0);
8623 if (comp == const_true_rtx)
8624 emit_jump (if_true_label);
8625 else if (comp != const0_rtx)
8626 do_jump_for_compare (comp, NULL_RTX, if_true_label);
0006469d 8627
ca695ac9
JB
8628 /* Consider lower words only if these are equal. */
8629 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
8630 NULL_RTX, 0);
8631 if (comp == const_true_rtx)
8632 emit_jump (if_false_label);
8633 else if (comp != const0_rtx)
8634 do_jump_for_compare (comp, NULL_RTX, if_false_label);
8635 }
0006469d 8636
ca695ac9
JB
8637 if (if_false_label)
8638 emit_jump (if_false_label);
8639 if (drop_through_label)
8640 emit_label (drop_through_label);
0006469d 8641}
bbf6f052 8642
ca695ac9
JB
8643/* Given an EQ_EXPR expression EXP for values too wide to be compared
8644 with one insn, test the comparison and jump to the appropriate label. */
8645
8646static void
8647do_jump_by_parts_equality (exp, if_false_label, if_true_label)
8648 tree exp;
8649 rtx if_false_label, if_true_label;
bbf6f052 8650{
ca695ac9
JB
8651 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8652 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8653 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
8654 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
8655 int i;
8656 rtx drop_through_label = 0;
bbf6f052 8657
ca695ac9
JB
8658 if (! if_false_label)
8659 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 8660
ca695ac9
JB
8661 for (i = 0; i < nwords; i++)
8662 {
8663 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
8664 operand_subword_force (op1, i, mode),
8665 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
8666 word_mode, NULL_RTX, 0);
8667 if (comp == const_true_rtx)
8668 emit_jump (if_false_label);
8669 else if (comp != const0_rtx)
8670 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8671 }
1499e0a8 8672
ca695ac9
JB
8673 if (if_true_label)
8674 emit_jump (if_true_label);
8675 if (drop_through_label)
8676 emit_label (drop_through_label);
8677}
8678\f
8679/* Jump according to whether OP0 is 0.
8680 We assume that OP0 has an integer mode that is too wide
8681 for the available compare insns. */
1499e0a8 8682
ca695ac9
JB
8683static void
8684do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
8685 rtx op0;
8686 rtx if_false_label, if_true_label;
8687{
8688 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
8689 int i;
8690 rtx drop_through_label = 0;
1499e0a8 8691
ca695ac9
JB
8692 if (! if_false_label)
8693 drop_through_label = if_false_label = gen_label_rtx ();
1499e0a8 8694
ca695ac9
JB
8695 for (i = 0; i < nwords; i++)
8696 {
8697 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
8698 GET_MODE (op0)),
8699 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
8700 if (comp == const_true_rtx)
8701 emit_jump (if_false_label);
8702 else if (comp != const0_rtx)
8703 do_jump_for_compare (comp, if_false_label, NULL_RTX);
8704 }
1499e0a8 8705
ca695ac9
JB
8706 if (if_true_label)
8707 emit_jump (if_true_label);
8708 if (drop_through_label)
8709 emit_label (drop_through_label);
8710}
bbf6f052 8711
ca695ac9
JB
8712/* Given a comparison expression in rtl form, output conditional branches to
8713 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 8714
ca695ac9
JB
8715static void
8716do_jump_for_compare (comparison, if_false_label, if_true_label)
8717 rtx comparison, if_false_label, if_true_label;
8718{
8719 if (if_true_label)
a358cee0 8720 {
ca695ac9
JB
8721 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8722 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
8723 else
8724 abort ();
a358cee0 8725
ca695ac9
JB
8726 if (if_false_label)
8727 emit_jump (if_false_label);
c980ac49 8728 }
ca695ac9 8729 else if (if_false_label)
bbf6f052 8730 {
ca695ac9 8731 rtx insn;
f12f485a 8732 rtx prev = get_last_insn ();
ca695ac9 8733 rtx branch = 0;
bbf6f052 8734
f12f485a
RK
8735 if (prev != 0)
8736 prev = PREV_INSN (prev);
8737
ca695ac9
JB
8738 /* Output the branch with the opposite condition. Then try to invert
8739 what is generated. If more than one insn is a branch, or if the
8740 branch is not the last insn written, abort. If we can't invert
8741 the branch, emit make a true label, redirect this jump to that,
8742 emit a jump to the false label and define the true label. */
bbf6f052 8743
ca695ac9
JB
8744 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
8745 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
8746 else
8747 abort ();
bbf6f052 8748
ca695ac9
JB
8749 /* Here we get the insn before what was just emitted.
8750 On some machines, emitting the branch can discard
8751 the previous compare insn and emit a replacement. */
8752 if (prev == 0)
8753 /* If there's only one preceding insn... */
8754 insn = get_insns ();
8755 else
8756 insn = NEXT_INSN (prev);
bbf6f052 8757
ca695ac9
JB
8758 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
8759 if (GET_CODE (insn) == JUMP_INSN)
8760 {
8761 if (branch)
8762 abort ();
8763 branch = insn;
8764 }
8765
8766 if (branch != get_last_insn ())
8767 abort ();
8768
127e4d19 8769 JUMP_LABEL (branch) = if_false_label;
ca695ac9
JB
8770 if (! invert_jump (branch, if_false_label))
8771 {
8772 if_true_label = gen_label_rtx ();
8773 redirect_jump (branch, if_true_label);
8774 emit_jump (if_false_label);
8775 emit_label (if_true_label);
bbf6f052
RK
8776 }
8777 }
ca695ac9
JB
8778}
8779\f
8780/* Generate code for a comparison expression EXP
8781 (including code to compute the values to be compared)
8782 and set (CC0) according to the result.
8783 SIGNED_CODE should be the rtx operation for this comparison for
8784 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
8785
8786 We force a stack adjustment unless there are currently
8787 things pushed on the stack that aren't yet used. */
8788
8789static rtx
8790compare (exp, signed_code, unsigned_code)
8791 register tree exp;
8792 enum rtx_code signed_code, unsigned_code;
8793{
8794 register rtx op0
8795 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8796 register rtx op1
8797 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
8798 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
8799 register enum machine_mode mode = TYPE_MODE (type);
8800 int unsignedp = TREE_UNSIGNED (type);
8801 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
bbf6f052 8802
ca695ac9
JB
8803 return compare_from_rtx (op0, op1, code, unsignedp, mode,
8804 ((mode == BLKmode)
8805 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
8806 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
8807}
bbf6f052 8808
ca695ac9
JB
8809/* Like compare but expects the values to compare as two rtx's.
8810 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 8811
ca695ac9
JB
8812 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
8813 compared.
bbf6f052 8814
ca695ac9
JB
8815 If ALIGN is non-zero, it is the alignment of this type; if zero, the
8816 size of MODE should be used. */
bbf6f052 8817
ca695ac9
JB
8818rtx
8819compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
8820 register rtx op0, op1;
8821 enum rtx_code code;
8822 int unsignedp;
8823 enum machine_mode mode;
8824 rtx size;
8825 int align;
8826{
8827 rtx tem;
bbf6f052 8828
ca695ac9
JB
8829 /* If one operand is constant, make it the second one. Only do this
8830 if the other operand is not constant as well. */
bbf6f052 8831
ca695ac9
JB
8832 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
8833 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
8834 {
8835 tem = op0;
8836 op0 = op1;
8837 op1 = tem;
8838 code = swap_condition (code);
8839 }
bbf6f052 8840
ca695ac9 8841 if (flag_force_mem)
bbf6f052 8842 {
ca695ac9
JB
8843 op0 = force_not_mem (op0);
8844 op1 = force_not_mem (op1);
8845 }
bbf6f052 8846
ca695ac9 8847 do_pending_stack_adjust ();
bbf6f052 8848
ca695ac9
JB
8849 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
8850 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
8851 return tem;
bbf6f052 8852
ca695ac9
JB
8853#if 0
8854 /* There's no need to do this now that combine.c can eliminate lots of
8855 sign extensions. This can be less efficient in certain cases on other
8856 machines. */
bbf6f052 8857
ca695ac9
JB
8858 /* If this is a signed equality comparison, we can do it as an
8859 unsigned comparison since zero-extension is cheaper than sign
8860 extension and comparisons with zero are done as unsigned. This is
8861 the case even on machines that can do fast sign extension, since
8862 zero-extension is easier to combine with other operations than
8863 sign-extension is. If we are comparing against a constant, we must
8864 convert it to what it would look like unsigned. */
8865 if ((code == EQ || code == NE) && ! unsignedp
8866 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
8867 {
8868 if (GET_CODE (op1) == CONST_INT
8869 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
8870 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
8871 unsignedp = 1;
bbf6f052 8872 }
ca695ac9
JB
8873#endif
8874
8875 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
bbf6f052 8876
ca695ac9 8877 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
bbf6f052
RK
8878}
8879\f
ca695ac9
JB
8880/* Generate code to calculate EXP using a store-flag instruction
8881 and return an rtx for the result. EXP is either a comparison
8882 or a TRUTH_NOT_EXPR whose operand is a comparison.
bbf6f052 8883
ca695ac9 8884 If TARGET is nonzero, store the result there if convenient.
bbf6f052 8885
ca695ac9
JB
8886 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
8887 cheap.
bbf6f052 8888
ca695ac9
JB
8889 Return zero if there is no suitable set-flag instruction
8890 available on this machine.
bbf6f052 8891
ca695ac9
JB
8892 Once expand_expr has been called on the arguments of the comparison,
8893 we are committed to doing the store flag, since it is not safe to
8894 re-evaluate the expression. We emit the store-flag insn by calling
8895 emit_store_flag, but only expand the arguments if we have a reason
8896 to believe that emit_store_flag will be successful. If we think that
8897 it will, but it isn't, we have to simulate the store-flag with a
8898 set/jump/set sequence. */
bbf6f052 8899
ca695ac9
JB
8900static rtx
8901do_store_flag (exp, target, mode, only_cheap)
8902 tree exp;
8903 rtx target;
8904 enum machine_mode mode;
8905 int only_cheap;
bbf6f052 8906{
ca695ac9
JB
8907 enum rtx_code code;
8908 tree arg0, arg1, type;
8909 tree tem;
8910 enum machine_mode operand_mode;
8911 int invert = 0;
8912 int unsignedp;
8913 rtx op0, op1;
8914 enum insn_code icode;
8915 rtx subtarget = target;
8916 rtx result, label, pattern, jump_pat;
bbf6f052 8917
ca695ac9
JB
8918 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8919 result at the end. We can't simply invert the test since it would
8920 have already been inverted if it were valid. This case occurs for
8921 some floating-point comparisons. */
8922
8923 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8924 invert = 1, exp = TREE_OPERAND (exp, 0);
8925
8926 arg0 = TREE_OPERAND (exp, 0);
8927 arg1 = TREE_OPERAND (exp, 1);
8928 type = TREE_TYPE (arg0);
8929 operand_mode = TYPE_MODE (type);
8930 unsignedp = TREE_UNSIGNED (type);
8931
8932 /* We won't bother with BLKmode store-flag operations because it would mean
8933 passing a lot of information to emit_store_flag. */
8934 if (operand_mode == BLKmode)
8935 return 0;
8936
8937 STRIP_NOPS (arg0);
8938 STRIP_NOPS (arg1);
8939
8940 /* Get the rtx comparison code to use. We know that EXP is a comparison
8941 operation of some type. Some comparisons against 1 and -1 can be
8942 converted to comparisons with zero. Do so here so that the tests
8943 below will be aware that we have a comparison with zero. These
8944 tests will not catch constants in the first operand, but constants
8945 are rarely passed as the first operand. */
8946
8947 switch (TREE_CODE (exp))
8948 {
8949 case EQ_EXPR:
8950 code = EQ;
8951 break;
8952 case NE_EXPR:
8953 code = NE;
8954 break;
8955 case LT_EXPR:
8956 if (integer_onep (arg1))
8957 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8958 else
8959 code = unsignedp ? LTU : LT;
8960 break;
8961 case LE_EXPR:
8962 if (! unsignedp && integer_all_onesp (arg1))
8963 arg1 = integer_zero_node, code = LT;
8964 else
8965 code = unsignedp ? LEU : LE;
8966 break;
8967 case GT_EXPR:
8968 if (! unsignedp && integer_all_onesp (arg1))
8969 arg1 = integer_zero_node, code = GE;
8970 else
8971 code = unsignedp ? GTU : GT;
8972 break;
8973 case GE_EXPR:
8974 if (integer_onep (arg1))
8975 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8976 else
8977 code = unsignedp ? GEU : GE;
8978 break;
8979 default:
8980 abort ();
8981 }
bbf6f052 8982
ca695ac9
JB
8983 /* Put a constant second. */
8984 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
bbf6f052 8985 {
ca695ac9
JB
8986 tem = arg0; arg0 = arg1; arg1 = tem;
8987 code = swap_condition (code);
bbf6f052 8988 }
bbf6f052 8989
ca695ac9
JB
8990 /* If this is an equality or inequality test of a single bit, we can
8991 do this by shifting the bit being tested to the low-order bit and
8992 masking the result with the constant 1. If the condition was EQ,
8993 we xor it with 1. This does not require an scc insn and is faster
8994 than an scc insn even if we have it. */
bbf6f052 8995
ca695ac9
JB
8996 if ((code == NE || code == EQ)
8997 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8998 && integer_pow2p (TREE_OPERAND (arg0, 1))
8999 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
9000 {
9001 tree inner = TREE_OPERAND (arg0, 0);
9002 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
9003 NULL_RTX, VOIDmode, 0)));
9004 int ops_unsignedp;
bbf6f052 9005
ca695ac9
JB
9006 /* If INNER is a right shift of a constant and it plus BITNUM does
9007 not overflow, adjust BITNUM and INNER. */
bbf6f052 9008
ca695ac9
JB
9009 if (TREE_CODE (inner) == RSHIFT_EXPR
9010 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9011 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9012 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9013 < TYPE_PRECISION (type)))
9014 {
9015 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9016 inner = TREE_OPERAND (inner, 0);
9017 }
bbf6f052 9018
ca695ac9
JB
9019 /* If we are going to be able to omit the AND below, we must do our
9020 operations as unsigned. If we must use the AND, we have a choice.
9021 Normally unsigned is faster, but for some machines signed is. */
9022 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
ad92c826
RK
9023#ifdef LOAD_EXTEND_OP
9024 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
ca695ac9
JB
9025#else
9026 : 1
9027#endif
9028 );
bbf6f052 9029
ca695ac9
JB
9030 if (subtarget == 0 || GET_CODE (subtarget) != REG
9031 || GET_MODE (subtarget) != operand_mode
9032 || ! safe_from_p (subtarget, inner))
9033 subtarget = 0;
e7c33f54 9034
ca695ac9 9035 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9036
ca695ac9
JB
9037 if (bitnum != 0)
9038 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
0c316b20 9039 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9040
ca695ac9
JB
9041 if (GET_MODE (op0) != mode)
9042 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9043
ca695ac9 9044 if ((code == EQ && ! invert) || (code == NE && invert))
0c316b20 9045 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
ca695ac9 9046 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9047
ca695ac9
JB
9048 /* Put the AND last so it can combine with more things. */
9049 if (bitnum != TYPE_PRECISION (type) - 1)
0c316b20 9050 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9051
ca695ac9
JB
9052 return op0;
9053 }
bbf6f052 9054
ca695ac9
JB
9055 /* Now see if we are likely to be able to do this. Return if not. */
9056 if (! can_compare_p (operand_mode))
9057 return 0;
9058 icode = setcc_gen_code[(int) code];
9059 if (icode == CODE_FOR_nothing
9060 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
9061 {
9062 /* We can only do this if it is one of the special cases that
9063 can be handled without an scc insn. */
9064 if ((code == LT && integer_zerop (arg1))
9065 || (! only_cheap && code == GE && integer_zerop (arg1)))
9066 ;
9067 else if (BRANCH_COST >= 0
9068 && ! only_cheap && (code == NE || code == EQ)
9069 && TREE_CODE (type) != REAL_TYPE
9070 && ((abs_optab->handlers[(int) operand_mode].insn_code
9071 != CODE_FOR_nothing)
9072 || (ffs_optab->handlers[(int) operand_mode].insn_code
9073 != CODE_FOR_nothing)))
9074 ;
9075 else
9076 return 0;
9077 }
9078
9079 preexpand_calls (exp);
9080 if (subtarget == 0 || GET_CODE (subtarget) != REG
9081 || GET_MODE (subtarget) != operand_mode
9082 || ! safe_from_p (subtarget, arg1))
9083 subtarget = 0;
bbf6f052 9084
ca695ac9
JB
9085 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9086 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052 9087
ca695ac9
JB
9088 if (target == 0)
9089 target = gen_reg_rtx (mode);
bbf6f052 9090
ca695ac9
JB
9091 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9092 because, if the emit_store_flag does anything it will succeed and
9093 OP0 and OP1 will not be used subsequently. */
bbf6f052 9094
ca695ac9
JB
9095 result = emit_store_flag (target, code,
9096 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9097 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9098 operand_mode, unsignedp, 1);
bbf6f052 9099
ca695ac9
JB
9100 if (result)
9101 {
9102 if (invert)
9103 result = expand_binop (mode, xor_optab, result, const1_rtx,
9104 result, 0, OPTAB_LIB_WIDEN);
9105 return result;
9106 }
bbf6f052 9107
ca695ac9
JB
9108 /* If this failed, we have to do this with set/compare/jump/set code. */
9109 if (target == 0 || GET_CODE (target) != REG
9110 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9111 target = gen_reg_rtx (GET_MODE (target));
bbf6f052 9112
ca695ac9
JB
9113 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9114 result = compare_from_rtx (op0, op1, code, unsignedp,
9115 operand_mode, NULL_RTX, 0);
9116 if (GET_CODE (result) == CONST_INT)
9117 return (((result == const0_rtx && ! invert)
9118 || (result != const0_rtx && invert))
9119 ? const0_rtx : const1_rtx);
bbf6f052 9120
ca695ac9
JB
9121 label = gen_label_rtx ();
9122 if (bcc_gen_fctn[(int) code] == 0)
9123 abort ();
bbf6f052 9124
ca695ac9
JB
9125 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9126 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9127 emit_label (label);
bbf6f052 9128
ca695ac9
JB
9129 return target;
9130}
9131\f
9132/* Generate a tablejump instruction (used for switch statements). */
bbf6f052 9133
ca695ac9 9134#ifdef HAVE_tablejump
bbf6f052 9135
ca695ac9
JB
9136/* INDEX is the value being switched on, with the lowest value
9137 in the table already subtracted.
9138 MODE is its expected mode (needed if INDEX is constant).
9139 RANGE is the length of the jump table.
9140 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
bbf6f052 9141
ca695ac9
JB
9142 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9143 index value is out of range. */
bbf6f052 9144
ca695ac9
JB
9145void
9146do_tablejump (index, mode, range, table_label, default_label)
9147 rtx index, range, table_label, default_label;
9148 enum machine_mode mode;
9149{
9150 register rtx temp, vector;
bbf6f052 9151
ca695ac9
JB
9152 /* Do an unsigned comparison (in the proper mode) between the index
9153 expression and the value which represents the length of the range.
9154 Since we just finished subtracting the lower bound of the range
9155 from the index expression, this comparison allows us to simultaneously
9156 check that the original index expression value is both greater than
9157 or equal to the minimum value of the range and less than or equal to
9158 the maximum value of the range. */
bbf6f052 9159
bf500664
RK
9160 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
9161 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 9162
ca695ac9
JB
9163 /* If index is in range, it must fit in Pmode.
9164 Convert to Pmode so we can index with it. */
9165 if (mode != Pmode)
9166 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9167
ca695ac9
JB
9168 /* Don't let a MEM slip thru, because then INDEX that comes
9169 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9170 and break_out_memory_refs will go to work on it and mess it up. */
9171#ifdef PIC_CASE_VECTOR_ADDRESS
9172 if (flag_pic && GET_CODE (index) != REG)
9173 index = copy_to_mode_reg (Pmode, index);
9174#endif
bbf6f052 9175
ca695ac9
JB
9176 /* If flag_force_addr were to affect this address
9177 it could interfere with the tricky assumptions made
9178 about addresses that contain label-refs,
9179 which may be valid only very near the tablejump itself. */
9180 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9181 GET_MODE_SIZE, because this indicates how large insns are. The other
9182 uses should all be Pmode, because they are addresses. This code
9183 could fail if addresses and insns are not the same size. */
9184 index = gen_rtx (PLUS, Pmode,
9185 gen_rtx (MULT, Pmode, index,
9186 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9187 gen_rtx (LABEL_REF, Pmode, table_label));
9188#ifdef PIC_CASE_VECTOR_ADDRESS
9189 if (flag_pic)
9190 index = PIC_CASE_VECTOR_ADDRESS (index);
9191 else
9192#endif
9193 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9194 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9195 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
9196 RTX_UNCHANGING_P (vector) = 1;
9197 convert_move (temp, vector, 0);
bbf6f052 9198
ca695ac9 9199 emit_jump_insn (gen_tablejump (temp, table_label));
bbf6f052 9200
ca695ac9
JB
9201#ifndef CASE_VECTOR_PC_RELATIVE
9202 /* If we are generating PIC code or if the table is PC-relative, the
9203 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9204 if (! flag_pic)
9205 emit_barrier ();
bbf6f052 9206#endif
ca695ac9 9207}
bbf6f052 9208
ca695ac9 9209#endif /* HAVE_tablejump */
bbf6f052 9210
bbf6f052 9211
ca695ac9
JB
9212/* Emit a suitable bytecode to load a value from memory, assuming a pointer
9213 to that value is on the top of the stack. The resulting type is TYPE, and
9214 the source declaration is DECL. */
bbf6f052 9215
ca695ac9
JB
9216void
9217bc_load_memory (type, decl)
9218 tree type, decl;
9219{
9220 enum bytecode_opcode opcode;
9221
9222
9223 /* Bit fields are special. We only know about signed and
9224 unsigned ints, and enums. The latter are treated as
9225 signed integers. */
9226
9227 if (DECL_BIT_FIELD (decl))
9228 if (TREE_CODE (type) == ENUMERAL_TYPE
9229 || TREE_CODE (type) == INTEGER_TYPE)
9230 opcode = TREE_UNSIGNED (type) ? zxloadBI : sxloadBI;
9231 else
9232 abort ();
9233 else
9234 /* See corresponding comment in bc_store_memory(). */
9235 if (TYPE_MODE (type) == BLKmode
9236 || TYPE_MODE (type) == VOIDmode)
9237 return;
9238 else
6bd6178d 9239 opcode = mode_to_load_map [(int) TYPE_MODE (type)];
bbf6f052 9240
ca695ac9
JB
9241 if (opcode == neverneverland)
9242 abort ();
9243
9244 bc_emit_bytecode (opcode);
9245
9246#ifdef DEBUG_PRINT_CODE
9247 fputc ('\n', stderr);
9248#endif
bbf6f052 9249}
bbf6f052 9250
bbf6f052 9251
ca695ac9
JB
9252/* Store the contents of the second stack slot to the address in the
9253 top stack slot. DECL is the declaration of the destination and is used
9254 to determine whether we're dealing with a bitfield. */
bbf6f052 9255
ca695ac9
JB
9256void
9257bc_store_memory (type, decl)
9258 tree type, decl;
9259{
9260 enum bytecode_opcode opcode;
9261
9262
9263 if (DECL_BIT_FIELD (decl))
f81497d9 9264 {
ca695ac9
JB
9265 if (TREE_CODE (type) == ENUMERAL_TYPE
9266 || TREE_CODE (type) == INTEGER_TYPE)
9267 opcode = sstoreBI;
f81497d9 9268 else
ca695ac9 9269 abort ();
f81497d9 9270 }
ca695ac9
JB
9271 else
9272 if (TYPE_MODE (type) == BLKmode)
9273 {
9274 /* Copy structure. This expands to a block copy instruction, storeBLK.
9275 In addition to the arguments expected by the other store instructions,
9276 it also expects a type size (SImode) on top of the stack, which is the
9277 structure size in size units (usually bytes). The two first arguments
9278 are already on the stack; so we just put the size on level 1. For some
9279 other languages, the size may be variable, this is why we don't encode
9280 it as a storeBLK literal, but rather treat it as a full-fledged expression. */
9281
9282 bc_expand_expr (TYPE_SIZE (type));
9283 opcode = storeBLK;
9284 }
9285 else
6bd6178d 9286 opcode = mode_to_store_map [(int) TYPE_MODE (type)];
f81497d9 9287
ca695ac9
JB
9288 if (opcode == neverneverland)
9289 abort ();
9290
9291 bc_emit_bytecode (opcode);
9292
9293#ifdef DEBUG_PRINT_CODE
9294 fputc ('\n', stderr);
9295#endif
f81497d9
RS
9296}
9297
f81497d9 9298
ca695ac9
JB
9299/* Allocate local stack space sufficient to hold a value of the given
9300 SIZE at alignment boundary ALIGNMENT bits. ALIGNMENT must be an
9301 integral power of 2. A special case is locals of type VOID, which
9302 have size 0 and alignment 1 - any "voidish" SIZE or ALIGNMENT is
9303 remapped into the corresponding attribute of SI. */
9304
9305rtx
9306bc_allocate_local (size, alignment)
9307 int size, alignment;
f81497d9 9308{
ca695ac9
JB
9309 rtx retval;
9310 int byte_alignment;
f81497d9 9311
ca695ac9
JB
9312 if (size < 0)
9313 abort ();
f81497d9 9314
ca695ac9
JB
9315 /* Normalize size and alignment */
9316 if (!size)
9317 size = UNITS_PER_WORD;
bbf6f052 9318
ca695ac9
JB
9319 if (alignment < BITS_PER_UNIT)
9320 byte_alignment = 1 << (INT_ALIGN - 1);
9321 else
9322 /* Align */
9323 byte_alignment = alignment / BITS_PER_UNIT;
bbf6f052 9324
ca695ac9
JB
9325 if (local_vars_size & (byte_alignment - 1))
9326 local_vars_size += byte_alignment - (local_vars_size & (byte_alignment - 1));
bbf6f052 9327
ca695ac9
JB
9328 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9329 local_vars_size += size;
bbf6f052 9330
ca695ac9 9331 return retval;
bbf6f052
RK
9332}
9333
bbf6f052 9334
ca695ac9
JB
9335/* Allocate variable-sized local array. Variable-sized arrays are
9336 actually pointers to the address in memory where they are stored. */
9337
9338rtx
9339bc_allocate_variable_array (size)
9340 tree size;
bbf6f052 9341{
ca695ac9
JB
9342 rtx retval;
9343 const int ptralign = (1 << (PTR_ALIGN - 1));
bbf6f052 9344
ca695ac9
JB
9345 /* Align pointer */
9346 if (local_vars_size & ptralign)
9347 local_vars_size += ptralign - (local_vars_size & ptralign);
bbf6f052 9348
ca695ac9
JB
9349 /* Note down local space needed: pointer to block; also return
9350 dummy rtx */
bbf6f052 9351
ca695ac9
JB
9352 retval = bc_gen_rtx ((char *) 0, local_vars_size, (struct bc_label *) 0);
9353 local_vars_size += POINTER_SIZE / BITS_PER_UNIT;
9354 return retval;
bbf6f052 9355}
bbf6f052 9356
bbf6f052 9357
ca695ac9
JB
9358/* Push the machine address for the given external variable offset. */
9359void
9360bc_load_externaddr (externaddr)
9361 rtx externaddr;
9362{
9363 bc_emit_bytecode (constP);
e7a42772
JB
9364 bc_emit_code_labelref (BYTECODE_LABEL (externaddr),
9365 BYTECODE_BC_LABEL (externaddr)->offset);
bbf6f052 9366
ca695ac9
JB
9367#ifdef DEBUG_PRINT_CODE
9368 fputc ('\n', stderr);
9369#endif
bbf6f052
RK
9370}
9371
bbf6f052 9372
ca695ac9
JB
9373static char *
9374bc_strdup (s)
9375 char *s;
bbf6f052 9376{
5e70898c
RS
9377 char *new = (char *) xmalloc ((strlen (s) + 1) * sizeof *s);
9378 strcpy (new, s);
9379 return new;
ca695ac9 9380}
bbf6f052 9381
bbf6f052 9382
ca695ac9
JB
9383/* Like above, but expects an IDENTIFIER. */
9384void
9385bc_load_externaddr_id (id, offset)
9386 tree id;
9387 int offset;
9388{
9389 if (!IDENTIFIER_POINTER (id))
9390 abort ();
bbf6f052 9391
ca695ac9
JB
9392 bc_emit_bytecode (constP);
9393 bc_emit_code_labelref (bc_xstrdup (IDENTIFIER_POINTER (id)), offset);
bbf6f052 9394
ca695ac9
JB
9395#ifdef DEBUG_PRINT_CODE
9396 fputc ('\n', stderr);
9397#endif
9398}
bbf6f052 9399
bbf6f052 9400
ca695ac9
JB
9401/* Push the machine address for the given local variable offset. */
9402void
9403bc_load_localaddr (localaddr)
9404 rtx localaddr;
9405{
e7a42772 9406 bc_emit_instruction (localP, (HOST_WIDE_INT) BYTECODE_BC_LABEL (localaddr)->offset);
bbf6f052 9407}
bbf6f052 9408
bbf6f052 9409
ca695ac9
JB
9410/* Push the machine address for the given parameter offset.
9411 NOTE: offset is in bits. */
9412void
9413bc_load_parmaddr (parmaddr)
9414 rtx parmaddr;
bbf6f052 9415{
e7a42772
JB
9416 bc_emit_instruction (argP, ((HOST_WIDE_INT) BYTECODE_BC_LABEL (parmaddr)->offset
9417 / BITS_PER_UNIT));
ca695ac9 9418}
bbf6f052 9419
ca695ac9
JB
9420
9421/* Convert a[i] into *(a + i). */
9422tree
9423bc_canonicalize_array_ref (exp)
9424 tree exp;
9425{
9426 tree type = TREE_TYPE (exp);
9427 tree array_adr = build1 (ADDR_EXPR, TYPE_POINTER_TO (type),
9428 TREE_OPERAND (exp, 0));
9429 tree index = TREE_OPERAND (exp, 1);
9430
9431
9432 /* Convert the integer argument to a type the same size as a pointer
9433 so the multiply won't overflow spuriously. */
9434
9435 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
9436 index = convert (type_for_size (POINTER_SIZE, 0), index);
9437
9438 /* The array address isn't volatile even if the array is.
9439 (Of course this isn't terribly relevant since the bytecode
9440 translator treats nearly everything as volatile anyway.) */
9441 TREE_THIS_VOLATILE (array_adr) = 0;
9442
9443 return build1 (INDIRECT_REF, type,
9444 fold (build (PLUS_EXPR,
9445 TYPE_POINTER_TO (type),
9446 array_adr,
9447 fold (build (MULT_EXPR,
9448 TYPE_POINTER_TO (type),
9449 index,
9450 size_in_bytes (type))))));
bbf6f052
RK
9451}
9452
bbf6f052 9453
ca695ac9
JB
9454/* Load the address of the component referenced by the given
9455 COMPONENT_REF expression.
bbf6f052 9456
ca695ac9 9457 Returns innermost lvalue. */
bbf6f052 9458
ca695ac9
JB
9459tree
9460bc_expand_component_address (exp)
9461 tree exp;
bbf6f052 9462{
ca695ac9
JB
9463 tree tem, chain;
9464 enum machine_mode mode;
9465 int bitpos = 0;
9466 HOST_WIDE_INT SIval;
a7c5971a 9467
bbf6f052 9468
ca695ac9
JB
9469 tem = TREE_OPERAND (exp, 1);
9470 mode = DECL_MODE (tem);
bbf6f052 9471
ca695ac9
JB
9472
9473 /* Compute cumulative bit offset for nested component refs
9474 and array refs, and find the ultimate containing object. */
9475
9476 for (tem = exp;; tem = TREE_OPERAND (tem, 0))
bbf6f052 9477 {
ca695ac9
JB
9478 if (TREE_CODE (tem) == COMPONENT_REF)
9479 bitpos += TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (tem, 1)));
9480 else
9481 if (TREE_CODE (tem) == ARRAY_REF
9482 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
9483 && TREE_CODE (TYPE_SIZE (TREE_TYPE (tem))) == INTEGER_CST)
bbf6f052 9484
ca695ac9
JB
9485 bitpos += (TREE_INT_CST_LOW (TREE_OPERAND (tem, 1))
9486 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (tem)))
9487 /* * TYPE_SIZE_UNIT (TREE_TYPE (tem)) */);
9488 else
9489 break;
9490 }
bbf6f052 9491
c02bd5d9 9492 bc_expand_expr (tem);
bbf6f052 9493
cd1b4b44 9494
ca695ac9
JB
9495 /* For bitfields also push their offset and size */
9496 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
9497 bc_push_offset_and_size (bitpos, /* DECL_SIZE_UNIT */ (TREE_OPERAND (exp, 1)));
9498 else
9499 if (SIval = bitpos / BITS_PER_UNIT)
9500 bc_emit_instruction (addconstPSI, SIval);
bbf6f052 9501
ca695ac9 9502 return (TREE_OPERAND (exp, 1));
bbf6f052 9503}
e7c33f54 9504
bbf6f052 9505
ca695ac9
JB
9506/* Emit code to push two SI constants */
9507void
9508bc_push_offset_and_size (offset, size)
9509 HOST_WIDE_INT offset, size;
9510{
9511 bc_emit_instruction (constSI, offset);
9512 bc_emit_instruction (constSI, size);
9513}
bbf6f052 9514
bbf6f052 9515
ca695ac9
JB
9516/* Emit byte code to push the address of the given lvalue expression to
9517 the stack. If it's a bit field, we also push offset and size info.
bbf6f052 9518
ca695ac9
JB
9519 Returns innermost component, which allows us to determine not only
9520 its type, but also whether it's a bitfield. */
9521
9522tree
9523bc_expand_address (exp)
bbf6f052 9524 tree exp;
bbf6f052 9525{
ca695ac9
JB
9526 /* Safeguard */
9527 if (!exp || TREE_CODE (exp) == ERROR_MARK)
9528 return (exp);
bbf6f052 9529
e7c33f54 9530
ca695ac9
JB
9531 switch (TREE_CODE (exp))
9532 {
9533 case ARRAY_REF:
e7c33f54 9534
ca695ac9 9535 return (bc_expand_address (bc_canonicalize_array_ref (exp)));
e7c33f54 9536
ca695ac9 9537 case COMPONENT_REF:
bbf6f052 9538
ca695ac9 9539 return (bc_expand_component_address (exp));
bbf6f052 9540
ca695ac9 9541 case INDIRECT_REF:
bbf6f052 9542
ca695ac9
JB
9543 bc_expand_expr (TREE_OPERAND (exp, 0));
9544
9545 /* For variable-sized types: retrieve pointer. Sometimes the
9546 TYPE_SIZE tree is NULL. Is this a bug or a feature? Let's
9547 also make sure we have an operand, just in case... */
9548
9549 if (TREE_OPERAND (exp, 0)
9550 && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))
9551 && TREE_CODE (TYPE_SIZE (TREE_TYPE (TREE_OPERAND (exp, 0)))) != INTEGER_CST)
9552 bc_emit_instruction (loadP);
9553
9554 /* If packed, also return offset and size */
9555 if (DECL_BIT_FIELD (TREE_OPERAND (exp, 0)))
9556
9557 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (TREE_OPERAND (exp, 0))),
9558 TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (exp, 0))));
9559
9560 return (TREE_OPERAND (exp, 0));
9561
9562 case FUNCTION_DECL:
9563
e7a42772
JB
9564 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
9565 BYTECODE_BC_LABEL (DECL_RTL (exp))->offset);
bbf6f052 9566 break;
ca695ac9
JB
9567
9568 case PARM_DECL:
9569
9570 bc_load_parmaddr (DECL_RTL (exp));
9571
9572 /* For variable-sized types: retrieve pointer */
9573 if (TYPE_SIZE (TREE_TYPE (exp))
9574 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9575 bc_emit_instruction (loadP);
9576
9577 /* If packed, also return offset and size */
9578 if (DECL_BIT_FIELD (exp))
9579 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9580 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9581
bbf6f052 9582 break;
ca695ac9
JB
9583
9584 case RESULT_DECL:
9585
9586 bc_emit_instruction (returnP);
bbf6f052 9587 break;
ca695ac9
JB
9588
9589 case VAR_DECL:
9590
9591#if 0
e7a42772 9592 if (BYTECODE_LABEL (DECL_RTL (exp)))
ca695ac9
JB
9593 bc_load_externaddr (DECL_RTL (exp));
9594#endif
9595
9596 if (DECL_EXTERNAL (exp))
e7a42772 9597 bc_load_externaddr_id (DECL_ASSEMBLER_NAME (exp),
eb862a37 9598 (BYTECODE_BC_LABEL (DECL_RTL (exp)))->offset);
bbf6f052 9599 else
ca695ac9
JB
9600 bc_load_localaddr (DECL_RTL (exp));
9601
9602 /* For variable-sized types: retrieve pointer */
9603 if (TYPE_SIZE (TREE_TYPE (exp))
9604 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST)
9605 bc_emit_instruction (loadP);
9606
9607 /* If packed, also return offset and size */
9608 if (DECL_BIT_FIELD (exp))
9609 bc_push_offset_and_size (TREE_INT_CST_LOW (DECL_FIELD_BITPOS (exp)),
9610 TREE_INT_CST_LOW (DECL_SIZE (exp)));
9611
bbf6f052 9612 break;
ca695ac9
JB
9613
9614 case STRING_CST:
9615 {
9616 rtx r;
9617
9618 bc_emit_bytecode (constP);
9619 r = output_constant_def (exp);
e7a42772 9620 bc_emit_code_labelref (BYTECODE_LABEL (r), BYTECODE_BC_LABEL (r)->offset);
ca695ac9
JB
9621
9622#ifdef DEBUG_PRINT_CODE
9623 fputc ('\n', stderr);
9624#endif
9625 }
bbf6f052 9626 break;
ca695ac9 9627
bbf6f052 9628 default:
bbf6f052 9629
ca695ac9
JB
9630 abort();
9631 break;
bbf6f052
RK
9632 }
9633
ca695ac9
JB
9634 /* Most lvalues don't have components. */
9635 return (exp);
9636}
bbf6f052 9637
ca695ac9
JB
9638
9639/* Emit a type code to be used by the runtime support in handling
9640 parameter passing. The type code consists of the machine mode
9641 plus the minimal alignment shifted left 8 bits. */
9642
9643tree
9644bc_runtime_type_code (type)
9645 tree type;
9646{
9647 int val;
9648
9649 switch (TREE_CODE (type))
bbf6f052 9650 {
ca695ac9
JB
9651 case VOID_TYPE:
9652 case INTEGER_TYPE:
9653 case REAL_TYPE:
9654 case COMPLEX_TYPE:
9655 case ENUMERAL_TYPE:
9656 case POINTER_TYPE:
9657 case RECORD_TYPE:
9658
6bd6178d 9659 val = (int) TYPE_MODE (type) | TYPE_ALIGN (type) << 8;
ca695ac9
JB
9660 break;
9661
9662 case ERROR_MARK:
9663
9664 val = 0;
9665 break;
9666
9667 default:
af508edd 9668
ca695ac9
JB
9669 abort ();
9670 }
9671 return build_int_2 (val, 0);
9672}
af508edd 9673
af508edd 9674
ca695ac9
JB
9675/* Generate constructor label */
9676char *
9677bc_gen_constr_label ()
9678{
9679 static int label_counter;
9680 static char label[20];
bbf6f052 9681
ca695ac9 9682 sprintf (label, "*LR%d", label_counter++);
bbf6f052 9683
ca695ac9
JB
9684 return (obstack_copy0 (&permanent_obstack, label, strlen (label)));
9685}
bbf6f052 9686
bbf6f052 9687
ca695ac9
JB
9688/* Evaluate constructor CONSTR and return pointer to it on level one. We
9689 expand the constructor data as static data, and push a pointer to it.
9690 The pointer is put in the pointer table and is retrieved by a constP
9691 bytecode instruction. We then loop and store each constructor member in
9692 the corresponding component. Finally, we return the original pointer on
9693 the stack. */
af508edd 9694
ca695ac9
JB
9695void
9696bc_expand_constructor (constr)
9697 tree constr;
9698{
9699 char *l;
9700 HOST_WIDE_INT ptroffs;
9701 rtx constr_rtx;
bbf6f052 9702
ca695ac9
JB
9703
9704 /* Literal constructors are handled as constants, whereas
9705 non-literals are evaluated and stored element by element
9706 into the data segment. */
9707
9708 /* Allocate space in proper segment and push pointer to space on stack.
9709 */
bbf6f052 9710
ca695ac9 9711 l = bc_gen_constr_label ();
bbf6f052 9712
ca695ac9 9713 if (TREE_CONSTANT (constr))
bbf6f052 9714 {
ca695ac9
JB
9715 text_section ();
9716
9717 bc_emit_const_labeldef (l);
9718 bc_output_constructor (constr, int_size_in_bytes (TREE_TYPE (constr)));
bbf6f052 9719 }
ca695ac9
JB
9720 else
9721 {
9722 data_section ();
bbf6f052 9723
ca695ac9
JB
9724 bc_emit_data_labeldef (l);
9725 bc_output_data_constructor (constr);
9726 }
bbf6f052 9727
ca695ac9
JB
9728
9729 /* Add reference to pointer table and recall pointer to stack;
9730 this code is common for both types of constructors: literals
9731 and non-literals. */
bbf6f052 9732
de7d9320
JB
9733 ptroffs = bc_define_pointer (l);
9734 bc_emit_instruction (constP, ptroffs);
d39985fa 9735
ca695ac9
JB
9736 /* This is all that has to be done if it's a literal. */
9737 if (TREE_CONSTANT (constr))
9738 return;
bbf6f052 9739
ca695ac9
JB
9740
9741 /* At this point, we have the pointer to the structure on top of the stack.
9742 Generate sequences of store_memory calls for the constructor. */
9743
9744 /* constructor type is structure */
9745 if (TREE_CODE (TREE_TYPE (constr)) == RECORD_TYPE)
e7c33f54 9746 {
ca695ac9
JB
9747 register tree elt;
9748
9749 /* If the constructor has fewer fields than the structure,
9750 clear the whole structure first. */
9751
9752 if (list_length (CONSTRUCTOR_ELTS (constr))
9753 != list_length (TYPE_FIELDS (TREE_TYPE (constr))))
9754 {
6d6e61ce 9755 bc_emit_instruction (duplicate);
ca695ac9
JB
9756 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9757 bc_emit_instruction (clearBLK);
9758 }
9759
9760 /* Store each element of the constructor into the corresponding
9761 field of TARGET. */
9762
9763 for (elt = CONSTRUCTOR_ELTS (constr); elt; elt = TREE_CHAIN (elt))
9764 {
9765 register tree field = TREE_PURPOSE (elt);
9766 register enum machine_mode mode;
9767 int bitsize;
9768 int bitpos;
9769 int unsignedp;
9770
9771 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field)) /* * DECL_SIZE_UNIT (field) */;
9772 mode = DECL_MODE (field);
9773 unsignedp = TREE_UNSIGNED (field);
9774
9775 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
9776
9777 bc_store_field (elt, bitsize, bitpos, mode, TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9778 /* The alignment of TARGET is
9779 at least what its type requires. */
9780 VOIDmode, 0,
9781 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9782 int_size_in_bytes (TREE_TYPE (constr)));
9783 }
e7c33f54 9784 }
ca695ac9
JB
9785 else
9786
9787 /* Constructor type is array */
9788 if (TREE_CODE (TREE_TYPE (constr)) == ARRAY_TYPE)
9789 {
9790 register tree elt;
9791 register int i;
9792 tree domain = TYPE_DOMAIN (TREE_TYPE (constr));
9793 int minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
9794 int maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
9795 tree elttype = TREE_TYPE (TREE_TYPE (constr));
9796
9797 /* If the constructor has fewer fields than the structure,
9798 clear the whole structure first. */
9799
9800 if (list_length (CONSTRUCTOR_ELTS (constr)) < maxelt - minelt + 1)
9801 {
6d6e61ce 9802 bc_emit_instruction (duplicate);
ca695ac9
JB
9803 bc_emit_instruction (constSI, (HOST_WIDE_INT) int_size_in_bytes (TREE_TYPE (constr)));
9804 bc_emit_instruction (clearBLK);
9805 }
9806
9807
9808 /* Store each element of the constructor into the corresponding
9809 element of TARGET, determined by counting the elements. */
9810
9811 for (elt = CONSTRUCTOR_ELTS (constr), i = 0;
9812 elt;
9813 elt = TREE_CHAIN (elt), i++)
9814 {
9815 register enum machine_mode mode;
9816 int bitsize;
9817 int bitpos;
9818 int unsignedp;
9819
9820 mode = TYPE_MODE (elttype);
9821 bitsize = GET_MODE_BITSIZE (mode);
9822 unsignedp = TREE_UNSIGNED (elttype);
9823
9824 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype))
9825 /* * TYPE_SIZE_UNIT (elttype) */ );
9826
9827 bc_store_field (elt, bitsize, bitpos, mode,
9828 TREE_VALUE (elt), TREE_TYPE (TREE_VALUE (elt)),
9829 /* The alignment of TARGET is
9830 at least what its type requires. */
9831 VOIDmode, 0,
9832 TYPE_ALIGN (TREE_TYPE (constr)) / BITS_PER_UNIT,
9833 int_size_in_bytes (TREE_TYPE (constr)));
9834 }
9835
9836 }
9837}
bbf6f052 9838
bbf6f052 9839
ca695ac9
JB
9840/* Store the value of EXP (an expression tree) into member FIELD of
9841 structure at address on stack, which has type TYPE, mode MODE and
9842 occupies BITSIZE bits, starting BITPOS bits from the beginning of the
9843 structure.
bbf6f052 9844
ca695ac9
JB
9845 ALIGN is the alignment that TARGET is known to have, measured in bytes.
9846 TOTAL_SIZE is its size in bytes, or -1 if variable. */
bbf6f052 9847
ca695ac9
JB
9848void
9849bc_store_field (field, bitsize, bitpos, mode, exp, type,
9850 value_mode, unsignedp, align, total_size)
9851 int bitsize, bitpos;
9852 enum machine_mode mode;
9853 tree field, exp, type;
9854 enum machine_mode value_mode;
9855 int unsignedp;
9856 int align;
9857 int total_size;
9858{
bbf6f052 9859
ca695ac9
JB
9860 /* Expand expression and copy pointer */
9861 bc_expand_expr (exp);
9862 bc_emit_instruction (over);
bbf6f052 9863
bbf6f052 9864
ca695ac9
JB
9865 /* If the component is a bit field, we cannot use addressing to access
9866 it. Use bit-field techniques to store in it. */
bbf6f052 9867
ca695ac9
JB
9868 if (DECL_BIT_FIELD (field))
9869 {
9870 bc_store_bit_field (bitpos, bitsize, unsignedp);
9871 return;
9872 }
9873 else
9874 /* Not bit field */
9875 {
9876 HOST_WIDE_INT offset = bitpos / BITS_PER_UNIT;
9877
9878 /* Advance pointer to the desired member */
9879 if (offset)
9880 bc_emit_instruction (addconstPSI, offset);
9881
9882 /* Store */
9883 bc_store_memory (type, field);
9884 }
9885}
bbf6f052 9886
ca695ac9
JB
9887
9888/* Store SI/SU in bitfield */
bbf6f052 9889void
ca695ac9
JB
9890bc_store_bit_field (offset, size, unsignedp)
9891 int offset, size, unsignedp;
bbf6f052 9892{
ca695ac9
JB
9893 /* Push bitfield offset and size */
9894 bc_push_offset_and_size (offset, size);
bbf6f052 9895
ca695ac9
JB
9896 /* Store */
9897 bc_emit_instruction (sstoreBI);
9898}
e87b4f3f 9899
88d3b7f0 9900
ca695ac9
JB
9901/* Load SI/SU from bitfield */
9902void
9903bc_load_bit_field (offset, size, unsignedp)
9904 int offset, size, unsignedp;
9905{
9906 /* Push bitfield offset and size */
9907 bc_push_offset_and_size (offset, size);
88d3b7f0 9908
ca695ac9
JB
9909 /* Load: sign-extend if signed, else zero-extend */
9910 bc_emit_instruction (unsignedp ? zxloadBI : sxloadBI);
9911}
709f5be1 9912
bbf6f052 9913
ca695ac9
JB
9914/* Adjust interpreter stack by NLEVELS. Positive means drop NLEVELS
9915 (adjust stack pointer upwards), negative means add that number of
9916 levels (adjust the stack pointer downwards). Only positive values
9917 normally make sense. */
bbf6f052 9918
ca695ac9
JB
9919void
9920bc_adjust_stack (nlevels)
9921 int nlevels;
9922{
9923 switch (nlevels)
9924 {
9925 case 0:
9926 break;
9927
9928 case 2:
9929 bc_emit_instruction (drop);
9930
9931 case 1:
9932 bc_emit_instruction (drop);
9933 break;
9934
9935 default:
9936
9937 bc_emit_instruction (adjstackSI, (HOST_WIDE_INT) nlevels);
9938 stack_depth -= nlevels;
9939 }
9940
a68c7608
RS
9941#if defined (VALIDATE_STACK_FOR_BC)
9942 VALIDATE_STACK_FOR_BC ();
bbf6f052
RK
9943#endif
9944}
This page took 1.410926 seconds and 5 git commands to generate.