]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(basic_induction_var): New arg MODE.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
bbf6f052
RK
32#include "typeclass.h"
33
34#define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36/* Decide whether a function's arguments should be processed
bbc8a071
RK
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
bbf6f052 41
bbf6f052 42#ifdef PUSH_ROUNDING
bbc8a071 43
3319a347 44#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
45#define PUSH_ARGS_REVERSED /* If it's last to first */
46#endif
bbc8a071 47
bbf6f052
RK
48#endif
49
50#ifndef STACK_PUSH_CODE
51#ifdef STACK_GROWS_DOWNWARD
52#define STACK_PUSH_CODE PRE_DEC
53#else
54#define STACK_PUSH_CODE PRE_INC
55#endif
56#endif
57
58/* Like STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61/* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67int cse_not_expected;
68
69/* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72int do_preexpand_calls = 1;
73
74/* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76int pending_stack_adjust;
77
78/* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82int inhibit_defer_pop;
83
84/* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86tree cleanups_this_call;
87
88/* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91static rtx saveregs_value;
92
dcf76fff
TW
93/* Similarly for __builtin_apply_args. */
94static rtx apply_args_value;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112};
113
114static rtx enqueue_insn PROTO((rtx, rtx));
115static int queued_subexp_p PROTO((rtx));
116static void init_queue PROTO((void));
117static void move_by_pieces PROTO((rtx, rtx, int, int));
118static int move_by_pieces_ninsns PROTO((unsigned int, int));
119static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121static void group_insns PROTO((rtx));
122static void store_constructor PROTO((tree, rtx));
123static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125static tree save_noncopied_parts PROTO((tree, tree));
126static tree init_noncopied_parts PROTO((tree, tree));
127static int safe_from_p PROTO((rtx, tree));
128static int fixed_type_p PROTO((tree));
129static int get_pointer_alignment PROTO((tree, unsigned));
130static tree string_constant PROTO((tree, tree *));
131static tree c_strlen PROTO((tree));
132static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
133static int apply_args_size PROTO((void));
134static int apply_result_size PROTO((void));
135static rtx result_vector PROTO((int, rtx));
136static rtx expand_builtin_apply_args PROTO((void));
137static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138static void expand_builtin_return PROTO((rtx));
4969d05d
RK
139static rtx expand_increment PROTO((tree, int));
140static void preexpand_calls PROTO((tree));
141static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 142static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
143static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 148
4fa52007
RK
149/* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153static char direct_load[NUM_MACHINE_MODES];
154static char direct_store[NUM_MACHINE_MODES];
155
bbf6f052
RK
156/* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159#ifndef MOVE_RATIO
266007a7 160#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
161#define MOVE_RATIO 2
162#else
163/* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165#define MOVE_RATIO 15
166#endif
167#endif
e87b4f3f 168
266007a7 169/* This array records the insn_code of insns to perform block moves. */
e6677db3 170enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 171
e87b4f3f
RS
172/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174#ifndef SLOW_UNALIGNED_ACCESS
175#define SLOW_UNALIGNED_ACCESS 0
176#endif
0006469d
TW
177
178/* Register mappings for target machines without register windows. */
179#ifndef INCOMING_REGNO
180#define INCOMING_REGNO(OUT) (OUT)
181#endif
182#ifndef OUTGOING_REGNO
183#define OUTGOING_REGNO(IN) (IN)
184#endif
bbf6f052 185\f
4fa52007 186/* This is run once per compilation to set up which modes can be used
266007a7 187 directly in memory and to initialize the block move optab. */
4fa52007
RK
188
189void
190init_expr_once ()
191{
192 rtx insn, pat;
193 enum machine_mode mode;
e2549997
RS
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
4fa52007 197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
e2549997 213 PUT_MODE (mem1, mode);
4fa52007 214
e6fe56a4
RK
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
7308a047
RS
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
e6fe56a4 225
7308a047 226 reg = gen_rtx (REG, mode, regno);
e6fe56a4 227
7308a047
RS
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
e6fe56a4 232
e2549997
RS
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
7308a047
RS
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
e2549997
RS
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
7308a047 247 }
4fa52007
RK
248 }
249
250 end_sequence ();
251}
252
bbf6f052
RK
253/* This is run at the start of compiling a function. */
254
255void
256init_expr ()
257{
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
0006469d 264 apply_args_value = 0;
e87b4f3f 265 forced_labels = 0;
bbf6f052
RK
266}
267
268/* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271void
272save_expr_status (p)
273 struct function *p;
274{
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
0006469d 282 p->apply_args_value = apply_args_value;
e87b4f3f 283 p->forced_labels = forced_labels;
bbf6f052
RK
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
0006469d 289 apply_args_value = 0;
e87b4f3f 290 forced_labels = 0;
bbf6f052
RK
291}
292
293/* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296void
297restore_expr_status (p)
298 struct function *p;
299{
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
0006469d 304 apply_args_value = p->apply_args_value;
e87b4f3f 305 forced_labels = p->forced_labels;
bbf6f052
RK
306}
307\f
308/* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311static rtx pending_chain;
312
313/* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320static rtx
321enqueue_insn (var, body)
322 rtx var, body;
323{
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 325 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
326 return pending_chain;
327}
328
329/* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344rtx
345protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348{
349 register RTX_CODE code = GET_CODE (x);
350
351#if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355#endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402}
403
404/* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409static int
410queued_subexp_p (x)
411 rtx x;
412{
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427}
428
429/* Perform all the pending incrementations. */
430
431void
432emit_queue ()
433{
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440}
441
442static void
443init_queue ()
444{
445 if (pending_chain)
446 abort ();
447}
448\f
449/* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454void
455convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458{
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
1499e0a8
RK
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
bbf6f052
RK
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
b424402e
RS
497#ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503#endif
504#ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510#endif
511#ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517#endif
518#ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
525#ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
532
533#ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539#endif
540#ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546#endif
547#ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
554#ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
561
bbf6f052
RK
562#ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568#endif
b092b471
JW
569#ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575#endif
bbf6f052
RK
576#ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
b092b471
JW
583#ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
bbf6f052
RK
590#ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597
598#ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
612#ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
bbf6f052
RK
661#ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
b092b471
JW
668#ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
bbf6f052
RK
675#ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
b092b471
JW
682#ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
bbf6f052
RK
689#ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696
b092b471
JW
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
bbf6f052
RK
763 abort ();
764
e87b4f3f 765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
cd1b4b44
RK
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
bbf6f052
RK
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827#ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
906c4e36
RK
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
bbf6f052
RK
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838#endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 843 NULL_RTX, 0);
bbf6f052
RK
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
906c4e36 864 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
866 return;
867 }
868
d3c64ee3
RS
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 872 {
431a6eca
JW
873 if (!((GET_CODE (from) == MEM
874 && ! MEM_VOLATILE_P (from)
875 && direct_load[(int) to_mode]
876 && ! mode_dependent_address_p (XEXP (from, 0)))
877 || GET_CODE (from) == REG
878 || GET_CODE (from) == SUBREG))
879 from = force_reg (from_mode, from);
bbf6f052
RK
880 convert_move (to, gen_lowpart (word_mode, from), 0);
881 return;
882 }
883
884 /* Handle pointer conversion */ /* SPEE 900220 */
885 if (to_mode == PSImode)
886 {
887 if (from_mode != SImode)
888 from = convert_to_mode (SImode, from, unsignedp);
889
890#ifdef HAVE_truncsipsi
891 if (HAVE_truncsipsi)
892 {
893 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
894 return;
895 }
896#endif /* HAVE_truncsipsi */
897 abort ();
898 }
899
900 if (from_mode == PSImode)
901 {
902 if (to_mode != SImode)
903 {
904 from = convert_to_mode (SImode, from, unsignedp);
905 from_mode = SImode;
906 }
907 else
908 {
909#ifdef HAVE_extendpsisi
910 if (HAVE_extendpsisi)
911 {
912 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
913 return;
914 }
915#endif /* HAVE_extendpsisi */
916 abort ();
917 }
918 }
919
920 /* Now follow all the conversions between integers
921 no more than a word long. */
922
923 /* For truncation, usually we can just refer to FROM in a narrower mode. */
924 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
925 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 926 GET_MODE_BITSIZE (from_mode)))
bbf6f052 927 {
d3c64ee3
RS
928 if (!((GET_CODE (from) == MEM
929 && ! MEM_VOLATILE_P (from)
930 && direct_load[(int) to_mode]
931 && ! mode_dependent_address_p (XEXP (from, 0)))
932 || GET_CODE (from) == REG
933 || GET_CODE (from) == SUBREG))
934 from = force_reg (from_mode, from);
bbf6f052
RK
935 emit_move_insn (to, gen_lowpart (to_mode, from));
936 return;
937 }
938
d3c64ee3 939 /* Handle extension. */
bbf6f052
RK
940 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
941 {
942 /* Convert directly if that works. */
943 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
944 != CODE_FOR_nothing)
945 {
3dc4195c
RK
946 /* If FROM is a SUBREG, put it into a register. Do this
947 so that we always generate the same set of insns for
948 better cse'ing; if an intermediate assignment occurred,
949 we won't be doing the operation directly on the SUBREG. */
950 if (optimize > 0 && GET_CODE (from) == SUBREG)
951 from = force_reg (from_mode, from);
bbf6f052
RK
952 emit_unop_insn (code, to, from, equiv_code);
953 return;
954 }
955 else
956 {
957 enum machine_mode intermediate;
958
959 /* Search for a mode to convert via. */
960 for (intermediate = from_mode; intermediate != VOIDmode;
961 intermediate = GET_MODE_WIDER_MODE (intermediate))
962 if ((can_extend_p (to_mode, intermediate, unsignedp)
963 != CODE_FOR_nothing)
964 && (can_extend_p (intermediate, from_mode, unsignedp)
965 != CODE_FOR_nothing))
966 {
967 convert_move (to, convert_to_mode (intermediate, from,
968 unsignedp), unsignedp);
969 return;
970 }
971
972 /* No suitable intermediate mode. */
973 abort ();
974 }
975 }
976
977 /* Support special truncate insns for certain modes. */
978
979 if (from_mode == DImode && to_mode == SImode)
980 {
981#ifdef HAVE_truncdisi2
982 if (HAVE_truncdisi2)
983 {
984 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
985 return;
986 }
987#endif
988 convert_move (to, force_reg (from_mode, from), unsignedp);
989 return;
990 }
991
992 if (from_mode == DImode && to_mode == HImode)
993 {
994#ifdef HAVE_truncdihi2
995 if (HAVE_truncdihi2)
996 {
997 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
998 return;
999 }
1000#endif
1001 convert_move (to, force_reg (from_mode, from), unsignedp);
1002 return;
1003 }
1004
1005 if (from_mode == DImode && to_mode == QImode)
1006 {
1007#ifdef HAVE_truncdiqi2
1008 if (HAVE_truncdiqi2)
1009 {
1010 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1011 return;
1012 }
1013#endif
1014 convert_move (to, force_reg (from_mode, from), unsignedp);
1015 return;
1016 }
1017
1018 if (from_mode == SImode && to_mode == HImode)
1019 {
1020#ifdef HAVE_truncsihi2
1021 if (HAVE_truncsihi2)
1022 {
1023 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1024 return;
1025 }
1026#endif
1027 convert_move (to, force_reg (from_mode, from), unsignedp);
1028 return;
1029 }
1030
1031 if (from_mode == SImode && to_mode == QImode)
1032 {
1033#ifdef HAVE_truncsiqi2
1034 if (HAVE_truncsiqi2)
1035 {
1036 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1037 return;
1038 }
1039#endif
1040 convert_move (to, force_reg (from_mode, from), unsignedp);
1041 return;
1042 }
1043
1044 if (from_mode == HImode && to_mode == QImode)
1045 {
1046#ifdef HAVE_trunchiqi2
1047 if (HAVE_trunchiqi2)
1048 {
1049 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1050 return;
1051 }
1052#endif
1053 convert_move (to, force_reg (from_mode, from), unsignedp);
1054 return;
1055 }
1056
1057 /* Handle truncation of volatile memrefs, and so on;
1058 the things that couldn't be truncated directly,
1059 and for which there was no special instruction. */
1060 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1061 {
1062 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1063 emit_move_insn (to, temp);
1064 return;
1065 }
1066
1067 /* Mode combination is not recognized. */
1068 abort ();
1069}
1070
1071/* Return an rtx for a value that would result
1072 from converting X to mode MODE.
1073 Both X and MODE may be floating, or both integer.
1074 UNSIGNEDP is nonzero if X is an unsigned value.
1075 This can be done by referring to a part of X in place
5d901c31
RS
1076 or by copying to a new temporary with conversion.
1077
1078 This function *must not* call protect_from_queue
1079 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1080
1081rtx
1082convert_to_mode (mode, x, unsignedp)
1083 enum machine_mode mode;
1084 rtx x;
1085 int unsignedp;
5ffe63ed
RS
1086{
1087 return convert_modes (mode, VOIDmode, x, unsignedp);
1088}
1089
1090/* Return an rtx for a value that would result
1091 from converting X from mode OLDMODE to mode MODE.
1092 Both modes may be floating, or both integer.
1093 UNSIGNEDP is nonzero if X is an unsigned value.
1094
1095 This can be done by referring to a part of X in place
1096 or by copying to a new temporary with conversion.
1097
1098 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1099
1100 This function *must not* call protect_from_queue
1101 except when putting X into an insn (in which case convert_move does it). */
1102
1103rtx
1104convert_modes (mode, oldmode, x, unsignedp)
1105 enum machine_mode mode, oldmode;
1106 rtx x;
1107 int unsignedp;
bbf6f052
RK
1108{
1109 register rtx temp;
5ffe63ed
RS
1110
1111 if (GET_MODE (x) != mode)
1112 oldmode = GET_MODE (x);
1113 /* If X doesnt have a mode, and we didn't specify one,
1114 we have a potential bug, so crash now and get it fixed. */
1115 if (oldmode == VOIDmode)
1116 abort ();
1499e0a8
RK
1117
1118 /* If FROM is a SUBREG that indicates that we have already done at least
1119 the required extension, strip it. */
1120
1121 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1122 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1123 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1124 x = gen_lowpart (mode, x);
bbf6f052 1125
5ffe63ed 1126 if (mode == oldmode)
bbf6f052
RK
1127 return x;
1128
1129 /* There is one case that we must handle specially: If we are converting
906c4e36 1130 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1131 we are to interpret the constant as unsigned, gen_lowpart will do
1132 the wrong if the constant appears negative. What we want to do is
1133 make the high-order word of the constant zero, not all ones. */
1134
1135 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1136 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1137 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1138 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1139
1140 /* We can do this with a gen_lowpart if both desired and current modes
1141 are integer, and this is either a constant integer, a register, or a
1142 non-volatile MEM. Except for the constant case, we must be narrowing
1143 the operand. */
1144
1145 if (GET_CODE (x) == CONST_INT
1146 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1147 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1148 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1149 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1150 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1151 && direct_load[(int) mode])
bbf6f052
RK
1152 || GET_CODE (x) == REG)))))
1153 return gen_lowpart (mode, x);
1154
1155 temp = gen_reg_rtx (mode);
1156 convert_move (temp, x, unsignedp);
1157 return temp;
1158}
1159\f
1160/* Generate several move instructions to copy LEN bytes
1161 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1162 The caller must pass FROM and TO
1163 through protect_from_queue before calling.
1164 ALIGN (in bytes) is maximum alignment we can assume. */
1165
bbf6f052
RK
1166static void
1167move_by_pieces (to, from, len, align)
1168 rtx to, from;
1169 int len, align;
1170{
1171 struct move_by_pieces data;
1172 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1173 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1174
1175 data.offset = 0;
1176 data.to_addr = to_addr;
1177 data.from_addr = from_addr;
1178 data.to = to;
1179 data.from = from;
1180 data.autinc_to
1181 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1182 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1183 data.autinc_from
1184 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1185 || GET_CODE (from_addr) == POST_INC
1186 || GET_CODE (from_addr) == POST_DEC);
1187
1188 data.explicit_inc_from = 0;
1189 data.explicit_inc_to = 0;
1190 data.reverse
1191 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1192 if (data.reverse) data.offset = len;
1193 data.len = len;
1194
1195 /* If copying requires more than two move insns,
1196 copy addresses to registers (to make displacements shorter)
1197 and use post-increment if available. */
1198 if (!(data.autinc_from && data.autinc_to)
1199 && move_by_pieces_ninsns (len, align) > 2)
1200 {
1201#ifdef HAVE_PRE_DECREMENT
1202 if (data.reverse && ! data.autinc_from)
1203 {
1204 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1205 data.autinc_from = 1;
1206 data.explicit_inc_from = -1;
1207 }
1208#endif
1209#ifdef HAVE_POST_INCREMENT
1210 if (! data.autinc_from)
1211 {
1212 data.from_addr = copy_addr_to_reg (from_addr);
1213 data.autinc_from = 1;
1214 data.explicit_inc_from = 1;
1215 }
1216#endif
1217 if (!data.autinc_from && CONSTANT_P (from_addr))
1218 data.from_addr = copy_addr_to_reg (from_addr);
1219#ifdef HAVE_PRE_DECREMENT
1220 if (data.reverse && ! data.autinc_to)
1221 {
1222 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1223 data.autinc_to = 1;
1224 data.explicit_inc_to = -1;
1225 }
1226#endif
1227#ifdef HAVE_POST_INCREMENT
1228 if (! data.reverse && ! data.autinc_to)
1229 {
1230 data.to_addr = copy_addr_to_reg (to_addr);
1231 data.autinc_to = 1;
1232 data.explicit_inc_to = 1;
1233 }
1234#endif
1235 if (!data.autinc_to && CONSTANT_P (to_addr))
1236 data.to_addr = copy_addr_to_reg (to_addr);
1237 }
1238
e87b4f3f
RS
1239 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1240 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1241 align = MOVE_MAX;
bbf6f052
RK
1242
1243 /* First move what we can in the largest integer mode, then go to
1244 successively smaller modes. */
1245
1246 while (max_size > 1)
1247 {
1248 enum machine_mode mode = VOIDmode, tmode;
1249 enum insn_code icode;
1250
e7c33f54
RK
1251 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1252 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1253 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1254 mode = tmode;
1255
1256 if (mode == VOIDmode)
1257 break;
1258
1259 icode = mov_optab->handlers[(int) mode].insn_code;
1260 if (icode != CODE_FOR_nothing
1261 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1262 GET_MODE_SIZE (mode)))
1263 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1264
1265 max_size = GET_MODE_SIZE (mode);
1266 }
1267
1268 /* The code above should have handled everything. */
1269 if (data.len != 0)
1270 abort ();
1271}
1272
1273/* Return number of insns required to move L bytes by pieces.
1274 ALIGN (in bytes) is maximum alignment we can assume. */
1275
1276static int
1277move_by_pieces_ninsns (l, align)
1278 unsigned int l;
1279 int align;
1280{
1281 register int n_insns = 0;
e87b4f3f 1282 int max_size = MOVE_MAX + 1;
bbf6f052 1283
e87b4f3f
RS
1284 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1285 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1286 align = MOVE_MAX;
bbf6f052
RK
1287
1288 while (max_size > 1)
1289 {
1290 enum machine_mode mode = VOIDmode, tmode;
1291 enum insn_code icode;
1292
e7c33f54
RK
1293 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1294 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1295 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1296 mode = tmode;
1297
1298 if (mode == VOIDmode)
1299 break;
1300
1301 icode = mov_optab->handlers[(int) mode].insn_code;
1302 if (icode != CODE_FOR_nothing
1303 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1304 GET_MODE_SIZE (mode)))
1305 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1306
1307 max_size = GET_MODE_SIZE (mode);
1308 }
1309
1310 return n_insns;
1311}
1312
1313/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1314 with move instructions for mode MODE. GENFUN is the gen_... function
1315 to make a move insn for that mode. DATA has all the other info. */
1316
1317static void
1318move_by_pieces_1 (genfun, mode, data)
1319 rtx (*genfun) ();
1320 enum machine_mode mode;
1321 struct move_by_pieces *data;
1322{
1323 register int size = GET_MODE_SIZE (mode);
1324 register rtx to1, from1;
1325
1326 while (data->len >= size)
1327 {
1328 if (data->reverse) data->offset -= size;
1329
1330 to1 = (data->autinc_to
1331 ? gen_rtx (MEM, mode, data->to_addr)
1332 : change_address (data->to, mode,
1333 plus_constant (data->to_addr, data->offset)));
1334 from1 =
1335 (data->autinc_from
1336 ? gen_rtx (MEM, mode, data->from_addr)
1337 : change_address (data->from, mode,
1338 plus_constant (data->from_addr, data->offset)));
1339
1340#ifdef HAVE_PRE_DECREMENT
1341 if (data->explicit_inc_to < 0)
906c4e36 1342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1343 if (data->explicit_inc_from < 0)
906c4e36 1344 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1345#endif
1346
1347 emit_insn ((*genfun) (to1, from1));
1348#ifdef HAVE_POST_INCREMENT
1349 if (data->explicit_inc_to > 0)
906c4e36 1350 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1351 if (data->explicit_inc_from > 0)
906c4e36 1352 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1353#endif
1354
1355 if (! data->reverse) data->offset += size;
1356
1357 data->len -= size;
1358 }
1359}
1360\f
1361/* Emit code to move a block Y to a block X.
1362 This may be done with string-move instructions,
1363 with multiple scalar move instructions, or with a library call.
1364
1365 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1366 with mode BLKmode.
1367 SIZE is an rtx that says how long they are.
1368 ALIGN is the maximum alignment we can assume they have,
1369 measured in bytes. */
1370
1371void
1372emit_block_move (x, y, size, align)
1373 rtx x, y;
1374 rtx size;
1375 int align;
1376{
1377 if (GET_MODE (x) != BLKmode)
1378 abort ();
1379
1380 if (GET_MODE (y) != BLKmode)
1381 abort ();
1382
1383 x = protect_from_queue (x, 1);
1384 y = protect_from_queue (y, 0);
5d901c31 1385 size = protect_from_queue (size, 0);
bbf6f052
RK
1386
1387 if (GET_CODE (x) != MEM)
1388 abort ();
1389 if (GET_CODE (y) != MEM)
1390 abort ();
1391 if (size == 0)
1392 abort ();
1393
1394 if (GET_CODE (size) == CONST_INT
906c4e36 1395 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1396 move_by_pieces (x, y, INTVAL (size), align);
1397 else
1398 {
1399 /* Try the most limited insn first, because there's no point
1400 including more than one in the machine description unless
1401 the more limited one has some advantage. */
266007a7 1402
0bba3f6f 1403 rtx opalign = GEN_INT (align);
266007a7
RK
1404 enum machine_mode mode;
1405
1406 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1407 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1408 {
266007a7 1409 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1410
1411 if (code != CODE_FOR_nothing
803090c4
RK
1412 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1413 here because if SIZE is less than the mode mask, as it is
8008b228 1414 returned by the macro, it will definitely be less than the
803090c4 1415 actual mode mask. */
f85b95d1 1416 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1417 && (insn_operand_predicate[(int) code][0] == 0
1418 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1419 && (insn_operand_predicate[(int) code][1] == 0
1420 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1421 && (insn_operand_predicate[(int) code][3] == 0
1422 || (*insn_operand_predicate[(int) code][3]) (opalign,
1423 VOIDmode)))
bbf6f052 1424 {
1ba1e2a8 1425 rtx op2;
266007a7
RK
1426 rtx last = get_last_insn ();
1427 rtx pat;
1428
1ba1e2a8 1429 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1430 if (insn_operand_predicate[(int) code][2] != 0
1431 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1432 op2 = copy_to_mode_reg (mode, op2);
1433
1434 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1435 if (pat)
1436 {
1437 emit_insn (pat);
1438 return;
1439 }
1440 else
1441 delete_insns_since (last);
bbf6f052
RK
1442 }
1443 }
bbf6f052
RK
1444
1445#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1446 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1447 VOIDmode, 3, XEXP (x, 0), Pmode,
1448 XEXP (y, 0), Pmode,
0fa83258
RK
1449 convert_to_mode (TYPE_MODE (sizetype), size,
1450 TREE_UNSIGNED (sizetype)),
1451 TYPE_MODE (sizetype));
bbf6f052 1452#else
d562e42e 1453 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1454 VOIDmode, 3, XEXP (y, 0), Pmode,
1455 XEXP (x, 0), Pmode,
0fa83258
RK
1456 convert_to_mode (TYPE_MODE (sizetype), size,
1457 TREE_UNSIGNED (sizetype)),
1458 TYPE_MODE (sizetype));
bbf6f052
RK
1459#endif
1460 }
1461}
1462\f
1463/* Copy all or part of a value X into registers starting at REGNO.
1464 The number of registers to be filled is NREGS. */
1465
1466void
1467move_block_to_reg (regno, x, nregs, mode)
1468 int regno;
1469 rtx x;
1470 int nregs;
1471 enum machine_mode mode;
1472{
1473 int i;
1474 rtx pat, last;
1475
1476 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1477 x = validize_mem (force_const_mem (mode, x));
1478
1479 /* See if the machine can do this with a load multiple insn. */
1480#ifdef HAVE_load_multiple
1481 last = get_last_insn ();
1482 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1483 GEN_INT (nregs));
bbf6f052
RK
1484 if (pat)
1485 {
1486 emit_insn (pat);
1487 return;
1488 }
1489 else
1490 delete_insns_since (last);
1491#endif
1492
1493 for (i = 0; i < nregs; i++)
1494 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1495 operand_subword_force (x, i, mode));
1496}
1497
1498/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1499 The number of registers to be filled is NREGS. SIZE indicates the number
1500 of bytes in the object X. */
1501
bbf6f052
RK
1502
1503void
0040593d 1504move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1505 int regno;
1506 rtx x;
1507 int nregs;
0040593d 1508 int size;
bbf6f052
RK
1509{
1510 int i;
1511 rtx pat, last;
1512
0040593d
JW
1513 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
1514 to the left before storing to memory. */
1515 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1516 {
1517 rtx tem = operand_subword (x, 0, 1, BLKmode);
1518 rtx shift;
1519
1520 if (tem == 0)
1521 abort ();
1522
1523 shift = expand_shift (LSHIFT_EXPR, word_mode,
1524 gen_rtx (REG, word_mode, regno),
1525 build_int_2 ((UNITS_PER_WORD - size)
1526 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1527 emit_move_insn (tem, shift);
1528 return;
1529 }
1530
bbf6f052
RK
1531 /* See if the machine can do this with a store multiple insn. */
1532#ifdef HAVE_store_multiple
1533 last = get_last_insn ();
1534 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1535 GEN_INT (nregs));
bbf6f052
RK
1536 if (pat)
1537 {
1538 emit_insn (pat);
1539 return;
1540 }
1541 else
1542 delete_insns_since (last);
1543#endif
1544
1545 for (i = 0; i < nregs; i++)
1546 {
1547 rtx tem = operand_subword (x, i, 1, BLKmode);
1548
1549 if (tem == 0)
1550 abort ();
1551
1552 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1553 }
1554}
1555
1556/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1557
1558void
1559use_regs (regno, nregs)
1560 int regno;
1561 int nregs;
1562{
1563 int i;
1564
1565 for (i = 0; i < nregs; i++)
1566 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1567}
7308a047
RS
1568
1569/* Mark the instructions since PREV as a libcall block.
1570 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1571
f76a70d5 1572static void
7308a047
RS
1573group_insns (prev)
1574 rtx prev;
1575{
1576 rtx insn_first;
1577 rtx insn_last;
1578
1579 /* Find the instructions to mark */
1580 if (prev)
1581 insn_first = NEXT_INSN (prev);
1582 else
1583 insn_first = get_insns ();
1584
1585 insn_last = get_last_insn ();
1586
1587 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1588 REG_NOTES (insn_last));
1589
1590 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1591 REG_NOTES (insn_first));
1592}
bbf6f052
RK
1593\f
1594/* Write zeros through the storage of OBJECT.
1595 If OBJECT has BLKmode, SIZE is its length in bytes. */
1596
1597void
1598clear_storage (object, size)
1599 rtx object;
1600 int size;
1601{
1602 if (GET_MODE (object) == BLKmode)
1603 {
1604#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1605 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1606 VOIDmode, 3,
1607 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1608 GEN_INT (size), Pmode);
bbf6f052 1609#else
d562e42e 1610 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1611 VOIDmode, 2,
1612 XEXP (object, 0), Pmode,
906c4e36 1613 GEN_INT (size), Pmode);
bbf6f052
RK
1614#endif
1615 }
1616 else
1617 emit_move_insn (object, const0_rtx);
1618}
1619
1620/* Generate code to copy Y into X.
1621 Both Y and X must have the same mode, except that
1622 Y can be a constant with VOIDmode.
1623 This mode cannot be BLKmode; use emit_block_move for that.
1624
1625 Return the last instruction emitted. */
1626
1627rtx
1628emit_move_insn (x, y)
1629 rtx x, y;
1630{
1631 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1632 enum machine_mode submode;
1633 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1634 int i;
1635
1636 x = protect_from_queue (x, 1);
1637 y = protect_from_queue (y, 0);
1638
1639 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1640 abort ();
1641
1642 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1643 y = force_const_mem (mode, y);
1644
1645 /* If X or Y are memory references, verify that their addresses are valid
1646 for the machine. */
1647 if (GET_CODE (x) == MEM
1648 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1649 && ! push_operand (x, GET_MODE (x)))
1650 || (flag_force_addr
1651 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1652 x = change_address (x, VOIDmode, XEXP (x, 0));
1653
1654 if (GET_CODE (y) == MEM
1655 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1656 || (flag_force_addr
1657 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1658 y = change_address (y, VOIDmode, XEXP (y, 0));
1659
1660 if (mode == BLKmode)
1661 abort ();
1662
261c4230
RS
1663 return emit_move_insn_1 (x, y);
1664}
1665
1666/* Low level part of emit_move_insn.
1667 Called just like emit_move_insn, but assumes X and Y
1668 are basically valid. */
1669
1670rtx
1671emit_move_insn_1 (x, y)
1672 rtx x, y;
1673{
1674 enum machine_mode mode = GET_MODE (x);
1675 enum machine_mode submode;
1676 enum mode_class class = GET_MODE_CLASS (mode);
1677 int i;
1678
7308a047
RS
1679 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1680 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1681 (class == MODE_COMPLEX_INT
1682 ? MODE_INT : MODE_FLOAT),
1683 0);
1684
bbf6f052
RK
1685 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1686 return
1687 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1688
89742723 1689 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1690 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1691 && submode != BLKmode
1692 && (mov_optab->handlers[(int) submode].insn_code
1693 != CODE_FOR_nothing))
1694 {
1695 /* Don't split destination if it is a stack push. */
1696 int stack = push_operand (x, GET_MODE (x));
1697 rtx prev = get_last_insn ();
1698
1699 /* Tell flow that the whole of the destination is being set. */
1700 if (GET_CODE (x) == REG)
1701 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1702
1703 /* If this is a stack, push the highpart first, so it
1704 will be in the argument order.
1705
1706 In that case, change_address is used only to convert
1707 the mode, not to change the address. */
c937357e
RS
1708 if (stack)
1709 {
1710#ifdef STACK_GROWS_DOWNWARD
1711 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1712 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1713 gen_highpart (submode, y)));
1714 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1715 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1716 gen_lowpart (submode, y)));
1717#else
1718 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1719 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1720 gen_lowpart (submode, y)));
1721 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1722 (gen_rtx (MEM, submode, (XEXP (x, 0))),
1723 gen_highpart (submode, y)));
1724#endif
1725 }
1726 else
1727 {
1728 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1729 (gen_highpart (submode, x), gen_highpart (submode, y)));
1730 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1731 (gen_lowpart (submode, x), gen_lowpart (submode, y)));
1732 }
7308a047
RS
1733
1734 group_insns (prev);
7a1ab50a
RS
1735
1736 return get_last_insn ();
7308a047
RS
1737 }
1738
bbf6f052
RK
1739 /* This will handle any multi-word mode that lacks a move_insn pattern.
1740 However, you will get better code if you define such patterns,
1741 even if they must turn into multiple assembler instructions. */
a4320483 1742 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1743 {
1744 rtx last_insn = 0;
7308a047 1745 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1746
1747 for (i = 0;
1748 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1749 i++)
1750 {
1751 rtx xpart = operand_subword (x, i, 1, mode);
1752 rtx ypart = operand_subword (y, i, 1, mode);
1753
1754 /* If we can't get a part of Y, put Y into memory if it is a
1755 constant. Otherwise, force it into a register. If we still
1756 can't get a part of Y, abort. */
1757 if (ypart == 0 && CONSTANT_P (y))
1758 {
1759 y = force_const_mem (mode, y);
1760 ypart = operand_subword (y, i, 1, mode);
1761 }
1762 else if (ypart == 0)
1763 ypart = operand_subword_force (y, i, mode);
1764
1765 if (xpart == 0 || ypart == 0)
1766 abort ();
1767
1768 last_insn = emit_move_insn (xpart, ypart);
1769 }
7308a047
RS
1770 /* Mark these insns as a libcall block. */
1771 group_insns (prev_insn);
1772
bbf6f052
RK
1773 return last_insn;
1774 }
1775 else
1776 abort ();
1777}
1778\f
1779/* Pushing data onto the stack. */
1780
1781/* Push a block of length SIZE (perhaps variable)
1782 and return an rtx to address the beginning of the block.
1783 Note that it is not possible for the value returned to be a QUEUED.
1784 The value may be virtual_outgoing_args_rtx.
1785
1786 EXTRA is the number of bytes of padding to push in addition to SIZE.
1787 BELOW nonzero means this padding comes at low addresses;
1788 otherwise, the padding comes at high addresses. */
1789
1790rtx
1791push_block (size, extra, below)
1792 rtx size;
1793 int extra, below;
1794{
1795 register rtx temp;
1796 if (CONSTANT_P (size))
1797 anti_adjust_stack (plus_constant (size, extra));
1798 else if (GET_CODE (size) == REG && extra == 0)
1799 anti_adjust_stack (size);
1800 else
1801 {
1802 rtx temp = copy_to_mode_reg (Pmode, size);
1803 if (extra != 0)
906c4e36 1804 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1805 temp, 0, OPTAB_LIB_WIDEN);
1806 anti_adjust_stack (temp);
1807 }
1808
1809#ifdef STACK_GROWS_DOWNWARD
1810 temp = virtual_outgoing_args_rtx;
1811 if (extra != 0 && below)
1812 temp = plus_constant (temp, extra);
1813#else
1814 if (GET_CODE (size) == CONST_INT)
1815 temp = plus_constant (virtual_outgoing_args_rtx,
1816 - INTVAL (size) - (below ? 0 : extra));
1817 else if (extra != 0 && !below)
1818 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1819 negate_rtx (Pmode, plus_constant (size, extra)));
1820 else
1821 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1822 negate_rtx (Pmode, size));
1823#endif
1824
1825 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1826}
1827
87e38d84 1828rtx
bbf6f052
RK
1829gen_push_operand ()
1830{
1831 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1832}
1833
1834/* Generate code to push X onto the stack, assuming it has mode MODE and
1835 type TYPE.
1836 MODE is redundant except when X is a CONST_INT (since they don't
1837 carry mode info).
1838 SIZE is an rtx for the size of data to be copied (in bytes),
1839 needed only if X is BLKmode.
1840
1841 ALIGN (in bytes) is maximum alignment we can assume.
1842
cd048831
RK
1843 If PARTIAL and REG are both nonzero, then copy that many of the first
1844 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1845 The amount of space pushed is decreased by PARTIAL words,
1846 rounded *down* to a multiple of PARM_BOUNDARY.
1847 REG must be a hard register in this case.
cd048831
RK
1848 If REG is zero but PARTIAL is not, take any all others actions for an
1849 argument partially in registers, but do not actually load any
1850 registers.
bbf6f052
RK
1851
1852 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1853 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1854
1855 On a machine that lacks real push insns, ARGS_ADDR is the address of
1856 the bottom of the argument block for this call. We use indexing off there
1857 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1858 argument block has not been preallocated.
1859
1860 ARGS_SO_FAR is the size of args previously pushed for this call. */
1861
1862void
1863emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1864 args_addr, args_so_far)
1865 register rtx x;
1866 enum machine_mode mode;
1867 tree type;
1868 rtx size;
1869 int align;
1870 int partial;
1871 rtx reg;
1872 int extra;
1873 rtx args_addr;
1874 rtx args_so_far;
1875{
1876 rtx xinner;
1877 enum direction stack_direction
1878#ifdef STACK_GROWS_DOWNWARD
1879 = downward;
1880#else
1881 = upward;
1882#endif
1883
1884 /* Decide where to pad the argument: `downward' for below,
1885 `upward' for above, or `none' for don't pad it.
1886 Default is below for small data on big-endian machines; else above. */
1887 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1888
1889 /* Invert direction if stack is post-update. */
1890 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1891 if (where_pad != none)
1892 where_pad = (where_pad == downward ? upward : downward);
1893
1894 xinner = x = protect_from_queue (x, 0);
1895
1896 if (mode == BLKmode)
1897 {
1898 /* Copy a block into the stack, entirely or partially. */
1899
1900 register rtx temp;
1901 int used = partial * UNITS_PER_WORD;
1902 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1903 int skip;
1904
1905 if (size == 0)
1906 abort ();
1907
1908 used -= offset;
1909
1910 /* USED is now the # of bytes we need not copy to the stack
1911 because registers will take care of them. */
1912
1913 if (partial != 0)
1914 xinner = change_address (xinner, BLKmode,
1915 plus_constant (XEXP (xinner, 0), used));
1916
1917 /* If the partial register-part of the arg counts in its stack size,
1918 skip the part of stack space corresponding to the registers.
1919 Otherwise, start copying to the beginning of the stack space,
1920 by setting SKIP to 0. */
1921#ifndef REG_PARM_STACK_SPACE
1922 skip = 0;
1923#else
1924 skip = used;
1925#endif
1926
1927#ifdef PUSH_ROUNDING
1928 /* Do it with several push insns if that doesn't take lots of insns
1929 and if there is no difficulty with push insns that skip bytes
1930 on the stack for alignment purposes. */
1931 if (args_addr == 0
1932 && GET_CODE (size) == CONST_INT
1933 && skip == 0
1934 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1935 < MOVE_RATIO)
bbf6f052
RK
1936 /* Here we avoid the case of a structure whose weak alignment
1937 forces many pushes of a small amount of data,
1938 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1939 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1940 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1941 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1942 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1943 {
1944 /* Push padding now if padding above and stack grows down,
1945 or if padding below and stack grows up.
1946 But if space already allocated, this has already been done. */
1947 if (extra && args_addr == 0
1948 && where_pad != none && where_pad != stack_direction)
906c4e36 1949 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1950
1951 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1952 INTVAL (size) - used, align);
1953 }
1954 else
1955#endif /* PUSH_ROUNDING */
1956 {
1957 /* Otherwise make space on the stack and copy the data
1958 to the address of that space. */
1959
1960 /* Deduct words put into registers from the size we must copy. */
1961 if (partial != 0)
1962 {
1963 if (GET_CODE (size) == CONST_INT)
906c4e36 1964 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1965 else
1966 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1967 GEN_INT (used), NULL_RTX, 0,
1968 OPTAB_LIB_WIDEN);
bbf6f052
RK
1969 }
1970
1971 /* Get the address of the stack space.
1972 In this case, we do not deal with EXTRA separately.
1973 A single stack adjust will do. */
1974 if (! args_addr)
1975 {
1976 temp = push_block (size, extra, where_pad == downward);
1977 extra = 0;
1978 }
1979 else if (GET_CODE (args_so_far) == CONST_INT)
1980 temp = memory_address (BLKmode,
1981 plus_constant (args_addr,
1982 skip + INTVAL (args_so_far)));
1983 else
1984 temp = memory_address (BLKmode,
1985 plus_constant (gen_rtx (PLUS, Pmode,
1986 args_addr, args_so_far),
1987 skip));
1988
1989 /* TEMP is the address of the block. Copy the data there. */
1990 if (GET_CODE (size) == CONST_INT
1991 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1992 < MOVE_RATIO))
1993 {
1994 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1995 INTVAL (size), align);
1996 goto ret;
1997 }
1998 /* Try the most limited insn first, because there's no point
1999 including more than one in the machine description unless
2000 the more limited one has some advantage. */
2001#ifdef HAVE_movstrqi
2002 if (HAVE_movstrqi
2003 && GET_CODE (size) == CONST_INT
2004 && ((unsigned) INTVAL (size)
2005 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2006 {
c841050e
RS
2007 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
2008 xinner, size, GEN_INT (align));
2009 if (pat != 0)
2010 {
2011 emit_insn (pat);
2012 goto ret;
2013 }
bbf6f052
RK
2014 }
2015#endif
2016#ifdef HAVE_movstrhi
2017 if (HAVE_movstrhi
2018 && GET_CODE (size) == CONST_INT
2019 && ((unsigned) INTVAL (size)
2020 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2021 {
c841050e
RS
2022 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
2023 xinner, size, GEN_INT (align));
2024 if (pat != 0)
2025 {
2026 emit_insn (pat);
2027 goto ret;
2028 }
bbf6f052
RK
2029 }
2030#endif
2031#ifdef HAVE_movstrsi
2032 if (HAVE_movstrsi)
2033 {
c841050e
RS
2034 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
2035 xinner, size, GEN_INT (align));
2036 if (pat != 0)
2037 {
2038 emit_insn (pat);
2039 goto ret;
2040 }
bbf6f052
RK
2041 }
2042#endif
2043#ifdef HAVE_movstrdi
2044 if (HAVE_movstrdi)
2045 {
c841050e
RS
2046 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
2047 xinner, size, GEN_INT (align));
2048 if (pat != 0)
2049 {
2050 emit_insn (pat);
2051 goto ret;
2052 }
bbf6f052
RK
2053 }
2054#endif
2055
2056#ifndef ACCUMULATE_OUTGOING_ARGS
2057 /* If the source is referenced relative to the stack pointer,
2058 copy it to another register to stabilize it. We do not need
2059 to do this if we know that we won't be changing sp. */
2060
2061 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2062 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2063 temp = copy_to_reg (temp);
2064#endif
2065
2066 /* Make inhibit_defer_pop nonzero around the library call
2067 to force it to pop the bcopy-arguments right away. */
2068 NO_DEFER_POP;
2069#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2070 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2071 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2072 convert_to_mode (TYPE_MODE (sizetype),
2073 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2074 TYPE_MODE (sizetype));
bbf6f052 2075#else
d562e42e 2076 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2077 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2078 convert_to_mode (TYPE_MODE (sizetype),
2079 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2080 TYPE_MODE (sizetype));
bbf6f052
RK
2081#endif
2082 OK_DEFER_POP;
2083 }
2084 }
2085 else if (partial > 0)
2086 {
2087 /* Scalar partly in registers. */
2088
2089 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2090 int i;
2091 int not_stack;
2092 /* # words of start of argument
2093 that we must make space for but need not store. */
2094 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2095 int args_offset = INTVAL (args_so_far);
2096 int skip;
2097
2098 /* Push padding now if padding above and stack grows down,
2099 or if padding below and stack grows up.
2100 But if space already allocated, this has already been done. */
2101 if (extra && args_addr == 0
2102 && where_pad != none && where_pad != stack_direction)
906c4e36 2103 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2104
2105 /* If we make space by pushing it, we might as well push
2106 the real data. Otherwise, we can leave OFFSET nonzero
2107 and leave the space uninitialized. */
2108 if (args_addr == 0)
2109 offset = 0;
2110
2111 /* Now NOT_STACK gets the number of words that we don't need to
2112 allocate on the stack. */
2113 not_stack = partial - offset;
2114
2115 /* If the partial register-part of the arg counts in its stack size,
2116 skip the part of stack space corresponding to the registers.
2117 Otherwise, start copying to the beginning of the stack space,
2118 by setting SKIP to 0. */
2119#ifndef REG_PARM_STACK_SPACE
2120 skip = 0;
2121#else
2122 skip = not_stack;
2123#endif
2124
2125 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2126 x = validize_mem (force_const_mem (mode, x));
2127
2128 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2129 SUBREGs of such registers are not allowed. */
2130 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2131 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2132 x = copy_to_reg (x);
2133
2134 /* Loop over all the words allocated on the stack for this arg. */
2135 /* We can do it by words, because any scalar bigger than a word
2136 has a size a multiple of a word. */
2137#ifndef PUSH_ARGS_REVERSED
2138 for (i = not_stack; i < size; i++)
2139#else
2140 for (i = size - 1; i >= not_stack; i--)
2141#endif
2142 if (i >= not_stack + offset)
2143 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2144 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2145 0, args_addr,
2146 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2147 * UNITS_PER_WORD)));
2148 }
2149 else
2150 {
2151 rtx addr;
2152
2153 /* Push padding now if padding above and stack grows down,
2154 or if padding below and stack grows up.
2155 But if space already allocated, this has already been done. */
2156 if (extra && args_addr == 0
2157 && where_pad != none && where_pad != stack_direction)
906c4e36 2158 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2159
2160#ifdef PUSH_ROUNDING
2161 if (args_addr == 0)
2162 addr = gen_push_operand ();
2163 else
2164#endif
2165 if (GET_CODE (args_so_far) == CONST_INT)
2166 addr
2167 = memory_address (mode,
2168 plus_constant (args_addr, INTVAL (args_so_far)));
2169 else
2170 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2171 args_so_far));
2172
2173 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2174 }
2175
2176 ret:
2177 /* If part should go in registers, copy that part
2178 into the appropriate registers. Do this now, at the end,
2179 since mem-to-mem copies above may do function calls. */
cd048831 2180 if (partial > 0 && reg != 0)
bbf6f052
RK
2181 move_block_to_reg (REGNO (reg), x, partial, mode);
2182
2183 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2184 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2185}
2186\f
bbf6f052
RK
2187/* Expand an assignment that stores the value of FROM into TO.
2188 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2189 (This may contain a QUEUED rtx;
2190 if the value is constant, this rtx is a constant.)
2191 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2192
2193 SUGGEST_REG is no longer actually used.
2194 It used to mean, copy the value through a register
2195 and return that register, if that is possible.
709f5be1 2196 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2197
2198rtx
2199expand_assignment (to, from, want_value, suggest_reg)
2200 tree to, from;
2201 int want_value;
2202 int suggest_reg;
2203{
2204 register rtx to_rtx = 0;
2205 rtx result;
2206
2207 /* Don't crash if the lhs of the assignment was erroneous. */
2208
2209 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2210 {
2211 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2212 return want_value ? result : NULL_RTX;
2213 }
bbf6f052
RK
2214
2215 /* Assignment of a structure component needs special treatment
2216 if the structure component's rtx is not simply a MEM.
2217 Assignment of an array element at a constant index
2218 has the same problem. */
2219
2220 if (TREE_CODE (to) == COMPONENT_REF
2221 || TREE_CODE (to) == BIT_FIELD_REF
2222 || (TREE_CODE (to) == ARRAY_REF
2223 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2224 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2225 {
2226 enum machine_mode mode1;
2227 int bitsize;
2228 int bitpos;
7bb0943f 2229 tree offset;
bbf6f052
RK
2230 int unsignedp;
2231 int volatilep = 0;
7bb0943f 2232 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2233 &mode1, &unsignedp, &volatilep);
2234
2235 /* If we are going to use store_bit_field and extract_bit_field,
2236 make sure to_rtx will be safe for multiple use. */
2237
2238 if (mode1 == VOIDmode && want_value)
2239 tem = stabilize_reference (tem);
2240
906c4e36 2241 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2242 if (offset != 0)
2243 {
906c4e36 2244 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2245
2246 if (GET_CODE (to_rtx) != MEM)
2247 abort ();
2248 to_rtx = change_address (to_rtx, VOIDmode,
2249 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2250 force_reg (Pmode, offset_rtx)));
2251 }
bbf6f052
RK
2252 if (volatilep)
2253 {
2254 if (GET_CODE (to_rtx) == MEM)
2255 MEM_VOLATILE_P (to_rtx) = 1;
2256#if 0 /* This was turned off because, when a field is volatile
2257 in an object which is not volatile, the object may be in a register,
2258 and then we would abort over here. */
2259 else
2260 abort ();
2261#endif
2262 }
2263
2264 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2265 (want_value
2266 /* Spurious cast makes HPUX compiler happy. */
2267 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2268 : VOIDmode),
2269 unsignedp,
2270 /* Required alignment of containing datum. */
2271 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2272 int_size_in_bytes (TREE_TYPE (tem)));
2273 preserve_temp_slots (result);
2274 free_temp_slots ();
2275
709f5be1
RS
2276 /* If the value is meaningful, convert RESULT to the proper mode.
2277 Otherwise, return nothing. */
5ffe63ed
RS
2278 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2279 TYPE_MODE (TREE_TYPE (from)),
2280 result,
2281 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2282 : NULL_RTX);
bbf6f052
RK
2283 }
2284
cd1db108
RS
2285 /* If the rhs is a function call and its value is not an aggregate,
2286 call the function before we start to compute the lhs.
2287 This is needed for correct code for cases such as
2288 val = setjmp (buf) on machines where reference to val
2289 requires loading up part of an address in a separate insn. */
2290 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2291 {
2292 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2293 if (to_rtx == 0)
2294 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2295 emit_move_insn (to_rtx, value);
2296 preserve_temp_slots (to_rtx);
2297 free_temp_slots ();
709f5be1 2298 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
2299 }
2300
bbf6f052
RK
2301 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2302 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2303
2304 if (to_rtx == 0)
906c4e36 2305 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2306
86d38d25
RS
2307 /* Don't move directly into a return register. */
2308 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2309 {
66538193 2310 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2311 emit_move_insn (to_rtx, temp);
2312 preserve_temp_slots (to_rtx);
2313 free_temp_slots ();
709f5be1 2314 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
2315 }
2316
bbf6f052
RK
2317 /* In case we are returning the contents of an object which overlaps
2318 the place the value is being stored, use a safe function when copying
2319 a value through a pointer into a structure value return block. */
2320 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2321 && current_function_returns_struct
2322 && !current_function_returns_pcc_struct)
2323 {
906c4e36 2324 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2325 rtx size = expr_size (from);
2326
2327#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2328 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2329 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2330 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2331 convert_to_mode (TYPE_MODE (sizetype),
2332 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2333 TYPE_MODE (sizetype));
bbf6f052 2334#else
d562e42e 2335 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2336 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2337 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2338 convert_to_mode (TYPE_MODE (sizetype),
2339 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2340 TYPE_MODE (sizetype));
bbf6f052
RK
2341#endif
2342
2343 preserve_temp_slots (to_rtx);
2344 free_temp_slots ();
709f5be1 2345 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
2346 }
2347
2348 /* Compute FROM and store the value in the rtx we got. */
2349
2350 result = store_expr (from, to_rtx, want_value);
2351 preserve_temp_slots (result);
2352 free_temp_slots ();
709f5be1 2353 return want_value ? result : NULL_RTX;
bbf6f052
RK
2354}
2355
2356/* Generate code for computing expression EXP,
2357 and storing the value into TARGET.
bbf6f052
RK
2358 TARGET may contain a QUEUED rtx.
2359
709f5be1
RS
2360 If WANT_VALUE is nonzero, return a copy of the value
2361 not in TARGET, so that we can be sure to use the proper
2362 value in a containing expression even if TARGET has something
2363 else stored in it. If possible, we copy the value through a pseudo
2364 and return that pseudo. Or, if the value is constant, we try to
2365 return the constant. In some cases, we return a pseudo
2366 copied *from* TARGET.
2367
2368 If the mode is BLKmode then we may return TARGET itself.
2369 It turns out that in BLKmode it doesn't cause a problem.
2370 because C has no operators that could combine two different
2371 assignments into the same BLKmode object with different values
2372 with no sequence point. Will other languages need this to
2373 be more thorough?
2374
2375 If WANT_VALUE is 0, we return NULL, to make sure
2376 to catch quickly any cases where the caller uses the value
2377 and fails to set WANT_VALUE. */
bbf6f052
RK
2378
2379rtx
709f5be1 2380store_expr (exp, target, want_value)
bbf6f052
RK
2381 register tree exp;
2382 register rtx target;
709f5be1 2383 int want_value;
bbf6f052
RK
2384{
2385 register rtx temp;
2386 int dont_return_target = 0;
2387
2388 if (TREE_CODE (exp) == COMPOUND_EXPR)
2389 {
2390 /* Perform first part of compound expression, then assign from second
2391 part. */
2392 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2393 emit_queue ();
709f5be1 2394 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
2395 }
2396 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2397 {
2398 /* For conditional expression, get safe form of the target. Then
2399 test the condition, doing the appropriate assignment on either
2400 side. This avoids the creation of unnecessary temporaries.
2401 For non-BLKmode, it is more efficient not to do this. */
2402
2403 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2404
2405 emit_queue ();
2406 target = protect_from_queue (target, 1);
2407
2408 NO_DEFER_POP;
2409 jumpifnot (TREE_OPERAND (exp, 0), lab1);
709f5be1 2410 store_expr (TREE_OPERAND (exp, 1), target, 0);
bbf6f052
RK
2411 emit_queue ();
2412 emit_jump_insn (gen_jump (lab2));
2413 emit_barrier ();
2414 emit_label (lab1);
709f5be1 2415 store_expr (TREE_OPERAND (exp, 2), target, 0);
bbf6f052
RK
2416 emit_queue ();
2417 emit_label (lab2);
2418 OK_DEFER_POP;
709f5be1 2419 return want_value ? target : NULL_RTX;
bbf6f052 2420 }
709f5be1 2421 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2422 && GET_MODE (target) != BLKmode)
2423 /* If target is in memory and caller wants value in a register instead,
2424 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 2425 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
2426 We know expand_expr will not use the target in that case.
2427 Don't do this if TARGET is volatile because we are supposed
2428 to write it and then read it. */
bbf6f052 2429 {
906c4e36 2430 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2431 GET_MODE (target), 0);
2432 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2433 temp = copy_to_reg (temp);
2434 dont_return_target = 1;
2435 }
2436 else if (queued_subexp_p (target))
709f5be1
RS
2437 /* If target contains a postincrement, let's not risk
2438 using it as the place to generate the rhs. */
bbf6f052
RK
2439 {
2440 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2441 {
2442 /* Expand EXP into a new pseudo. */
2443 temp = gen_reg_rtx (GET_MODE (target));
2444 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2445 }
2446 else
906c4e36 2447 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
2448
2449 /* If target is volatile, ANSI requires accessing the value
2450 *from* the target, if it is accessed. So make that happen.
2451 In no case return the target itself. */
2452 if (! MEM_VOLATILE_P (target) && want_value)
2453 dont_return_target = 1;
bbf6f052 2454 }
1499e0a8
RK
2455 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2456 /* If this is an scalar in a register that is stored in a wider mode
2457 than the declared mode, compute the result into its declared mode
2458 and then convert to the wider mode. Our value is the computed
2459 expression. */
2460 {
2461 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2462 convert_move (SUBREG_REG (target), temp,
2463 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 2464 return want_value ? temp : NULL_RTX;
1499e0a8 2465 }
bbf6f052
RK
2466 else
2467 {
2468 temp = expand_expr (exp, target, GET_MODE (target), 0);
2469 /* DO return TARGET if it's a specified hardware register.
c2e6aff6 2470 expand_return relies on this.
709f5be1
RS
2471 If TARGET is a volatile mem ref, either return TARGET
2472 or return a reg copied *from* TARGET; ANSI requires this.
2473
2474 Otherwise, if TEMP is not TARGET, return TEMP
2475 if it is constant (for efficiency),
2476 or if we really want the correct value. */
bbf6f052
RK
2477 if (!(target && GET_CODE (target) == REG
2478 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1
RS
2479 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
2480 && temp != target
2481 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
2482 dont_return_target = 1;
2483 }
2484
2485 /* If value was not generated in the target, store it there.
2486 Convert the value to TARGET's type first if nec. */
2487
2488 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2489 {
2490 target = protect_from_queue (target, 1);
2491 if (GET_MODE (temp) != GET_MODE (target)
2492 && GET_MODE (temp) != VOIDmode)
2493 {
2494 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2495 if (dont_return_target)
2496 {
2497 /* In this case, we will return TEMP,
2498 so make sure it has the proper mode.
2499 But don't forget to store the value into TARGET. */
2500 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2501 emit_move_insn (target, temp);
2502 }
2503 else
2504 convert_move (target, temp, unsignedp);
2505 }
2506
2507 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2508 {
2509 /* Handle copying a string constant into an array.
2510 The string constant may be shorter than the array.
2511 So copy just the string's actual length, and clear the rest. */
2512 rtx size;
2513
e87b4f3f
RS
2514 /* Get the size of the data type of the string,
2515 which is actually the size of the target. */
2516 size = expr_size (exp);
2517 if (GET_CODE (size) == CONST_INT
2518 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2519 emit_block_move (target, temp, size,
2520 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2521 else
bbf6f052 2522 {
e87b4f3f
RS
2523 /* Compute the size of the data to copy from the string. */
2524 tree copy_size
c03b7665
RK
2525 = size_binop (MIN_EXPR,
2526 size_binop (CEIL_DIV_EXPR,
2527 TYPE_SIZE (TREE_TYPE (exp)),
2528 size_int (BITS_PER_UNIT)),
2529 convert (sizetype,
2530 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2531 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2532 VOIDmode, 0);
e87b4f3f
RS
2533 rtx label = 0;
2534
2535 /* Copy that much. */
2536 emit_block_move (target, temp, copy_size_rtx,
2537 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2538
2539 /* Figure out how much is left in TARGET
2540 that we have to clear. */
2541 if (GET_CODE (copy_size_rtx) == CONST_INT)
2542 {
2543 temp = plus_constant (XEXP (target, 0),
2544 TREE_STRING_LENGTH (exp));
2545 size = plus_constant (size,
2546 - TREE_STRING_LENGTH (exp));
2547 }
2548 else
2549 {
2550 enum machine_mode size_mode = Pmode;
2551
2552 temp = force_reg (Pmode, XEXP (target, 0));
2553 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2554 copy_size_rtx, NULL_RTX, 0,
2555 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2556
2557 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2558 copy_size_rtx, NULL_RTX, 0,
2559 OPTAB_LIB_WIDEN);
e87b4f3f 2560
906c4e36 2561 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2562 GET_MODE (size), 0, 0);
2563 label = gen_label_rtx ();
2564 emit_jump_insn (gen_blt (label));
2565 }
2566
2567 if (size != const0_rtx)
2568 {
bbf6f052 2569#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2570 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2571 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2572#else
d562e42e 2573 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2574 temp, Pmode, size, Pmode);
bbf6f052 2575#endif
e87b4f3f
RS
2576 }
2577 if (label)
2578 emit_label (label);
bbf6f052
RK
2579 }
2580 }
2581 else if (GET_MODE (temp) == BLKmode)
2582 emit_block_move (target, temp, expr_size (exp),
2583 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2584 else
2585 emit_move_insn (target, temp);
2586 }
709f5be1 2587
7d26fec6 2588 if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 2589 return temp;
709f5be1
RS
2590 if (want_value && GET_MODE (target) != BLKmode)
2591 return copy_to_reg (target);
2592 if (want_value)
2593 return target;
2594 return NULL_RTX;
bbf6f052
RK
2595}
2596\f
2597/* Store the value of constructor EXP into the rtx TARGET.
2598 TARGET is either a REG or a MEM. */
2599
2600static void
2601store_constructor (exp, target)
2602 tree exp;
2603 rtx target;
2604{
4af3895e
JVA
2605 tree type = TREE_TYPE (exp);
2606
bbf6f052
RK
2607 /* We know our target cannot conflict, since safe_from_p has been called. */
2608#if 0
2609 /* Don't try copying piece by piece into a hard register
2610 since that is vulnerable to being clobbered by EXP.
2611 Instead, construct in a pseudo register and then copy it all. */
2612 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2613 {
2614 rtx temp = gen_reg_rtx (GET_MODE (target));
2615 store_constructor (exp, temp);
2616 emit_move_insn (target, temp);
2617 return;
2618 }
2619#endif
2620
e44842fe
RK
2621 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2622 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2623 {
2624 register tree elt;
2625
4af3895e 2626 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2627 if (TREE_CODE (type) == UNION_TYPE
2628 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2629 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2630
2631 /* If we are building a static constructor into a register,
2632 set the initial value as zero so we can fold the value into
2633 a constant. */
2634 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2635 emit_move_insn (target, const0_rtx);
2636
bbf6f052
RK
2637 /* If the constructor has fewer fields than the structure,
2638 clear the whole structure first. */
2639 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2640 != list_length (TYPE_FIELDS (type)))
2641 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2642 else
2643 /* Inform later passes that the old value is dead. */
2644 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2645
2646 /* Store each element of the constructor into
2647 the corresponding field of TARGET. */
2648
2649 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2650 {
2651 register tree field = TREE_PURPOSE (elt);
2652 register enum machine_mode mode;
2653 int bitsize;
2654 int bitpos;
2655 int unsignedp;
2656
f32fd778
RS
2657 /* Just ignore missing fields.
2658 We cleared the whole structure, above,
2659 if any fields are missing. */
2660 if (field == 0)
2661 continue;
2662
bbf6f052
RK
2663 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2664 unsignedp = TREE_UNSIGNED (field);
2665 mode = DECL_MODE (field);
2666 if (DECL_BIT_FIELD (field))
2667 mode = VOIDmode;
2668
2669 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2670 /* ??? This case remains to be written. */
2671 abort ();
2672
2673 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2674
2675 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2676 /* The alignment of TARGET is
2677 at least what its type requires. */
2678 VOIDmode, 0,
4af3895e
JVA
2679 TYPE_ALIGN (type) / BITS_PER_UNIT,
2680 int_size_in_bytes (type));
bbf6f052
RK
2681 }
2682 }
4af3895e 2683 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2684 {
2685 register tree elt;
2686 register int i;
4af3895e 2687 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2688 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2689 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2690 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2691
2692 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2693 clear the whole structure first. Similarly if this this is
2694 static constructor of a non-BLKmode object. */
bbf6f052 2695
4af3895e
JVA
2696 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2697 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2698 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2699 else
2700 /* Inform later passes that the old value is dead. */
2701 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2702
2703 /* Store each element of the constructor into
2704 the corresponding element of TARGET, determined
2705 by counting the elements. */
2706 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2707 elt;
2708 elt = TREE_CHAIN (elt), i++)
2709 {
2710 register enum machine_mode mode;
2711 int bitsize;
2712 int bitpos;
2713 int unsignedp;
2714
2715 mode = TYPE_MODE (elttype);
2716 bitsize = GET_MODE_BITSIZE (mode);
2717 unsignedp = TREE_UNSIGNED (elttype);
2718
2719 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2720
2721 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2722 /* The alignment of TARGET is
2723 at least what its type requires. */
2724 VOIDmode, 0,
4af3895e
JVA
2725 TYPE_ALIGN (type) / BITS_PER_UNIT,
2726 int_size_in_bytes (type));
bbf6f052
RK
2727 }
2728 }
2729
2730 else
2731 abort ();
2732}
2733
2734/* Store the value of EXP (an expression tree)
2735 into a subfield of TARGET which has mode MODE and occupies
2736 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2737 If MODE is VOIDmode, it means that we are storing into a bit-field.
2738
2739 If VALUE_MODE is VOIDmode, return nothing in particular.
2740 UNSIGNEDP is not used in this case.
2741
2742 Otherwise, return an rtx for the value stored. This rtx
2743 has mode VALUE_MODE if that is convenient to do.
2744 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2745
2746 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2747 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2748
2749static rtx
2750store_field (target, bitsize, bitpos, mode, exp, value_mode,
2751 unsignedp, align, total_size)
2752 rtx target;
2753 int bitsize, bitpos;
2754 enum machine_mode mode;
2755 tree exp;
2756 enum machine_mode value_mode;
2757 int unsignedp;
2758 int align;
2759 int total_size;
2760{
906c4e36 2761 HOST_WIDE_INT width_mask = 0;
bbf6f052 2762
906c4e36
RK
2763 if (bitsize < HOST_BITS_PER_WIDE_INT)
2764 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2765
2766 /* If we are storing into an unaligned field of an aligned union that is
2767 in a register, we may have the mode of TARGET being an integer mode but
2768 MODE == BLKmode. In that case, get an aligned object whose size and
2769 alignment are the same as TARGET and store TARGET into it (we can avoid
2770 the store if the field being stored is the entire width of TARGET). Then
2771 call ourselves recursively to store the field into a BLKmode version of
2772 that object. Finally, load from the object into TARGET. This is not
2773 very efficient in general, but should only be slightly more expensive
2774 than the otherwise-required unaligned accesses. Perhaps this can be
2775 cleaned up later. */
2776
2777 if (mode == BLKmode
2778 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2779 {
2780 rtx object = assign_stack_temp (GET_MODE (target),
2781 GET_MODE_SIZE (GET_MODE (target)), 0);
2782 rtx blk_object = copy_rtx (object);
2783
2784 PUT_MODE (blk_object, BLKmode);
2785
2786 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2787 emit_move_insn (object, target);
2788
2789 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2790 align, total_size);
2791
2792 emit_move_insn (target, object);
2793
2794 return target;
2795 }
2796
2797 /* If the structure is in a register or if the component
2798 is a bit field, we cannot use addressing to access it.
2799 Use bit-field techniques or SUBREG to store in it. */
2800
4fa52007
RK
2801 if (mode == VOIDmode
2802 || (mode != BLKmode && ! direct_store[(int) mode])
2803 || GET_CODE (target) == REG
c980ac49 2804 || GET_CODE (target) == SUBREG
ccc98036
RS
2805 /* If the field isn't aligned enough to store as an ordinary memref,
2806 store it as a bit field. */
2807 || (STRICT_ALIGNMENT
2808 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
2809 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 2810 {
906c4e36 2811 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2812 /* Store the value in the bitfield. */
2813 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2814 if (value_mode != VOIDmode)
2815 {
2816 /* The caller wants an rtx for the value. */
2817 /* If possible, avoid refetching from the bitfield itself. */
2818 if (width_mask != 0
2819 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2820 {
9074de27 2821 tree count;
5c4d7cfb 2822 enum machine_mode tmode;
86a2c12a 2823
5c4d7cfb
RS
2824 if (unsignedp)
2825 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2826 tmode = GET_MODE (temp);
86a2c12a
RS
2827 if (tmode == VOIDmode)
2828 tmode = value_mode;
5c4d7cfb
RS
2829 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2830 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2831 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2832 }
bbf6f052 2833 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2834 NULL_RTX, value_mode, 0, align,
2835 total_size);
bbf6f052
RK
2836 }
2837 return const0_rtx;
2838 }
2839 else
2840 {
2841 rtx addr = XEXP (target, 0);
2842 rtx to_rtx;
2843
2844 /* If a value is wanted, it must be the lhs;
2845 so make the address stable for multiple use. */
2846
2847 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2848 && ! CONSTANT_ADDRESS_P (addr)
2849 /* A frame-pointer reference is already stable. */
2850 && ! (GET_CODE (addr) == PLUS
2851 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2852 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2853 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2854 addr = copy_to_reg (addr);
2855
2856 /* Now build a reference to just the desired component. */
2857
2858 to_rtx = change_address (target, mode,
2859 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2860 MEM_IN_STRUCT_P (to_rtx) = 1;
2861
2862 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2863 }
2864}
2865\f
2866/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2867 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 2868 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
2869
2870 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2871 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2872 If the position of the field is variable, we store a tree
2873 giving the variable offset (in units) in *POFFSET.
2874 This offset is in addition to the bit position.
2875 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2876
2877 If any of the extraction expressions is volatile,
2878 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2879
2880 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2881 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2882 is redundant.
2883
2884 If the field describes a variable-sized object, *PMODE is set to
2885 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2886 this case, but the address of the object can be found. */
bbf6f052
RK
2887
2888tree
4969d05d
RK
2889get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2890 punsignedp, pvolatilep)
bbf6f052
RK
2891 tree exp;
2892 int *pbitsize;
2893 int *pbitpos;
7bb0943f 2894 tree *poffset;
bbf6f052
RK
2895 enum machine_mode *pmode;
2896 int *punsignedp;
2897 int *pvolatilep;
2898{
2899 tree size_tree = 0;
2900 enum machine_mode mode = VOIDmode;
742920c7 2901 tree offset = integer_zero_node;
bbf6f052
RK
2902
2903 if (TREE_CODE (exp) == COMPONENT_REF)
2904 {
2905 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2906 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2907 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2908 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2909 }
2910 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2911 {
2912 size_tree = TREE_OPERAND (exp, 1);
2913 *punsignedp = TREE_UNSIGNED (exp);
2914 }
2915 else
2916 {
2917 mode = TYPE_MODE (TREE_TYPE (exp));
2918 *pbitsize = GET_MODE_BITSIZE (mode);
2919 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2920 }
2921
2922 if (size_tree)
2923 {
2924 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2925 mode = BLKmode, *pbitsize = -1;
2926 else
2927 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2928 }
2929
2930 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2931 and find the ultimate containing object. */
2932
2933 *pbitpos = 0;
2934
2935 while (1)
2936 {
7bb0943f 2937 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2938 {
7bb0943f
RS
2939 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2940 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2941 : TREE_OPERAND (exp, 2));
bbf6f052 2942
e7f3c83f
RK
2943 /* If this field hasn't been filled in yet, don't go
2944 past it. This should only happen when folding expressions
2945 made during type construction. */
2946 if (pos == 0)
2947 break;
2948
7bb0943f
RS
2949 if (TREE_CODE (pos) == PLUS_EXPR)
2950 {
2951 tree constant, var;
2952 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2953 {
2954 constant = TREE_OPERAND (pos, 0);
2955 var = TREE_OPERAND (pos, 1);
2956 }
2957 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2958 {
2959 constant = TREE_OPERAND (pos, 1);
2960 var = TREE_OPERAND (pos, 0);
2961 }
2962 else
2963 abort ();
742920c7 2964
7bb0943f 2965 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
2966 offset = size_binop (PLUS_EXPR, offset,
2967 size_binop (FLOOR_DIV_EXPR, var,
2968 size_int (BITS_PER_UNIT)));
7bb0943f
RS
2969 }
2970 else if (TREE_CODE (pos) == INTEGER_CST)
2971 *pbitpos += TREE_INT_CST_LOW (pos);
2972 else
2973 {
2974 /* Assume here that the offset is a multiple of a unit.
2975 If not, there should be an explicitly added constant. */
742920c7
RK
2976 offset = size_binop (PLUS_EXPR, offset,
2977 size_binop (FLOOR_DIV_EXPR, pos,
2978 size_int (BITS_PER_UNIT)));
7bb0943f 2979 }
bbf6f052 2980 }
bbf6f052 2981
742920c7 2982 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 2983 {
742920c7
RK
2984 /* This code is based on the code in case ARRAY_REF in expand_expr
2985 below. We assume here that the size of an array element is
2986 always an integral multiple of BITS_PER_UNIT. */
2987
2988 tree index = TREE_OPERAND (exp, 1);
2989 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2990 tree low_bound
2991 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2992 tree index_type = TREE_TYPE (index);
2993
2994 if (! integer_zerop (low_bound))
2995 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2996
2997 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2998 {
2999 index = convert (type_for_size (POINTER_SIZE, 0), index);
3000 index_type = TREE_TYPE (index);
3001 }
3002
3003 index = fold (build (MULT_EXPR, index_type, index,
3004 TYPE_SIZE (TREE_TYPE (exp))));
3005
3006 if (TREE_CODE (index) == INTEGER_CST
3007 && TREE_INT_CST_HIGH (index) == 0)
3008 *pbitpos += TREE_INT_CST_LOW (index);
3009 else
3010 offset = size_binop (PLUS_EXPR, offset,
3011 size_binop (FLOOR_DIV_EXPR, index,
3012 size_int (BITS_PER_UNIT)));
bbf6f052
RK
3013 }
3014 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3015 && ! ((TREE_CODE (exp) == NOP_EXPR
3016 || TREE_CODE (exp) == CONVERT_EXPR)
3017 && (TYPE_MODE (TREE_TYPE (exp))
3018 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3019 break;
7bb0943f
RS
3020
3021 /* If any reference in the chain is volatile, the effect is volatile. */
3022 if (TREE_THIS_VOLATILE (exp))
3023 *pvolatilep = 1;
bbf6f052
RK
3024 exp = TREE_OPERAND (exp, 0);
3025 }
3026
3027 /* If this was a bit-field, see if there is a mode that allows direct
3028 access in case EXP is in memory. */
e7f3c83f 3029 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
3030 {
3031 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3032 if (mode == BLKmode)
3033 mode = VOIDmode;
3034 }
3035
742920c7
RK
3036 if (integer_zerop (offset))
3037 offset = 0;
3038
bbf6f052 3039 *pmode = mode;
7bb0943f
RS
3040 *poffset = offset;
3041#if 0
3042 /* We aren't finished fixing the callers to really handle nonzero offset. */
3043 if (offset != 0)
3044 abort ();
3045#endif
bbf6f052
RK
3046
3047 return exp;
3048}
3049\f
3050/* Given an rtx VALUE that may contain additions and multiplications,
3051 return an equivalent value that just refers to a register or memory.
3052 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3053 and returning a pseudo-register containing the value.
3054
3055 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3056
3057rtx
3058force_operand (value, target)
3059 rtx value, target;
3060{
3061 register optab binoptab = 0;
3062 /* Use a temporary to force order of execution of calls to
3063 `force_operand'. */
3064 rtx tmp;
3065 register rtx op2;
3066 /* Use subtarget as the target for operand 0 of a binary operation. */
3067 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3068
3069 if (GET_CODE (value) == PLUS)
3070 binoptab = add_optab;
3071 else if (GET_CODE (value) == MINUS)
3072 binoptab = sub_optab;
3073 else if (GET_CODE (value) == MULT)
3074 {
3075 op2 = XEXP (value, 1);
3076 if (!CONSTANT_P (op2)
3077 && !(GET_CODE (op2) == REG && op2 != subtarget))
3078 subtarget = 0;
3079 tmp = force_operand (XEXP (value, 0), subtarget);
3080 return expand_mult (GET_MODE (value), tmp,
906c4e36 3081 force_operand (op2, NULL_RTX),
bbf6f052
RK
3082 target, 0);
3083 }
3084
3085 if (binoptab)
3086 {
3087 op2 = XEXP (value, 1);
3088 if (!CONSTANT_P (op2)
3089 && !(GET_CODE (op2) == REG && op2 != subtarget))
3090 subtarget = 0;
3091 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3092 {
3093 binoptab = add_optab;
3094 op2 = negate_rtx (GET_MODE (value), op2);
3095 }
3096
3097 /* Check for an addition with OP2 a constant integer and our first
3098 operand a PLUS of a virtual register and something else. In that
3099 case, we want to emit the sum of the virtual register and the
3100 constant first and then add the other value. This allows virtual
3101 register instantiation to simply modify the constant rather than
3102 creating another one around this addition. */
3103 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3104 && GET_CODE (XEXP (value, 0)) == PLUS
3105 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3106 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3107 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3108 {
3109 rtx temp = expand_binop (GET_MODE (value), binoptab,
3110 XEXP (XEXP (value, 0), 0), op2,
3111 subtarget, 0, OPTAB_LIB_WIDEN);
3112 return expand_binop (GET_MODE (value), binoptab, temp,
3113 force_operand (XEXP (XEXP (value, 0), 1), 0),
3114 target, 0, OPTAB_LIB_WIDEN);
3115 }
3116
3117 tmp = force_operand (XEXP (value, 0), subtarget);
3118 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3119 force_operand (op2, NULL_RTX),
bbf6f052 3120 target, 0, OPTAB_LIB_WIDEN);
8008b228 3121 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3122 because the only operations we are expanding here are signed ones. */
3123 }
3124 return value;
3125}
3126\f
3127/* Subroutine of expand_expr:
3128 save the non-copied parts (LIST) of an expr (LHS), and return a list
3129 which can restore these values to their previous values,
3130 should something modify their storage. */
3131
3132static tree
3133save_noncopied_parts (lhs, list)
3134 tree lhs;
3135 tree list;
3136{
3137 tree tail;
3138 tree parts = 0;
3139
3140 for (tail = list; tail; tail = TREE_CHAIN (tail))
3141 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3142 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3143 else
3144 {
3145 tree part = TREE_VALUE (tail);
3146 tree part_type = TREE_TYPE (part);
906c4e36 3147 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3148 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3149 int_size_in_bytes (part_type), 0);
3150 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3151 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3152 parts = tree_cons (to_be_saved,
906c4e36
RK
3153 build (RTL_EXPR, part_type, NULL_TREE,
3154 (tree) target),
bbf6f052
RK
3155 parts);
3156 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3157 }
3158 return parts;
3159}
3160
3161/* Subroutine of expand_expr:
3162 record the non-copied parts (LIST) of an expr (LHS), and return a list
3163 which specifies the initial values of these parts. */
3164
3165static tree
3166init_noncopied_parts (lhs, list)
3167 tree lhs;
3168 tree list;
3169{
3170 tree tail;
3171 tree parts = 0;
3172
3173 for (tail = list; tail; tail = TREE_CHAIN (tail))
3174 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3175 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3176 else
3177 {
3178 tree part = TREE_VALUE (tail);
3179 tree part_type = TREE_TYPE (part);
906c4e36 3180 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3181 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3182 }
3183 return parts;
3184}
3185
3186/* Subroutine of expand_expr: return nonzero iff there is no way that
3187 EXP can reference X, which is being modified. */
3188
3189static int
3190safe_from_p (x, exp)
3191 rtx x;
3192 tree exp;
3193{
3194 rtx exp_rtl = 0;
3195 int i, nops;
3196
3197 if (x == 0)
3198 return 1;
3199
3200 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3201 find the underlying pseudo. */
3202 if (GET_CODE (x) == SUBREG)
3203 {
3204 x = SUBREG_REG (x);
3205 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3206 return 0;
3207 }
3208
3209 /* If X is a location in the outgoing argument area, it is always safe. */
3210 if (GET_CODE (x) == MEM
3211 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3212 || (GET_CODE (XEXP (x, 0)) == PLUS
3213 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3214 return 1;
3215
3216 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3217 {
3218 case 'd':
3219 exp_rtl = DECL_RTL (exp);
3220 break;
3221
3222 case 'c':
3223 return 1;
3224
3225 case 'x':
3226 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3227 return ((TREE_VALUE (exp) == 0
3228 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3229 && (TREE_CHAIN (exp) == 0
3230 || safe_from_p (x, TREE_CHAIN (exp))));
3231 else
3232 return 0;
3233
3234 case '1':
3235 return safe_from_p (x, TREE_OPERAND (exp, 0));
3236
3237 case '2':
3238 case '<':
3239 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3240 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3241
3242 case 'e':
3243 case 'r':
3244 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3245 the expression. If it is set, we conflict iff we are that rtx or
3246 both are in memory. Otherwise, we check all operands of the
3247 expression recursively. */
3248
3249 switch (TREE_CODE (exp))
3250 {
3251 case ADDR_EXPR:
e44842fe
RK
3252 return (staticp (TREE_OPERAND (exp, 0))
3253 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3254
3255 case INDIRECT_REF:
3256 if (GET_CODE (x) == MEM)
3257 return 0;
3258 break;
3259
3260 case CALL_EXPR:
3261 exp_rtl = CALL_EXPR_RTL (exp);
3262 if (exp_rtl == 0)
3263 {
3264 /* Assume that the call will clobber all hard registers and
3265 all of memory. */
3266 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3267 || GET_CODE (x) == MEM)
3268 return 0;
3269 }
3270
3271 break;
3272
3273 case RTL_EXPR:
3274 exp_rtl = RTL_EXPR_RTL (exp);
3275 if (exp_rtl == 0)
3276 /* We don't know what this can modify. */
3277 return 0;
3278
3279 break;
3280
3281 case WITH_CLEANUP_EXPR:
3282 exp_rtl = RTL_EXPR_RTL (exp);
3283 break;
3284
3285 case SAVE_EXPR:
3286 exp_rtl = SAVE_EXPR_RTL (exp);
3287 break;
3288
8129842c
RS
3289 case BIND_EXPR:
3290 /* The only operand we look at is operand 1. The rest aren't
3291 part of the expression. */
3292 return safe_from_p (x, TREE_OPERAND (exp, 1));
3293
bbf6f052
RK
3294 case METHOD_CALL_EXPR:
3295 /* This takes a rtx argument, but shouldn't appear here. */
3296 abort ();
3297 }
3298
3299 /* If we have an rtx, we do not need to scan our operands. */
3300 if (exp_rtl)
3301 break;
3302
3303 nops = tree_code_length[(int) TREE_CODE (exp)];
3304 for (i = 0; i < nops; i++)
3305 if (TREE_OPERAND (exp, i) != 0
3306 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3307 return 0;
3308 }
3309
3310 /* If we have an rtl, find any enclosed object. Then see if we conflict
3311 with it. */
3312 if (exp_rtl)
3313 {
3314 if (GET_CODE (exp_rtl) == SUBREG)
3315 {
3316 exp_rtl = SUBREG_REG (exp_rtl);
3317 if (GET_CODE (exp_rtl) == REG
3318 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3319 return 0;
3320 }
3321
3322 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3323 are memory and EXP is not readonly. */
3324 return ! (rtx_equal_p (x, exp_rtl)
3325 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3326 && ! TREE_READONLY (exp)));
3327 }
3328
3329 /* If we reach here, it is safe. */
3330 return 1;
3331}
3332
3333/* Subroutine of expand_expr: return nonzero iff EXP is an
3334 expression whose type is statically determinable. */
3335
3336static int
3337fixed_type_p (exp)
3338 tree exp;
3339{
3340 if (TREE_CODE (exp) == PARM_DECL
3341 || TREE_CODE (exp) == VAR_DECL
3342 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3343 || TREE_CODE (exp) == COMPONENT_REF
3344 || TREE_CODE (exp) == ARRAY_REF)
3345 return 1;
3346 return 0;
3347}
3348\f
3349/* expand_expr: generate code for computing expression EXP.
3350 An rtx for the computed value is returned. The value is never null.
3351 In the case of a void EXP, const0_rtx is returned.
3352
3353 The value may be stored in TARGET if TARGET is nonzero.
3354 TARGET is just a suggestion; callers must assume that
3355 the rtx returned may not be the same as TARGET.
3356
3357 If TARGET is CONST0_RTX, it means that the value will be ignored.
3358
3359 If TMODE is not VOIDmode, it suggests generating the
3360 result in mode TMODE. But this is done only when convenient.
3361 Otherwise, TMODE is ignored and the value generated in its natural mode.
3362 TMODE is just a suggestion; callers must assume that
3363 the rtx returned may not have mode TMODE.
3364
3365 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3366 with a constant address even if that address is not normally legitimate.
3367 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3368
3369 If MODIFIER is EXPAND_SUM then when EXP is an addition
3370 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3371 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3372 products as above, or REG or MEM, or constant.
3373 Ordinarily in such cases we would output mul or add instructions
3374 and then return a pseudo reg containing the sum.
3375
3376 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3377 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3378 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3379 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3380
3381rtx
3382expand_expr (exp, target, tmode, modifier)
3383 register tree exp;
3384 rtx target;
3385 enum machine_mode tmode;
3386 enum expand_modifier modifier;
3387{
3388 register rtx op0, op1, temp;
3389 tree type = TREE_TYPE (exp);
3390 int unsignedp = TREE_UNSIGNED (type);
3391 register enum machine_mode mode = TYPE_MODE (type);
3392 register enum tree_code code = TREE_CODE (exp);
3393 optab this_optab;
3394 /* Use subtarget as the target for operand 0 of a binary operation. */
3395 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3396 rtx original_target = target;
dd27116b
RK
3397 int ignore = (target == const0_rtx
3398 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
3399 || code == CONVERT_EXPR || code == REFERENCE_EXPR
3400 || code == COND_EXPR)
dd27116b 3401 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3402 tree context;
3403
3404 /* Don't use hard regs as subtargets, because the combiner
3405 can only handle pseudo regs. */
3406 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3407 subtarget = 0;
3408 /* Avoid subtargets inside loops,
3409 since they hide some invariant expressions. */
3410 if (preserve_subexpressions_p ())
3411 subtarget = 0;
3412
dd27116b
RK
3413 /* If we are going to ignore this result, we need only do something
3414 if there is a side-effect somewhere in the expression. If there
3415 is, short-circuit the most common cases here. */
bbf6f052 3416
dd27116b
RK
3417 if (ignore)
3418 {
3419 if (! TREE_SIDE_EFFECTS (exp))
3420 return const0_rtx;
3421
3422 /* Ensure we reference a volatile object even if value is ignored. */
3423 if (TREE_THIS_VOLATILE (exp)
3424 && TREE_CODE (exp) != FUNCTION_DECL
3425 && mode != VOIDmode && mode != BLKmode)
3426 {
3427 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3428 if (GET_CODE (temp) == MEM)
3429 temp = copy_to_reg (temp);
3430 return const0_rtx;
3431 }
3432
3433 if (TREE_CODE_CLASS (code) == '1')
3434 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3435 VOIDmode, modifier);
3436 else if (TREE_CODE_CLASS (code) == '2'
3437 || TREE_CODE_CLASS (code) == '<')
3438 {
3439 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3440 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3441 return const0_rtx;
3442 }
3443 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3444 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3445 /* If the second operand has no side effects, just evaluate
3446 the first. */
3447 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3448 VOIDmode, modifier);
dd27116b
RK
3449
3450 target = 0, original_target = 0;
3451 }
bbf6f052 3452
e44842fe
RK
3453 /* If will do cse, generate all results into pseudo registers
3454 since 1) that allows cse to find more things
3455 and 2) otherwise cse could produce an insn the machine
3456 cannot support. */
3457
bbf6f052
RK
3458 if (! cse_not_expected && mode != BLKmode && target
3459 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3460 target = subtarget;
3461
bbf6f052
RK
3462 switch (code)
3463 {
3464 case LABEL_DECL:
b552441b
RS
3465 {
3466 tree function = decl_function_context (exp);
3467 /* Handle using a label in a containing function. */
3468 if (function != current_function_decl && function != 0)
3469 {
3470 struct function *p = find_function_data (function);
3471 /* Allocate in the memory associated with the function
3472 that the label is in. */
3473 push_obstacks (p->function_obstack,
3474 p->function_maybepermanent_obstack);
3475
3476 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3477 label_rtx (exp), p->forced_labels);
3478 pop_obstacks ();
3479 }
3480 else if (modifier == EXPAND_INITIALIZER)
3481 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3482 label_rtx (exp), forced_labels);
26fcb35a 3483 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3484 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3485 if (function != current_function_decl && function != 0)
3486 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3487 return temp;
b552441b 3488 }
bbf6f052
RK
3489
3490 case PARM_DECL:
3491 if (DECL_RTL (exp) == 0)
3492 {
3493 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3494 return CONST0_RTX (mode);
bbf6f052
RK
3495 }
3496
3497 case FUNCTION_DECL:
3498 case VAR_DECL:
3499 case RESULT_DECL:
3500 if (DECL_RTL (exp) == 0)
3501 abort ();
e44842fe
RK
3502 /* Ensure variable marked as used even if it doesn't go through
3503 a parser. If it hasn't be used yet, write out an external
3504 definition. */
3505 if (! TREE_USED (exp))
3506 {
3507 assemble_external (exp);
3508 TREE_USED (exp) = 1;
3509 }
3510
bbf6f052
RK
3511 /* Handle variables inherited from containing functions. */
3512 context = decl_function_context (exp);
3513
3514 /* We treat inline_function_decl as an alias for the current function
3515 because that is the inline function whose vars, types, etc.
3516 are being merged into the current function.
3517 See expand_inline_function. */
3518 if (context != 0 && context != current_function_decl
3519 && context != inline_function_decl
3520 /* If var is static, we don't need a static chain to access it. */
3521 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3522 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3523 {
3524 rtx addr;
3525
3526 /* Mark as non-local and addressable. */
81feeecb 3527 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3528 mark_addressable (exp);
3529 if (GET_CODE (DECL_RTL (exp)) != MEM)
3530 abort ();
3531 addr = XEXP (DECL_RTL (exp), 0);
3532 if (GET_CODE (addr) == MEM)
3533 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3534 else
3535 addr = fix_lexical_addr (addr, exp);
3536 return change_address (DECL_RTL (exp), mode, addr);
3537 }
4af3895e 3538
bbf6f052
RK
3539 /* This is the case of an array whose size is to be determined
3540 from its initializer, while the initializer is still being parsed.
3541 See expand_decl. */
3542 if (GET_CODE (DECL_RTL (exp)) == MEM
3543 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3544 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3545 XEXP (DECL_RTL (exp), 0));
3546 if (GET_CODE (DECL_RTL (exp)) == MEM
3547 && modifier != EXPAND_CONST_ADDRESS
3548 && modifier != EXPAND_SUM
3549 && modifier != EXPAND_INITIALIZER)
3550 {
3551 /* DECL_RTL probably contains a constant address.
3552 On RISC machines where a constant address isn't valid,
3553 make some insns to get that address into a register. */
3554 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3555 || (flag_force_addr
3556 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3557 return change_address (DECL_RTL (exp), VOIDmode,
3558 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3559 }
1499e0a8
RK
3560
3561 /* If the mode of DECL_RTL does not match that of the decl, it
3562 must be a promoted value. We return a SUBREG of the wanted mode,
3563 but mark it so that we know that it was already extended. */
3564
3565 if (GET_CODE (DECL_RTL (exp)) == REG
3566 && GET_MODE (DECL_RTL (exp)) != mode)
3567 {
3568 enum machine_mode decl_mode = DECL_MODE (exp);
3569
3570 /* Get the signedness used for this variable. Ensure we get the
3571 same mode we got when the variable was declared. */
3572
3573 PROMOTE_MODE (decl_mode, unsignedp, type);
3574
3575 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3576 abort ();
3577
3578 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3579 SUBREG_PROMOTED_VAR_P (temp) = 1;
3580 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3581 return temp;
3582 }
3583
bbf6f052
RK
3584 return DECL_RTL (exp);
3585
3586 case INTEGER_CST:
3587 return immed_double_const (TREE_INT_CST_LOW (exp),
3588 TREE_INT_CST_HIGH (exp),
3589 mode);
3590
3591 case CONST_DECL:
3592 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3593
3594 case REAL_CST:
3595 /* If optimized, generate immediate CONST_DOUBLE
3596 which will be turned into memory by reload if necessary.
3597
3598 We used to force a register so that loop.c could see it. But
3599 this does not allow gen_* patterns to perform optimizations with
3600 the constants. It also produces two insns in cases like "x = 1.0;".
3601 On most machines, floating-point constants are not permitted in
3602 many insns, so we'd end up copying it to a register in any case.
3603
3604 Now, we do the copying in expand_binop, if appropriate. */
3605 return immed_real_const (exp);
3606
3607 case COMPLEX_CST:
3608 case STRING_CST:
3609 if (! TREE_CST_RTL (exp))
3610 output_constant_def (exp);
3611
3612 /* TREE_CST_RTL probably contains a constant address.
3613 On RISC machines where a constant address isn't valid,
3614 make some insns to get that address into a register. */
3615 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3616 && modifier != EXPAND_CONST_ADDRESS
3617 && modifier != EXPAND_INITIALIZER
3618 && modifier != EXPAND_SUM
3619 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3620 return change_address (TREE_CST_RTL (exp), VOIDmode,
3621 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3622 return TREE_CST_RTL (exp);
3623
3624 case SAVE_EXPR:
3625 context = decl_function_context (exp);
3626 /* We treat inline_function_decl as an alias for the current function
3627 because that is the inline function whose vars, types, etc.
3628 are being merged into the current function.
3629 See expand_inline_function. */
3630 if (context == current_function_decl || context == inline_function_decl)
3631 context = 0;
3632
3633 /* If this is non-local, handle it. */
3634 if (context)
3635 {
3636 temp = SAVE_EXPR_RTL (exp);
3637 if (temp && GET_CODE (temp) == REG)
3638 {
3639 put_var_into_stack (exp);
3640 temp = SAVE_EXPR_RTL (exp);
3641 }
3642 if (temp == 0 || GET_CODE (temp) != MEM)
3643 abort ();
3644 return change_address (temp, mode,
3645 fix_lexical_addr (XEXP (temp, 0), exp));
3646 }
3647 if (SAVE_EXPR_RTL (exp) == 0)
3648 {
3649 if (mode == BLKmode)
34a25822
RK
3650 {
3651 temp
3652 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3653 MEM_IN_STRUCT_P (temp)
3654 = (TREE_CODE (type) == RECORD_TYPE
3655 || TREE_CODE (type) == UNION_TYPE
3656 || TREE_CODE (type) == QUAL_UNION_TYPE
3657 || TREE_CODE (type) == ARRAY_TYPE);
3658 }
bbf6f052 3659 else
1499e0a8
RK
3660 {
3661 enum machine_mode var_mode = mode;
3662
3663 if (TREE_CODE (type) == INTEGER_TYPE
3664 || TREE_CODE (type) == ENUMERAL_TYPE
3665 || TREE_CODE (type) == BOOLEAN_TYPE
3666 || TREE_CODE (type) == CHAR_TYPE
3667 || TREE_CODE (type) == REAL_TYPE
3668 || TREE_CODE (type) == POINTER_TYPE
3669 || TREE_CODE (type) == OFFSET_TYPE)
3670 {
3671 PROMOTE_MODE (var_mode, unsignedp, type);
3672 }
3673
3674 temp = gen_reg_rtx (var_mode);
3675 }
3676
bbf6f052 3677 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3678 if (!optimize && GET_CODE (temp) == REG)
3679 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3680 save_expr_regs);
ff78f773
RK
3681
3682 /* If the mode of TEMP does not match that of the expression, it
3683 must be a promoted value. We pass store_expr a SUBREG of the
3684 wanted mode but mark it so that we know that it was already
3685 extended. Note that `unsignedp' was modified above in
3686 this case. */
3687
3688 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3689 {
3690 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3691 SUBREG_PROMOTED_VAR_P (temp) = 1;
3692 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3693 }
3694
3695 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3696 }
1499e0a8
RK
3697
3698 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3699 must be a promoted value. We return a SUBREG of the wanted mode,
3700 but mark it so that we know that it was already extended. Note
3701 that `unsignedp' was modified above in this case. */
3702
3703 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3704 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3705 {
3706 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3707 SUBREG_PROMOTED_VAR_P (temp) = 1;
3708 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3709 return temp;
3710 }
3711
bbf6f052
RK
3712 return SAVE_EXPR_RTL (exp);
3713
3714 case EXIT_EXPR:
e44842fe
RK
3715 expand_exit_loop_if_false (NULL_PTR,
3716 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
3717 return const0_rtx;
3718
3719 case LOOP_EXPR:
3720 expand_start_loop (1);
3721 expand_expr_stmt (TREE_OPERAND (exp, 0));
3722 expand_end_loop ();
3723
3724 return const0_rtx;
3725
3726 case BIND_EXPR:
3727 {
3728 tree vars = TREE_OPERAND (exp, 0);
3729 int vars_need_expansion = 0;
3730
3731 /* Need to open a binding contour here because
3732 if there are any cleanups they most be contained here. */
3733 expand_start_bindings (0);
3734
2df53c0b
RS
3735 /* Mark the corresponding BLOCK for output in its proper place. */
3736 if (TREE_OPERAND (exp, 2) != 0
3737 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3738 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3739
3740 /* If VARS have not yet been expanded, expand them now. */
3741 while (vars)
3742 {
3743 if (DECL_RTL (vars) == 0)
3744 {
3745 vars_need_expansion = 1;
3746 expand_decl (vars);
3747 }
3748 expand_decl_init (vars);
3749 vars = TREE_CHAIN (vars);
3750 }
3751
3752 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3753
3754 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3755
3756 return temp;
3757 }
3758
3759 case RTL_EXPR:
3760 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3761 abort ();
3762 emit_insns (RTL_EXPR_SEQUENCE (exp));
3763 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3764 return RTL_EXPR_RTL (exp);
3765
3766 case CONSTRUCTOR:
dd27116b
RK
3767 /* If we don't need the result, just ensure we evaluate any
3768 subexpressions. */
3769 if (ignore)
3770 {
3771 tree elt;
3772 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3773 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3774 return const0_rtx;
3775 }
4af3895e
JVA
3776 /* All elts simple constants => refer to a constant in memory. But
3777 if this is a non-BLKmode mode, let it store a field at a time
3778 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
3779 fold. If we are making an initializer and all operands are
3780 constant, put it in memory as well. */
3781 else if ((TREE_STATIC (exp)
3782 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3783 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
3784 {
3785 rtx constructor = output_constant_def (exp);
b552441b
RS
3786 if (modifier != EXPAND_CONST_ADDRESS
3787 && modifier != EXPAND_INITIALIZER
3788 && modifier != EXPAND_SUM
3789 && !memory_address_p (GET_MODE (constructor),
3790 XEXP (constructor, 0)))
bbf6f052
RK
3791 constructor = change_address (constructor, VOIDmode,
3792 XEXP (constructor, 0));
3793 return constructor;
3794 }
3795
bbf6f052
RK
3796 else
3797 {
3798 if (target == 0 || ! safe_from_p (target, exp))
3799 {
3800 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3801 target = gen_reg_rtx (mode);
3802 else
3803 {
3b94d087
RS
3804 enum tree_code c = TREE_CODE (type);
3805 target
3806 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
3807 if (c == RECORD_TYPE || c == UNION_TYPE
3808 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 3809 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3810 }
3811 }
3812 store_constructor (exp, target);
3813 return target;
3814 }
3815
3816 case INDIRECT_REF:
3817 {
3818 tree exp1 = TREE_OPERAND (exp, 0);
3819 tree exp2;
3820
3821 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3822 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3823 This code has the same general effect as simply doing
3824 expand_expr on the save expr, except that the expression PTR
3825 is computed for use as a memory address. This means different
3826 code, suitable for indexing, may be generated. */
3827 if (TREE_CODE (exp1) == SAVE_EXPR
3828 && SAVE_EXPR_RTL (exp1) == 0
3829 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3830 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3831 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3832 {
906c4e36
RK
3833 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3834 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3835 op0 = memory_address (mode, temp);
3836 op0 = copy_all_regs (op0);
3837 SAVE_EXPR_RTL (exp1) = op0;
3838 }
3839 else
3840 {
906c4e36 3841 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3842 op0 = memory_address (mode, op0);
3843 }
8c8a8e34
JW
3844
3845 temp = gen_rtx (MEM, mode, op0);
3846 /* If address was computed by addition,
3847 mark this as an element of an aggregate. */
3848 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3849 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3850 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3851 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3852 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3853 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 3854 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
3855 || (TREE_CODE (exp1) == ADDR_EXPR
3856 && (exp2 = TREE_OPERAND (exp1, 0))
3857 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3858 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
3859 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3860 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 3861 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3862 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 3863#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
3864 a location is accessed through a pointer to const does not mean
3865 that the value there can never change. */
8c8a8e34 3866 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3867#endif
8c8a8e34
JW
3868 return temp;
3869 }
bbf6f052
RK
3870
3871 case ARRAY_REF:
742920c7
RK
3872 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3873 abort ();
bbf6f052 3874
bbf6f052 3875 {
742920c7
RK
3876 tree array = TREE_OPERAND (exp, 0);
3877 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3878 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3879 tree index = TREE_OPERAND (exp, 1);
3880 tree index_type = TREE_TYPE (index);
bbf6f052 3881 int i;
bbf6f052 3882
742920c7
RK
3883 /* Optimize the special-case of a zero lower bound. */
3884 if (! integer_zerop (low_bound))
3885 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3886
3887 if (TREE_CODE (index) != INTEGER_CST
3888 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3889 {
3890 /* Nonconstant array index or nonconstant element size.
3891 Generate the tree for *(&array+index) and expand that,
3892 except do it in a language-independent way
3893 and don't complain about non-lvalue arrays.
3894 `mark_addressable' should already have been called
3895 for any array for which this case will be reached. */
3896
3897 /* Don't forget the const or volatile flag from the array
3898 element. */
3899 tree variant_type = build_type_variant (type,
3900 TREE_READONLY (exp),
3901 TREE_THIS_VOLATILE (exp));
3902 tree array_adr = build1 (ADDR_EXPR,
3903 build_pointer_type (variant_type), array);
3904 tree elt;
3905
3906 /* Convert the integer argument to a type the same size as a
3907 pointer so the multiply won't overflow spuriously. */
3908 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3909 index = convert (type_for_size (POINTER_SIZE, 0), index);
3910
3911 /* Don't think the address has side effects
3912 just because the array does.
3913 (In some cases the address might have side effects,
3914 and we fail to record that fact here. However, it should not
3915 matter, since expand_expr should not care.) */
3916 TREE_SIDE_EFFECTS (array_adr) = 0;
3917
3918 elt = build1 (INDIRECT_REF, type,
3919 fold (build (PLUS_EXPR,
3920 TYPE_POINTER_TO (variant_type),
3921 array_adr,
3922 fold (build (MULT_EXPR,
3923 TYPE_POINTER_TO (variant_type),
3924 index,
3925 size_in_bytes (type))))));
3926
3927 /* Volatility, etc., of new expression is same as old
3928 expression. */
3929 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3930 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3931 TREE_READONLY (elt) = TREE_READONLY (exp);
3932
3933 return expand_expr (elt, target, tmode, modifier);
3934 }
3935
3936 /* Fold an expression like: "foo"[2].
3937 This is not done in fold so it won't happen inside &. */
3938
3939 if (TREE_CODE (array) == STRING_CST
3940 && TREE_CODE (index) == INTEGER_CST
3941 && !TREE_INT_CST_HIGH (index)
3942 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 3943 {
742920c7 3944 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 3945 {
742920c7 3946 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
3947 TREE_TYPE (exp) = integer_type_node;
3948 return expand_expr (exp, target, tmode, modifier);
3949 }
742920c7 3950 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 3951 {
742920c7 3952 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 3953 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
3954 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3955 exp),
3956 target, tmode, modifier);
bbf6f052
RK
3957 }
3958 }
bbf6f052 3959
742920c7
RK
3960 /* If this is a constant index into a constant array,
3961 just get the value from the array. Handle both the cases when
3962 we have an explicit constructor and when our operand is a variable
3963 that was declared const. */
4af3895e 3964
742920c7
RK
3965 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3966 {
3967 if (TREE_CODE (index) == INTEGER_CST
3968 && TREE_INT_CST_HIGH (index) == 0)
3969 {
3970 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3971
3972 i = TREE_INT_CST_LOW (index);
3973 while (elem && i--)
3974 elem = TREE_CHAIN (elem);
3975 if (elem)
3976 return expand_expr (fold (TREE_VALUE (elem)), target,
3977 tmode, modifier);
3978 }
3979 }
4af3895e 3980
742920c7
RK
3981 else if (optimize >= 1
3982 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3983 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3984 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3985 {
3986 if (TREE_CODE (index) == INTEGER_CST
3987 && TREE_INT_CST_HIGH (index) == 0)
3988 {
3989 tree init = DECL_INITIAL (array);
3990
3991 i = TREE_INT_CST_LOW (index);
3992 if (TREE_CODE (init) == CONSTRUCTOR)
3993 {
3994 tree elem = CONSTRUCTOR_ELTS (init);
3995
3996 while (elem && i--)
3997 elem = TREE_CHAIN (elem);
3998 if (elem)
3999 return expand_expr (fold (TREE_VALUE (elem)), target,
4000 tmode, modifier);
4001 }
4002 else if (TREE_CODE (init) == STRING_CST
4003 && i < TREE_STRING_LENGTH (init))
4004 {
4005 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
4006 return convert_to_mode (mode, temp, 0);
4007 }
4008 }
4009 }
4010 }
8c8a8e34 4011
bbf6f052
RK
4012 /* Treat array-ref with constant index as a component-ref. */
4013
4014 case COMPONENT_REF:
4015 case BIT_FIELD_REF:
4af3895e
JVA
4016 /* If the operand is a CONSTRUCTOR, we can just extract the
4017 appropriate field if it is present. */
4018 if (code != ARRAY_REF
4019 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4020 {
4021 tree elt;
4022
4023 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4024 elt = TREE_CHAIN (elt))
4025 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4026 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4027 }
4028
bbf6f052
RK
4029 {
4030 enum machine_mode mode1;
4031 int bitsize;
4032 int bitpos;
7bb0943f 4033 tree offset;
bbf6f052 4034 int volatilep = 0;
7bb0943f 4035 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
4036 &mode1, &unsignedp, &volatilep);
4037
e7f3c83f
RK
4038 /* If we got back the original object, something is wrong. Perhaps
4039 we are evaluating an expression too early. In any event, don't
4040 infinitely recurse. */
4041 if (tem == exp)
4042 abort ();
4043
bbf6f052
RK
4044 /* In some cases, we will be offsetting OP0's address by a constant.
4045 So get it as a sum, if possible. If we will be using it
4046 directly in an insn, we validate it. */
906c4e36 4047 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4048
8c8a8e34 4049 /* If this is a constant, put it into a register if it is a
8008b228 4050 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4051 if (CONSTANT_P (op0))
4052 {
4053 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 4054 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
4055 op0 = force_reg (mode, op0);
4056 else
4057 op0 = validize_mem (force_const_mem (mode, op0));
4058 }
4059
7bb0943f
RS
4060 if (offset != 0)
4061 {
906c4e36 4062 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4063
4064 if (GET_CODE (op0) != MEM)
4065 abort ();
4066 op0 = change_address (op0, VOIDmode,
4067 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4068 force_reg (Pmode, offset_rtx)));
4069 }
4070
bbf6f052
RK
4071 /* Don't forget about volatility even if this is a bitfield. */
4072 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4073 {
4074 op0 = copy_rtx (op0);
4075 MEM_VOLATILE_P (op0) = 1;
4076 }
4077
ccc98036
RS
4078 /* In cases where an aligned union has an unaligned object
4079 as a field, we might be extracting a BLKmode value from
4080 an integer-mode (e.g., SImode) object. Handle this case
4081 by doing the extract into an object as wide as the field
4082 (which we know to be the width of a basic mode), then
4083 storing into memory, and changing the mode to BLKmode. */
bbf6f052 4084 if (mode1 == VOIDmode
0bba3f6f
RK
4085 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4086 && modifier != EXPAND_CONST_ADDRESS
4087 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
4088 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
4089 /* If the field isn't aligned enough to fetch as a memref,
4090 fetch it as a bit field. */
4091 || (STRICT_ALIGNMENT
4092 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
4093 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4094 {
bbf6f052
RK
4095 enum machine_mode ext_mode = mode;
4096
4097 if (ext_mode == BLKmode)
4098 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4099
4100 if (ext_mode == BLKmode)
4101 abort ();
4102
4103 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4104 unsignedp, target, ext_mode, ext_mode,
4105 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4106 int_size_in_bytes (TREE_TYPE (tem)));
4107 if (mode == BLKmode)
4108 {
4109 rtx new = assign_stack_temp (ext_mode,
4110 bitsize / BITS_PER_UNIT, 0);
4111
4112 emit_move_insn (new, op0);
4113 op0 = copy_rtx (new);
4114 PUT_MODE (op0, BLKmode);
092dded9 4115 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4116 }
4117
4118 return op0;
4119 }
4120
4121 /* Get a reference to just this component. */
4122 if (modifier == EXPAND_CONST_ADDRESS
4123 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4124 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4125 (bitpos / BITS_PER_UNIT)));
4126 else
4127 op0 = change_address (op0, mode1,
4128 plus_constant (XEXP (op0, 0),
4129 (bitpos / BITS_PER_UNIT)));
4130 MEM_IN_STRUCT_P (op0) = 1;
4131 MEM_VOLATILE_P (op0) |= volatilep;
4132 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4133 return op0;
4134 if (target == 0)
4135 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4136 convert_move (target, op0, unsignedp);
4137 return target;
4138 }
4139
4140 case OFFSET_REF:
4141 {
da120c2f 4142 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4143 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4144 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4145 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4146 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4147 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4148#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4149 a location is accessed through a pointer to const does not mean
4150 that the value there can never change. */
4151 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4152#endif
4153 return temp;
4154 }
4155
4156 /* Intended for a reference to a buffer of a file-object in Pascal.
4157 But it's not certain that a special tree code will really be
4158 necessary for these. INDIRECT_REF might work for them. */
4159 case BUFFER_REF:
4160 abort ();
4161
7308a047
RS
4162 /* IN_EXPR: Inlined pascal set IN expression.
4163
4164 Algorithm:
4165 rlo = set_low - (set_low%bits_per_word);
4166 the_word = set [ (index - rlo)/bits_per_word ];
4167 bit_index = index % bits_per_word;
4168 bitmask = 1 << bit_index;
4169 return !!(the_word & bitmask); */
4170 case IN_EXPR:
4171 preexpand_calls (exp);
4172 {
4173 tree set = TREE_OPERAND (exp, 0);
4174 tree index = TREE_OPERAND (exp, 1);
4175 tree set_type = TREE_TYPE (set);
4176
4177 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4178 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4179
4180 rtx index_val;
4181 rtx lo_r;
4182 rtx hi_r;
4183 rtx rlow;
4184 rtx diff, quo, rem, addr, bit, result;
4185 rtx setval, setaddr;
4186 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4187
4188 if (target == 0)
17938e57 4189 target = gen_reg_rtx (mode);
7308a047
RS
4190
4191 /* If domain is empty, answer is no. */
4192 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4193 return const0_rtx;
4194
4195 index_val = expand_expr (index, 0, VOIDmode, 0);
4196 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4197 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4198 setval = expand_expr (set, 0, VOIDmode, 0);
4199 setaddr = XEXP (setval, 0);
4200
4201 /* Compare index against bounds, if they are constant. */
4202 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4203 && GET_CODE (lo_r) == CONST_INT
4204 && INTVAL (index_val) < INTVAL (lo_r))
4205 return const0_rtx;
7308a047
RS
4206
4207 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4208 && GET_CODE (hi_r) == CONST_INT
4209 && INTVAL (hi_r) < INTVAL (index_val))
4210 return const0_rtx;
7308a047
RS
4211
4212 /* If we get here, we have to generate the code for both cases
4213 (in range and out of range). */
4214
4215 op0 = gen_label_rtx ();
4216 op1 = gen_label_rtx ();
4217
4218 if (! (GET_CODE (index_val) == CONST_INT
4219 && GET_CODE (lo_r) == CONST_INT))
4220 {
17938e57
RK
4221 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4222 GET_MODE (index_val), 0, 0);
7308a047
RS
4223 emit_jump_insn (gen_blt (op1));
4224 }
4225
4226 if (! (GET_CODE (index_val) == CONST_INT
4227 && GET_CODE (hi_r) == CONST_INT))
4228 {
17938e57
RK
4229 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4230 GET_MODE (index_val), 0, 0);
7308a047
RS
4231 emit_jump_insn (gen_bgt (op1));
4232 }
4233
4234 /* Calculate the element number of bit zero in the first word
4235 of the set. */
4236 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4237 rlow = GEN_INT (INTVAL (lo_r)
4238 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4239 else
17938e57
RK
4240 rlow = expand_binop (index_mode, and_optab, lo_r,
4241 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4242 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4243
4244 diff = expand_binop (index_mode, sub_optab,
17938e57 4245 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4246
4247 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4248 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4249 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4250 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4251 addr = memory_address (byte_mode,
4252 expand_binop (index_mode, add_optab,
17938e57
RK
4253 diff, setaddr, NULL_RTX, 0,
4254 OPTAB_LIB_WIDEN));
7308a047
RS
4255 /* Extract the bit we want to examine */
4256 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4257 gen_rtx (MEM, byte_mode, addr),
4258 make_tree (TREE_TYPE (index), rem),
4259 NULL_RTX, 1);
4260 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4261 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4262 1, OPTAB_LIB_WIDEN);
17938e57
RK
4263
4264 if (result != target)
4265 convert_move (target, result, 1);
7308a047
RS
4266
4267 /* Output the code to handle the out-of-range case. */
4268 emit_jump (op0);
4269 emit_label (op1);
4270 emit_move_insn (target, const0_rtx);
4271 emit_label (op0);
4272 return target;
4273 }
4274
bbf6f052
RK
4275 case WITH_CLEANUP_EXPR:
4276 if (RTL_EXPR_RTL (exp) == 0)
4277 {
4278 RTL_EXPR_RTL (exp)
4279 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4280 cleanups_this_call
4281 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4282 /* That's it for this cleanup. */
4283 TREE_OPERAND (exp, 2) = 0;
4284 }
4285 return RTL_EXPR_RTL (exp);
4286
4287 case CALL_EXPR:
4288 /* Check for a built-in function. */
4289 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4290 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4291 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4292 return expand_builtin (exp, target, subtarget, tmode, ignore);
4293 /* If this call was expanded already by preexpand_calls,
4294 just return the result we got. */
4295 if (CALL_EXPR_RTL (exp) != 0)
4296 return CALL_EXPR_RTL (exp);
8129842c 4297 return expand_call (exp, target, ignore);
bbf6f052
RK
4298
4299 case NON_LVALUE_EXPR:
4300 case NOP_EXPR:
4301 case CONVERT_EXPR:
4302 case REFERENCE_EXPR:
bbf6f052
RK
4303 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4304 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4305 if (TREE_CODE (type) == UNION_TYPE)
4306 {
4307 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4308 if (target == 0)
4309 {
4310 if (mode == BLKmode)
4311 {
4312 if (TYPE_SIZE (type) == 0
4313 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4314 abort ();
4315 target = assign_stack_temp (BLKmode,
4316 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4317 + BITS_PER_UNIT - 1)
4318 / BITS_PER_UNIT, 0);
4319 }
4320 else
4321 target = gen_reg_rtx (mode);
4322 }
4323 if (GET_CODE (target) == MEM)
4324 /* Store data into beginning of memory target. */
4325 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4326 change_address (target, TYPE_MODE (valtype), 0), 0);
4327
bbf6f052
RK
4328 else if (GET_CODE (target) == REG)
4329 /* Store this field into a union of the proper type. */
4330 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4331 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4332 VOIDmode, 0, 1,
4333 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4334 else
4335 abort ();
4336
4337 /* Return the entire union. */
4338 return target;
4339 }
1499e0a8 4340 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4341 if (GET_MODE (op0) == mode)
4342 return op0;
4343 /* If arg is a constant integer being extended from a narrower mode,
4344 we must really truncate to get the extended bits right. Otherwise
4345 (unsigned long) (unsigned char) ("\377"[0])
4346 would come out as ffffffff. */
4347 if (GET_MODE (op0) == VOIDmode
4348 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4349 < GET_MODE_BITSIZE (mode)))
4350 {
4351 /* MODE must be narrower than HOST_BITS_PER_INT. */
4352 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4353
4354 if (width < HOST_BITS_PER_WIDE_INT)
4355 {
4356 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4357 : CONST_DOUBLE_LOW (op0));
4358 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4359 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4360 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4361 else
4362 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4363
4364 op0 = GEN_INT (val);
4365 }
4366 else
4367 {
4368 op0 = (simplify_unary_operation
4369 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4370 ? ZERO_EXTEND : SIGN_EXTEND),
4371 mode, op0,
4372 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4373 if (op0 == 0)
4374 abort ();
4375 }
4376 }
4377 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4378 return op0;
26fcb35a
RS
4379 if (modifier == EXPAND_INITIALIZER)
4380 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4381 if (flag_force_mem && GET_CODE (op0) == MEM)
4382 op0 = copy_to_reg (op0);
4383
4384 if (target == 0)
4385 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4386 else
4387 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4388 return target;
4389
4390 case PLUS_EXPR:
4391 /* We come here from MINUS_EXPR when the second operand is a constant. */
4392 plus_expr:
4393 this_optab = add_optab;
4394
4395 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4396 something else, make sure we add the register to the constant and
4397 then to the other thing. This case can occur during strength
4398 reduction and doing it this way will produce better code if the
4399 frame pointer or argument pointer is eliminated.
4400
4401 fold-const.c will ensure that the constant is always in the inner
4402 PLUS_EXPR, so the only case we need to do anything about is if
4403 sp, ap, or fp is our second argument, in which case we must swap
4404 the innermost first argument and our second argument. */
4405
4406 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4407 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4408 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4409 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4410 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4411 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4412 {
4413 tree t = TREE_OPERAND (exp, 1);
4414
4415 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4416 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4417 }
4418
4419 /* If the result is to be Pmode and we are adding an integer to
4420 something, we might be forming a constant. So try to use
4421 plus_constant. If it produces a sum and we can't accept it,
4422 use force_operand. This allows P = &ARR[const] to generate
4423 efficient code on machines where a SYMBOL_REF is not a valid
4424 address.
4425
4426 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4427 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4428 || mode == Pmode)
bbf6f052 4429 {
c980ac49
RS
4430 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4431 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4432 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4433 {
4434 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4435 EXPAND_SUM);
4436 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4437 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4438 op1 = force_operand (op1, target);
4439 return op1;
4440 }
bbf6f052 4441
c980ac49
RS
4442 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4443 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4444 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4445 {
4446 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4447 EXPAND_SUM);
4448 if (! CONSTANT_P (op0))
4449 {
4450 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4451 VOIDmode, modifier);
709f5be1
RS
4452 /* Don't go to both_summands if modifier
4453 says it's not right to return a PLUS. */
4454 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4455 goto binop2;
c980ac49
RS
4456 goto both_summands;
4457 }
4458 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4459 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4460 op0 = force_operand (op0, target);
4461 return op0;
4462 }
bbf6f052
RK
4463 }
4464
4465 /* No sense saving up arithmetic to be done
4466 if it's all in the wrong mode to form part of an address.
4467 And force_operand won't know whether to sign-extend or
4468 zero-extend. */
4469 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4470 || mode != Pmode)
4471 goto binop;
bbf6f052
RK
4472
4473 preexpand_calls (exp);
4474 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4475 subtarget = 0;
4476
4477 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4478 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4479
c980ac49 4480 both_summands:
bbf6f052
RK
4481 /* Make sure any term that's a sum with a constant comes last. */
4482 if (GET_CODE (op0) == PLUS
4483 && CONSTANT_P (XEXP (op0, 1)))
4484 {
4485 temp = op0;
4486 op0 = op1;
4487 op1 = temp;
4488 }
4489 /* If adding to a sum including a constant,
4490 associate it to put the constant outside. */
4491 if (GET_CODE (op1) == PLUS
4492 && CONSTANT_P (XEXP (op1, 1)))
4493 {
4494 rtx constant_term = const0_rtx;
4495
4496 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4497 if (temp != 0)
4498 op0 = temp;
6f90e075
JW
4499 /* Ensure that MULT comes first if there is one. */
4500 else if (GET_CODE (op0) == MULT)
4501 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4502 else
4503 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4504
4505 /* Let's also eliminate constants from op0 if possible. */
4506 op0 = eliminate_constant_term (op0, &constant_term);
4507
4508 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4509 their sum should be a constant. Form it into OP1, since the
4510 result we want will then be OP0 + OP1. */
4511
4512 temp = simplify_binary_operation (PLUS, mode, constant_term,
4513 XEXP (op1, 1));
4514 if (temp != 0)
4515 op1 = temp;
4516 else
4517 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4518 }
4519
4520 /* Put a constant term last and put a multiplication first. */
4521 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4522 temp = op1, op1 = op0, op0 = temp;
4523
4524 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4525 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4526
4527 case MINUS_EXPR:
4528 /* Handle difference of two symbolic constants,
4529 for the sake of an initializer. */
4530 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4531 && really_constant_p (TREE_OPERAND (exp, 0))
4532 && really_constant_p (TREE_OPERAND (exp, 1)))
4533 {
906c4e36
RK
4534 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4535 VOIDmode, modifier);
4536 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4537 VOIDmode, modifier);
bbf6f052
RK
4538 return gen_rtx (MINUS, mode, op0, op1);
4539 }
4540 /* Convert A - const to A + (-const). */
4541 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4542 {
4543 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4544 fold (build1 (NEGATE_EXPR, type,
4545 TREE_OPERAND (exp, 1))));
4546 goto plus_expr;
4547 }
4548 this_optab = sub_optab;
4549 goto binop;
4550
4551 case MULT_EXPR:
4552 preexpand_calls (exp);
4553 /* If first operand is constant, swap them.
4554 Thus the following special case checks need only
4555 check the second operand. */
4556 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4557 {
4558 register tree t1 = TREE_OPERAND (exp, 0);
4559 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4560 TREE_OPERAND (exp, 1) = t1;
4561 }
4562
4563 /* Attempt to return something suitable for generating an
4564 indexed address, for machines that support that. */
4565
4566 if (modifier == EXPAND_SUM && mode == Pmode
4567 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4568 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4569 {
4570 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4571
4572 /* Apply distributive law if OP0 is x+c. */
4573 if (GET_CODE (op0) == PLUS
4574 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4575 return gen_rtx (PLUS, mode,
4576 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4577 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4578 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4579 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4580
4581 if (GET_CODE (op0) != REG)
906c4e36 4582 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4583 if (GET_CODE (op0) != REG)
4584 op0 = copy_to_mode_reg (mode, op0);
4585
4586 return gen_rtx (MULT, mode, op0,
906c4e36 4587 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4588 }
4589
4590 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4591 subtarget = 0;
4592
4593 /* Check for multiplying things that have been extended
4594 from a narrower type. If this machine supports multiplying
4595 in that narrower type with a result in the desired type,
4596 do it that way, and avoid the explicit type-conversion. */
4597 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4598 && TREE_CODE (type) == INTEGER_TYPE
4599 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4600 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4601 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4602 && int_fits_type_p (TREE_OPERAND (exp, 1),
4603 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4604 /* Don't use a widening multiply if a shift will do. */
4605 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4606 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4607 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4608 ||
4609 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4610 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4611 ==
4612 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4613 /* If both operands are extended, they must either both
4614 be zero-extended or both be sign-extended. */
4615 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4616 ==
4617 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4618 {
4619 enum machine_mode innermode
4620 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4621 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4622 ? umul_widen_optab : smul_widen_optab);
4623 if (mode == GET_MODE_WIDER_MODE (innermode)
4624 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4625 {
4626 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4627 NULL_RTX, VOIDmode, 0);
bbf6f052 4628 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4629 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4630 VOIDmode, 0);
bbf6f052
RK
4631 else
4632 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4633 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4634 goto binop2;
4635 }
4636 }
4637 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4638 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4639 return expand_mult (mode, op0, op1, target, unsignedp);
4640
4641 case TRUNC_DIV_EXPR:
4642 case FLOOR_DIV_EXPR:
4643 case CEIL_DIV_EXPR:
4644 case ROUND_DIV_EXPR:
4645 case EXACT_DIV_EXPR:
4646 preexpand_calls (exp);
4647 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4648 subtarget = 0;
4649 /* Possible optimization: compute the dividend with EXPAND_SUM
4650 then if the divisor is constant can optimize the case
4651 where some terms of the dividend have coeffs divisible by it. */
4652 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4653 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4654 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4655
4656 case RDIV_EXPR:
4657 this_optab = flodiv_optab;
4658 goto binop;
4659
4660 case TRUNC_MOD_EXPR:
4661 case FLOOR_MOD_EXPR:
4662 case CEIL_MOD_EXPR:
4663 case ROUND_MOD_EXPR:
4664 preexpand_calls (exp);
4665 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4666 subtarget = 0;
4667 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4668 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4669 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4670
4671 case FIX_ROUND_EXPR:
4672 case FIX_FLOOR_EXPR:
4673 case FIX_CEIL_EXPR:
4674 abort (); /* Not used for C. */
4675
4676 case FIX_TRUNC_EXPR:
906c4e36 4677 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4678 if (target == 0)
4679 target = gen_reg_rtx (mode);
4680 expand_fix (target, op0, unsignedp);
4681 return target;
4682
4683 case FLOAT_EXPR:
906c4e36 4684 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4685 if (target == 0)
4686 target = gen_reg_rtx (mode);
4687 /* expand_float can't figure out what to do if FROM has VOIDmode.
4688 So give it the correct mode. With -O, cse will optimize this. */
4689 if (GET_MODE (op0) == VOIDmode)
4690 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4691 op0);
4692 expand_float (target, op0,
4693 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4694 return target;
4695
4696 case NEGATE_EXPR:
4697 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4698 temp = expand_unop (mode, neg_optab, op0, target, 0);
4699 if (temp == 0)
4700 abort ();
4701 return temp;
4702
4703 case ABS_EXPR:
4704 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4705
2d7050fd
RS
4706 /* Handle complex values specially. */
4707 {
4708 enum machine_mode opmode
4709 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4710
4711 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4712 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4713 return expand_complex_abs (opmode, op0, target, unsignedp);
4714 }
4715
bbf6f052
RK
4716 /* Unsigned abs is simply the operand. Testing here means we don't
4717 risk generating incorrect code below. */
4718 if (TREE_UNSIGNED (type))
4719 return op0;
4720
4721 /* First try to do it with a special abs instruction. */
4722 temp = expand_unop (mode, abs_optab, op0, target, 0);
4723 if (temp != 0)
4724 return temp;
4725
4726 /* If this machine has expensive jumps, we can do integer absolute
4727 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4728 where W is the width of MODE. */
4729
4730 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4731 {
4732 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4733 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4734 NULL_RTX, 0);
bbf6f052
RK
4735
4736 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4737 OPTAB_LIB_WIDEN);
4738 if (temp != 0)
4739 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4740 OPTAB_LIB_WIDEN);
4741
4742 if (temp != 0)
4743 return temp;
4744 }
4745
4746 /* If that does not win, use conditional jump and negate. */
4747 target = original_target;
4748 temp = gen_label_rtx ();
4749 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4750 || (GET_CODE (target) == REG
4751 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4752 target = gen_reg_rtx (mode);
4753 emit_move_insn (target, op0);
4754 emit_cmp_insn (target,
4755 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4756 NULL_RTX, VOIDmode, 0),
4757 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4758 NO_DEFER_POP;
4759 emit_jump_insn (gen_bge (temp));
4760 op0 = expand_unop (mode, neg_optab, target, target, 0);
4761 if (op0 != target)
4762 emit_move_insn (target, op0);
4763 emit_label (temp);
4764 OK_DEFER_POP;
4765 return target;
4766
4767 case MAX_EXPR:
4768 case MIN_EXPR:
4769 target = original_target;
4770 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4771 || (GET_CODE (target) == REG
4772 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4773 target = gen_reg_rtx (mode);
906c4e36 4774 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4775 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4776
4777 /* First try to do it with a special MIN or MAX instruction.
4778 If that does not win, use a conditional jump to select the proper
4779 value. */
4780 this_optab = (TREE_UNSIGNED (type)
4781 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4782 : (code == MIN_EXPR ? smin_optab : smax_optab));
4783
4784 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4785 OPTAB_WIDEN);
4786 if (temp != 0)
4787 return temp;
4788
4789 if (target != op0)
4790 emit_move_insn (target, op0);
4791 op0 = gen_label_rtx ();
f81497d9
RS
4792 /* If this mode is an integer too wide to compare properly,
4793 compare word by word. Rely on cse to optimize constant cases. */
4794 if (GET_MODE_CLASS (mode) == MODE_INT
4795 && !can_compare_p (mode))
bbf6f052 4796 {
f81497d9
RS
4797 if (code == MAX_EXPR)
4798 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 4799 else
f81497d9 4800 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
bbf6f052
RK
4801 emit_move_insn (target, op1);
4802 }
f81497d9
RS
4803 else
4804 {
4805 if (code == MAX_EXPR)
4806 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4807 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4808 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4809 else
4810 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4811 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4812 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4813 if (temp == const0_rtx)
4814 emit_move_insn (target, op1);
4815 else if (temp != const_true_rtx)
4816 {
4817 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4818 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4819 else
4820 abort ();
4821 emit_move_insn (target, op1);
4822 }
4823 }
bbf6f052
RK
4824 emit_label (op0);
4825 return target;
4826
4827/* ??? Can optimize when the operand of this is a bitwise operation,
4828 by using a different bitwise operation. */
4829 case BIT_NOT_EXPR:
4830 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4831 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4832 if (temp == 0)
4833 abort ();
4834 return temp;
4835
4836 case FFS_EXPR:
4837 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4838 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4839 if (temp == 0)
4840 abort ();
4841 return temp;
4842
4843/* ??? Can optimize bitwise operations with one arg constant.
4844 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4845 and (a bitwise1 b) bitwise2 b (etc)
4846 but that is probably not worth while. */
4847
4848/* BIT_AND_EXPR is for bitwise anding.
4849 TRUTH_AND_EXPR is for anding two boolean values
4850 when we want in all cases to compute both of them.
4851 In general it is fastest to do TRUTH_AND_EXPR by
4852 computing both operands as actual zero-or-1 values
4853 and then bitwise anding. In cases where there cannot
4854 be any side effects, better code would be made by
4855 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4856 but the question is how to recognize those cases. */
4857
4858 case TRUTH_AND_EXPR:
4859 case BIT_AND_EXPR:
4860 this_optab = and_optab;
4861 goto binop;
4862
4863/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4864 case TRUTH_OR_EXPR:
4865 case BIT_IOR_EXPR:
4866 this_optab = ior_optab;
4867 goto binop;
4868
874726a8 4869 case TRUTH_XOR_EXPR:
bbf6f052
RK
4870 case BIT_XOR_EXPR:
4871 this_optab = xor_optab;
4872 goto binop;
4873
4874 case LSHIFT_EXPR:
4875 case RSHIFT_EXPR:
4876 case LROTATE_EXPR:
4877 case RROTATE_EXPR:
4878 preexpand_calls (exp);
4879 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4880 subtarget = 0;
4881 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4882 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4883 unsignedp);
4884
4885/* Could determine the answer when only additive constants differ.
4886 Also, the addition of one can be handled by changing the condition. */
4887 case LT_EXPR:
4888 case LE_EXPR:
4889 case GT_EXPR:
4890 case GE_EXPR:
4891 case EQ_EXPR:
4892 case NE_EXPR:
4893 preexpand_calls (exp);
4894 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4895 if (temp != 0)
4896 return temp;
4897 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4898 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4899 && original_target
4900 && GET_CODE (original_target) == REG
4901 && (GET_MODE (original_target)
4902 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4903 {
4904 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4905 if (temp != original_target)
4906 temp = copy_to_reg (temp);
4907 op1 = gen_label_rtx ();
906c4e36 4908 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4909 GET_MODE (temp), unsignedp, 0);
4910 emit_jump_insn (gen_beq (op1));
4911 emit_move_insn (temp, const1_rtx);
4912 emit_label (op1);
4913 return temp;
4914 }
4915 /* If no set-flag instruction, must generate a conditional
4916 store into a temporary variable. Drop through
4917 and handle this like && and ||. */
4918
4919 case TRUTH_ANDIF_EXPR:
4920 case TRUTH_ORIF_EXPR:
e44842fe
RK
4921 if (! ignore
4922 && (target == 0 || ! safe_from_p (target, exp)
4923 /* Make sure we don't have a hard reg (such as function's return
4924 value) live across basic blocks, if not optimizing. */
4925 || (!optimize && GET_CODE (target) == REG
4926 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 4927 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
4928
4929 if (target)
4930 emit_clr_insn (target);
4931
bbf6f052
RK
4932 op1 = gen_label_rtx ();
4933 jumpifnot (exp, op1);
e44842fe
RK
4934
4935 if (target)
4936 emit_0_to_1_insn (target);
4937
bbf6f052 4938 emit_label (op1);
e44842fe 4939 return ignore ? const0_rtx : target;
bbf6f052
RK
4940
4941 case TRUTH_NOT_EXPR:
4942 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4943 /* The parser is careful to generate TRUTH_NOT_EXPR
4944 only with operands that are always zero or one. */
906c4e36 4945 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4946 target, 1, OPTAB_LIB_WIDEN);
4947 if (temp == 0)
4948 abort ();
4949 return temp;
4950
4951 case COMPOUND_EXPR:
4952 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4953 emit_queue ();
4954 return expand_expr (TREE_OPERAND (exp, 1),
4955 (ignore ? const0_rtx : target),
4956 VOIDmode, 0);
4957
4958 case COND_EXPR:
4959 {
4960 /* Note that COND_EXPRs whose type is a structure or union
4961 are required to be constructed to contain assignments of
4962 a temporary variable, so that we can evaluate them here
4963 for side effect only. If type is void, we must do likewise. */
4964
4965 /* If an arm of the branch requires a cleanup,
4966 only that cleanup is performed. */
4967
4968 tree singleton = 0;
4969 tree binary_op = 0, unary_op = 0;
4970 tree old_cleanups = cleanups_this_call;
4971 cleanups_this_call = 0;
4972
4973 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4974 convert it to our mode, if necessary. */
4975 if (integer_onep (TREE_OPERAND (exp, 1))
4976 && integer_zerop (TREE_OPERAND (exp, 2))
4977 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4978 {
dd27116b
RK
4979 if (ignore)
4980 {
4981 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4982 modifier);
4983 return const0_rtx;
4984 }
4985
bbf6f052
RK
4986 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4987 if (GET_MODE (op0) == mode)
4988 return op0;
4989 if (target == 0)
4990 target = gen_reg_rtx (mode);
4991 convert_move (target, op0, unsignedp);
4992 return target;
4993 }
4994
4995 /* If we are not to produce a result, we have no target. Otherwise,
4996 if a target was specified use it; it will not be used as an
4997 intermediate target unless it is safe. If no target, use a
4998 temporary. */
4999
dd27116b 5000 if (ignore)
bbf6f052
RK
5001 temp = 0;
5002 else if (original_target
5003 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5004 temp = original_target;
5005 else if (mode == BLKmode)
5006 {
5007 if (TYPE_SIZE (type) == 0
5008 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5009 abort ();
5010 temp = assign_stack_temp (BLKmode,
5011 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5012 + BITS_PER_UNIT - 1)
5013 / BITS_PER_UNIT, 0);
5014 }
5015 else
5016 temp = gen_reg_rtx (mode);
5017
5018 /* Check for X ? A + B : A. If we have this, we can copy
5019 A to the output and conditionally add B. Similarly for unary
5020 operations. Don't do this if X has side-effects because
5021 those side effects might affect A or B and the "?" operation is
5022 a sequence point in ANSI. (We test for side effects later.) */
5023
5024 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5025 && operand_equal_p (TREE_OPERAND (exp, 2),
5026 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5027 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5028 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5029 && operand_equal_p (TREE_OPERAND (exp, 1),
5030 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5031 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5032 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5033 && operand_equal_p (TREE_OPERAND (exp, 2),
5034 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5035 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5036 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5037 && operand_equal_p (TREE_OPERAND (exp, 1),
5038 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5039 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5040
5041 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5042 operation, do this as A + (X != 0). Similarly for other simple
5043 binary operators. */
dd27116b 5044 if (temp && singleton && binary_op
bbf6f052
RK
5045 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5046 && (TREE_CODE (binary_op) == PLUS_EXPR
5047 || TREE_CODE (binary_op) == MINUS_EXPR
5048 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5049 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5050 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5051 && integer_onep (TREE_OPERAND (binary_op, 1))
5052 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5053 {
5054 rtx result;
5055 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5056 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5057 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5058 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5059 : and_optab);
5060
5061 /* If we had X ? A : A + 1, do this as A + (X == 0).
5062
5063 We have to invert the truth value here and then put it
5064 back later if do_store_flag fails. We cannot simply copy
5065 TREE_OPERAND (exp, 0) to another variable and modify that
5066 because invert_truthvalue can modify the tree pointed to
5067 by its argument. */
5068 if (singleton == TREE_OPERAND (exp, 1))
5069 TREE_OPERAND (exp, 0)
5070 = invert_truthvalue (TREE_OPERAND (exp, 0));
5071
5072 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5073 (safe_from_p (temp, singleton)
5074 ? temp : NULL_RTX),
bbf6f052
RK
5075 mode, BRANCH_COST <= 1);
5076
5077 if (result)
5078 {
906c4e36 5079 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5080 return expand_binop (mode, boptab, op1, result, temp,
5081 unsignedp, OPTAB_LIB_WIDEN);
5082 }
5083 else if (singleton == TREE_OPERAND (exp, 1))
5084 TREE_OPERAND (exp, 0)
5085 = invert_truthvalue (TREE_OPERAND (exp, 0));
5086 }
5087
5088 NO_DEFER_POP;
5089 op0 = gen_label_rtx ();
5090
5091 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5092 {
5093 if (temp != 0)
5094 {
5095 /* If the target conflicts with the other operand of the
5096 binary op, we can't use it. Also, we can't use the target
5097 if it is a hard register, because evaluating the condition
5098 might clobber it. */
5099 if ((binary_op
5100 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5101 || (GET_CODE (temp) == REG
5102 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5103 temp = gen_reg_rtx (mode);
5104 store_expr (singleton, temp, 0);
5105 }
5106 else
906c4e36 5107 expand_expr (singleton,
2937cf87 5108 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5109 if (cleanups_this_call)
5110 {
5111 sorry ("aggregate value in COND_EXPR");
5112 cleanups_this_call = 0;
5113 }
5114 if (singleton == TREE_OPERAND (exp, 1))
5115 jumpif (TREE_OPERAND (exp, 0), op0);
5116 else
5117 jumpifnot (TREE_OPERAND (exp, 0), op0);
5118
5119 if (binary_op && temp == 0)
5120 /* Just touch the other operand. */
5121 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5122 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5123 else if (binary_op)
5124 store_expr (build (TREE_CODE (binary_op), type,
5125 make_tree (type, temp),
5126 TREE_OPERAND (binary_op, 1)),
5127 temp, 0);
5128 else
5129 store_expr (build1 (TREE_CODE (unary_op), type,
5130 make_tree (type, temp)),
5131 temp, 0);
5132 op1 = op0;
5133 }
5134#if 0
5135 /* This is now done in jump.c and is better done there because it
5136 produces shorter register lifetimes. */
5137
5138 /* Check for both possibilities either constants or variables
5139 in registers (but not the same as the target!). If so, can
5140 save branches by assigning one, branching, and assigning the
5141 other. */
5142 else if (temp && GET_MODE (temp) != BLKmode
5143 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5144 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5145 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5146 && DECL_RTL (TREE_OPERAND (exp, 1))
5147 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5148 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5149 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5150 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5151 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5152 && DECL_RTL (TREE_OPERAND (exp, 2))
5153 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5154 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5155 {
5156 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5157 temp = gen_reg_rtx (mode);
5158 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5159 jumpifnot (TREE_OPERAND (exp, 0), op0);
5160 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5161 op1 = op0;
5162 }
5163#endif
5164 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5165 comparison operator. If we have one of these cases, set the
5166 output to A, branch on A (cse will merge these two references),
5167 then set the output to FOO. */
5168 else if (temp
5169 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5170 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5171 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5172 TREE_OPERAND (exp, 1), 0)
5173 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5174 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5175 {
5176 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5177 temp = gen_reg_rtx (mode);
5178 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5179 jumpif (TREE_OPERAND (exp, 0), op0);
5180 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5181 op1 = op0;
5182 }
5183 else if (temp
5184 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5185 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5186 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5187 TREE_OPERAND (exp, 2), 0)
5188 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5189 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5190 {
5191 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5192 temp = gen_reg_rtx (mode);
5193 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5194 jumpifnot (TREE_OPERAND (exp, 0), op0);
5195 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5196 op1 = op0;
5197 }
5198 else
5199 {
5200 op1 = gen_label_rtx ();
5201 jumpifnot (TREE_OPERAND (exp, 0), op0);
5202 if (temp != 0)
5203 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5204 else
906c4e36
RK
5205 expand_expr (TREE_OPERAND (exp, 1),
5206 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5207 if (cleanups_this_call)
5208 {
5209 sorry ("aggregate value in COND_EXPR");
5210 cleanups_this_call = 0;
5211 }
5212
5213 emit_queue ();
5214 emit_jump_insn (gen_jump (op1));
5215 emit_barrier ();
5216 emit_label (op0);
5217 if (temp != 0)
5218 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5219 else
906c4e36
RK
5220 expand_expr (TREE_OPERAND (exp, 2),
5221 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5222 }
5223
5224 if (cleanups_this_call)
5225 {
5226 sorry ("aggregate value in COND_EXPR");
5227 cleanups_this_call = 0;
5228 }
5229
5230 emit_queue ();
5231 emit_label (op1);
5232 OK_DEFER_POP;
5233 cleanups_this_call = old_cleanups;
5234 return temp;
5235 }
5236
5237 case TARGET_EXPR:
5238 {
5239 /* Something needs to be initialized, but we didn't know
5240 where that thing was when building the tree. For example,
5241 it could be the return value of a function, or a parameter
5242 to a function which lays down in the stack, or a temporary
5243 variable which must be passed by reference.
5244
5245 We guarantee that the expression will either be constructed
5246 or copied into our original target. */
5247
5248 tree slot = TREE_OPERAND (exp, 0);
5c062816 5249 tree exp1;
bbf6f052
RK
5250
5251 if (TREE_CODE (slot) != VAR_DECL)
5252 abort ();
5253
5254 if (target == 0)
5255 {
5256 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5257 {
5258 target = DECL_RTL (slot);
5c062816 5259 /* If we have already expanded the slot, so don't do
ac993f4f 5260 it again. (mrs) */
5c062816
MS
5261 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5262 return target;
ac993f4f 5263 }
bbf6f052
RK
5264 else
5265 {
5266 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5267 /* All temp slots at this level must not conflict. */
5268 preserve_temp_slots (target);
5269 DECL_RTL (slot) = target;
5270 }
5271
5272#if 0
ac993f4f
MS
5273 /* I bet this needs to be done, and I bet that it needs to
5274 be above, inside the else clause. The reason is
5275 simple, how else is it going to get cleaned up? (mrs)
5276
5277 The reason is probably did not work before, and was
5278 commented out is because this was re-expanding already
5279 expanded target_exprs (target == 0 and DECL_RTL (slot)
5280 != 0) also cleaning them up many times as well. :-( */
5281
bbf6f052
RK
5282 /* Since SLOT is not known to the called function
5283 to belong to its stack frame, we must build an explicit
5284 cleanup. This case occurs when we must build up a reference
5285 to pass the reference as an argument. In this case,
5286 it is very likely that such a reference need not be
5287 built here. */
5288
5289 if (TREE_OPERAND (exp, 2) == 0)
5290 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5291 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5292 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5293 cleanups_this_call);
bbf6f052
RK
5294#endif
5295 }
5296 else
5297 {
5298 /* This case does occur, when expanding a parameter which
5299 needs to be constructed on the stack. The target
5300 is the actual stack address that we want to initialize.
5301 The function we call will perform the cleanup in this case. */
5302
8c042b47
RS
5303 /* If we have already assigned it space, use that space,
5304 not target that we were passed in, as our target
5305 parameter is only a hint. */
5306 if (DECL_RTL (slot) != 0)
5307 {
5308 target = DECL_RTL (slot);
5309 /* If we have already expanded the slot, so don't do
5310 it again. (mrs) */
5311 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5312 return target;
5313 }
5314
bbf6f052
RK
5315 DECL_RTL (slot) = target;
5316 }
5317
5c062816
MS
5318 exp1 = TREE_OPERAND (exp, 1);
5319 /* Mark it as expanded. */
5320 TREE_OPERAND (exp, 1) = NULL_TREE;
5321
5322 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5323 }
5324
5325 case INIT_EXPR:
5326 {
5327 tree lhs = TREE_OPERAND (exp, 0);
5328 tree rhs = TREE_OPERAND (exp, 1);
5329 tree noncopied_parts = 0;
5330 tree lhs_type = TREE_TYPE (lhs);
5331
5332 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5333 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5334 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5335 TYPE_NONCOPIED_PARTS (lhs_type));
5336 while (noncopied_parts != 0)
5337 {
5338 expand_assignment (TREE_VALUE (noncopied_parts),
5339 TREE_PURPOSE (noncopied_parts), 0, 0);
5340 noncopied_parts = TREE_CHAIN (noncopied_parts);
5341 }
5342 return temp;
5343 }
5344
5345 case MODIFY_EXPR:
5346 {
5347 /* If lhs is complex, expand calls in rhs before computing it.
5348 That's so we don't compute a pointer and save it over a call.
5349 If lhs is simple, compute it first so we can give it as a
5350 target if the rhs is just a call. This avoids an extra temp and copy
5351 and that prevents a partial-subsumption which makes bad code.
5352 Actually we could treat component_ref's of vars like vars. */
5353
5354 tree lhs = TREE_OPERAND (exp, 0);
5355 tree rhs = TREE_OPERAND (exp, 1);
5356 tree noncopied_parts = 0;
5357 tree lhs_type = TREE_TYPE (lhs);
5358
5359 temp = 0;
5360
5361 if (TREE_CODE (lhs) != VAR_DECL
5362 && TREE_CODE (lhs) != RESULT_DECL
5363 && TREE_CODE (lhs) != PARM_DECL)
5364 preexpand_calls (exp);
5365
5366 /* Check for |= or &= of a bitfield of size one into another bitfield
5367 of size 1. In this case, (unless we need the result of the
5368 assignment) we can do this more efficiently with a
5369 test followed by an assignment, if necessary.
5370
5371 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5372 things change so we do, this code should be enhanced to
5373 support it. */
5374 if (ignore
5375 && TREE_CODE (lhs) == COMPONENT_REF
5376 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5377 || TREE_CODE (rhs) == BIT_AND_EXPR)
5378 && TREE_OPERAND (rhs, 0) == lhs
5379 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5380 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5381 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5382 {
5383 rtx label = gen_label_rtx ();
5384
5385 do_jump (TREE_OPERAND (rhs, 1),
5386 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5387 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5388 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5389 (TREE_CODE (rhs) == BIT_IOR_EXPR
5390 ? integer_one_node
5391 : integer_zero_node)),
5392 0, 0);
e7c33f54 5393 do_pending_stack_adjust ();
bbf6f052
RK
5394 emit_label (label);
5395 return const0_rtx;
5396 }
5397
5398 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5399 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5400 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5401 TYPE_NONCOPIED_PARTS (lhs_type));
5402
5403 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5404 while (noncopied_parts != 0)
5405 {
5406 expand_assignment (TREE_PURPOSE (noncopied_parts),
5407 TREE_VALUE (noncopied_parts), 0, 0);
5408 noncopied_parts = TREE_CHAIN (noncopied_parts);
5409 }
5410 return temp;
5411 }
5412
5413 case PREINCREMENT_EXPR:
5414 case PREDECREMENT_EXPR:
5415 return expand_increment (exp, 0);
5416
5417 case POSTINCREMENT_EXPR:
5418 case POSTDECREMENT_EXPR:
5419 /* Faster to treat as pre-increment if result is not used. */
5420 return expand_increment (exp, ! ignore);
5421
5422 case ADDR_EXPR:
5423 /* Are we taking the address of a nested function? */
5424 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5425 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5426 {
5427 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5428 op0 = force_operand (op0, target);
5429 }
5430 else
5431 {
906c4e36 5432 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5433 (modifier == EXPAND_INITIALIZER
5434 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5435
5436 /* We would like the object in memory. If it is a constant,
5437 we can have it be statically allocated into memory. For
5438 a non-constant (REG or SUBREG), we need to allocate some
5439 memory and store the value into it. */
5440
5441 if (CONSTANT_P (op0))
5442 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5443 op0);
5444
b6f01001
RS
5445 /* These cases happen in Fortran. Is that legitimate?
5446 Should Fortran work in another way?
5447 Do they happen in C? */
5448 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
5449 || GET_CODE (op0) == CONCAT)
896102d0
RK
5450 {
5451 /* If this object is in a register, it must be not
5452 be BLKmode. */
5453 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5454 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5455 rtx memloc
5456 = assign_stack_temp (inner_mode,
5457 int_size_in_bytes (inner_type), 1);
5458
5459 emit_move_insn (memloc, op0);
5460 op0 = memloc;
5461 }
5462
bbf6f052
RK
5463 if (GET_CODE (op0) != MEM)
5464 abort ();
5465
5466 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5467 return XEXP (op0, 0);
5468 op0 = force_operand (XEXP (op0, 0), target);
5469 }
5470 if (flag_force_addr && GET_CODE (op0) != REG)
5471 return force_reg (Pmode, op0);
5472 return op0;
5473
5474 case ENTRY_VALUE_EXPR:
5475 abort ();
5476
7308a047
RS
5477 /* COMPLEX type for Extended Pascal & Fortran */
5478 case COMPLEX_EXPR:
5479 {
5480 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5481
5482 rtx prev;
5483
5484 /* Get the rtx code of the operands. */
5485 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5486 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5487
5488 if (! target)
5489 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5490
5491 prev = get_last_insn ();
5492
5493 /* Tell flow that the whole of the destination is being set. */
5494 if (GET_CODE (target) == REG)
5495 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5496
5497 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5498 emit_move_insn (gen_realpart (mode, target), op0);
5499 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5500
5501 /* Complex construction should appear as a single unit. */
5502 group_insns (prev);
5503
5504 return target;
5505 }
5506
5507 case REALPART_EXPR:
2d7050fd
RS
5508 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5509 return gen_realpart (mode, op0);
7308a047
RS
5510
5511 case IMAGPART_EXPR:
2d7050fd
RS
5512 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5513 return gen_imagpart (mode, op0);
7308a047
RS
5514
5515 case CONJ_EXPR:
5516 {
5517 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5518 rtx imag_t;
5519 rtx prev;
5520
5521 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5522
5523 if (! target)
5524 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5525
5526 prev = get_last_insn ();
5527
5528 /* Tell flow that the whole of the destination is being set. */
5529 if (GET_CODE (target) == REG)
5530 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5531
5532 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5533 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5534
2d7050fd 5535 imag_t = gen_imagpart (mode, target);
7308a047 5536 temp = expand_unop (mode, neg_optab,
2d7050fd 5537 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5538 if (temp != imag_t)
5539 emit_move_insn (imag_t, temp);
5540
5541 /* Conjugate should appear as a single unit */
5542 group_insns (prev);
5543
5544 return target;
5545 }
5546
bbf6f052 5547 case ERROR_MARK:
66538193
RS
5548 op0 = CONST0_RTX (tmode);
5549 if (op0 != 0)
5550 return op0;
bbf6f052
RK
5551 return const0_rtx;
5552
5553 default:
5554 return (*lang_expand_expr) (exp, target, tmode, modifier);
5555 }
5556
5557 /* Here to do an ordinary binary operator, generating an instruction
5558 from the optab already placed in `this_optab'. */
5559 binop:
5560 preexpand_calls (exp);
5561 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5562 subtarget = 0;
5563 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5564 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5565 binop2:
5566 temp = expand_binop (mode, this_optab, op0, op1, target,
5567 unsignedp, OPTAB_LIB_WIDEN);
5568 if (temp == 0)
5569 abort ();
5570 return temp;
5571}
5572\f
e87b4f3f
RS
5573/* Return the alignment in bits of EXP, a pointer valued expression.
5574 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5575 The alignment returned is, by default, the alignment of the thing that
5576 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5577
5578 Otherwise, look at the expression to see if we can do better, i.e., if the
5579 expression is actually pointing at an object whose alignment is tighter. */
5580
5581static int
5582get_pointer_alignment (exp, max_align)
5583 tree exp;
5584 unsigned max_align;
5585{
5586 unsigned align, inner;
5587
5588 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5589 return 0;
5590
5591 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5592 align = MIN (align, max_align);
5593
5594 while (1)
5595 {
5596 switch (TREE_CODE (exp))
5597 {
5598 case NOP_EXPR:
5599 case CONVERT_EXPR:
5600 case NON_LVALUE_EXPR:
5601 exp = TREE_OPERAND (exp, 0);
5602 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5603 return align;
5604 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5605 inner = MIN (inner, max_align);
5606 align = MAX (align, inner);
5607 break;
5608
5609 case PLUS_EXPR:
5610 /* If sum of pointer + int, restrict our maximum alignment to that
5611 imposed by the integer. If not, we can't do any better than
5612 ALIGN. */
5613 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5614 return align;
5615
e87b4f3f
RS
5616 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5617 & (max_align - 1))
5618 != 0)
bbf6f052
RK
5619 max_align >>= 1;
5620
5621 exp = TREE_OPERAND (exp, 0);
5622 break;
5623
5624 case ADDR_EXPR:
5625 /* See what we are pointing at and look at its alignment. */
5626 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5627 if (TREE_CODE (exp) == FUNCTION_DECL)
5628 align = MAX (align, FUNCTION_BOUNDARY);
5629 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5630 align = MAX (align, DECL_ALIGN (exp));
5631#ifdef CONSTANT_ALIGNMENT
5632 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5633 align = CONSTANT_ALIGNMENT (exp, align);
5634#endif
5635 return MIN (align, max_align);
5636
5637 default:
5638 return align;
5639 }
5640 }
5641}
5642\f
5643/* Return the tree node and offset if a given argument corresponds to
5644 a string constant. */
5645
5646static tree
5647string_constant (arg, ptr_offset)
5648 tree arg;
5649 tree *ptr_offset;
5650{
5651 STRIP_NOPS (arg);
5652
5653 if (TREE_CODE (arg) == ADDR_EXPR
5654 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5655 {
5656 *ptr_offset = integer_zero_node;
5657 return TREE_OPERAND (arg, 0);
5658 }
5659 else if (TREE_CODE (arg) == PLUS_EXPR)
5660 {
5661 tree arg0 = TREE_OPERAND (arg, 0);
5662 tree arg1 = TREE_OPERAND (arg, 1);
5663
5664 STRIP_NOPS (arg0);
5665 STRIP_NOPS (arg1);
5666
5667 if (TREE_CODE (arg0) == ADDR_EXPR
5668 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5669 {
5670 *ptr_offset = arg1;
5671 return TREE_OPERAND (arg0, 0);
5672 }
5673 else if (TREE_CODE (arg1) == ADDR_EXPR
5674 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5675 {
5676 *ptr_offset = arg0;
5677 return TREE_OPERAND (arg1, 0);
5678 }
5679 }
5680
5681 return 0;
5682}
5683
5684/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5685 way, because it could contain a zero byte in the middle.
5686 TREE_STRING_LENGTH is the size of the character array, not the string.
5687
5688 Unfortunately, string_constant can't access the values of const char
5689 arrays with initializers, so neither can we do so here. */
5690
5691static tree
5692c_strlen (src)
5693 tree src;
5694{
5695 tree offset_node;
5696 int offset, max;
5697 char *ptr;
5698
5699 src = string_constant (src, &offset_node);
5700 if (src == 0)
5701 return 0;
5702 max = TREE_STRING_LENGTH (src);
5703 ptr = TREE_STRING_POINTER (src);
5704 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5705 {
5706 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5707 compute the offset to the following null if we don't know where to
5708 start searching for it. */
5709 int i;
5710 for (i = 0; i < max; i++)
5711 if (ptr[i] == 0)
5712 return 0;
5713 /* We don't know the starting offset, but we do know that the string
5714 has no internal zero bytes. We can assume that the offset falls
5715 within the bounds of the string; otherwise, the programmer deserves
5716 what he gets. Subtract the offset from the length of the string,
5717 and return that. */
5718 /* This would perhaps not be valid if we were dealing with named
5719 arrays in addition to literal string constants. */
5720 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5721 }
5722
5723 /* We have a known offset into the string. Start searching there for
5724 a null character. */
5725 if (offset_node == 0)
5726 offset = 0;
5727 else
5728 {
5729 /* Did we get a long long offset? If so, punt. */
5730 if (TREE_INT_CST_HIGH (offset_node) != 0)
5731 return 0;
5732 offset = TREE_INT_CST_LOW (offset_node);
5733 }
5734 /* If the offset is known to be out of bounds, warn, and call strlen at
5735 runtime. */
5736 if (offset < 0 || offset > max)
5737 {
5738 warning ("offset outside bounds of constant string");
5739 return 0;
5740 }
5741 /* Use strlen to search for the first zero byte. Since any strings
5742 constructed with build_string will have nulls appended, we win even
5743 if we get handed something like (char[4])"abcd".
5744
5745 Since OFFSET is our starting index into the string, no further
5746 calculation is needed. */
5747 return size_int (strlen (ptr + offset));
5748}
5749\f
5750/* Expand an expression EXP that calls a built-in function,
5751 with result going to TARGET if that's convenient
5752 (and in mode MODE if that's convenient).
5753 SUBTARGET may be used as the target for computing one of EXP's operands.
5754 IGNORE is nonzero if the value is to be ignored. */
5755
5756static rtx
5757expand_builtin (exp, target, subtarget, mode, ignore)
5758 tree exp;
5759 rtx target;
5760 rtx subtarget;
5761 enum machine_mode mode;
5762 int ignore;
5763{
5764 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5765 tree arglist = TREE_OPERAND (exp, 1);
5766 rtx op0;
60bac6ea 5767 rtx lab1, insns;
bbf6f052 5768 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5769 optab builtin_optab;
bbf6f052
RK
5770
5771 switch (DECL_FUNCTION_CODE (fndecl))
5772 {
5773 case BUILT_IN_ABS:
5774 case BUILT_IN_LABS:
5775 case BUILT_IN_FABS:
5776 /* build_function_call changes these into ABS_EXPR. */
5777 abort ();
5778
1bbddf11
JVA
5779 case BUILT_IN_SIN:
5780 case BUILT_IN_COS:
e87b4f3f
RS
5781 case BUILT_IN_FSQRT:
5782 /* If not optimizing, call the library function. */
8c8a8e34 5783 if (! optimize)
e87b4f3f
RS
5784 break;
5785
5786 if (arglist == 0
19deaec9 5787 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5788 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5789 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5790
db0e6d01
RS
5791 /* Stabilize and compute the argument. */
5792 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5793 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5794 {
5795 exp = copy_node (exp);
5796 arglist = copy_node (arglist);
5797 TREE_OPERAND (exp, 1) = arglist;
5798 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5799 }
e87b4f3f 5800 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5801
5802 /* Make a suitable register to place result in. */
5803 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5804
c1f7c223 5805 emit_queue ();
8c8a8e34 5806 start_sequence ();
e7c33f54 5807
1bbddf11
JVA
5808 switch (DECL_FUNCTION_CODE (fndecl))
5809 {
5810 case BUILT_IN_SIN:
5811 builtin_optab = sin_optab; break;
5812 case BUILT_IN_COS:
5813 builtin_optab = cos_optab; break;
5814 case BUILT_IN_FSQRT:
5815 builtin_optab = sqrt_optab; break;
5816 default:
5817 abort ();
5818 }
5819
5820 /* Compute into TARGET.
e87b4f3f
RS
5821 Set TARGET to wherever the result comes back. */
5822 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5823 builtin_optab, op0, target, 0);
e7c33f54
RK
5824
5825 /* If we were unable to expand via the builtin, stop the
5826 sequence (without outputting the insns) and break, causing
5827 a call the the library function. */
e87b4f3f 5828 if (target == 0)
e7c33f54 5829 {
8c8a8e34 5830 end_sequence ();
e7c33f54
RK
5831 break;
5832 }
e87b4f3f 5833
60bac6ea
RS
5834 /* Check the results by default. But if flag_fast_math is turned on,
5835 then assume sqrt will always be called with valid arguments. */
5836
5837 if (! flag_fast_math)
5838 {
1bbddf11 5839 /* Don't define the builtin FP instructions
60bac6ea
RS
5840 if your machine is not IEEE. */
5841 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5842 abort ();
5843
5844 lab1 = gen_label_rtx ();
5845
5846 /* Test the result; if it is NaN, set errno=EDOM because
5847 the argument was not in the domain. */
5848 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5849 emit_jump_insn (gen_beq (lab1));
5850
5851#if TARGET_EDOM
5852 {
5853#ifdef GEN_ERRNO_RTX
5854 rtx errno_rtx = GEN_ERRNO_RTX;
5855#else
5856 rtx errno_rtx
5857 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5858#endif
5859
5860 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5861 }
5862#else
5863 /* We can't set errno=EDOM directly; let the library call do it.
5864 Pop the arguments right away in case the call gets deleted. */
5865 NO_DEFER_POP;
5866 expand_call (exp, target, 0);
5867 OK_DEFER_POP;
5868#endif
5869
5870 emit_label (lab1);
5871 }
e87b4f3f 5872
e7c33f54 5873 /* Output the entire sequence. */
8c8a8e34
JW
5874 insns = get_insns ();
5875 end_sequence ();
5876 emit_insns (insns);
e7c33f54
RK
5877
5878 return target;
5879
0006469d
TW
5880 /* __builtin_apply_args returns block of memory allocated on
5881 the stack into which is stored the arg pointer, structure
5882 value address, static chain, and all the registers that might
5883 possibly be used in performing a function call. The code is
5884 moved to the start of the function so the incoming values are
5885 saved. */
5886 case BUILT_IN_APPLY_ARGS:
5887 /* Don't do __builtin_apply_args more than once in a function.
5888 Save the result of the first call and reuse it. */
5889 if (apply_args_value != 0)
5890 return apply_args_value;
5891 {
5892 /* When this function is called, it means that registers must be
5893 saved on entry to this function. So we migrate the
5894 call to the first insn of this function. */
5895 rtx temp;
5896 rtx seq;
5897
5898 start_sequence ();
5899 temp = expand_builtin_apply_args ();
5900 seq = get_insns ();
5901 end_sequence ();
5902
5903 apply_args_value = temp;
5904
5905 /* Put the sequence after the NOTE that starts the function.
5906 If this is inside a SEQUENCE, make the outer-level insn
5907 chain current, so the code is placed at the start of the
5908 function. */
5909 push_topmost_sequence ();
5910 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5911 pop_topmost_sequence ();
5912 return temp;
5913 }
5914
5915 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5916 FUNCTION with a copy of the parameters described by
5917 ARGUMENTS, and ARGSIZE. It returns a block of memory
5918 allocated on the stack into which is stored all the registers
5919 that might possibly be used for returning the result of a
5920 function. ARGUMENTS is the value returned by
5921 __builtin_apply_args. ARGSIZE is the number of bytes of
5922 arguments that must be copied. ??? How should this value be
5923 computed? We'll also need a safe worst case value for varargs
5924 functions. */
5925 case BUILT_IN_APPLY:
5926 if (arglist == 0
5927 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5928 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5929 || TREE_CHAIN (arglist) == 0
5930 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5931 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5932 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5933 return const0_rtx;
5934 else
5935 {
5936 int i;
5937 tree t;
5938 rtx ops[3];
5939
5940 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5941 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5942
5943 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5944 }
5945
5946 /* __builtin_return (RESULT) causes the function to return the
5947 value described by RESULT. RESULT is address of the block of
5948 memory returned by __builtin_apply. */
5949 case BUILT_IN_RETURN:
5950 if (arglist
5951 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5952 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5953 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5954 NULL_RTX, VOIDmode, 0));
5955 return const0_rtx;
5956
bbf6f052
RK
5957 case BUILT_IN_SAVEREGS:
5958 /* Don't do __builtin_saveregs more than once in a function.
5959 Save the result of the first call and reuse it. */
5960 if (saveregs_value != 0)
5961 return saveregs_value;
5962 {
5963 /* When this function is called, it means that registers must be
5964 saved on entry to this function. So we migrate the
5965 call to the first insn of this function. */
5966 rtx temp;
5967 rtx seq;
5968 rtx valreg, saved_valreg;
5969
5970 /* Now really call the function. `expand_call' does not call
5971 expand_builtin, so there is no danger of infinite recursion here. */
5972 start_sequence ();
5973
5974#ifdef EXPAND_BUILTIN_SAVEREGS
5975 /* Do whatever the machine needs done in this case. */
5976 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5977#else
5978 /* The register where the function returns its value
5979 is likely to have something else in it, such as an argument.
5980 So preserve that register around the call. */
5981 if (value_mode != VOIDmode)
5982 {
5983 valreg = hard_libcall_value (value_mode);
5984 saved_valreg = gen_reg_rtx (value_mode);
5985 emit_move_insn (saved_valreg, valreg);
5986 }
5987
5988 /* Generate the call, putting the value in a pseudo. */
5989 temp = expand_call (exp, target, ignore);
5990
5991 if (value_mode != VOIDmode)
5992 emit_move_insn (valreg, saved_valreg);
5993#endif
5994
5995 seq = get_insns ();
5996 end_sequence ();
5997
5998 saveregs_value = temp;
5999
0006469d
TW
6000 /* Put the sequence after the NOTE that starts the function.
6001 If this is inside a SEQUENCE, make the outer-level insn
6002 chain current, so the code is placed at the start of the
6003 function. */
6004 push_topmost_sequence ();
bbf6f052 6005 emit_insns_before (seq, NEXT_INSN (get_insns ()));
0006469d 6006 pop_topmost_sequence ();
bbf6f052
RK
6007 return temp;
6008 }
6009
6010 /* __builtin_args_info (N) returns word N of the arg space info
6011 for the current function. The number and meanings of words
6012 is controlled by the definition of CUMULATIVE_ARGS. */
6013 case BUILT_IN_ARGS_INFO:
6014 {
6015 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6016 int i;
6017 int *word_ptr = (int *) &current_function_args_info;
6018 tree type, elts, result;
6019
6020 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6021 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6022 __FILE__, __LINE__);
6023
6024 if (arglist != 0)
6025 {
6026 tree arg = TREE_VALUE (arglist);
6027 if (TREE_CODE (arg) != INTEGER_CST)
42b85a55 6028 error ("argument of `__builtin_args_info' must be constant");
bbf6f052
RK
6029 else
6030 {
6031 int wordnum = TREE_INT_CST_LOW (arg);
6032
42b85a55
RS
6033 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6034 error ("argument of `__builtin_args_info' out of range");
bbf6f052 6035 else
906c4e36 6036 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
6037 }
6038 }
6039 else
42b85a55 6040 error ("missing argument in `__builtin_args_info'");
bbf6f052
RK
6041
6042 return const0_rtx;
6043
6044#if 0
6045 for (i = 0; i < nwords; i++)
6046 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6047
6048 type = build_array_type (integer_type_node,
6049 build_index_type (build_int_2 (nwords, 0)));
6050 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6051 TREE_CONSTANT (result) = 1;
6052 TREE_STATIC (result) = 1;
6053 result = build (INDIRECT_REF, build_pointer_type (type), result);
6054 TREE_CONSTANT (result) = 1;
906c4e36 6055 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6056#endif
6057 }
6058
6059 /* Return the address of the first anonymous stack arg. */
6060 case BUILT_IN_NEXT_ARG:
6061 {
6062 tree fntype = TREE_TYPE (current_function_decl);
6063 if (!(TYPE_ARG_TYPES (fntype) != 0
6064 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6065 != void_type_node)))
6066 {
6067 error ("`va_start' used in function with fixed args");
6068 return const0_rtx;
6069 }
6070 }
6071
6072 return expand_binop (Pmode, add_optab,
6073 current_function_internal_arg_pointer,
6074 current_function_arg_offset_rtx,
906c4e36 6075 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
6076
6077 case BUILT_IN_CLASSIFY_TYPE:
6078 if (arglist != 0)
6079 {
6080 tree type = TREE_TYPE (TREE_VALUE (arglist));
6081 enum tree_code code = TREE_CODE (type);
6082 if (code == VOID_TYPE)
906c4e36 6083 return GEN_INT (void_type_class);
bbf6f052 6084 if (code == INTEGER_TYPE)
906c4e36 6085 return GEN_INT (integer_type_class);
bbf6f052 6086 if (code == CHAR_TYPE)
906c4e36 6087 return GEN_INT (char_type_class);
bbf6f052 6088 if (code == ENUMERAL_TYPE)
906c4e36 6089 return GEN_INT (enumeral_type_class);
bbf6f052 6090 if (code == BOOLEAN_TYPE)
906c4e36 6091 return GEN_INT (boolean_type_class);
bbf6f052 6092 if (code == POINTER_TYPE)
906c4e36 6093 return GEN_INT (pointer_type_class);
bbf6f052 6094 if (code == REFERENCE_TYPE)
906c4e36 6095 return GEN_INT (reference_type_class);
bbf6f052 6096 if (code == OFFSET_TYPE)
906c4e36 6097 return GEN_INT (offset_type_class);
bbf6f052 6098 if (code == REAL_TYPE)
906c4e36 6099 return GEN_INT (real_type_class);
bbf6f052 6100 if (code == COMPLEX_TYPE)
906c4e36 6101 return GEN_INT (complex_type_class);
bbf6f052 6102 if (code == FUNCTION_TYPE)
906c4e36 6103 return GEN_INT (function_type_class);
bbf6f052 6104 if (code == METHOD_TYPE)
906c4e36 6105 return GEN_INT (method_type_class);
bbf6f052 6106 if (code == RECORD_TYPE)
906c4e36 6107 return GEN_INT (record_type_class);
e7f3c83f 6108 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
906c4e36 6109 return GEN_INT (union_type_class);
bbf6f052 6110 if (code == ARRAY_TYPE)
906c4e36 6111 return GEN_INT (array_type_class);
bbf6f052 6112 if (code == STRING_TYPE)
906c4e36 6113 return GEN_INT (string_type_class);
bbf6f052 6114 if (code == SET_TYPE)
906c4e36 6115 return GEN_INT (set_type_class);
bbf6f052 6116 if (code == FILE_TYPE)
906c4e36 6117 return GEN_INT (file_type_class);
bbf6f052 6118 if (code == LANG_TYPE)
906c4e36 6119 return GEN_INT (lang_type_class);
bbf6f052 6120 }
906c4e36 6121 return GEN_INT (no_type_class);
bbf6f052
RK
6122
6123 case BUILT_IN_CONSTANT_P:
6124 if (arglist == 0)
6125 return const0_rtx;
6126 else
cda0ec81 6127 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
6128 ? const1_rtx : const0_rtx);
6129
6130 case BUILT_IN_FRAME_ADDRESS:
6131 /* The argument must be a nonnegative integer constant.
6132 It counts the number of frames to scan up the stack.
6133 The value is the address of that frame. */
6134 case BUILT_IN_RETURN_ADDRESS:
6135 /* The argument must be a nonnegative integer constant.
6136 It counts the number of frames to scan up the stack.
6137 The value is the return address saved in that frame. */
6138 if (arglist == 0)
6139 /* Warning about missing arg was already issued. */
6140 return const0_rtx;
6141 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6142 {
42b85a55 6143 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6144 return const0_rtx;
6145 }
6146 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6147 {
42b85a55 6148 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6149 return const0_rtx;
6150 }
6151 else
6152 {
6153 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6154 rtx tem = frame_pointer_rtx;
6155 int i;
6156
46b68a37
JW
6157 /* Some machines need special handling before we can access arbitrary
6158 frames. For example, on the sparc, we must first flush all
6159 register windows to the stack. */
6160#ifdef SETUP_FRAME_ADDRESSES
6161 SETUP_FRAME_ADDRESSES ();
6162#endif
6163
6164 /* On the sparc, the return address is not in the frame, it is
6165 in a register. There is no way to access it off of the current
6166 frame pointer, but it can be accessed off the previous frame
6167 pointer by reading the value from the register window save
6168 area. */
6169#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6170 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6171 count--;
6172#endif
6173
bbf6f052
RK
6174 /* Scan back COUNT frames to the specified frame. */
6175 for (i = 0; i < count; i++)
6176 {
6177 /* Assume the dynamic chain pointer is in the word that
6178 the frame address points to, unless otherwise specified. */
6179#ifdef DYNAMIC_CHAIN_ADDRESS
6180 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6181#endif
6182 tem = memory_address (Pmode, tem);
6183 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6184 }
6185
6186 /* For __builtin_frame_address, return what we've got. */
6187 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6188 return tem;
6189
6190 /* For __builtin_return_address,
6191 Get the return address from that frame. */
6192#ifdef RETURN_ADDR_RTX
6193 return RETURN_ADDR_RTX (count, tem);
6194#else
6195 tem = memory_address (Pmode,
6196 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6197 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6198#endif
6199 }
6200
6201 case BUILT_IN_ALLOCA:
6202 if (arglist == 0
6203 /* Arg could be non-integer if user redeclared this fcn wrong. */
6204 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6205 return const0_rtx;
6206 current_function_calls_alloca = 1;
6207 /* Compute the argument. */
906c4e36 6208 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6209
6210 /* Allocate the desired space. */
8c8a8e34 6211 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
6212
6213 /* Record the new stack level for nonlocal gotos. */
6dc42e49 6214 if (nonlocal_goto_handler_slot != 0)
906c4e36 6215 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
6216 return target;
6217
6218 case BUILT_IN_FFS:
6219 /* If not optimizing, call the library function. */
6220 if (!optimize)
6221 break;
6222
6223 if (arglist == 0
6224 /* Arg could be non-integer if user redeclared this fcn wrong. */
6225 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6226 return const0_rtx;
6227
6228 /* Compute the argument. */
6229 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6230 /* Compute ffs, into TARGET if possible.
6231 Set TARGET to wherever the result comes back. */
6232 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6233 ffs_optab, op0, target, 1);
6234 if (target == 0)
6235 abort ();
6236 return target;
6237
6238 case BUILT_IN_STRLEN:
6239 /* If not optimizing, call the library function. */
6240 if (!optimize)
6241 break;
6242
6243 if (arglist == 0
6244 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6245 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6246 return const0_rtx;
6247 else
6248 {
e7c33f54
RK
6249 tree src = TREE_VALUE (arglist);
6250 tree len = c_strlen (src);
bbf6f052 6251
e7c33f54
RK
6252 int align
6253 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6254
6255 rtx result, src_rtx, char_rtx;
6256 enum machine_mode insn_mode = value_mode, char_mode;
6257 enum insn_code icode;
6258
6259 /* If the length is known, just return it. */
6260 if (len != 0)
6261 return expand_expr (len, target, mode, 0);
6262
6263 /* If SRC is not a pointer type, don't do this operation inline. */
6264 if (align == 0)
6265 break;
6266
6267 /* Call a function if we can't compute strlen in the right mode. */
6268
6269 while (insn_mode != VOIDmode)
6270 {
6271 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6272 if (icode != CODE_FOR_nothing)
6273 break;
6274
6275 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6276 }
6277 if (insn_mode == VOIDmode)
bbf6f052 6278 break;
e7c33f54
RK
6279
6280 /* Make a place to write the result of the instruction. */
6281 result = target;
6282 if (! (result != 0
6283 && GET_CODE (result) == REG
6284 && GET_MODE (result) == insn_mode
6285 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6286 result = gen_reg_rtx (insn_mode);
6287
4d613828 6288 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6289
4d613828 6290 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6291 result = gen_reg_rtx (insn_mode);
6292
6293 src_rtx = memory_address (BLKmode,
906c4e36 6294 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6295 EXPAND_NORMAL));
4d613828 6296 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6297 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6298
6299 char_rtx = const0_rtx;
4d613828
RS
6300 char_mode = insn_operand_mode[(int)icode][2];
6301 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6302 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6303
6304 emit_insn (GEN_FCN (icode) (result,
6305 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6306 char_rtx, GEN_INT (align)));
e7c33f54
RK
6307
6308 /* Return the value in the proper mode for this function. */
6309 if (GET_MODE (result) == value_mode)
6310 return result;
6311 else if (target != 0)
6312 {
6313 convert_move (target, result, 0);
6314 return target;
6315 }
6316 else
6317 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6318 }
6319
6320 case BUILT_IN_STRCPY:
6321 /* If not optimizing, call the library function. */
6322 if (!optimize)
6323 break;
6324
6325 if (arglist == 0
6326 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6327 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6328 || TREE_CHAIN (arglist) == 0
6329 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6330 return const0_rtx;
6331 else
6332 {
6333 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6334
6335 if (len == 0)
6336 break;
6337
6338 len = size_binop (PLUS_EXPR, len, integer_one_node);
6339
906c4e36 6340 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6341 }
6342
6343 /* Drops in. */
6344 case BUILT_IN_MEMCPY:
6345 /* If not optimizing, call the library function. */
6346 if (!optimize)
6347 break;
6348
6349 if (arglist == 0
6350 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6351 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6352 || TREE_CHAIN (arglist) == 0
6353 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6354 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6355 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6356 return const0_rtx;
6357 else
6358 {
6359 tree dest = TREE_VALUE (arglist);
6360 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6361 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6362
6363 int src_align
6364 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6365 int dest_align
6366 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9937da1a 6367 rtx dest_rtx, dest_mem, src_mem;
bbf6f052
RK
6368
6369 /* If either SRC or DEST is not a pointer type, don't do
6370 this operation in-line. */
6371 if (src_align == 0 || dest_align == 0)
6372 {
6373 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6374 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6375 break;
6376 }
6377
906c4e36 6378 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
9937da1a
RS
6379 dest_mem = gen_rtx (MEM, BLKmode,
6380 memory_address (BLKmode, dest_rtx));
6381 src_mem = gen_rtx (MEM, BLKmode,
6382 memory_address (BLKmode,
6383 expand_expr (src, NULL_RTX,
6384 Pmode,
6385 EXPAND_NORMAL)));
bbf6f052
RK
6386
6387 /* Copy word part most expediently. */
9937da1a 6388 emit_block_move (dest_mem, src_mem,
906c4e36 6389 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6390 MIN (src_align, dest_align));
6391 return dest_rtx;
6392 }
6393
6394/* These comparison functions need an instruction that returns an actual
6395 index. An ordinary compare that just sets the condition codes
6396 is not enough. */
6397#ifdef HAVE_cmpstrsi
6398 case BUILT_IN_STRCMP:
6399 /* If not optimizing, call the library function. */
6400 if (!optimize)
6401 break;
6402
6403 if (arglist == 0
6404 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6405 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6406 || TREE_CHAIN (arglist) == 0
6407 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6408 return const0_rtx;
6409 else if (!HAVE_cmpstrsi)
6410 break;
6411 {
6412 tree arg1 = TREE_VALUE (arglist);
6413 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6414 tree offset;
6415 tree len, len2;
6416
6417 len = c_strlen (arg1);
6418 if (len)
6419 len = size_binop (PLUS_EXPR, integer_one_node, len);
6420 len2 = c_strlen (arg2);
6421 if (len2)
6422 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6423
6424 /* If we don't have a constant length for the first, use the length
6425 of the second, if we know it. We don't require a constant for
6426 this case; some cost analysis could be done if both are available
6427 but neither is constant. For now, assume they're equally cheap.
6428
6429 If both strings have constant lengths, use the smaller. This
6430 could arise if optimization results in strcpy being called with
6431 two fixed strings, or if the code was machine-generated. We should
6432 add some code to the `memcmp' handler below to deal with such
6433 situations, someday. */
6434 if (!len || TREE_CODE (len) != INTEGER_CST)
6435 {
6436 if (len2)
6437 len = len2;
6438 else if (len == 0)
6439 break;
6440 }
6441 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6442 {
6443 if (tree_int_cst_lt (len2, len))
6444 len = len2;
6445 }
6446
906c4e36 6447 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6448 }
6449
6450 /* Drops in. */
6451 case BUILT_IN_MEMCMP:
6452 /* If not optimizing, call the library function. */
6453 if (!optimize)
6454 break;
6455
6456 if (arglist == 0
6457 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6458 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6459 || TREE_CHAIN (arglist) == 0
6460 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6461 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6462 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6463 return const0_rtx;
6464 else if (!HAVE_cmpstrsi)
6465 break;
6466 {
6467 tree arg1 = TREE_VALUE (arglist);
6468 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6469 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6470 rtx result;
6471
6472 int arg1_align
6473 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6474 int arg2_align
6475 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6476 enum machine_mode insn_mode
6477 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6478
6479 /* If we don't have POINTER_TYPE, call the function. */
6480 if (arg1_align == 0 || arg2_align == 0)
6481 {
6482 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6483 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6484 break;
6485 }
6486
6487 /* Make a place to write the result of the instruction. */
6488 result = target;
6489 if (! (result != 0
6490 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6491 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6492 result = gen_reg_rtx (insn_mode);
6493
6494 emit_insn (gen_cmpstrsi (result,
6495 gen_rtx (MEM, BLKmode,
906c4e36
RK
6496 expand_expr (arg1, NULL_RTX, Pmode,
6497 EXPAND_NORMAL)),
bbf6f052 6498 gen_rtx (MEM, BLKmode,
906c4e36
RK
6499 expand_expr (arg2, NULL_RTX, Pmode,
6500 EXPAND_NORMAL)),
6501 expand_expr (len, NULL_RTX, VOIDmode, 0),
6502 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6503
6504 /* Return the value in the proper mode for this function. */
6505 mode = TYPE_MODE (TREE_TYPE (exp));
6506 if (GET_MODE (result) == mode)
6507 return result;
6508 else if (target != 0)
6509 {
6510 convert_move (target, result, 0);
6511 return target;
6512 }
6513 else
6514 return convert_to_mode (mode, result, 0);
6515 }
6516#else
6517 case BUILT_IN_STRCMP:
6518 case BUILT_IN_MEMCMP:
6519 break;
6520#endif
6521
6522 default: /* just do library call, if unknown builtin */
42b85a55 6523 error ("built-in function `%s' not currently supported",
bbf6f052
RK
6524 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6525 }
6526
6527 /* The switch statement above can drop through to cause the function
6528 to be called normally. */
6529
6530 return expand_call (exp, target, ignore);
6531}
6532\f
0006469d
TW
6533/* Built-in functions to perform an untyped call and return. */
6534
6535/* For each register that may be used for calling a function, this
6536 gives a mode used to copy the register's value. VOIDmode indicates
6537 the register is not used for calling a function. If the machine
6538 has register windows, this gives only the outbound registers.
6539 INCOMING_REGNO gives the corresponding inbound register. */
6540static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6541
6542/* For each register that may be used for returning values, this gives
6543 a mode used to copy the register's value. VOIDmode indicates the
6544 register is not used for returning values. If the machine has
6545 register windows, this gives only the outbound registers.
6546 INCOMING_REGNO gives the corresponding inbound register. */
6547static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6548
fb2ca25a
KKT
6549/* For each register that may be used for calling a function, this
6550 gives the offset of that register into the block returned by
6551 __bultin_apply_args. 0 indicates that the register is not
6552 used for calling a function. */
6553static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
6554
6555/* Return the offset of register REGNO into the block returned by
6556 __builtin_apply_args. This is not declared static, since it is
6557 needed in objc-act.c. */
904762c8 6558
fb2ca25a 6559int
904762c8
RK
6560apply_args_register_offset (regno)
6561 int regno;
fb2ca25a
KKT
6562{
6563 apply_args_size ();
6564
6565 /* Arguments are always put in outgoing registers (in the argument
6566 block) if such make sense. */
6567#ifdef OUTGOING_REGNO
6568 regno = OUTGOING_REGNO(regno);
6569#endif
6570 return apply_args_reg_offset[regno];
6571}
6572
0006469d
TW
6573/* Return the size required for the block returned by __builtin_apply_args,
6574 and initialize apply_args_mode. */
904762c8 6575
0006469d
TW
6576static int
6577apply_args_size ()
6578{
6579 static int size = -1;
6580 int align, regno;
6581 enum machine_mode mode;
6582
6583 /* The values computed by this function never change. */
6584 if (size < 0)
6585 {
6586 /* The first value is the incoming arg-pointer. */
6587 size = GET_MODE_SIZE (Pmode);
6588
6589 /* The second value is the structure value address unless this is
6590 passed as an "invisible" first argument. */
6591 if (struct_value_rtx)
6592 size += GET_MODE_SIZE (Pmode);
6593
6594 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6595 if (FUNCTION_ARG_REGNO_P (regno))
6596 {
6597 /* Search for the proper mode for copying this register's
6598 value. I'm not sure this is right, but it works so far. */
6599 enum machine_mode best_mode = VOIDmode;
6600
6601 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6602 mode != VOIDmode;
6603 mode = GET_MODE_WIDER_MODE (mode))
6604 if (HARD_REGNO_MODE_OK (regno, mode)
6605 && HARD_REGNO_NREGS (regno, mode) == 1)
6606 best_mode = mode;
6607
6608 if (best_mode == VOIDmode)
6609 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6610 mode != VOIDmode;
6611 mode = GET_MODE_WIDER_MODE (mode))
6612 if (HARD_REGNO_MODE_OK (regno, mode)
6613 && (mov_optab->handlers[(int) mode].insn_code
6614 != CODE_FOR_nothing))
6615 best_mode = mode;
6616
6617 mode = best_mode;
6618 if (mode == VOIDmode)
6619 abort ();
6620
6621 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6622 if (size % align != 0)
6623 size = CEIL (size, align) * align;
fb2ca25a 6624 apply_args_reg_offset[regno] = size;
0006469d
TW
6625 size += GET_MODE_SIZE (mode);
6626 apply_args_mode[regno] = mode;
6627 }
6628 else
fb2ca25a
KKT
6629 {
6630 apply_args_mode[regno] = VOIDmode;
6631 apply_args_reg_offset[regno] = 0;
6632 }
0006469d
TW
6633 }
6634 return size;
6635}
6636
6637/* Return the size required for the block returned by __builtin_apply,
6638 and initialize apply_result_mode. */
904762c8 6639
0006469d
TW
6640static int
6641apply_result_size ()
6642{
6643 static int size = -1;
6644 int align, regno;
6645 enum machine_mode mode;
6646
6647 /* The values computed by this function never change. */
6648 if (size < 0)
6649 {
6650 size = 0;
6651
6652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6653 if (FUNCTION_VALUE_REGNO_P (regno))
6654 {
6655 /* Search for the proper mode for copying this register's
6656 value. I'm not sure this is right, but it works so far. */
6657 enum machine_mode best_mode = VOIDmode;
6658
6659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6660 mode != TImode;
6661 mode = GET_MODE_WIDER_MODE (mode))
6662 if (HARD_REGNO_MODE_OK (regno, mode))
6663 best_mode = mode;
6664
6665 if (best_mode == VOIDmode)
6666 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6667 mode != VOIDmode;
6668 mode = GET_MODE_WIDER_MODE (mode))
6669 if (HARD_REGNO_MODE_OK (regno, mode)
6670 && (mov_optab->handlers[(int) mode].insn_code
6671 != CODE_FOR_nothing))
6672 best_mode = mode;
6673
6674 mode = best_mode;
6675 if (mode == VOIDmode)
6676 abort ();
6677
6678 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6679 if (size % align != 0)
6680 size = CEIL (size, align) * align;
6681 size += GET_MODE_SIZE (mode);
6682 apply_result_mode[regno] = mode;
6683 }
6684 else
6685 apply_result_mode[regno] = VOIDmode;
6686
6687 /* Allow targets that use untyped_call and untyped_return to override
6688 the size so that machine-specific information can be stored here. */
6689#ifdef APPLY_RESULT_SIZE
6690 size = APPLY_RESULT_SIZE;
6691#endif
6692 }
6693 return size;
6694}
6695
6696#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6697/* Create a vector describing the result block RESULT. If SAVEP is true,
6698 the result block is used to save the values; otherwise it is used to
6699 restore the values. */
904762c8 6700
0006469d
TW
6701static rtx
6702result_vector (savep, result)
6703 int savep;
6704 rtx result;
6705{
6706 int regno, size, align, nelts;
6707 enum machine_mode mode;
6708 rtx reg, mem;
6709 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6710
6711 size = nelts = 0;
6712 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6713 if ((mode = apply_result_mode[regno]) != VOIDmode)
6714 {
6715 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6716 if (size % align != 0)
6717 size = CEIL (size, align) * align;
6718 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6719 mem = change_address (result, mode,
6720 plus_constant (XEXP (result, 0), size));
6721 savevec[nelts++] = (savep
6722 ? gen_rtx (SET, VOIDmode, mem, reg)
6723 : gen_rtx (SET, VOIDmode, reg, mem));
6724 size += GET_MODE_SIZE (mode);
6725 }
6726 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6727}
6728#endif /* HAVE_untyped_call or HAVE_untyped_return */
6729
0006469d
TW
6730/* Save the state required to perform an untyped call with the same
6731 arguments as were passed to the current function. */
904762c8 6732
0006469d
TW
6733static rtx
6734expand_builtin_apply_args ()
6735{
6736 rtx registers;
6737 int size, align, regno;
6738 enum machine_mode mode;
6739
6740 /* Create a block where the arg-pointer, structure value address,
6741 and argument registers can be saved. */
6742 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6743
6744 /* Walk past the arg-pointer and structure value address. */
6745 size = GET_MODE_SIZE (Pmode);
6746 if (struct_value_rtx)
6747 size += GET_MODE_SIZE (Pmode);
6748
6749 /* Save each register used in calling a function to the block. */
6750 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6751 if ((mode = apply_args_mode[regno]) != VOIDmode)
6752 {
6753 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6754 if (size % align != 0)
6755 size = CEIL (size, align) * align;
6756 emit_move_insn (change_address (registers, mode,
6757 plus_constant (XEXP (registers, 0),
6758 size)),
6759 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6760 size += GET_MODE_SIZE (mode);
6761 }
6762
6763 /* Save the arg pointer to the block. */
6764 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6765 copy_to_reg (virtual_incoming_args_rtx));
6766 size = GET_MODE_SIZE (Pmode);
6767
6768 /* Save the structure value address unless this is passed as an
6769 "invisible" first argument. */
6770 if (struct_value_incoming_rtx)
6771 {
6772 emit_move_insn (change_address (registers, Pmode,
6773 plus_constant (XEXP (registers, 0),
6774 size)),
6775 copy_to_reg (struct_value_incoming_rtx));
6776 size += GET_MODE_SIZE (Pmode);
6777 }
6778
6779 /* Return the address of the block. */
6780 return copy_addr_to_reg (XEXP (registers, 0));
6781}
6782
6783/* Perform an untyped call and save the state required to perform an
6784 untyped return of whatever value was returned by the given function. */
904762c8 6785
0006469d
TW
6786static rtx
6787expand_builtin_apply (function, arguments, argsize)
6788 rtx function, arguments, argsize;
6789{
6790 int size, align, regno;
6791 enum machine_mode mode;
6792 rtx incoming_args, result, reg, dest, call_insn;
6793 rtx old_stack_level = 0;
6794 rtx use_insns = 0;
6795
6796 /* Create a block where the return registers can be saved. */
6797 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6798
6799 /* ??? The argsize value should be adjusted here. */
6800
6801 /* Fetch the arg pointer from the ARGUMENTS block. */
6802 incoming_args = gen_reg_rtx (Pmode);
6803 emit_move_insn (incoming_args,
6804 gen_rtx (MEM, Pmode, arguments));
6805#ifndef STACK_GROWS_DOWNWARD
b4af138b 6806 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
0006469d
TW
6807 incoming_args, 0, OPTAB_LIB_WIDEN);
6808#endif
6809
6810 /* Perform postincrements before actually calling the function. */
6811 emit_queue ();
6812
6813 /* Push a new argument block and copy the arguments. */
6814 do_pending_stack_adjust ();
6815 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6816
6817 /* Push a block of memory onto the stack to store the memory arguments.
6818 Save the address in a register, and copy the memory arguments. ??? I
6819 haven't figured out how the calling convention macros effect this,
6820 but it's likely that the source and/or destination addresses in
6821 the block copy will need updating in machine specific ways. */
6822 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6823 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6824 gen_rtx (MEM, BLKmode, incoming_args),
6825 argsize,
6826 PARM_BOUNDARY / BITS_PER_UNIT);
6827
6828 /* Refer to the argument block. */
6829 apply_args_size ();
6830 arguments = gen_rtx (MEM, BLKmode, arguments);
6831
6832 /* Walk past the arg-pointer and structure value address. */
6833 size = GET_MODE_SIZE (Pmode);
6834 if (struct_value_rtx)
6835 size += GET_MODE_SIZE (Pmode);
6836
6837 /* Restore each of the registers previously saved. Make USE insns
6838 for each of these registers for use in making the call. */
6839 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6840 if ((mode = apply_args_mode[regno]) != VOIDmode)
6841 {
6842 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6843 if (size % align != 0)
6844 size = CEIL (size, align) * align;
6845 reg = gen_rtx (REG, mode, regno);
6846 emit_move_insn (reg,
6847 change_address (arguments, mode,
6848 plus_constant (XEXP (arguments, 0),
6849 size)));
6850
6851 push_to_sequence (use_insns);
6852 emit_insn (gen_rtx (USE, VOIDmode, reg));
6853 use_insns = get_insns ();
6854 end_sequence ();
6855 size += GET_MODE_SIZE (mode);
6856 }
6857
6858 /* Restore the structure value address unless this is passed as an
6859 "invisible" first argument. */
6860 size = GET_MODE_SIZE (Pmode);
6861 if (struct_value_rtx)
6862 {
6863 rtx value = gen_reg_rtx (Pmode);
6864 emit_move_insn (value,
6865 change_address (arguments, Pmode,
6866 plus_constant (XEXP (arguments, 0),
6867 size)));
6868 emit_move_insn (struct_value_rtx, value);
6869 if (GET_CODE (struct_value_rtx) == REG)
6870 {
6871 push_to_sequence (use_insns);
6872 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6873 use_insns = get_insns ();
6874 end_sequence ();
6875 }
6876 size += GET_MODE_SIZE (Pmode);
6877 }
6878
6879 /* All arguments and registers used for the call are set up by now! */
6880 function = prepare_call_address (function, NULL_TREE, &use_insns);
6881
6882 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6883 and we don't want to load it into a register as an optimization,
6884 because prepare_call_address already did it if it should be done. */
6885 if (GET_CODE (function) != SYMBOL_REF)
6886 function = memory_address (FUNCTION_MODE, function);
6887
6888 /* Generate the actual call instruction and save the return value. */
6889#ifdef HAVE_untyped_call
6890 if (HAVE_untyped_call)
6891 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6892 result, result_vector (1, result)));
6893 else
6894#endif
6895#ifdef HAVE_call_value
6896 if (HAVE_call_value)
6897 {
6898 rtx valreg = 0;
6899
6900 /* Locate the unique return register. It is not possible to
6901 express a call that sets more than one return register using
6902 call_value; use untyped_call for that. In fact, untyped_call
6903 only needs to save the return registers in the given block. */
6904 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6905 if ((mode = apply_result_mode[regno]) != VOIDmode)
6906 {
6907 if (valreg)
6908 abort (); /* HAVE_untyped_call required. */
6909 valreg = gen_rtx (REG, mode, regno);
6910 }
6911
6912 emit_call_insn (gen_call_value (valreg,
6913 gen_rtx (MEM, FUNCTION_MODE, function),
6914 const0_rtx, NULL_RTX, const0_rtx));
6915
6916 emit_move_insn (change_address (result, GET_MODE (valreg),
6917 XEXP (result, 0)),
6918 valreg);
6919 }
6920 else
6921#endif
6922 abort ();
6923
6924 /* Find the CALL insn we just emitted and write the USE insns before it. */
6925 for (call_insn = get_last_insn ();
6926 call_insn && GET_CODE (call_insn) != CALL_INSN;
6927 call_insn = PREV_INSN (call_insn))
6928 ;
6929
6930 if (! call_insn)
6931 abort ();
6932
6933 /* Put the USE insns before the CALL. */
6934 emit_insns_before (use_insns, call_insn);
6935
6936 /* Restore the stack. */
6937 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6938
6939 /* Return the address of the result block. */
6940 return copy_addr_to_reg (XEXP (result, 0));
6941}
6942
6943/* Perform an untyped return. */
904762c8 6944
0006469d
TW
6945static void
6946expand_builtin_return (result)
6947 rtx result;
6948{
6949 int size, align, regno;
6950 enum machine_mode mode;
6951 rtx reg;
6952 rtx use_insns = 0;
6953
6954 apply_result_size ();
6955 result = gen_rtx (MEM, BLKmode, result);
6956
6957#ifdef HAVE_untyped_return
6958 if (HAVE_untyped_return)
6959 {
6960 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6961 emit_barrier ();
6962 return;
6963 }
6964#endif
6965
6966 /* Restore the return value and note that each value is used. */
6967 size = 0;
6968 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6969 if ((mode = apply_result_mode[regno]) != VOIDmode)
6970 {
6971 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6972 if (size % align != 0)
6973 size = CEIL (size, align) * align;
6974 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6975 emit_move_insn (reg,
6976 change_address (result, mode,
6977 plus_constant (XEXP (result, 0),
6978 size)));
6979
6980 push_to_sequence (use_insns);
6981 emit_insn (gen_rtx (USE, VOIDmode, reg));
6982 use_insns = get_insns ();
6983 end_sequence ();
6984 size += GET_MODE_SIZE (mode);
6985 }
6986
6987 /* Put the USE insns before the return. */
6988 emit_insns (use_insns);
6989
6990 /* Return whatever values was restored by jumping directly to the end
6991 of the function. */
6992 expand_null_return ();
6993}
6994\f
bbf6f052
RK
6995/* Expand code for a post- or pre- increment or decrement
6996 and return the RTX for the result.
6997 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6998
6999static rtx
7000expand_increment (exp, post)
7001 register tree exp;
7002 int post;
7003{
7004 register rtx op0, op1;
7005 register rtx temp, value;
7006 register tree incremented = TREE_OPERAND (exp, 0);
7007 optab this_optab = add_optab;
7008 int icode;
7009 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7010 int op0_is_copy = 0;
c980ac49 7011 int single_insn = 0;
bbf6f052
RK
7012
7013 /* Stabilize any component ref that might need to be
7014 evaluated more than once below. */
ca300798
RS
7015 if (!post
7016 || TREE_CODE (incremented) == BIT_FIELD_REF
bbf6f052
RK
7017 || (TREE_CODE (incremented) == COMPONENT_REF
7018 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
7019 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
7020 incremented = stabilize_reference (incremented);
591b15bb
JW
7021 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
7022 ones into save exprs so that they don't accidentally get evaluated
7023 more than once by the code below. */
7024 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
7025 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
7026 incremented = save_expr (incremented);
bbf6f052
RK
7027
7028 /* Compute the operands as RTX.
7029 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 7030 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
7031 and insns were generated in computing it. */
7032
bbf6f052 7033 temp = get_last_insn ();
906c4e36 7034 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
7035
7036 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
7037 in place but intead must do sign- or zero-extension during assignment,
7038 so we copy it into a new register and let the code below use it as
7039 a copy.
7040
7041 Note that we can safely modify this SUBREG since it is know not to be
7042 shared (it was made by the expand_expr call above). */
7043
7044 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
7045 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
7046
94a58076
RS
7047 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
7048 && temp != get_last_insn ());
906c4e36 7049 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7050
7051 /* Decide whether incrementing or decrementing. */
7052 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
7053 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7054 this_optab = sub_optab;
7055
a358cee0
RS
7056 /* Convert decrement by a constant into a negative increment. */
7057 if (this_optab == sub_optab
7058 && GET_CODE (op1) == CONST_INT)
7059 {
7060 op1 = GEN_INT (- INTVAL (op1));
7061 this_optab = add_optab;
7062 }
7063
c980ac49
RS
7064 /* For a preincrement, see if we can do this with a single instruction. */
7065 if (!post)
7066 {
7067 icode = (int) this_optab->handlers[(int) mode].insn_code;
7068 if (icode != (int) CODE_FOR_nothing
7069 /* Make sure that OP0 is valid for operands 0 and 1
7070 of the insn we want to queue. */
7071 && (*insn_operand_predicate[icode][0]) (op0, mode)
7072 && (*insn_operand_predicate[icode][1]) (op0, mode)
7073 && (*insn_operand_predicate[icode][2]) (op1, mode))
7074 single_insn = 1;
7075 }
7076
bbf6f052 7077 /* If OP0 is not the actual lvalue, but rather a copy in a register,
ca300798
RS
7078 then we cannot just increment OP0. We must therefore contrive to
7079 increment the original value. Then, for postincrement, we can return
c980ac49
RS
7080 OP0 since it is a copy of the old value. For preincrement, expand here
7081 unless we can do it with a single insn. */
7082 if (op0_is_copy || (!post && !single_insn))
bbf6f052
RK
7083 {
7084 /* This is the easiest way to increment the value wherever it is.
ca300798
RS
7085 Problems with multiple evaluation of INCREMENTED are prevented
7086 because either (1) it is a component_ref or preincrement,
bbf6f052
RK
7087 in which case it was stabilized above, or (2) it is an array_ref
7088 with constant index in an array in a register, which is
7089 safe to reevaluate. */
a358cee0
RS
7090 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
7091 || TREE_CODE (exp) == PREDECREMENT_EXPR)
7092 ? MINUS_EXPR : PLUS_EXPR),
bbf6f052
RK
7093 TREE_TYPE (exp),
7094 incremented,
7095 TREE_OPERAND (exp, 1));
7096 temp = expand_assignment (incremented, newexp, ! post, 0);
7097 return post ? op0 : temp;
7098 }
7099
bbf6f052
RK
7100 if (post)
7101 {
7102 /* We have a true reference to the value in OP0.
7103 If there is an insn to add or subtract in this mode, queue it. */
7104
7105#if 0 /* Turned off to avoid making extra insn for indexed memref. */
7106 op0 = stabilize (op0);
7107#endif
7108
7109 icode = (int) this_optab->handlers[(int) mode].insn_code;
7110 if (icode != (int) CODE_FOR_nothing
7111 /* Make sure that OP0 is valid for operands 0 and 1
7112 of the insn we want to queue. */
7113 && (*insn_operand_predicate[icode][0]) (op0, mode)
7114 && (*insn_operand_predicate[icode][1]) (op0, mode))
7115 {
7116 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
7117 op1 = force_reg (mode, op1);
7118
7119 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
7120 }
7121 }
7122
7123 /* Preincrement, or we can't increment with one simple insn. */
7124 if (post)
7125 /* Save a copy of the value before inc or dec, to return it later. */
7126 temp = value = copy_to_reg (op0);
7127 else
7128 /* Arrange to return the incremented value. */
7129 /* Copy the rtx because expand_binop will protect from the queue,
7130 and the results of that would be invalid for us to return
7131 if our caller does emit_queue before using our result. */
7132 temp = copy_rtx (value = op0);
7133
7134 /* Increment however we can. */
7135 op1 = expand_binop (mode, this_optab, value, op1, op0,
7136 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
7137 /* Make sure the value is stored into OP0. */
7138 if (op1 != op0)
7139 emit_move_insn (op0, op1);
7140
7141 return temp;
7142}
7143\f
7144/* Expand all function calls contained within EXP, innermost ones first.
7145 But don't look within expressions that have sequence points.
7146 For each CALL_EXPR, record the rtx for its value
7147 in the CALL_EXPR_RTL field. */
7148
7149static void
7150preexpand_calls (exp)
7151 tree exp;
7152{
7153 register int nops, i;
7154 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7155
7156 if (! do_preexpand_calls)
7157 return;
7158
7159 /* Only expressions and references can contain calls. */
7160
7161 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7162 return;
7163
7164 switch (TREE_CODE (exp))
7165 {
7166 case CALL_EXPR:
7167 /* Do nothing if already expanded. */
7168 if (CALL_EXPR_RTL (exp) != 0)
7169 return;
7170
7171 /* Do nothing to built-in functions. */
7172 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7173 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7174 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 7175 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
7176 return;
7177
7178 case COMPOUND_EXPR:
7179 case COND_EXPR:
7180 case TRUTH_ANDIF_EXPR:
7181 case TRUTH_ORIF_EXPR:
7182 /* If we find one of these, then we can be sure
7183 the adjust will be done for it (since it makes jumps).
7184 Do it now, so that if this is inside an argument
7185 of a function, we don't get the stack adjustment
7186 after some other args have already been pushed. */
7187 do_pending_stack_adjust ();
7188 return;
7189
7190 case BLOCK:
7191 case RTL_EXPR:
7192 case WITH_CLEANUP_EXPR:
7193 return;
7194
7195 case SAVE_EXPR:
7196 if (SAVE_EXPR_RTL (exp) != 0)
7197 return;
7198 }
7199
7200 nops = tree_code_length[(int) TREE_CODE (exp)];
7201 for (i = 0; i < nops; i++)
7202 if (TREE_OPERAND (exp, i) != 0)
7203 {
7204 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7205 if (type == 'e' || type == '<' || type == '1' || type == '2'
7206 || type == 'r')
7207 preexpand_calls (TREE_OPERAND (exp, i));
7208 }
7209}
7210\f
7211/* At the start of a function, record that we have no previously-pushed
7212 arguments waiting to be popped. */
7213
7214void
7215init_pending_stack_adjust ()
7216{
7217 pending_stack_adjust = 0;
7218}
7219
7220/* When exiting from function, if safe, clear out any pending stack adjust
7221 so the adjustment won't get done. */
7222
7223void
7224clear_pending_stack_adjust ()
7225{
7226#ifdef EXIT_IGNORE_STACK
7227 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 7228 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
7229 && ! flag_inline_functions)
7230 pending_stack_adjust = 0;
7231#endif
7232}
7233
7234/* Pop any previously-pushed arguments that have not been popped yet. */
7235
7236void
7237do_pending_stack_adjust ()
7238{
7239 if (inhibit_defer_pop == 0)
7240 {
7241 if (pending_stack_adjust != 0)
906c4e36 7242 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
7243 pending_stack_adjust = 0;
7244 }
7245}
7246
7247/* Expand all cleanups up to OLD_CLEANUPS.
7248 Needed here, and also for language-dependent calls. */
7249
7250void
7251expand_cleanups_to (old_cleanups)
7252 tree old_cleanups;
7253{
7254 while (cleanups_this_call != old_cleanups)
7255 {
906c4e36 7256 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7257 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7258 }
7259}
7260\f
7261/* Expand conditional expressions. */
7262
7263/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7264 LABEL is an rtx of code CODE_LABEL, in this function and all the
7265 functions here. */
7266
7267void
7268jumpifnot (exp, label)
7269 tree exp;
7270 rtx label;
7271{
906c4e36 7272 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
7273}
7274
7275/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7276
7277void
7278jumpif (exp, label)
7279 tree exp;
7280 rtx label;
7281{
906c4e36 7282 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
7283}
7284
7285/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7286 the result is zero, or IF_TRUE_LABEL if the result is one.
7287 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7288 meaning fall through in that case.
7289
e7c33f54
RK
7290 do_jump always does any pending stack adjust except when it does not
7291 actually perform a jump. An example where there is no jump
7292 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7293
bbf6f052
RK
7294 This function is responsible for optimizing cases such as
7295 &&, || and comparison operators in EXP. */
7296
7297void
7298do_jump (exp, if_false_label, if_true_label)
7299 tree exp;
7300 rtx if_false_label, if_true_label;
7301{
7302 register enum tree_code code = TREE_CODE (exp);
7303 /* Some cases need to create a label to jump to
7304 in order to properly fall through.
7305 These cases set DROP_THROUGH_LABEL nonzero. */
7306 rtx drop_through_label = 0;
7307 rtx temp;
7308 rtx comparison = 0;
7309 int i;
7310 tree type;
7311
7312 emit_queue ();
7313
7314 switch (code)
7315 {
7316 case ERROR_MARK:
7317 break;
7318
7319 case INTEGER_CST:
7320 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7321 if (temp)
7322 emit_jump (temp);
7323 break;
7324
7325#if 0
7326 /* This is not true with #pragma weak */
7327 case ADDR_EXPR:
7328 /* The address of something can never be zero. */
7329 if (if_true_label)
7330 emit_jump (if_true_label);
7331 break;
7332#endif
7333
7334 case NOP_EXPR:
7335 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7336 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7337 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7338 goto normal;
7339 case CONVERT_EXPR:
7340 /* If we are narrowing the operand, we have to do the compare in the
7341 narrower mode. */
7342 if ((TYPE_PRECISION (TREE_TYPE (exp))
7343 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7344 goto normal;
7345 case NON_LVALUE_EXPR:
7346 case REFERENCE_EXPR:
7347 case ABS_EXPR:
7348 case NEGATE_EXPR:
7349 case LROTATE_EXPR:
7350 case RROTATE_EXPR:
7351 /* These cannot change zero->non-zero or vice versa. */
7352 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7353 break;
7354
7355#if 0
7356 /* This is never less insns than evaluating the PLUS_EXPR followed by
7357 a test and can be longer if the test is eliminated. */
7358 case PLUS_EXPR:
7359 /* Reduce to minus. */
7360 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7361 TREE_OPERAND (exp, 0),
7362 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7363 TREE_OPERAND (exp, 1))));
7364 /* Process as MINUS. */
7365#endif
7366
7367 case MINUS_EXPR:
7368 /* Non-zero iff operands of minus differ. */
7369 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7370 TREE_OPERAND (exp, 0),
7371 TREE_OPERAND (exp, 1)),
7372 NE, NE);
7373 break;
7374
7375 case BIT_AND_EXPR:
7376 /* If we are AND'ing with a small constant, do this comparison in the
7377 smallest type that fits. If the machine doesn't have comparisons
7378 that small, it will be converted back to the wider comparison.
7379 This helps if we are testing the sign bit of a narrower object.
7380 combine can't do this for us because it can't know whether a
7381 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7382
08af8e09
RK
7383 if (! SLOW_BYTE_ACCESS
7384 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7385 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
7386 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7387 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
7388 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7389 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7390 != CODE_FOR_nothing))
bbf6f052
RK
7391 {
7392 do_jump (convert (type, exp), if_false_label, if_true_label);
7393 break;
7394 }
7395 goto normal;
7396
7397 case TRUTH_NOT_EXPR:
7398 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7399 break;
7400
7401 case TRUTH_ANDIF_EXPR:
7402 if (if_false_label == 0)
7403 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 7404 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
7405 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7406 break;
7407
7408 case TRUTH_ORIF_EXPR:
7409 if (if_true_label == 0)
7410 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 7411 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
7412 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7413 break;
7414
7415 case COMPOUND_EXPR:
7416 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7417 free_temp_slots ();
7418 emit_queue ();
e7c33f54 7419 do_pending_stack_adjust ();
bbf6f052
RK
7420 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7421 break;
7422
7423 case COMPONENT_REF:
7424 case BIT_FIELD_REF:
7425 case ARRAY_REF:
7426 {
7427 int bitsize, bitpos, unsignedp;
7428 enum machine_mode mode;
7429 tree type;
7bb0943f 7430 tree offset;
bbf6f052
RK
7431 int volatilep = 0;
7432
7433 /* Get description of this reference. We don't actually care
7434 about the underlying object here. */
7bb0943f
RS
7435 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7436 &mode, &unsignedp, &volatilep);
bbf6f052
RK
7437
7438 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
7439 if (! SLOW_BYTE_ACCESS
7440 && type != 0 && bitsize >= 0
7441 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7442 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7443 != CODE_FOR_nothing))
bbf6f052
RK
7444 {
7445 do_jump (convert (type, exp), if_false_label, if_true_label);
7446 break;
7447 }
7448 goto normal;
7449 }
7450
7451 case COND_EXPR:
7452 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7453 if (integer_onep (TREE_OPERAND (exp, 1))
7454 && integer_zerop (TREE_OPERAND (exp, 2)))
7455 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7456
7457 else if (integer_zerop (TREE_OPERAND (exp, 1))
7458 && integer_onep (TREE_OPERAND (exp, 2)))
7459 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7460
7461 else
7462 {
7463 register rtx label1 = gen_label_rtx ();
7464 drop_through_label = gen_label_rtx ();
906c4e36 7465 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
7466 /* Now the THEN-expression. */
7467 do_jump (TREE_OPERAND (exp, 1),
7468 if_false_label ? if_false_label : drop_through_label,
7469 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
7470 /* In case the do_jump just above never jumps. */
7471 do_pending_stack_adjust ();
bbf6f052
RK
7472 emit_label (label1);
7473 /* Now the ELSE-expression. */
7474 do_jump (TREE_OPERAND (exp, 2),
7475 if_false_label ? if_false_label : drop_through_label,
7476 if_true_label ? if_true_label : drop_through_label);
7477 }
7478 break;
7479
7480 case EQ_EXPR:
7481 if (integer_zerop (TREE_OPERAND (exp, 1)))
7482 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7483 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7484 == MODE_INT)
7485 &&
7486 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7487 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7488 else
7489 comparison = compare (exp, EQ, EQ);
7490 break;
7491
7492 case NE_EXPR:
7493 if (integer_zerop (TREE_OPERAND (exp, 1)))
7494 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7495 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7496 == MODE_INT)
7497 &&
7498 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7499 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7500 else
7501 comparison = compare (exp, NE, NE);
7502 break;
7503
7504 case LT_EXPR:
7505 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7506 == MODE_INT)
7507 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7508 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7509 else
7510 comparison = compare (exp, LT, LTU);
7511 break;
7512
7513 case LE_EXPR:
7514 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7515 == MODE_INT)
7516 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7517 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7518 else
7519 comparison = compare (exp, LE, LEU);
7520 break;
7521
7522 case GT_EXPR:
7523 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7524 == MODE_INT)
7525 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7526 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7527 else
7528 comparison = compare (exp, GT, GTU);
7529 break;
7530
7531 case GE_EXPR:
7532 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7533 == MODE_INT)
7534 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7535 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7536 else
7537 comparison = compare (exp, GE, GEU);
7538 break;
7539
7540 default:
7541 normal:
906c4e36 7542 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7543#if 0
7544 /* This is not needed any more and causes poor code since it causes
7545 comparisons and tests from non-SI objects to have different code
7546 sequences. */
7547 /* Copy to register to avoid generating bad insns by cse
7548 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7549 if (!cse_not_expected && GET_CODE (temp) == MEM)
7550 temp = copy_to_reg (temp);
7551#endif
7552 do_pending_stack_adjust ();
7553 if (GET_CODE (temp) == CONST_INT)
7554 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7555 else if (GET_CODE (temp) == LABEL_REF)
7556 comparison = const_true_rtx;
7557 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7558 && !can_compare_p (GET_MODE (temp)))
7559 /* Note swapping the labels gives us not-equal. */
7560 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7561 else if (GET_MODE (temp) != VOIDmode)
7562 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
7563 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7564 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
7565 else
7566 abort ();
7567 }
7568
7569 /* Do any postincrements in the expression that was tested. */
7570 emit_queue ();
7571
7572 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7573 straight into a conditional jump instruction as the jump condition.
7574 Otherwise, all the work has been done already. */
7575
7576 if (comparison == const_true_rtx)
7577 {
7578 if (if_true_label)
7579 emit_jump (if_true_label);
7580 }
7581 else if (comparison == const0_rtx)
7582 {
7583 if (if_false_label)
7584 emit_jump (if_false_label);
7585 }
7586 else if (comparison)
7587 do_jump_for_compare (comparison, if_false_label, if_true_label);
7588
7589 free_temp_slots ();
7590
7591 if (drop_through_label)
e7c33f54
RK
7592 {
7593 /* If do_jump produces code that might be jumped around,
7594 do any stack adjusts from that code, before the place
7595 where control merges in. */
7596 do_pending_stack_adjust ();
7597 emit_label (drop_through_label);
7598 }
bbf6f052
RK
7599}
7600\f
7601/* Given a comparison expression EXP for values too wide to be compared
7602 with one insn, test the comparison and jump to the appropriate label.
7603 The code of EXP is ignored; we always test GT if SWAP is 0,
7604 and LT if SWAP is 1. */
7605
7606static void
7607do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7608 tree exp;
7609 int swap;
7610 rtx if_false_label, if_true_label;
7611{
906c4e36
RK
7612 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7613 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7614 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7615 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7616 rtx drop_through_label = 0;
7617 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7618 int i;
7619
7620 if (! if_true_label || ! if_false_label)
7621 drop_through_label = gen_label_rtx ();
7622 if (! if_true_label)
7623 if_true_label = drop_through_label;
7624 if (! if_false_label)
7625 if_false_label = drop_through_label;
7626
7627 /* Compare a word at a time, high order first. */
f81497d9
RS
7628 for (i = 0; i < nwords; i++)
7629 {
7630 rtx comp;
7631 rtx op0_word, op1_word;
7632
7633 if (WORDS_BIG_ENDIAN)
7634 {
7635 op0_word = operand_subword_force (op0, i, mode);
7636 op1_word = operand_subword_force (op1, i, mode);
7637 }
7638 else
7639 {
7640 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7641 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7642 }
7643
7644 /* All but high-order word must be compared as unsigned. */
7645 comp = compare_from_rtx (op0_word, op1_word,
7646 (unsignedp || i > 0) ? GTU : GT,
7647 unsignedp, word_mode, NULL_RTX, 0);
7648 if (comp == const_true_rtx)
7649 emit_jump (if_true_label);
7650 else if (comp != const0_rtx)
7651 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7652
7653 /* Consider lower words only if these are equal. */
7654 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7655 NULL_RTX, 0);
7656 if (comp == const_true_rtx)
7657 emit_jump (if_false_label);
7658 else if (comp != const0_rtx)
7659 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7660 }
7661
7662 if (if_false_label)
7663 emit_jump (if_false_label);
7664 if (drop_through_label)
7665 emit_label (drop_through_label);
7666}
7667
7668/* Compare OP0 with OP1, word at a time, in mode MODE.
7669 UNSIGNEDP says to do unsigned comparison.
7670 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7671
7672static void
7673do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7674 enum machine_mode mode;
7675 int unsignedp;
7676 rtx op0, op1;
7677 rtx if_false_label, if_true_label;
7678{
7679 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7680 rtx drop_through_label = 0;
7681 int i;
7682
7683 if (! if_true_label || ! if_false_label)
7684 drop_through_label = gen_label_rtx ();
7685 if (! if_true_label)
7686 if_true_label = drop_through_label;
7687 if (! if_false_label)
7688 if_false_label = drop_through_label;
7689
7690 /* Compare a word at a time, high order first. */
bbf6f052
RK
7691 for (i = 0; i < nwords; i++)
7692 {
7693 rtx comp;
7694 rtx op0_word, op1_word;
7695
7696 if (WORDS_BIG_ENDIAN)
7697 {
7698 op0_word = operand_subword_force (op0, i, mode);
7699 op1_word = operand_subword_force (op1, i, mode);
7700 }
7701 else
7702 {
7703 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7704 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7705 }
7706
7707 /* All but high-order word must be compared as unsigned. */
7708 comp = compare_from_rtx (op0_word, op1_word,
7709 (unsignedp || i > 0) ? GTU : GT,
906c4e36 7710 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
7711 if (comp == const_true_rtx)
7712 emit_jump (if_true_label);
7713 else if (comp != const0_rtx)
906c4e36 7714 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
7715
7716 /* Consider lower words only if these are equal. */
7717 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 7718 NULL_RTX, 0);
bbf6f052
RK
7719 if (comp == const_true_rtx)
7720 emit_jump (if_false_label);
7721 else if (comp != const0_rtx)
906c4e36 7722 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
7723 }
7724
7725 if (if_false_label)
7726 emit_jump (if_false_label);
7727 if (drop_through_label)
7728 emit_label (drop_through_label);
7729}
7730
7731/* Given an EQ_EXPR expression EXP for values too wide to be compared
7732 with one insn, test the comparison and jump to the appropriate label. */
7733
7734static void
7735do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7736 tree exp;
7737 rtx if_false_label, if_true_label;
7738{
906c4e36
RK
7739 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7740 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7741 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7742 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7743 int i;
7744 rtx drop_through_label = 0;
7745
7746 if (! if_false_label)
7747 drop_through_label = if_false_label = gen_label_rtx ();
7748
7749 for (i = 0; i < nwords; i++)
7750 {
7751 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7752 operand_subword_force (op1, i, mode),
cd1b4b44
RK
7753 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7754 word_mode, NULL_RTX, 0);
bbf6f052
RK
7755 if (comp == const_true_rtx)
7756 emit_jump (if_false_label);
7757 else if (comp != const0_rtx)
906c4e36 7758 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7759 }
7760
7761 if (if_true_label)
7762 emit_jump (if_true_label);
7763 if (drop_through_label)
7764 emit_label (drop_through_label);
7765}
7766\f
7767/* Jump according to whether OP0 is 0.
7768 We assume that OP0 has an integer mode that is too wide
7769 for the available compare insns. */
7770
7771static void
7772do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7773 rtx op0;
7774 rtx if_false_label, if_true_label;
7775{
7776 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7777 int i;
7778 rtx drop_through_label = 0;
7779
7780 if (! if_false_label)
7781 drop_through_label = if_false_label = gen_label_rtx ();
7782
7783 for (i = 0; i < nwords; i++)
7784 {
7785 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7786 GET_MODE (op0)),
cd1b4b44 7787 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
7788 if (comp == const_true_rtx)
7789 emit_jump (if_false_label);
7790 else if (comp != const0_rtx)
906c4e36 7791 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7792 }
7793
7794 if (if_true_label)
7795 emit_jump (if_true_label);
7796 if (drop_through_label)
7797 emit_label (drop_through_label);
7798}
7799
7800/* Given a comparison expression in rtl form, output conditional branches to
7801 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7802
7803static void
7804do_jump_for_compare (comparison, if_false_label, if_true_label)
7805 rtx comparison, if_false_label, if_true_label;
7806{
7807 if (if_true_label)
7808 {
7809 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7810 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7811 else
7812 abort ();
7813
7814 if (if_false_label)
7815 emit_jump (if_false_label);
7816 }
7817 else if (if_false_label)
7818 {
7819 rtx insn;
7820 rtx prev = PREV_INSN (get_last_insn ());
7821 rtx branch = 0;
7822
7823 /* Output the branch with the opposite condition. Then try to invert
7824 what is generated. If more than one insn is a branch, or if the
7825 branch is not the last insn written, abort. If we can't invert
7826 the branch, emit make a true label, redirect this jump to that,
7827 emit a jump to the false label and define the true label. */
7828
7829 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7830 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7831 else
7832 abort ();
7833
7834 /* Here we get the insn before what was just emitted.
7835 On some machines, emitting the branch can discard
7836 the previous compare insn and emit a replacement. */
7837 if (prev == 0)
7838 /* If there's only one preceding insn... */
7839 insn = get_insns ();
7840 else
7841 insn = NEXT_INSN (prev);
7842
7843 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7844 if (GET_CODE (insn) == JUMP_INSN)
7845 {
7846 if (branch)
7847 abort ();
7848 branch = insn;
7849 }
7850
7851 if (branch != get_last_insn ())
7852 abort ();
7853
7854 if (! invert_jump (branch, if_false_label))
7855 {
7856 if_true_label = gen_label_rtx ();
7857 redirect_jump (branch, if_true_label);
7858 emit_jump (if_false_label);
7859 emit_label (if_true_label);
7860 }
7861 }
7862}
7863\f
7864/* Generate code for a comparison expression EXP
7865 (including code to compute the values to be compared)
7866 and set (CC0) according to the result.
7867 SIGNED_CODE should be the rtx operation for this comparison for
7868 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7869
7870 We force a stack adjustment unless there are currently
7871 things pushed on the stack that aren't yet used. */
7872
7873static rtx
7874compare (exp, signed_code, unsigned_code)
7875 register tree exp;
7876 enum rtx_code signed_code, unsigned_code;
7877{
906c4e36
RK
7878 register rtx op0
7879 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7880 register rtx op1
7881 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7882 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7883 register enum machine_mode mode = TYPE_MODE (type);
7884 int unsignedp = TREE_UNSIGNED (type);
7885 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7886
7887 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7888 ((mode == BLKmode)
906c4e36 7889 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7890 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7891}
7892
7893/* Like compare but expects the values to compare as two rtx's.
7894 The decision as to signed or unsigned comparison must be made by the caller.
7895
7896 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7897 compared.
7898
7899 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7900 size of MODE should be used. */
7901
7902rtx
7903compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7904 register rtx op0, op1;
7905 enum rtx_code code;
7906 int unsignedp;
7907 enum machine_mode mode;
7908 rtx size;
7909 int align;
7910{
a7c5971a
RK
7911 rtx tem;
7912
bf743ac5
RK
7913 /* If one operand is constant, make it the second one. Only do this
7914 if the other operand is not constant as well. */
bbf6f052 7915
bf743ac5
RK
7916 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7917 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 7918 {
a7c5971a 7919 tem = op0;
bbf6f052
RK
7920 op0 = op1;
7921 op1 = tem;
7922 code = swap_condition (code);
7923 }
7924
7925 if (flag_force_mem)
7926 {
7927 op0 = force_not_mem (op0);
7928 op1 = force_not_mem (op1);
7929 }
7930
7931 do_pending_stack_adjust ();
7932
a7c5971a
RK
7933 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7934 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7935 return tem;
bbf6f052 7936
cd1b4b44
RK
7937#if 0
7938 /* There's no need to do this now that combine.c can eliminate lots of
7939 sign extensions. This can be less efficient in certain cases on other
1c6bc817 7940 machines. */
cd1b4b44 7941
bbf6f052
RK
7942 /* If this is a signed equality comparison, we can do it as an
7943 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7944 extension and comparisons with zero are done as unsigned. This is
7945 the case even on machines that can do fast sign extension, since
8008b228 7946 zero-extension is easier to combine with other operations than
77fa0940
RK
7947 sign-extension is. If we are comparing against a constant, we must
7948 convert it to what it would look like unsigned. */
bbf6f052 7949 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7950 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7951 {
7952 if (GET_CODE (op1) == CONST_INT
7953 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7954 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7955 unsignedp = 1;
7956 }
cd1b4b44 7957#endif
bbf6f052
RK
7958
7959 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7960
7961 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7962}
7963\f
7964/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7965 and return an rtx for the result. EXP is either a comparison
7966 or a TRUTH_NOT_EXPR whose operand is a comparison.
7967
bbf6f052
RK
7968 If TARGET is nonzero, store the result there if convenient.
7969
7970 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7971 cheap.
7972
7973 Return zero if there is no suitable set-flag instruction
7974 available on this machine.
7975
7976 Once expand_expr has been called on the arguments of the comparison,
7977 we are committed to doing the store flag, since it is not safe to
7978 re-evaluate the expression. We emit the store-flag insn by calling
7979 emit_store_flag, but only expand the arguments if we have a reason
7980 to believe that emit_store_flag will be successful. If we think that
7981 it will, but it isn't, we have to simulate the store-flag with a
7982 set/jump/set sequence. */
7983
7984static rtx
7985do_store_flag (exp, target, mode, only_cheap)
7986 tree exp;
7987 rtx target;
7988 enum machine_mode mode;
7989 int only_cheap;
7990{
7991 enum rtx_code code;
e7c33f54 7992 tree arg0, arg1, type;
bbf6f052 7993 tree tem;
e7c33f54
RK
7994 enum machine_mode operand_mode;
7995 int invert = 0;
7996 int unsignedp;
bbf6f052
RK
7997 rtx op0, op1;
7998 enum insn_code icode;
7999 rtx subtarget = target;
8000 rtx result, label, pattern, jump_pat;
8001
e7c33f54
RK
8002 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
8003 result at the end. We can't simply invert the test since it would
8004 have already been inverted if it were valid. This case occurs for
8005 some floating-point comparisons. */
8006
8007 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
8008 invert = 1, exp = TREE_OPERAND (exp, 0);
8009
8010 arg0 = TREE_OPERAND (exp, 0);
8011 arg1 = TREE_OPERAND (exp, 1);
8012 type = TREE_TYPE (arg0);
8013 operand_mode = TYPE_MODE (type);
8014 unsignedp = TREE_UNSIGNED (type);
8015
bbf6f052
RK
8016 /* We won't bother with BLKmode store-flag operations because it would mean
8017 passing a lot of information to emit_store_flag. */
8018 if (operand_mode == BLKmode)
8019 return 0;
8020
d964285c
CH
8021 STRIP_NOPS (arg0);
8022 STRIP_NOPS (arg1);
bbf6f052
RK
8023
8024 /* Get the rtx comparison code to use. We know that EXP is a comparison
8025 operation of some type. Some comparisons against 1 and -1 can be
8026 converted to comparisons with zero. Do so here so that the tests
8027 below will be aware that we have a comparison with zero. These
8028 tests will not catch constants in the first operand, but constants
8029 are rarely passed as the first operand. */
8030
8031 switch (TREE_CODE (exp))
8032 {
8033 case EQ_EXPR:
8034 code = EQ;
8035 break;
8036 case NE_EXPR:
8037 code = NE;
8038 break;
8039 case LT_EXPR:
8040 if (integer_onep (arg1))
8041 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
8042 else
8043 code = unsignedp ? LTU : LT;
8044 break;
8045 case LE_EXPR:
5bf6e3bd
RK
8046 if (! unsignedp && integer_all_onesp (arg1))
8047 arg1 = integer_zero_node, code = LT;
bbf6f052
RK
8048 else
8049 code = unsignedp ? LEU : LE;
8050 break;
8051 case GT_EXPR:
5bf6e3bd
RK
8052 if (! unsignedp && integer_all_onesp (arg1))
8053 arg1 = integer_zero_node, code = GE;
bbf6f052
RK
8054 else
8055 code = unsignedp ? GTU : GT;
8056 break;
8057 case GE_EXPR:
8058 if (integer_onep (arg1))
8059 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
8060 else
8061 code = unsignedp ? GEU : GE;
8062 break;
8063 default:
8064 abort ();
8065 }
8066
8067 /* Put a constant second. */
8068 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
8069 {
8070 tem = arg0; arg0 = arg1; arg1 = tem;
8071 code = swap_condition (code);
8072 }
8073
8074 /* If this is an equality or inequality test of a single bit, we can
8075 do this by shifting the bit being tested to the low-order bit and
8076 masking the result with the constant 1. If the condition was EQ,
8077 we xor it with 1. This does not require an scc insn and is faster
8078 than an scc insn even if we have it. */
8079
8080 if ((code == NE || code == EQ)
8081 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
8082 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 8083 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 8084 {
af508edd 8085 tree inner = TREE_OPERAND (arg0, 0);
bbf6f052 8086 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 8087 NULL_RTX, VOIDmode, 0)));
af508edd
RK
8088 int ops_unsignedp;
8089
8090 /* If INNER is a right shift of a constant and it plus BITNUM does
8091 not overflow, adjust BITNUM and INNER. */
8092
8093 if (TREE_CODE (inner) == RSHIFT_EXPR
8094 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
8095 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
8096 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
8097 < TYPE_PRECISION (type)))
8098 {
8099 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
8100 inner = TREE_OPERAND (inner, 0);
8101 }
8102
8103 /* If we are going to be able to omit the AND below, we must do our
8104 operations as unsigned. If we must use the AND, we have a choice.
8105 Normally unsigned is faster, but for some machines signed is. */
8106 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
8107#ifdef BYTE_LOADS_SIGN_EXTEND
8108 : 0
8109#else
8110 : 1
8111#endif
8112 );
bbf6f052
RK
8113
8114 if (subtarget == 0 || GET_CODE (subtarget) != REG
8115 || GET_MODE (subtarget) != operand_mode
af508edd 8116 || ! safe_from_p (subtarget, inner))
bbf6f052
RK
8117 subtarget = 0;
8118
af508edd 8119 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052
RK
8120
8121 if (bitnum != 0)
8122 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
af508edd 8123 size_int (bitnum), target, ops_unsignedp);
bbf6f052
RK
8124
8125 if (GET_MODE (op0) != mode)
af508edd
RK
8126 op0 = convert_to_mode (mode, op0, ops_unsignedp);
8127
8128 if ((code == EQ && ! invert) || (code == NE && invert))
8129 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
8130 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 8131
af508edd 8132 /* Put the AND last so it can combine with more things. */
bbf6f052
RK
8133 if (bitnum != TYPE_PRECISION (type) - 1)
8134 op0 = expand_and (op0, const1_rtx, target);
8135
bbf6f052
RK
8136 return op0;
8137 }
8138
8139 /* Now see if we are likely to be able to do this. Return if not. */
8140 if (! can_compare_p (operand_mode))
8141 return 0;
8142 icode = setcc_gen_code[(int) code];
8143 if (icode == CODE_FOR_nothing
8144 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
8145 {
8146 /* We can only do this if it is one of the special cases that
8147 can be handled without an scc insn. */
8148 if ((code == LT && integer_zerop (arg1))
8149 || (! only_cheap && code == GE && integer_zerop (arg1)))
8150 ;
8151 else if (BRANCH_COST >= 0
8152 && ! only_cheap && (code == NE || code == EQ)
8153 && TREE_CODE (type) != REAL_TYPE
8154 && ((abs_optab->handlers[(int) operand_mode].insn_code
8155 != CODE_FOR_nothing)
8156 || (ffs_optab->handlers[(int) operand_mode].insn_code
8157 != CODE_FOR_nothing)))
8158 ;
8159 else
8160 return 0;
8161 }
8162
8163 preexpand_calls (exp);
8164 if (subtarget == 0 || GET_CODE (subtarget) != REG
8165 || GET_MODE (subtarget) != operand_mode
8166 || ! safe_from_p (subtarget, arg1))
8167 subtarget = 0;
8168
8169 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 8170 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8171
8172 if (target == 0)
8173 target = gen_reg_rtx (mode);
8174
d39985fa
RK
8175 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8176 because, if the emit_store_flag does anything it will succeed and
8177 OP0 and OP1 will not be used subsequently. */
8178
8179 result = emit_store_flag (target, code,
8180 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8181 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8182 operand_mode, unsignedp, 1);
bbf6f052
RK
8183
8184 if (result)
e7c33f54
RK
8185 {
8186 if (invert)
8187 result = expand_binop (mode, xor_optab, result, const1_rtx,
8188 result, 0, OPTAB_LIB_WIDEN);
8189 return result;
8190 }
bbf6f052
RK
8191
8192 /* If this failed, we have to do this with set/compare/jump/set code. */
8193 if (target == 0 || GET_CODE (target) != REG
8194 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8195 target = gen_reg_rtx (GET_MODE (target));
8196
e7c33f54 8197 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
8198 result = compare_from_rtx (op0, op1, code, unsignedp,
8199 operand_mode, NULL_RTX, 0);
bbf6f052 8200 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
8201 return (((result == const0_rtx && ! invert)
8202 || (result != const0_rtx && invert))
8203 ? const0_rtx : const1_rtx);
bbf6f052
RK
8204
8205 label = gen_label_rtx ();
8206 if (bcc_gen_fctn[(int) code] == 0)
8207 abort ();
8208
8209 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 8210 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
8211 emit_label (label);
8212
8213 return target;
8214}
8215\f
8216/* Generate a tablejump instruction (used for switch statements). */
8217
8218#ifdef HAVE_tablejump
8219
8220/* INDEX is the value being switched on, with the lowest value
8221 in the table already subtracted.
88d3b7f0 8222 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
8223 RANGE is the length of the jump table.
8224 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8225
8226 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8227 index value is out of range. */
8228
8229void
e87b4f3f 8230do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 8231 rtx index, range, table_label, default_label;
e87b4f3f 8232 enum machine_mode mode;
bbf6f052
RK
8233{
8234 register rtx temp, vector;
8235
88d3b7f0
RS
8236 /* Do an unsigned comparison (in the proper mode) between the index
8237 expression and the value which represents the length of the range.
8238 Since we just finished subtracting the lower bound of the range
8239 from the index expression, this comparison allows us to simultaneously
8240 check that the original index expression value is both greater than
8241 or equal to the minimum value of the range and less than or equal to
8242 the maximum value of the range. */
e87b4f3f 8243
b4c65118 8244 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
bbf6f052 8245 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
8246
8247 /* If index is in range, it must fit in Pmode.
8248 Convert to Pmode so we can index with it. */
8249 if (mode != Pmode)
8250 index = convert_to_mode (Pmode, index, 1);
8251
709f5be1
RS
8252 /* Don't let a MEM slip thru, because then INDEX that comes
8253 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
8254 and break_out_memory_refs will go to work on it and mess it up. */
8255#ifdef PIC_CASE_VECTOR_ADDRESS
8256 if (flag_pic && GET_CODE (index) != REG)
8257 index = copy_to_mode_reg (Pmode, index);
8258#endif
8259
bbf6f052
RK
8260 /* If flag_force_addr were to affect this address
8261 it could interfere with the tricky assumptions made
8262 about addresses that contain label-refs,
8263 which may be valid only very near the tablejump itself. */
8264 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8265 GET_MODE_SIZE, because this indicates how large insns are. The other
8266 uses should all be Pmode, because they are addresses. This code
8267 could fail if addresses and insns are not the same size. */
19b2fd27
RS
8268 index = gen_rtx (PLUS, Pmode,
8269 gen_rtx (MULT, Pmode, index,
8270 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
8271 gen_rtx (LABEL_REF, Pmode, table_label));
8272#ifdef PIC_CASE_VECTOR_ADDRESS
8273 if (flag_pic)
8274 index = PIC_CASE_VECTOR_ADDRESS (index);
8275 else
8276#endif
8277 index = memory_address_noforce (CASE_VECTOR_MODE, index);
bbf6f052
RK
8278 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8279 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8280 RTX_UNCHANGING_P (vector) = 1;
8281 convert_move (temp, vector, 0);
8282
8283 emit_jump_insn (gen_tablejump (temp, table_label));
8284
8285#ifndef CASE_VECTOR_PC_RELATIVE
8286 /* If we are generating PIC code or if the table is PC-relative, the
8287 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8288 if (! flag_pic)
8289 emit_barrier ();
8290#endif
8291}
8292
8293#endif /* HAVE_tablejump */
This page took 1.079805 seconds and 5 git commands to generate.