]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(lang_options): Add -fdollars-in-identifiers.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
bbf6f052
RK
32#include "typeclass.h"
33
34#define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36/* Decide whether a function's arguments should be processed
bbc8a071
RK
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
bbf6f052 41
bbf6f052 42#ifdef PUSH_ROUNDING
bbc8a071
RK
43
44#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
bbf6f052
RK
45#define PUSH_ARGS_REVERSED /* If it's last to first */
46#endif
bbc8a071 47
bbf6f052
RK
48#endif
49
50#ifndef STACK_PUSH_CODE
51#ifdef STACK_GROWS_DOWNWARD
52#define STACK_PUSH_CODE PRE_DEC
53#else
54#define STACK_PUSH_CODE PRE_INC
55#endif
56#endif
57
58/* Like STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61/* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67int cse_not_expected;
68
69/* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72int do_preexpand_calls = 1;
73
74/* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76int pending_stack_adjust;
77
78/* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82int inhibit_defer_pop;
83
84/* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86tree cleanups_this_call;
87
88/* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91static rtx saveregs_value;
92
dcf76fff
TW
93/* Similarly for __builtin_apply_args. */
94static rtx apply_args_value;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112};
113
114static rtx enqueue_insn PROTO((rtx, rtx));
115static int queued_subexp_p PROTO((rtx));
116static void init_queue PROTO((void));
117static void move_by_pieces PROTO((rtx, rtx, int, int));
118static int move_by_pieces_ninsns PROTO((unsigned int, int));
119static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121static void group_insns PROTO((rtx));
122static void store_constructor PROTO((tree, rtx));
123static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125static tree save_noncopied_parts PROTO((tree, tree));
126static tree init_noncopied_parts PROTO((tree, tree));
127static int safe_from_p PROTO((rtx, tree));
128static int fixed_type_p PROTO((tree));
129static int get_pointer_alignment PROTO((tree, unsigned));
130static tree string_constant PROTO((tree, tree *));
131static tree c_strlen PROTO((tree));
132static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
133static int apply_args_size PROTO((void));
134static int apply_result_size PROTO((void));
135static rtx result_vector PROTO((int, rtx));
136static rtx expand_builtin_apply_args PROTO((void));
137static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138static void expand_builtin_return PROTO((rtx));
4969d05d
RK
139static rtx expand_increment PROTO((tree, int));
140static void preexpand_calls PROTO((tree));
141static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 142static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
143static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 148
4fa52007
RK
149/* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153static char direct_load[NUM_MACHINE_MODES];
154static char direct_store[NUM_MACHINE_MODES];
155
bbf6f052
RK
156/* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159#ifndef MOVE_RATIO
266007a7 160#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
161#define MOVE_RATIO 2
162#else
163/* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165#define MOVE_RATIO 15
166#endif
167#endif
e87b4f3f 168
266007a7 169/* This array records the insn_code of insns to perform block moves. */
e6677db3 170enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 171
e87b4f3f
RS
172/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174#ifndef SLOW_UNALIGNED_ACCESS
175#define SLOW_UNALIGNED_ACCESS 0
176#endif
0006469d
TW
177
178/* Register mappings for target machines without register windows. */
179#ifndef INCOMING_REGNO
180#define INCOMING_REGNO(OUT) (OUT)
181#endif
182#ifndef OUTGOING_REGNO
183#define OUTGOING_REGNO(IN) (IN)
184#endif
bbf6f052 185\f
4fa52007 186/* This is run once per compilation to set up which modes can be used
266007a7 187 directly in memory and to initialize the block move optab. */
4fa52007
RK
188
189void
190init_expr_once ()
191{
192 rtx insn, pat;
193 enum machine_mode mode;
e2549997
RS
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
4fa52007 197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
e2549997 213 PUT_MODE (mem1, mode);
4fa52007 214
e6fe56a4
RK
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
7308a047
RS
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
e6fe56a4 225
7308a047 226 reg = gen_rtx (REG, mode, regno);
e6fe56a4 227
7308a047
RS
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
e6fe56a4 232
e2549997
RS
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
7308a047
RS
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
e2549997
RS
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
7308a047 247 }
4fa52007
RK
248 }
249
250 end_sequence ();
251}
252
bbf6f052
RK
253/* This is run at the start of compiling a function. */
254
255void
256init_expr ()
257{
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
0006469d 264 apply_args_value = 0;
e87b4f3f 265 forced_labels = 0;
bbf6f052
RK
266}
267
268/* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271void
272save_expr_status (p)
273 struct function *p;
274{
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
0006469d 282 p->apply_args_value = apply_args_value;
e87b4f3f 283 p->forced_labels = forced_labels;
bbf6f052
RK
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
0006469d 289 apply_args_value = 0;
e87b4f3f 290 forced_labels = 0;
bbf6f052
RK
291}
292
293/* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296void
297restore_expr_status (p)
298 struct function *p;
299{
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
0006469d 304 apply_args_value = p->apply_args_value;
e87b4f3f 305 forced_labels = p->forced_labels;
bbf6f052
RK
306}
307\f
308/* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311static rtx pending_chain;
312
313/* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320static rtx
321enqueue_insn (var, body)
322 rtx var, body;
323{
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 325 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
326 return pending_chain;
327}
328
329/* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344rtx
345protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348{
349 register RTX_CODE code = GET_CODE (x);
350
351#if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355#endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402}
403
404/* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409static int
410queued_subexp_p (x)
411 rtx x;
412{
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427}
428
429/* Perform all the pending incrementations. */
430
431void
432emit_queue ()
433{
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440}
441
442static void
443init_queue ()
444{
445 if (pending_chain)
446 abort ();
447}
448\f
449/* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454void
455convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458{
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
1499e0a8
RK
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
bbf6f052
RK
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
b424402e
RS
497#ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503#endif
504#ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510#endif
511#ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517#endif
518#ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
525#ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
532
533#ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539#endif
540#ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546#endif
547#ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
554#ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
561
bbf6f052
RK
562#ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568#endif
b092b471
JW
569#ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575#endif
bbf6f052
RK
576#ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
b092b471
JW
583#ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
bbf6f052
RK
590#ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597
598#ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
612#ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
bbf6f052
RK
661#ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
b092b471
JW
668#ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
bbf6f052
RK
675#ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
b092b471
JW
682#ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
bbf6f052
RK
689#ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696
b092b471
JW
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
bbf6f052
RK
763 abort ();
764
e87b4f3f 765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
cd1b4b44
RK
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
bbf6f052
RK
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827#ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
906c4e36
RK
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
bbf6f052
RK
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838#endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 843 NULL_RTX, 0);
bbf6f052
RK
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
906c4e36 864 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
866 return;
867 }
868
d3c64ee3
RS
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052
RK
872 {
873 convert_move (to, gen_lowpart (word_mode, from), 0);
874 return;
875 }
876
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
879 {
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
882
883#ifdef HAVE_truncsipsi
884 if (HAVE_truncsipsi)
885 {
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
887 return;
888 }
889#endif /* HAVE_truncsipsi */
890 abort ();
891 }
892
893 if (from_mode == PSImode)
894 {
895 if (to_mode != SImode)
896 {
897 from = convert_to_mode (SImode, from, unsignedp);
898 from_mode = SImode;
899 }
900 else
901 {
902#ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
904 {
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
906 return;
907 }
908#endif /* HAVE_extendpsisi */
909 abort ();
910 }
911 }
912
913 /* Now follow all the conversions between integers
914 no more than a word long. */
915
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 919 GET_MODE_BITSIZE (from_mode)))
bbf6f052 920 {
d3c64ee3
RS
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
bbf6f052
RK
928 emit_move_insn (to, gen_lowpart (to_mode, from));
929 return;
930 }
931
d3c64ee3 932 /* Handle extension. */
bbf6f052
RK
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
934 {
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
937 != CODE_FOR_nothing)
938 {
3dc4195c
RK
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
bbf6f052
RK
945 emit_unop_insn (code, to, from, equiv_code);
946 return;
947 }
948 else
949 {
950 enum machine_mode intermediate;
951
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
956 != CODE_FOR_nothing)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
959 {
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
962 return;
963 }
964
965 /* No suitable intermediate mode. */
966 abort ();
967 }
968 }
969
970 /* Support special truncate insns for certain modes. */
971
972 if (from_mode == DImode && to_mode == SImode)
973 {
974#ifdef HAVE_truncdisi2
975 if (HAVE_truncdisi2)
976 {
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 return;
979 }
980#endif
981 convert_move (to, force_reg (from_mode, from), unsignedp);
982 return;
983 }
984
985 if (from_mode == DImode && to_mode == HImode)
986 {
987#ifdef HAVE_truncdihi2
988 if (HAVE_truncdihi2)
989 {
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 return;
992 }
993#endif
994 convert_move (to, force_reg (from_mode, from), unsignedp);
995 return;
996 }
997
998 if (from_mode == DImode && to_mode == QImode)
999 {
1000#ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 return;
1009 }
1010
1011 if (from_mode == SImode && to_mode == HImode)
1012 {
1013#ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1015 {
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 return;
1018 }
1019#endif
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 return;
1022 }
1023
1024 if (from_mode == SImode && to_mode == QImode)
1025 {
1026#ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1028 {
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 return;
1031 }
1032#endif
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 return;
1035 }
1036
1037 if (from_mode == HImode && to_mode == QImode)
1038 {
1039#ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1041 {
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 return;
1044 }
1045#endif
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 return;
1048 }
1049
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1054 {
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1057 return;
1058 }
1059
1060 /* Mode combination is not recognized. */
1061 abort ();
1062}
1063
1064/* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
5d901c31
RS
1069 or by copying to a new temporary with conversion.
1070
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1073
1074rtx
1075convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1077 rtx x;
1078 int unsignedp;
1079{
1080 register rtx temp;
1499e0a8
RK
1081
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1084
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
bbf6f052 1089
bbf6f052
RK
1090 if (mode == GET_MODE (x))
1091 return x;
1092
1093 /* There is one case that we must handle specially: If we are converting
906c4e36 1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1098
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1103
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1107 the operand. */
1108
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 1115 && direct_load[(int) mode]
bbf6f052
RK
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1118
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1121 return temp;
1122}
1123\f
1124/* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1129
bbf6f052
RK
1130static void
1131move_by_pieces (to, from, len, align)
1132 rtx to, from;
1133 int len, align;
1134{
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1137 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1138
1139 data.offset = 0;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1142 data.to = to;
1143 data.from = from;
1144 data.autinc_to
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1147 data.autinc_from
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1151
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1154 data.reverse
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1157 data.len = len;
1158
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1164 {
1165#ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1167 {
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1171 }
1172#endif
1173#ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1175 {
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1179 }
1180#endif
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183#ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1185 {
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1187 data.autinc_to = 1;
1188 data.explicit_inc_to = -1;
1189 }
1190#endif
1191#ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1193 {
1194 data.to_addr = copy_addr_to_reg (to_addr);
1195 data.autinc_to = 1;
1196 data.explicit_inc_to = 1;
1197 }
1198#endif
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1201 }
1202
e87b4f3f
RS
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1205 align = MOVE_MAX;
bbf6f052
RK
1206
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1209
1210 while (max_size > 1)
1211 {
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1214
e7c33f54
RK
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1218 mode = tmode;
1219
1220 if (mode == VOIDmode)
1221 break;
1222
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1228
1229 max_size = GET_MODE_SIZE (mode);
1230 }
1231
1232 /* The code above should have handled everything. */
1233 if (data.len != 0)
1234 abort ();
1235}
1236
1237/* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1239
1240static int
1241move_by_pieces_ninsns (l, align)
1242 unsigned int l;
1243 int align;
1244{
1245 register int n_insns = 0;
e87b4f3f 1246 int max_size = MOVE_MAX + 1;
bbf6f052 1247
e87b4f3f
RS
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1250 align = MOVE_MAX;
bbf6f052
RK
1251
1252 while (max_size > 1)
1253 {
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1256
e7c33f54
RK
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1260 mode = tmode;
1261
1262 if (mode == VOIDmode)
1263 break;
1264
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1270
1271 max_size = GET_MODE_SIZE (mode);
1272 }
1273
1274 return n_insns;
1275}
1276
1277/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1280
1281static void
1282move_by_pieces_1 (genfun, mode, data)
1283 rtx (*genfun) ();
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1286{
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1289
1290 while (data->len >= size)
1291 {
1292 if (data->reverse) data->offset -= size;
1293
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1298 from1 =
1299 (data->autinc_from
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1303
1304#ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
906c4e36 1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1307 if (data->explicit_inc_from < 0)
906c4e36 1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1309#endif
1310
1311 emit_insn ((*genfun) (to1, from1));
1312#ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
906c4e36 1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1315 if (data->explicit_inc_from > 0)
906c4e36 1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1317#endif
1318
1319 if (! data->reverse) data->offset += size;
1320
1321 data->len -= size;
1322 }
1323}
1324\f
1325/* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1328
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1330 with mode BLKmode.
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1334
1335void
1336emit_block_move (x, y, size, align)
1337 rtx x, y;
1338 rtx size;
1339 int align;
1340{
1341 if (GET_MODE (x) != BLKmode)
1342 abort ();
1343
1344 if (GET_MODE (y) != BLKmode)
1345 abort ();
1346
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
5d901c31 1349 size = protect_from_queue (size, 0);
bbf6f052
RK
1350
1351 if (GET_CODE (x) != MEM)
1352 abort ();
1353 if (GET_CODE (y) != MEM)
1354 abort ();
1355 if (size == 0)
1356 abort ();
1357
1358 if (GET_CODE (size) == CONST_INT
906c4e36 1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1360 move_by_pieces (x, y, INTVAL (size), align);
1361 else
1362 {
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
266007a7 1366
0bba3f6f 1367 rtx opalign = GEN_INT (align);
266007a7
RK
1368 enum machine_mode mode;
1369
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1372 {
266007a7 1373 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1374
1375 if (code != CODE_FOR_nothing
803090c4
RK
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
8008b228 1378 returned by the macro, it will definitely be less than the
803090c4 1379 actual mode mask. */
f85b95d1 1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 VOIDmode)))
bbf6f052 1388 {
1ba1e2a8 1389 rtx op2;
266007a7
RK
1390 rtx last = get_last_insn ();
1391 rtx pat;
1392
1ba1e2a8 1393 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1396 op2 = copy_to_mode_reg (mode, op2);
1397
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1399 if (pat)
1400 {
1401 emit_insn (pat);
1402 return;
1403 }
1404 else
1405 delete_insns_since (last);
bbf6f052
RK
1406 }
1407 }
bbf6f052
RK
1408
1409#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1410 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1412 XEXP (y, 0), Pmode,
0fa83258
RK
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
bbf6f052 1416#else
d562e42e 1417 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
0fa83258
RK
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
bbf6f052
RK
1423#endif
1424 }
1425}
1426\f
1427/* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1429
1430void
1431move_block_to_reg (regno, x, nregs, mode)
1432 int regno;
1433 rtx x;
1434 int nregs;
1435 enum machine_mode mode;
1436{
1437 int i;
1438 rtx pat, last;
1439
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1442
1443 /* See if the machine can do this with a load multiple insn. */
1444#ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1447 GEN_INT (nregs));
bbf6f052
RK
1448 if (pat)
1449 {
1450 emit_insn (pat);
1451 return;
1452 }
1453 else
1454 delete_insns_since (last);
1455#endif
1456
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1460}
1461
1462/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1464
1465void
1466move_block_from_reg (regno, x, nregs)
1467 int regno;
1468 rtx x;
1469 int nregs;
1470{
1471 int i;
1472 rtx pat, last;
1473
1474 /* See if the machine can do this with a store multiple insn. */
1475#ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1478 GEN_INT (nregs));
bbf6f052
RK
1479 if (pat)
1480 {
1481 emit_insn (pat);
1482 return;
1483 }
1484 else
1485 delete_insns_since (last);
1486#endif
1487
1488 for (i = 0; i < nregs; i++)
1489 {
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1491
1492 if (tem == 0)
1493 abort ();
1494
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1496 }
1497}
1498
1499/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1500
1501void
1502use_regs (regno, nregs)
1503 int regno;
1504 int nregs;
1505{
1506 int i;
1507
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1510}
7308a047
RS
1511
1512/* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1514
f76a70d5 1515static void
7308a047
RS
1516group_insns (prev)
1517 rtx prev;
1518{
1519 rtx insn_first;
1520 rtx insn_last;
1521
1522 /* Find the instructions to mark */
1523 if (prev)
1524 insn_first = NEXT_INSN (prev);
1525 else
1526 insn_first = get_insns ();
1527
1528 insn_last = get_last_insn ();
1529
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1532
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1535}
bbf6f052
RK
1536\f
1537/* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1539
1540void
1541clear_storage (object, size)
1542 rtx object;
1543 int size;
1544{
1545 if (GET_MODE (object) == BLKmode)
1546 {
1547#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1548 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1551 GEN_INT (size), Pmode);
bbf6f052 1552#else
d562e42e 1553 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1554 VOIDmode, 2,
1555 XEXP (object, 0), Pmode,
906c4e36 1556 GEN_INT (size), Pmode);
bbf6f052
RK
1557#endif
1558 }
1559 else
1560 emit_move_insn (object, const0_rtx);
1561}
1562
1563/* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1567
1568 Return the last instruction emitted. */
1569
1570rtx
1571emit_move_insn (x, y)
1572 rtx x, y;
1573{
1574 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1577 int i;
1578
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1581
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1583 abort ();
1584
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1587
1588 /* If X or Y are memory references, verify that their addresses are valid
1589 for the machine. */
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1593 || (flag_force_addr
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1596
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1599 || (flag_force_addr
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1602
1603 if (mode == BLKmode)
1604 abort ();
1605
261c4230
RS
1606 return emit_move_insn_1 (x, y);
1607}
1608
1609/* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1612
1613rtx
1614emit_move_insn_1 (x, y)
1615 rtx x, y;
1616{
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1620 int i;
1621
7308a047
RS
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1626 0);
1627
bbf6f052
RK
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1629 return
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1631
89742723 1632 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1637 {
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1641
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1645
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1648
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1659
1660 group_insns (prev);
7a1ab50a
RS
1661
1662 return get_last_insn ();
7308a047
RS
1663 }
1664
bbf6f052
RK
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
a4320483 1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1669 {
1670 rtx last_insn = 0;
7308a047 1671 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1672
1673 for (i = 0;
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1675 i++)
1676 {
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1679
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1684 {
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1687 }
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1690
1691 if (xpart == 0 || ypart == 0)
1692 abort ();
1693
1694 last_insn = emit_move_insn (xpart, ypart);
1695 }
7308a047
RS
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1698
bbf6f052
RK
1699 return last_insn;
1700 }
1701 else
1702 abort ();
1703}
1704\f
1705/* Pushing data onto the stack. */
1706
1707/* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1711
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1715
1716rtx
1717push_block (size, extra, below)
1718 rtx size;
1719 int extra, below;
1720{
1721 register rtx temp;
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1726 else
1727 {
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1729 if (extra != 0)
906c4e36 1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1733 }
1734
1735#ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1739#else
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1746 else
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1749#endif
1750
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1752}
1753
87e38d84 1754rtx
bbf6f052
RK
1755gen_push_operand ()
1756{
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1758}
1759
1760/* Generate code to push X onto the stack, assuming it has mode MODE and
1761 type TYPE.
1762 MODE is redundant except when X is a CONST_INT (since they don't
1763 carry mode info).
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1766
1767 ALIGN (in bytes) is maximum alignment we can assume.
1768
cd048831
RK
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
cd048831
RK
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1776 registers.
bbf6f052
RK
1777
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1779 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1780
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1785
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1787
1788void
1789emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1791 register rtx x;
1792 enum machine_mode mode;
1793 tree type;
1794 rtx size;
1795 int align;
1796 int partial;
1797 rtx reg;
1798 int extra;
1799 rtx args_addr;
1800 rtx args_so_far;
1801{
1802 rtx xinner;
1803 enum direction stack_direction
1804#ifdef STACK_GROWS_DOWNWARD
1805 = downward;
1806#else
1807 = upward;
1808#endif
1809
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1814
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1819
1820 xinner = x = protect_from_queue (x, 0);
1821
1822 if (mode == BLKmode)
1823 {
1824 /* Copy a block into the stack, entirely or partially. */
1825
1826 register rtx temp;
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1829 int skip;
1830
1831 if (size == 0)
1832 abort ();
1833
1834 used -= offset;
1835
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1838
1839 if (partial != 0)
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1842
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847#ifndef REG_PARM_STACK_SPACE
1848 skip = 0;
1849#else
1850 skip = used;
1851#endif
1852
1853#ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1857 if (args_addr == 0
1858 && GET_CODE (size) == CONST_INT
1859 && skip == 0
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1861 < MOVE_RATIO)
bbf6f052
RK
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1867 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1869 {
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
906c4e36 1875 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1876
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1879 }
1880 else
1881#endif /* PUSH_ROUNDING */
1882 {
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1885
1886 /* Deduct words put into registers from the size we must copy. */
1887 if (partial != 0)
1888 {
1889 if (GET_CODE (size) == CONST_INT)
906c4e36 1890 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1891 else
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1893 GEN_INT (used), NULL_RTX, 0,
1894 OPTAB_LIB_WIDEN);
bbf6f052
RK
1895 }
1896
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1900 if (! args_addr)
1901 {
1902 temp = push_block (size, extra, where_pad == downward);
1903 extra = 0;
1904 }
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1909 else
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1913 skip));
1914
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1918 < MOVE_RATIO))
1919 {
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1922 goto ret;
1923 }
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927#ifdef HAVE_movstrqi
1928 if (HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1932 {
c841050e
RS
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1935 if (pat != 0)
1936 {
1937 emit_insn (pat);
1938 goto ret;
1939 }
bbf6f052
RK
1940 }
1941#endif
1942#ifdef HAVE_movstrhi
1943 if (HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1947 {
c841050e
RS
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1950 if (pat != 0)
1951 {
1952 emit_insn (pat);
1953 goto ret;
1954 }
bbf6f052
RK
1955 }
1956#endif
1957#ifdef HAVE_movstrsi
1958 if (HAVE_movstrsi)
1959 {
c841050e
RS
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1962 if (pat != 0)
1963 {
1964 emit_insn (pat);
1965 goto ret;
1966 }
bbf6f052
RK
1967 }
1968#endif
1969#ifdef HAVE_movstrdi
1970 if (HAVE_movstrdi)
1971 {
c841050e
RS
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1974 if (pat != 0)
1975 {
1976 emit_insn (pat);
1977 goto ret;
1978 }
bbf6f052
RK
1979 }
1980#endif
1981
1982#ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1986
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1990#endif
1991
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1994 NO_DEFER_POP;
1995#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1996 emit_library_call (memcpy_libfunc, 0,
bbf6f052 1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2000 TYPE_MODE (sizetype));
bbf6f052 2001#else
d562e42e 2002 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2006 TYPE_MODE (sizetype));
bbf6f052
RK
2007#endif
2008 OK_DEFER_POP;
2009 }
2010 }
2011 else if (partial > 0)
2012 {
2013 /* Scalar partly in registers. */
2014
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2016 int i;
2017 int not_stack;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2022 int skip;
2023
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
906c4e36 2029 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2030
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2034 if (args_addr == 0)
2035 offset = 0;
2036
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2040
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045#ifndef REG_PARM_STACK_SPACE
2046 skip = 0;
2047#else
2048 skip = not_stack;
2049#endif
2050
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2053
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2059
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063#ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2065#else
2066 for (i = size - 1; i >= not_stack; i--)
2067#endif
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2071 0, args_addr,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2073 * UNITS_PER_WORD)));
2074 }
2075 else
2076 {
2077 rtx addr;
2078
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
906c4e36 2084 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2085
2086#ifdef PUSH_ROUNDING
2087 if (args_addr == 0)
2088 addr = gen_push_operand ();
2089 else
2090#endif
2091 if (GET_CODE (args_so_far) == CONST_INT)
2092 addr
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2095 else
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2097 args_so_far));
2098
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2100 }
2101
2102 ret:
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
cd048831 2106 if (partial > 0 && reg != 0)
bbf6f052
RK
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2108
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2110 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2111}
2112\f
bbf6f052
RK
2113/* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2117
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2122
2123 If the value stored is a constant, we return the constant. */
2124
2125rtx
2126expand_assignment (to, from, want_value, suggest_reg)
2127 tree to, from;
2128 int want_value;
2129 int suggest_reg;
2130{
2131 register rtx to_rtx = 0;
2132 rtx result;
2133
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2135
2136 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2138
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2143
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2149 {
2150 enum machine_mode mode1;
2151 int bitsize;
2152 int bitpos;
7bb0943f 2153 tree offset;
bbf6f052
RK
2154 int unsignedp;
2155 int volatilep = 0;
7bb0943f 2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2157 &mode1, &unsignedp, &volatilep);
2158
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2161
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2164
906c4e36 2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2166 if (offset != 0)
2167 {
906c4e36 2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2169
2170 if (GET_CODE (to_rtx) != MEM)
2171 abort ();
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2175 }
bbf6f052
RK
2176 if (volatilep)
2177 {
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180#if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2183 else
2184 abort ();
2185#endif
2186 }
2187
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2189 (want_value
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2192 : VOIDmode),
2193 unsignedp,
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2198 free_temp_slots ();
2199
4be204f0
RK
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2205 : result);
bbf6f052
RK
2206 }
2207
2208 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2209 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2210
2211 if (to_rtx == 0)
906c4e36 2212 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2213
86d38d25
RS
2214 /* Don't move directly into a return register. */
2215 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2216 {
66538193 2217 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2218 emit_move_insn (to_rtx, temp);
2219 preserve_temp_slots (to_rtx);
2220 free_temp_slots ();
2221 return to_rtx;
2222 }
2223
bbf6f052
RK
2224 /* In case we are returning the contents of an object which overlaps
2225 the place the value is being stored, use a safe function when copying
2226 a value through a pointer into a structure value return block. */
2227 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2228 && current_function_returns_struct
2229 && !current_function_returns_pcc_struct)
2230 {
906c4e36 2231 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2232 rtx size = expr_size (from);
2233
2234#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2235 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2236 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2237 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2238 convert_to_mode (TYPE_MODE (sizetype),
2239 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2240 TYPE_MODE (sizetype));
bbf6f052 2241#else
d562e42e 2242 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2243 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2244 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2245 convert_to_mode (TYPE_MODE (sizetype),
2246 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2247 TYPE_MODE (sizetype));
bbf6f052
RK
2248#endif
2249
2250 preserve_temp_slots (to_rtx);
2251 free_temp_slots ();
2252 return to_rtx;
2253 }
2254
2255 /* Compute FROM and store the value in the rtx we got. */
2256
2257 result = store_expr (from, to_rtx, want_value);
2258 preserve_temp_slots (result);
2259 free_temp_slots ();
2260 return result;
2261}
2262
2263/* Generate code for computing expression EXP,
2264 and storing the value into TARGET.
2265 Returns TARGET or an equivalent value.
2266 TARGET may contain a QUEUED rtx.
2267
2268 If SUGGEST_REG is nonzero, copy the value through a register
2269 and return that register, if that is possible.
2270
2271 If the value stored is a constant, we return the constant. */
2272
2273rtx
2274store_expr (exp, target, suggest_reg)
2275 register tree exp;
2276 register rtx target;
2277 int suggest_reg;
2278{
2279 register rtx temp;
2280 int dont_return_target = 0;
2281
2282 if (TREE_CODE (exp) == COMPOUND_EXPR)
2283 {
2284 /* Perform first part of compound expression, then assign from second
2285 part. */
2286 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2287 emit_queue ();
2288 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2289 }
2290 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2291 {
2292 /* For conditional expression, get safe form of the target. Then
2293 test the condition, doing the appropriate assignment on either
2294 side. This avoids the creation of unnecessary temporaries.
2295 For non-BLKmode, it is more efficient not to do this. */
2296
2297 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2298
2299 emit_queue ();
2300 target = protect_from_queue (target, 1);
2301
2302 NO_DEFER_POP;
2303 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2304 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2305 emit_queue ();
2306 emit_jump_insn (gen_jump (lab2));
2307 emit_barrier ();
2308 emit_label (lab1);
2309 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2310 emit_queue ();
2311 emit_label (lab2);
2312 OK_DEFER_POP;
2313 return target;
2314 }
2315 else if (suggest_reg && GET_CODE (target) == MEM
2316 && GET_MODE (target) != BLKmode)
2317 /* If target is in memory and caller wants value in a register instead,
2318 arrange that. Pass TARGET as target for expand_expr so that,
2319 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2320 We know expand_expr will not use the target in that case. */
2321 {
906c4e36 2322 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2323 GET_MODE (target), 0);
2324 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2325 temp = copy_to_reg (temp);
2326 dont_return_target = 1;
2327 }
2328 else if (queued_subexp_p (target))
2329 /* If target contains a postincrement, it is not safe
2330 to use as the returned value. It would access the wrong
2331 place by the time the queued increment gets output.
2332 So copy the value through a temporary and use that temp
2333 as the result. */
2334 {
2335 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2336 {
2337 /* Expand EXP into a new pseudo. */
2338 temp = gen_reg_rtx (GET_MODE (target));
2339 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2340 }
2341 else
906c4e36 2342 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2343 dont_return_target = 1;
2344 }
1499e0a8
RK
2345 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2346 /* If this is an scalar in a register that is stored in a wider mode
2347 than the declared mode, compute the result into its declared mode
2348 and then convert to the wider mode. Our value is the computed
2349 expression. */
2350 {
2351 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2352 convert_move (SUBREG_REG (target), temp,
2353 SUBREG_PROMOTED_UNSIGNED_P (target));
2354 return temp;
2355 }
bbf6f052
RK
2356 else
2357 {
2358 temp = expand_expr (exp, target, GET_MODE (target), 0);
2359 /* DO return TARGET if it's a specified hardware register.
2360 expand_return relies on this. */
2361 if (!(target && GET_CODE (target) == REG
2362 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2363 && CONSTANT_P (temp))
2364 dont_return_target = 1;
2365 }
2366
2367 /* If value was not generated in the target, store it there.
2368 Convert the value to TARGET's type first if nec. */
2369
2370 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2371 {
2372 target = protect_from_queue (target, 1);
2373 if (GET_MODE (temp) != GET_MODE (target)
2374 && GET_MODE (temp) != VOIDmode)
2375 {
2376 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2377 if (dont_return_target)
2378 {
2379 /* In this case, we will return TEMP,
2380 so make sure it has the proper mode.
2381 But don't forget to store the value into TARGET. */
2382 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2383 emit_move_insn (target, temp);
2384 }
2385 else
2386 convert_move (target, temp, unsignedp);
2387 }
2388
2389 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2390 {
2391 /* Handle copying a string constant into an array.
2392 The string constant may be shorter than the array.
2393 So copy just the string's actual length, and clear the rest. */
2394 rtx size;
2395
e87b4f3f
RS
2396 /* Get the size of the data type of the string,
2397 which is actually the size of the target. */
2398 size = expr_size (exp);
2399 if (GET_CODE (size) == CONST_INT
2400 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2401 emit_block_move (target, temp, size,
2402 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2403 else
bbf6f052 2404 {
e87b4f3f
RS
2405 /* Compute the size of the data to copy from the string. */
2406 tree copy_size
c03b7665
RK
2407 = size_binop (MIN_EXPR,
2408 size_binop (CEIL_DIV_EXPR,
2409 TYPE_SIZE (TREE_TYPE (exp)),
2410 size_int (BITS_PER_UNIT)),
2411 convert (sizetype,
2412 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2413 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2414 VOIDmode, 0);
e87b4f3f
RS
2415 rtx label = 0;
2416
2417 /* Copy that much. */
2418 emit_block_move (target, temp, copy_size_rtx,
2419 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2420
2421 /* Figure out how much is left in TARGET
2422 that we have to clear. */
2423 if (GET_CODE (copy_size_rtx) == CONST_INT)
2424 {
2425 temp = plus_constant (XEXP (target, 0),
2426 TREE_STRING_LENGTH (exp));
2427 size = plus_constant (size,
2428 - TREE_STRING_LENGTH (exp));
2429 }
2430 else
2431 {
2432 enum machine_mode size_mode = Pmode;
2433
2434 temp = force_reg (Pmode, XEXP (target, 0));
2435 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2436 copy_size_rtx, NULL_RTX, 0,
2437 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2438
2439 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2440 copy_size_rtx, NULL_RTX, 0,
2441 OPTAB_LIB_WIDEN);
e87b4f3f 2442
906c4e36 2443 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2444 GET_MODE (size), 0, 0);
2445 label = gen_label_rtx ();
2446 emit_jump_insn (gen_blt (label));
2447 }
2448
2449 if (size != const0_rtx)
2450 {
bbf6f052 2451#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2452 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2453 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2454#else
d562e42e 2455 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2456 temp, Pmode, size, Pmode);
bbf6f052 2457#endif
e87b4f3f
RS
2458 }
2459 if (label)
2460 emit_label (label);
bbf6f052
RK
2461 }
2462 }
2463 else if (GET_MODE (temp) == BLKmode)
2464 emit_block_move (target, temp, expr_size (exp),
2465 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2466 else
2467 emit_move_insn (target, temp);
2468 }
2469 if (dont_return_target)
2470 return temp;
2471 return target;
2472}
2473\f
2474/* Store the value of constructor EXP into the rtx TARGET.
2475 TARGET is either a REG or a MEM. */
2476
2477static void
2478store_constructor (exp, target)
2479 tree exp;
2480 rtx target;
2481{
4af3895e
JVA
2482 tree type = TREE_TYPE (exp);
2483
bbf6f052
RK
2484 /* We know our target cannot conflict, since safe_from_p has been called. */
2485#if 0
2486 /* Don't try copying piece by piece into a hard register
2487 since that is vulnerable to being clobbered by EXP.
2488 Instead, construct in a pseudo register and then copy it all. */
2489 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2490 {
2491 rtx temp = gen_reg_rtx (GET_MODE (target));
2492 store_constructor (exp, temp);
2493 emit_move_insn (target, temp);
2494 return;
2495 }
2496#endif
2497
4af3895e 2498 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2499 {
2500 register tree elt;
2501
4af3895e
JVA
2502 /* Inform later passes that the whole union value is dead. */
2503 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2504 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2505
2506 /* If we are building a static constructor into a register,
2507 set the initial value as zero so we can fold the value into
2508 a constant. */
2509 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2510 emit_move_insn (target, const0_rtx);
2511
bbf6f052
RK
2512 /* If the constructor has fewer fields than the structure,
2513 clear the whole structure first. */
2514 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2515 != list_length (TYPE_FIELDS (type)))
2516 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2517 else
2518 /* Inform later passes that the old value is dead. */
2519 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2520
2521 /* Store each element of the constructor into
2522 the corresponding field of TARGET. */
2523
2524 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2525 {
2526 register tree field = TREE_PURPOSE (elt);
2527 register enum machine_mode mode;
2528 int bitsize;
2529 int bitpos;
2530 int unsignedp;
2531
f32fd778
RS
2532 /* Just ignore missing fields.
2533 We cleared the whole structure, above,
2534 if any fields are missing. */
2535 if (field == 0)
2536 continue;
2537
bbf6f052
RK
2538 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2539 unsignedp = TREE_UNSIGNED (field);
2540 mode = DECL_MODE (field);
2541 if (DECL_BIT_FIELD (field))
2542 mode = VOIDmode;
2543
2544 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2545 /* ??? This case remains to be written. */
2546 abort ();
2547
2548 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2549
2550 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2551 /* The alignment of TARGET is
2552 at least what its type requires. */
2553 VOIDmode, 0,
4af3895e
JVA
2554 TYPE_ALIGN (type) / BITS_PER_UNIT,
2555 int_size_in_bytes (type));
bbf6f052
RK
2556 }
2557 }
4af3895e 2558 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2559 {
2560 register tree elt;
2561 register int i;
4af3895e 2562 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2563 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2564 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2565 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2566
2567 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2568 clear the whole structure first. Similarly if this this is
2569 static constructor of a non-BLKmode object. */
bbf6f052 2570
4af3895e
JVA
2571 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2572 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2573 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2574 else
2575 /* Inform later passes that the old value is dead. */
2576 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2577
2578 /* Store each element of the constructor into
2579 the corresponding element of TARGET, determined
2580 by counting the elements. */
2581 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2582 elt;
2583 elt = TREE_CHAIN (elt), i++)
2584 {
2585 register enum machine_mode mode;
2586 int bitsize;
2587 int bitpos;
2588 int unsignedp;
2589
2590 mode = TYPE_MODE (elttype);
2591 bitsize = GET_MODE_BITSIZE (mode);
2592 unsignedp = TREE_UNSIGNED (elttype);
2593
2594 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2595
2596 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2597 /* The alignment of TARGET is
2598 at least what its type requires. */
2599 VOIDmode, 0,
4af3895e
JVA
2600 TYPE_ALIGN (type) / BITS_PER_UNIT,
2601 int_size_in_bytes (type));
bbf6f052
RK
2602 }
2603 }
2604
2605 else
2606 abort ();
2607}
2608
2609/* Store the value of EXP (an expression tree)
2610 into a subfield of TARGET which has mode MODE and occupies
2611 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2612 If MODE is VOIDmode, it means that we are storing into a bit-field.
2613
2614 If VALUE_MODE is VOIDmode, return nothing in particular.
2615 UNSIGNEDP is not used in this case.
2616
2617 Otherwise, return an rtx for the value stored. This rtx
2618 has mode VALUE_MODE if that is convenient to do.
2619 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2620
2621 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2622 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2623
2624static rtx
2625store_field (target, bitsize, bitpos, mode, exp, value_mode,
2626 unsignedp, align, total_size)
2627 rtx target;
2628 int bitsize, bitpos;
2629 enum machine_mode mode;
2630 tree exp;
2631 enum machine_mode value_mode;
2632 int unsignedp;
2633 int align;
2634 int total_size;
2635{
906c4e36 2636 HOST_WIDE_INT width_mask = 0;
bbf6f052 2637
906c4e36
RK
2638 if (bitsize < HOST_BITS_PER_WIDE_INT)
2639 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2640
2641 /* If we are storing into an unaligned field of an aligned union that is
2642 in a register, we may have the mode of TARGET being an integer mode but
2643 MODE == BLKmode. In that case, get an aligned object whose size and
2644 alignment are the same as TARGET and store TARGET into it (we can avoid
2645 the store if the field being stored is the entire width of TARGET). Then
2646 call ourselves recursively to store the field into a BLKmode version of
2647 that object. Finally, load from the object into TARGET. This is not
2648 very efficient in general, but should only be slightly more expensive
2649 than the otherwise-required unaligned accesses. Perhaps this can be
2650 cleaned up later. */
2651
2652 if (mode == BLKmode
2653 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2654 {
2655 rtx object = assign_stack_temp (GET_MODE (target),
2656 GET_MODE_SIZE (GET_MODE (target)), 0);
2657 rtx blk_object = copy_rtx (object);
2658
2659 PUT_MODE (blk_object, BLKmode);
2660
2661 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2662 emit_move_insn (object, target);
2663
2664 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2665 align, total_size);
2666
2667 emit_move_insn (target, object);
2668
2669 return target;
2670 }
2671
2672 /* If the structure is in a register or if the component
2673 is a bit field, we cannot use addressing to access it.
2674 Use bit-field techniques or SUBREG to store in it. */
2675
4fa52007
RK
2676 if (mode == VOIDmode
2677 || (mode != BLKmode && ! direct_store[(int) mode])
2678 || GET_CODE (target) == REG
bbf6f052
RK
2679 || GET_CODE (target) == SUBREG)
2680 {
906c4e36 2681 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2682 /* Store the value in the bitfield. */
2683 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2684 if (value_mode != VOIDmode)
2685 {
2686 /* The caller wants an rtx for the value. */
2687 /* If possible, avoid refetching from the bitfield itself. */
2688 if (width_mask != 0
2689 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2690 {
9074de27 2691 tree count;
5c4d7cfb 2692 enum machine_mode tmode;
86a2c12a 2693
5c4d7cfb
RS
2694 if (unsignedp)
2695 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2696 tmode = GET_MODE (temp);
86a2c12a
RS
2697 if (tmode == VOIDmode)
2698 tmode = value_mode;
5c4d7cfb
RS
2699 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2700 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2701 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2702 }
bbf6f052 2703 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2704 NULL_RTX, value_mode, 0, align,
2705 total_size);
bbf6f052
RK
2706 }
2707 return const0_rtx;
2708 }
2709 else
2710 {
2711 rtx addr = XEXP (target, 0);
2712 rtx to_rtx;
2713
2714 /* If a value is wanted, it must be the lhs;
2715 so make the address stable for multiple use. */
2716
2717 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2718 && ! CONSTANT_ADDRESS_P (addr)
2719 /* A frame-pointer reference is already stable. */
2720 && ! (GET_CODE (addr) == PLUS
2721 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2722 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2723 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2724 addr = copy_to_reg (addr);
2725
2726 /* Now build a reference to just the desired component. */
2727
2728 to_rtx = change_address (target, mode,
2729 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2730 MEM_IN_STRUCT_P (to_rtx) = 1;
2731
2732 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2733 }
2734}
2735\f
2736/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2737 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 2738 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
2739
2740 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2741 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2742 If the position of the field is variable, we store a tree
2743 giving the variable offset (in units) in *POFFSET.
2744 This offset is in addition to the bit position.
2745 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2746
2747 If any of the extraction expressions is volatile,
2748 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2749
2750 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2751 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2752 is redundant.
2753
2754 If the field describes a variable-sized object, *PMODE is set to
2755 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2756 this case, but the address of the object can be found. */
bbf6f052
RK
2757
2758tree
4969d05d
RK
2759get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2760 punsignedp, pvolatilep)
bbf6f052
RK
2761 tree exp;
2762 int *pbitsize;
2763 int *pbitpos;
7bb0943f 2764 tree *poffset;
bbf6f052
RK
2765 enum machine_mode *pmode;
2766 int *punsignedp;
2767 int *pvolatilep;
2768{
2769 tree size_tree = 0;
2770 enum machine_mode mode = VOIDmode;
742920c7 2771 tree offset = integer_zero_node;
bbf6f052
RK
2772
2773 if (TREE_CODE (exp) == COMPONENT_REF)
2774 {
2775 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2776 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2777 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2778 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2779 }
2780 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2781 {
2782 size_tree = TREE_OPERAND (exp, 1);
2783 *punsignedp = TREE_UNSIGNED (exp);
2784 }
2785 else
2786 {
2787 mode = TYPE_MODE (TREE_TYPE (exp));
2788 *pbitsize = GET_MODE_BITSIZE (mode);
2789 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2790 }
2791
2792 if (size_tree)
2793 {
2794 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2795 mode = BLKmode, *pbitsize = -1;
2796 else
2797 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2798 }
2799
2800 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2801 and find the ultimate containing object. */
2802
2803 *pbitpos = 0;
2804
2805 while (1)
2806 {
7bb0943f 2807 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2808 {
7bb0943f
RS
2809 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2810 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2811 : TREE_OPERAND (exp, 2));
bbf6f052 2812
e7f3c83f
RK
2813 /* If this field hasn't been filled in yet, don't go
2814 past it. This should only happen when folding expressions
2815 made during type construction. */
2816 if (pos == 0)
2817 break;
2818
7bb0943f
RS
2819 if (TREE_CODE (pos) == PLUS_EXPR)
2820 {
2821 tree constant, var;
2822 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2823 {
2824 constant = TREE_OPERAND (pos, 0);
2825 var = TREE_OPERAND (pos, 1);
2826 }
2827 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2828 {
2829 constant = TREE_OPERAND (pos, 1);
2830 var = TREE_OPERAND (pos, 0);
2831 }
2832 else
2833 abort ();
742920c7 2834
7bb0943f 2835 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
2836 offset = size_binop (PLUS_EXPR, offset,
2837 size_binop (FLOOR_DIV_EXPR, var,
2838 size_int (BITS_PER_UNIT)));
7bb0943f
RS
2839 }
2840 else if (TREE_CODE (pos) == INTEGER_CST)
2841 *pbitpos += TREE_INT_CST_LOW (pos);
2842 else
2843 {
2844 /* Assume here that the offset is a multiple of a unit.
2845 If not, there should be an explicitly added constant. */
742920c7
RK
2846 offset = size_binop (PLUS_EXPR, offset,
2847 size_binop (FLOOR_DIV_EXPR, pos,
2848 size_int (BITS_PER_UNIT)));
7bb0943f 2849 }
bbf6f052 2850 }
bbf6f052 2851
742920c7 2852 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 2853 {
742920c7
RK
2854 /* This code is based on the code in case ARRAY_REF in expand_expr
2855 below. We assume here that the size of an array element is
2856 always an integral multiple of BITS_PER_UNIT. */
2857
2858 tree index = TREE_OPERAND (exp, 1);
2859 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2860 tree low_bound
2861 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2862 tree index_type = TREE_TYPE (index);
2863
2864 if (! integer_zerop (low_bound))
2865 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2866
2867 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2868 {
2869 index = convert (type_for_size (POINTER_SIZE, 0), index);
2870 index_type = TREE_TYPE (index);
2871 }
2872
2873 index = fold (build (MULT_EXPR, index_type, index,
2874 TYPE_SIZE (TREE_TYPE (exp))));
2875
2876 if (TREE_CODE (index) == INTEGER_CST
2877 && TREE_INT_CST_HIGH (index) == 0)
2878 *pbitpos += TREE_INT_CST_LOW (index);
2879 else
2880 offset = size_binop (PLUS_EXPR, offset,
2881 size_binop (FLOOR_DIV_EXPR, index,
2882 size_int (BITS_PER_UNIT)));
bbf6f052
RK
2883 }
2884 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2885 && ! ((TREE_CODE (exp) == NOP_EXPR
2886 || TREE_CODE (exp) == CONVERT_EXPR)
2887 && (TYPE_MODE (TREE_TYPE (exp))
2888 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2889 break;
7bb0943f
RS
2890
2891 /* If any reference in the chain is volatile, the effect is volatile. */
2892 if (TREE_THIS_VOLATILE (exp))
2893 *pvolatilep = 1;
bbf6f052
RK
2894 exp = TREE_OPERAND (exp, 0);
2895 }
2896
2897 /* If this was a bit-field, see if there is a mode that allows direct
2898 access in case EXP is in memory. */
e7f3c83f 2899 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
2900 {
2901 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2902 if (mode == BLKmode)
2903 mode = VOIDmode;
2904 }
2905
742920c7
RK
2906 if (integer_zerop (offset))
2907 offset = 0;
2908
bbf6f052 2909 *pmode = mode;
7bb0943f
RS
2910 *poffset = offset;
2911#if 0
2912 /* We aren't finished fixing the callers to really handle nonzero offset. */
2913 if (offset != 0)
2914 abort ();
2915#endif
bbf6f052
RK
2916
2917 return exp;
2918}
2919\f
2920/* Given an rtx VALUE that may contain additions and multiplications,
2921 return an equivalent value that just refers to a register or memory.
2922 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
2923 and returning a pseudo-register containing the value.
2924
2925 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
2926
2927rtx
2928force_operand (value, target)
2929 rtx value, target;
2930{
2931 register optab binoptab = 0;
2932 /* Use a temporary to force order of execution of calls to
2933 `force_operand'. */
2934 rtx tmp;
2935 register rtx op2;
2936 /* Use subtarget as the target for operand 0 of a binary operation. */
2937 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2938
2939 if (GET_CODE (value) == PLUS)
2940 binoptab = add_optab;
2941 else if (GET_CODE (value) == MINUS)
2942 binoptab = sub_optab;
2943 else if (GET_CODE (value) == MULT)
2944 {
2945 op2 = XEXP (value, 1);
2946 if (!CONSTANT_P (op2)
2947 && !(GET_CODE (op2) == REG && op2 != subtarget))
2948 subtarget = 0;
2949 tmp = force_operand (XEXP (value, 0), subtarget);
2950 return expand_mult (GET_MODE (value), tmp,
906c4e36 2951 force_operand (op2, NULL_RTX),
bbf6f052
RK
2952 target, 0);
2953 }
2954
2955 if (binoptab)
2956 {
2957 op2 = XEXP (value, 1);
2958 if (!CONSTANT_P (op2)
2959 && !(GET_CODE (op2) == REG && op2 != subtarget))
2960 subtarget = 0;
2961 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2962 {
2963 binoptab = add_optab;
2964 op2 = negate_rtx (GET_MODE (value), op2);
2965 }
2966
2967 /* Check for an addition with OP2 a constant integer and our first
2968 operand a PLUS of a virtual register and something else. In that
2969 case, we want to emit the sum of the virtual register and the
2970 constant first and then add the other value. This allows virtual
2971 register instantiation to simply modify the constant rather than
2972 creating another one around this addition. */
2973 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2974 && GET_CODE (XEXP (value, 0)) == PLUS
2975 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2976 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2977 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2978 {
2979 rtx temp = expand_binop (GET_MODE (value), binoptab,
2980 XEXP (XEXP (value, 0), 0), op2,
2981 subtarget, 0, OPTAB_LIB_WIDEN);
2982 return expand_binop (GET_MODE (value), binoptab, temp,
2983 force_operand (XEXP (XEXP (value, 0), 1), 0),
2984 target, 0, OPTAB_LIB_WIDEN);
2985 }
2986
2987 tmp = force_operand (XEXP (value, 0), subtarget);
2988 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2989 force_operand (op2, NULL_RTX),
bbf6f052 2990 target, 0, OPTAB_LIB_WIDEN);
8008b228 2991 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
2992 because the only operations we are expanding here are signed ones. */
2993 }
2994 return value;
2995}
2996\f
2997/* Subroutine of expand_expr:
2998 save the non-copied parts (LIST) of an expr (LHS), and return a list
2999 which can restore these values to their previous values,
3000 should something modify their storage. */
3001
3002static tree
3003save_noncopied_parts (lhs, list)
3004 tree lhs;
3005 tree list;
3006{
3007 tree tail;
3008 tree parts = 0;
3009
3010 for (tail = list; tail; tail = TREE_CHAIN (tail))
3011 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3012 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3013 else
3014 {
3015 tree part = TREE_VALUE (tail);
3016 tree part_type = TREE_TYPE (part);
906c4e36 3017 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3018 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3019 int_size_in_bytes (part_type), 0);
3020 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3021 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3022 parts = tree_cons (to_be_saved,
906c4e36
RK
3023 build (RTL_EXPR, part_type, NULL_TREE,
3024 (tree) target),
bbf6f052
RK
3025 parts);
3026 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3027 }
3028 return parts;
3029}
3030
3031/* Subroutine of expand_expr:
3032 record the non-copied parts (LIST) of an expr (LHS), and return a list
3033 which specifies the initial values of these parts. */
3034
3035static tree
3036init_noncopied_parts (lhs, list)
3037 tree lhs;
3038 tree list;
3039{
3040 tree tail;
3041 tree parts = 0;
3042
3043 for (tail = list; tail; tail = TREE_CHAIN (tail))
3044 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3045 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3046 else
3047 {
3048 tree part = TREE_VALUE (tail);
3049 tree part_type = TREE_TYPE (part);
906c4e36 3050 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3051 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3052 }
3053 return parts;
3054}
3055
3056/* Subroutine of expand_expr: return nonzero iff there is no way that
3057 EXP can reference X, which is being modified. */
3058
3059static int
3060safe_from_p (x, exp)
3061 rtx x;
3062 tree exp;
3063{
3064 rtx exp_rtl = 0;
3065 int i, nops;
3066
3067 if (x == 0)
3068 return 1;
3069
3070 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3071 find the underlying pseudo. */
3072 if (GET_CODE (x) == SUBREG)
3073 {
3074 x = SUBREG_REG (x);
3075 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3076 return 0;
3077 }
3078
3079 /* If X is a location in the outgoing argument area, it is always safe. */
3080 if (GET_CODE (x) == MEM
3081 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3082 || (GET_CODE (XEXP (x, 0)) == PLUS
3083 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3084 return 1;
3085
3086 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3087 {
3088 case 'd':
3089 exp_rtl = DECL_RTL (exp);
3090 break;
3091
3092 case 'c':
3093 return 1;
3094
3095 case 'x':
3096 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3097 return ((TREE_VALUE (exp) == 0
3098 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3099 && (TREE_CHAIN (exp) == 0
3100 || safe_from_p (x, TREE_CHAIN (exp))));
3101 else
3102 return 0;
3103
3104 case '1':
3105 return safe_from_p (x, TREE_OPERAND (exp, 0));
3106
3107 case '2':
3108 case '<':
3109 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3110 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3111
3112 case 'e':
3113 case 'r':
3114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3115 the expression. If it is set, we conflict iff we are that rtx or
3116 both are in memory. Otherwise, we check all operands of the
3117 expression recursively. */
3118
3119 switch (TREE_CODE (exp))
3120 {
3121 case ADDR_EXPR:
3122 return staticp (TREE_OPERAND (exp, 0));
3123
3124 case INDIRECT_REF:
3125 if (GET_CODE (x) == MEM)
3126 return 0;
3127 break;
3128
3129 case CALL_EXPR:
3130 exp_rtl = CALL_EXPR_RTL (exp);
3131 if (exp_rtl == 0)
3132 {
3133 /* Assume that the call will clobber all hard registers and
3134 all of memory. */
3135 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3136 || GET_CODE (x) == MEM)
3137 return 0;
3138 }
3139
3140 break;
3141
3142 case RTL_EXPR:
3143 exp_rtl = RTL_EXPR_RTL (exp);
3144 if (exp_rtl == 0)
3145 /* We don't know what this can modify. */
3146 return 0;
3147
3148 break;
3149
3150 case WITH_CLEANUP_EXPR:
3151 exp_rtl = RTL_EXPR_RTL (exp);
3152 break;
3153
3154 case SAVE_EXPR:
3155 exp_rtl = SAVE_EXPR_RTL (exp);
3156 break;
3157
8129842c
RS
3158 case BIND_EXPR:
3159 /* The only operand we look at is operand 1. The rest aren't
3160 part of the expression. */
3161 return safe_from_p (x, TREE_OPERAND (exp, 1));
3162
bbf6f052
RK
3163 case METHOD_CALL_EXPR:
3164 /* This takes a rtx argument, but shouldn't appear here. */
3165 abort ();
3166 }
3167
3168 /* If we have an rtx, we do not need to scan our operands. */
3169 if (exp_rtl)
3170 break;
3171
3172 nops = tree_code_length[(int) TREE_CODE (exp)];
3173 for (i = 0; i < nops; i++)
3174 if (TREE_OPERAND (exp, i) != 0
3175 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3176 return 0;
3177 }
3178
3179 /* If we have an rtl, find any enclosed object. Then see if we conflict
3180 with it. */
3181 if (exp_rtl)
3182 {
3183 if (GET_CODE (exp_rtl) == SUBREG)
3184 {
3185 exp_rtl = SUBREG_REG (exp_rtl);
3186 if (GET_CODE (exp_rtl) == REG
3187 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3188 return 0;
3189 }
3190
3191 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3192 are memory and EXP is not readonly. */
3193 return ! (rtx_equal_p (x, exp_rtl)
3194 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3195 && ! TREE_READONLY (exp)));
3196 }
3197
3198 /* If we reach here, it is safe. */
3199 return 1;
3200}
3201
3202/* Subroutine of expand_expr: return nonzero iff EXP is an
3203 expression whose type is statically determinable. */
3204
3205static int
3206fixed_type_p (exp)
3207 tree exp;
3208{
3209 if (TREE_CODE (exp) == PARM_DECL
3210 || TREE_CODE (exp) == VAR_DECL
3211 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3212 || TREE_CODE (exp) == COMPONENT_REF
3213 || TREE_CODE (exp) == ARRAY_REF)
3214 return 1;
3215 return 0;
3216}
3217\f
3218/* expand_expr: generate code for computing expression EXP.
3219 An rtx for the computed value is returned. The value is never null.
3220 In the case of a void EXP, const0_rtx is returned.
3221
3222 The value may be stored in TARGET if TARGET is nonzero.
3223 TARGET is just a suggestion; callers must assume that
3224 the rtx returned may not be the same as TARGET.
3225
3226 If TARGET is CONST0_RTX, it means that the value will be ignored.
3227
3228 If TMODE is not VOIDmode, it suggests generating the
3229 result in mode TMODE. But this is done only when convenient.
3230 Otherwise, TMODE is ignored and the value generated in its natural mode.
3231 TMODE is just a suggestion; callers must assume that
3232 the rtx returned may not have mode TMODE.
3233
3234 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3235 with a constant address even if that address is not normally legitimate.
3236 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3237
3238 If MODIFIER is EXPAND_SUM then when EXP is an addition
3239 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3240 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3241 products as above, or REG or MEM, or constant.
3242 Ordinarily in such cases we would output mul or add instructions
3243 and then return a pseudo reg containing the sum.
3244
3245 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3246 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3247 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3248 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3249
3250rtx
3251expand_expr (exp, target, tmode, modifier)
3252 register tree exp;
3253 rtx target;
3254 enum machine_mode tmode;
3255 enum expand_modifier modifier;
3256{
3257 register rtx op0, op1, temp;
3258 tree type = TREE_TYPE (exp);
3259 int unsignedp = TREE_UNSIGNED (type);
3260 register enum machine_mode mode = TYPE_MODE (type);
3261 register enum tree_code code = TREE_CODE (exp);
3262 optab this_optab;
3263 /* Use subtarget as the target for operand 0 of a binary operation. */
3264 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3265 rtx original_target = target;
dd27116b
RK
3266 int ignore = (target == const0_rtx
3267 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3268 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3269 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3270 tree context;
3271
3272 /* Don't use hard regs as subtargets, because the combiner
3273 can only handle pseudo regs. */
3274 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3275 subtarget = 0;
3276 /* Avoid subtargets inside loops,
3277 since they hide some invariant expressions. */
3278 if (preserve_subexpressions_p ())
3279 subtarget = 0;
3280
dd27116b
RK
3281 /* If we are going to ignore this result, we need only do something
3282 if there is a side-effect somewhere in the expression. If there
3283 is, short-circuit the most common cases here. */
bbf6f052 3284
dd27116b
RK
3285 if (ignore)
3286 {
3287 if (! TREE_SIDE_EFFECTS (exp))
3288 return const0_rtx;
3289
3290 /* Ensure we reference a volatile object even if value is ignored. */
3291 if (TREE_THIS_VOLATILE (exp)
3292 && TREE_CODE (exp) != FUNCTION_DECL
3293 && mode != VOIDmode && mode != BLKmode)
3294 {
3295 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3296 if (GET_CODE (temp) == MEM)
3297 temp = copy_to_reg (temp);
3298 return const0_rtx;
3299 }
3300
3301 if (TREE_CODE_CLASS (code) == '1')
3302 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3303 VOIDmode, modifier);
3304 else if (TREE_CODE_CLASS (code) == '2'
3305 || TREE_CODE_CLASS (code) == '<')
3306 {
3307 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3308 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3309 return const0_rtx;
3310 }
3311 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3312 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3313 /* If the second operand has no side effects, just evaluate
3314 the first. */
3315 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3316 VOIDmode, modifier);
3317 /* If will do cse, generate all results into pseudo registers
3318 since 1) that allows cse to find more things
3319 and 2) otherwise cse could produce an insn the machine
3320 cannot support. */
3321
3322 target = 0, original_target = 0;
3323 }
bbf6f052
RK
3324
3325 if (! cse_not_expected && mode != BLKmode && target
3326 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3327 target = subtarget;
3328
bbf6f052
RK
3329 switch (code)
3330 {
3331 case LABEL_DECL:
b552441b
RS
3332 {
3333 tree function = decl_function_context (exp);
3334 /* Handle using a label in a containing function. */
3335 if (function != current_function_decl && function != 0)
3336 {
3337 struct function *p = find_function_data (function);
3338 /* Allocate in the memory associated with the function
3339 that the label is in. */
3340 push_obstacks (p->function_obstack,
3341 p->function_maybepermanent_obstack);
3342
3343 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3344 label_rtx (exp), p->forced_labels);
3345 pop_obstacks ();
3346 }
3347 else if (modifier == EXPAND_INITIALIZER)
3348 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3349 label_rtx (exp), forced_labels);
26fcb35a 3350 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3351 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3352 if (function != current_function_decl && function != 0)
3353 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3354 return temp;
b552441b 3355 }
bbf6f052
RK
3356
3357 case PARM_DECL:
3358 if (DECL_RTL (exp) == 0)
3359 {
3360 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3361 return CONST0_RTX (mode);
bbf6f052
RK
3362 }
3363
3364 case FUNCTION_DECL:
3365 case VAR_DECL:
3366 case RESULT_DECL:
3367 if (DECL_RTL (exp) == 0)
3368 abort ();
3369 /* Ensure variable marked as used
3370 even if it doesn't go through a parser. */
3371 TREE_USED (exp) = 1;
3372 /* Handle variables inherited from containing functions. */
3373 context = decl_function_context (exp);
3374
3375 /* We treat inline_function_decl as an alias for the current function
3376 because that is the inline function whose vars, types, etc.
3377 are being merged into the current function.
3378 See expand_inline_function. */
3379 if (context != 0 && context != current_function_decl
3380 && context != inline_function_decl
3381 /* If var is static, we don't need a static chain to access it. */
3382 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3383 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3384 {
3385 rtx addr;
3386
3387 /* Mark as non-local and addressable. */
81feeecb 3388 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3389 mark_addressable (exp);
3390 if (GET_CODE (DECL_RTL (exp)) != MEM)
3391 abort ();
3392 addr = XEXP (DECL_RTL (exp), 0);
3393 if (GET_CODE (addr) == MEM)
3394 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3395 else
3396 addr = fix_lexical_addr (addr, exp);
3397 return change_address (DECL_RTL (exp), mode, addr);
3398 }
4af3895e 3399
bbf6f052
RK
3400 /* This is the case of an array whose size is to be determined
3401 from its initializer, while the initializer is still being parsed.
3402 See expand_decl. */
3403 if (GET_CODE (DECL_RTL (exp)) == MEM
3404 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3405 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3406 XEXP (DECL_RTL (exp), 0));
3407 if (GET_CODE (DECL_RTL (exp)) == MEM
3408 && modifier != EXPAND_CONST_ADDRESS
3409 && modifier != EXPAND_SUM
3410 && modifier != EXPAND_INITIALIZER)
3411 {
3412 /* DECL_RTL probably contains a constant address.
3413 On RISC machines where a constant address isn't valid,
3414 make some insns to get that address into a register. */
3415 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3416 || (flag_force_addr
3417 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3418 return change_address (DECL_RTL (exp), VOIDmode,
3419 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3420 }
1499e0a8
RK
3421
3422 /* If the mode of DECL_RTL does not match that of the decl, it
3423 must be a promoted value. We return a SUBREG of the wanted mode,
3424 but mark it so that we know that it was already extended. */
3425
3426 if (GET_CODE (DECL_RTL (exp)) == REG
3427 && GET_MODE (DECL_RTL (exp)) != mode)
3428 {
3429 enum machine_mode decl_mode = DECL_MODE (exp);
3430
3431 /* Get the signedness used for this variable. Ensure we get the
3432 same mode we got when the variable was declared. */
3433
3434 PROMOTE_MODE (decl_mode, unsignedp, type);
3435
3436 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3437 abort ();
3438
3439 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3440 SUBREG_PROMOTED_VAR_P (temp) = 1;
3441 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3442 return temp;
3443 }
3444
bbf6f052
RK
3445 return DECL_RTL (exp);
3446
3447 case INTEGER_CST:
3448 return immed_double_const (TREE_INT_CST_LOW (exp),
3449 TREE_INT_CST_HIGH (exp),
3450 mode);
3451
3452 case CONST_DECL:
3453 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3454
3455 case REAL_CST:
3456 /* If optimized, generate immediate CONST_DOUBLE
3457 which will be turned into memory by reload if necessary.
3458
3459 We used to force a register so that loop.c could see it. But
3460 this does not allow gen_* patterns to perform optimizations with
3461 the constants. It also produces two insns in cases like "x = 1.0;".
3462 On most machines, floating-point constants are not permitted in
3463 many insns, so we'd end up copying it to a register in any case.
3464
3465 Now, we do the copying in expand_binop, if appropriate. */
3466 return immed_real_const (exp);
3467
3468 case COMPLEX_CST:
3469 case STRING_CST:
3470 if (! TREE_CST_RTL (exp))
3471 output_constant_def (exp);
3472
3473 /* TREE_CST_RTL probably contains a constant address.
3474 On RISC machines where a constant address isn't valid,
3475 make some insns to get that address into a register. */
3476 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3477 && modifier != EXPAND_CONST_ADDRESS
3478 && modifier != EXPAND_INITIALIZER
3479 && modifier != EXPAND_SUM
3480 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3481 return change_address (TREE_CST_RTL (exp), VOIDmode,
3482 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3483 return TREE_CST_RTL (exp);
3484
3485 case SAVE_EXPR:
3486 context = decl_function_context (exp);
3487 /* We treat inline_function_decl as an alias for the current function
3488 because that is the inline function whose vars, types, etc.
3489 are being merged into the current function.
3490 See expand_inline_function. */
3491 if (context == current_function_decl || context == inline_function_decl)
3492 context = 0;
3493
3494 /* If this is non-local, handle it. */
3495 if (context)
3496 {
3497 temp = SAVE_EXPR_RTL (exp);
3498 if (temp && GET_CODE (temp) == REG)
3499 {
3500 put_var_into_stack (exp);
3501 temp = SAVE_EXPR_RTL (exp);
3502 }
3503 if (temp == 0 || GET_CODE (temp) != MEM)
3504 abort ();
3505 return change_address (temp, mode,
3506 fix_lexical_addr (XEXP (temp, 0), exp));
3507 }
3508 if (SAVE_EXPR_RTL (exp) == 0)
3509 {
3510 if (mode == BLKmode)
3511 temp
3512 = assign_stack_temp (mode,
3513 int_size_in_bytes (TREE_TYPE (exp)), 0);
3514 else
1499e0a8
RK
3515 {
3516 enum machine_mode var_mode = mode;
3517
3518 if (TREE_CODE (type) == INTEGER_TYPE
3519 || TREE_CODE (type) == ENUMERAL_TYPE
3520 || TREE_CODE (type) == BOOLEAN_TYPE
3521 || TREE_CODE (type) == CHAR_TYPE
3522 || TREE_CODE (type) == REAL_TYPE
3523 || TREE_CODE (type) == POINTER_TYPE
3524 || TREE_CODE (type) == OFFSET_TYPE)
3525 {
3526 PROMOTE_MODE (var_mode, unsignedp, type);
3527 }
3528
3529 temp = gen_reg_rtx (var_mode);
3530 }
3531
bbf6f052 3532 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3533 if (!optimize && GET_CODE (temp) == REG)
3534 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3535 save_expr_regs);
ff78f773
RK
3536
3537 /* If the mode of TEMP does not match that of the expression, it
3538 must be a promoted value. We pass store_expr a SUBREG of the
3539 wanted mode but mark it so that we know that it was already
3540 extended. Note that `unsignedp' was modified above in
3541 this case. */
3542
3543 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3544 {
3545 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3546 SUBREG_PROMOTED_VAR_P (temp) = 1;
3547 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3548 }
3549
3550 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3551 }
1499e0a8
RK
3552
3553 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3554 must be a promoted value. We return a SUBREG of the wanted mode,
3555 but mark it so that we know that it was already extended. Note
3556 that `unsignedp' was modified above in this case. */
3557
3558 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3559 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3560 {
3561 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3562 SUBREG_PROMOTED_VAR_P (temp) = 1;
3563 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3564 return temp;
3565 }
3566
bbf6f052
RK
3567 return SAVE_EXPR_RTL (exp);
3568
3569 case EXIT_EXPR:
3570 /* Exit the current loop if the body-expression is true. */
3571 {
3572 rtx label = gen_label_rtx ();
906c4e36
RK
3573 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3574 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3575 emit_label (label);
3576 }
3577 return const0_rtx;
3578
3579 case LOOP_EXPR:
3580 expand_start_loop (1);
3581 expand_expr_stmt (TREE_OPERAND (exp, 0));
3582 expand_end_loop ();
3583
3584 return const0_rtx;
3585
3586 case BIND_EXPR:
3587 {
3588 tree vars = TREE_OPERAND (exp, 0);
3589 int vars_need_expansion = 0;
3590
3591 /* Need to open a binding contour here because
3592 if there are any cleanups they most be contained here. */
3593 expand_start_bindings (0);
3594
2df53c0b
RS
3595 /* Mark the corresponding BLOCK for output in its proper place. */
3596 if (TREE_OPERAND (exp, 2) != 0
3597 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3598 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3599
3600 /* If VARS have not yet been expanded, expand them now. */
3601 while (vars)
3602 {
3603 if (DECL_RTL (vars) == 0)
3604 {
3605 vars_need_expansion = 1;
3606 expand_decl (vars);
3607 }
3608 expand_decl_init (vars);
3609 vars = TREE_CHAIN (vars);
3610 }
3611
3612 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3613
3614 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3615
3616 return temp;
3617 }
3618
3619 case RTL_EXPR:
3620 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3621 abort ();
3622 emit_insns (RTL_EXPR_SEQUENCE (exp));
3623 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3624 return RTL_EXPR_RTL (exp);
3625
3626 case CONSTRUCTOR:
dd27116b
RK
3627 /* If we don't need the result, just ensure we evaluate any
3628 subexpressions. */
3629 if (ignore)
3630 {
3631 tree elt;
3632 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3633 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3634 return const0_rtx;
3635 }
4af3895e
JVA
3636 /* All elts simple constants => refer to a constant in memory. But
3637 if this is a non-BLKmode mode, let it store a field at a time
3638 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
3639 fold. If we are making an initializer and all operands are
3640 constant, put it in memory as well. */
3641 else if ((TREE_STATIC (exp)
3642 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3643 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
3644 {
3645 rtx constructor = output_constant_def (exp);
b552441b
RS
3646 if (modifier != EXPAND_CONST_ADDRESS
3647 && modifier != EXPAND_INITIALIZER
3648 && modifier != EXPAND_SUM
3649 && !memory_address_p (GET_MODE (constructor),
3650 XEXP (constructor, 0)))
bbf6f052
RK
3651 constructor = change_address (constructor, VOIDmode,
3652 XEXP (constructor, 0));
3653 return constructor;
3654 }
3655
bbf6f052
RK
3656 else
3657 {
3658 if (target == 0 || ! safe_from_p (target, exp))
3659 {
3660 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3661 target = gen_reg_rtx (mode);
3662 else
3663 {
3b94d087
RS
3664 enum tree_code c = TREE_CODE (type);
3665 target
3666 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
3667 if (c == RECORD_TYPE || c == UNION_TYPE
3668 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 3669 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3670 }
3671 }
3672 store_constructor (exp, target);
3673 return target;
3674 }
3675
3676 case INDIRECT_REF:
3677 {
3678 tree exp1 = TREE_OPERAND (exp, 0);
3679 tree exp2;
3680
3681 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3682 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3683 This code has the same general effect as simply doing
3684 expand_expr on the save expr, except that the expression PTR
3685 is computed for use as a memory address. This means different
3686 code, suitable for indexing, may be generated. */
3687 if (TREE_CODE (exp1) == SAVE_EXPR
3688 && SAVE_EXPR_RTL (exp1) == 0
3689 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3690 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3691 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3692 {
906c4e36
RK
3693 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3694 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3695 op0 = memory_address (mode, temp);
3696 op0 = copy_all_regs (op0);
3697 SAVE_EXPR_RTL (exp1) = op0;
3698 }
3699 else
3700 {
906c4e36 3701 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3702 op0 = memory_address (mode, op0);
3703 }
8c8a8e34
JW
3704
3705 temp = gen_rtx (MEM, mode, op0);
3706 /* If address was computed by addition,
3707 mark this as an element of an aggregate. */
3708 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3709 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3710 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3711 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3712 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3713 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 3714 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
3715 || (TREE_CODE (exp1) == ADDR_EXPR
3716 && (exp2 = TREE_OPERAND (exp1, 0))
3717 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3718 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
3719 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3720 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 3721 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3722 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 3723#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
3724 a location is accessed through a pointer to const does not mean
3725 that the value there can never change. */
8c8a8e34 3726 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3727#endif
8c8a8e34
JW
3728 return temp;
3729 }
bbf6f052
RK
3730
3731 case ARRAY_REF:
742920c7
RK
3732 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3733 abort ();
bbf6f052 3734
bbf6f052 3735 {
742920c7
RK
3736 tree array = TREE_OPERAND (exp, 0);
3737 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3738 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3739 tree index = TREE_OPERAND (exp, 1);
3740 tree index_type = TREE_TYPE (index);
bbf6f052 3741 int i;
bbf6f052 3742
742920c7
RK
3743 /* Optimize the special-case of a zero lower bound. */
3744 if (! integer_zerop (low_bound))
3745 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3746
3747 if (TREE_CODE (index) != INTEGER_CST
3748 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3749 {
3750 /* Nonconstant array index or nonconstant element size.
3751 Generate the tree for *(&array+index) and expand that,
3752 except do it in a language-independent way
3753 and don't complain about non-lvalue arrays.
3754 `mark_addressable' should already have been called
3755 for any array for which this case will be reached. */
3756
3757 /* Don't forget the const or volatile flag from the array
3758 element. */
3759 tree variant_type = build_type_variant (type,
3760 TREE_READONLY (exp),
3761 TREE_THIS_VOLATILE (exp));
3762 tree array_adr = build1 (ADDR_EXPR,
3763 build_pointer_type (variant_type), array);
3764 tree elt;
3765
3766 /* Convert the integer argument to a type the same size as a
3767 pointer so the multiply won't overflow spuriously. */
3768 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3769 index = convert (type_for_size (POINTER_SIZE, 0), index);
3770
3771 /* Don't think the address has side effects
3772 just because the array does.
3773 (In some cases the address might have side effects,
3774 and we fail to record that fact here. However, it should not
3775 matter, since expand_expr should not care.) */
3776 TREE_SIDE_EFFECTS (array_adr) = 0;
3777
3778 elt = build1 (INDIRECT_REF, type,
3779 fold (build (PLUS_EXPR,
3780 TYPE_POINTER_TO (variant_type),
3781 array_adr,
3782 fold (build (MULT_EXPR,
3783 TYPE_POINTER_TO (variant_type),
3784 index,
3785 size_in_bytes (type))))));
3786
3787 /* Volatility, etc., of new expression is same as old
3788 expression. */
3789 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3790 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3791 TREE_READONLY (elt) = TREE_READONLY (exp);
3792
3793 return expand_expr (elt, target, tmode, modifier);
3794 }
3795
3796 /* Fold an expression like: "foo"[2].
3797 This is not done in fold so it won't happen inside &. */
3798
3799 if (TREE_CODE (array) == STRING_CST
3800 && TREE_CODE (index) == INTEGER_CST
3801 && !TREE_INT_CST_HIGH (index)
3802 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 3803 {
742920c7 3804 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 3805 {
742920c7 3806 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
3807 TREE_TYPE (exp) = integer_type_node;
3808 return expand_expr (exp, target, tmode, modifier);
3809 }
742920c7 3810 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 3811 {
742920c7 3812 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 3813 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
3814 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3815 exp),
3816 target, tmode, modifier);
bbf6f052
RK
3817 }
3818 }
bbf6f052 3819
742920c7
RK
3820 /* If this is a constant index into a constant array,
3821 just get the value from the array. Handle both the cases when
3822 we have an explicit constructor and when our operand is a variable
3823 that was declared const. */
4af3895e 3824
742920c7
RK
3825 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3826 {
3827 if (TREE_CODE (index) == INTEGER_CST
3828 && TREE_INT_CST_HIGH (index) == 0)
3829 {
3830 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3831
3832 i = TREE_INT_CST_LOW (index);
3833 while (elem && i--)
3834 elem = TREE_CHAIN (elem);
3835 if (elem)
3836 return expand_expr (fold (TREE_VALUE (elem)), target,
3837 tmode, modifier);
3838 }
3839 }
4af3895e 3840
742920c7
RK
3841 else if (optimize >= 1
3842 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3843 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3844 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3845 {
3846 if (TREE_CODE (index) == INTEGER_CST
3847 && TREE_INT_CST_HIGH (index) == 0)
3848 {
3849 tree init = DECL_INITIAL (array);
3850
3851 i = TREE_INT_CST_LOW (index);
3852 if (TREE_CODE (init) == CONSTRUCTOR)
3853 {
3854 tree elem = CONSTRUCTOR_ELTS (init);
3855
3856 while (elem && i--)
3857 elem = TREE_CHAIN (elem);
3858 if (elem)
3859 return expand_expr (fold (TREE_VALUE (elem)), target,
3860 tmode, modifier);
3861 }
3862 else if (TREE_CODE (init) == STRING_CST
3863 && i < TREE_STRING_LENGTH (init))
3864 {
3865 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3866 return convert_to_mode (mode, temp, 0);
3867 }
3868 }
3869 }
3870 }
8c8a8e34 3871
bbf6f052
RK
3872 /* Treat array-ref with constant index as a component-ref. */
3873
3874 case COMPONENT_REF:
3875 case BIT_FIELD_REF:
4af3895e
JVA
3876 /* If the operand is a CONSTRUCTOR, we can just extract the
3877 appropriate field if it is present. */
3878 if (code != ARRAY_REF
3879 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3880 {
3881 tree elt;
3882
3883 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3884 elt = TREE_CHAIN (elt))
3885 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3886 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3887 }
3888
bbf6f052
RK
3889 {
3890 enum machine_mode mode1;
3891 int bitsize;
3892 int bitpos;
7bb0943f 3893 tree offset;
bbf6f052 3894 int volatilep = 0;
7bb0943f 3895 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3896 &mode1, &unsignedp, &volatilep);
3897
e7f3c83f
RK
3898 /* If we got back the original object, something is wrong. Perhaps
3899 we are evaluating an expression too early. In any event, don't
3900 infinitely recurse. */
3901 if (tem == exp)
3902 abort ();
3903
bbf6f052
RK
3904 /* In some cases, we will be offsetting OP0's address by a constant.
3905 So get it as a sum, if possible. If we will be using it
3906 directly in an insn, we validate it. */
906c4e36 3907 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3908
8c8a8e34 3909 /* If this is a constant, put it into a register if it is a
8008b228 3910 legitimate constant and memory if it isn't. */
8c8a8e34
JW
3911 if (CONSTANT_P (op0))
3912 {
3913 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 3914 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
3915 op0 = force_reg (mode, op0);
3916 else
3917 op0 = validize_mem (force_const_mem (mode, op0));
3918 }
3919
7bb0943f
RS
3920 if (offset != 0)
3921 {
906c4e36 3922 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3923
3924 if (GET_CODE (op0) != MEM)
3925 abort ();
3926 op0 = change_address (op0, VOIDmode,
3927 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3928 force_reg (Pmode, offset_rtx)));
3929 }
3930
bbf6f052
RK
3931 /* Don't forget about volatility even if this is a bitfield. */
3932 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3933 {
3934 op0 = copy_rtx (op0);
3935 MEM_VOLATILE_P (op0) = 1;
3936 }
3937
3938 if (mode1 == VOIDmode
0bba3f6f
RK
3939 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3940 && modifier != EXPAND_CONST_ADDRESS
3941 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3942 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3943 {
3944 /* In cases where an aligned union has an unaligned object
3945 as a field, we might be extracting a BLKmode value from
3946 an integer-mode (e.g., SImode) object. Handle this case
3947 by doing the extract into an object as wide as the field
3948 (which we know to be the width of a basic mode), then
3949 storing into memory, and changing the mode to BLKmode. */
3950 enum machine_mode ext_mode = mode;
3951
3952 if (ext_mode == BLKmode)
3953 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3954
3955 if (ext_mode == BLKmode)
3956 abort ();
3957
3958 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3959 unsignedp, target, ext_mode, ext_mode,
3960 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3961 int_size_in_bytes (TREE_TYPE (tem)));
3962 if (mode == BLKmode)
3963 {
3964 rtx new = assign_stack_temp (ext_mode,
3965 bitsize / BITS_PER_UNIT, 0);
3966
3967 emit_move_insn (new, op0);
3968 op0 = copy_rtx (new);
3969 PUT_MODE (op0, BLKmode);
3970 }
3971
3972 return op0;
3973 }
3974
3975 /* Get a reference to just this component. */
3976 if (modifier == EXPAND_CONST_ADDRESS
3977 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3978 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3979 (bitpos / BITS_PER_UNIT)));
3980 else
3981 op0 = change_address (op0, mode1,
3982 plus_constant (XEXP (op0, 0),
3983 (bitpos / BITS_PER_UNIT)));
3984 MEM_IN_STRUCT_P (op0) = 1;
3985 MEM_VOLATILE_P (op0) |= volatilep;
3986 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3987 return op0;
3988 if (target == 0)
3989 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3990 convert_move (target, op0, unsignedp);
3991 return target;
3992 }
3993
3994 case OFFSET_REF:
3995 {
da120c2f 3996 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 3997 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3998 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3999 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4000 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4001 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4002#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4003 a location is accessed through a pointer to const does not mean
4004 that the value there can never change. */
4005 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4006#endif
4007 return temp;
4008 }
4009
4010 /* Intended for a reference to a buffer of a file-object in Pascal.
4011 But it's not certain that a special tree code will really be
4012 necessary for these. INDIRECT_REF might work for them. */
4013 case BUFFER_REF:
4014 abort ();
4015
7308a047
RS
4016 /* IN_EXPR: Inlined pascal set IN expression.
4017
4018 Algorithm:
4019 rlo = set_low - (set_low%bits_per_word);
4020 the_word = set [ (index - rlo)/bits_per_word ];
4021 bit_index = index % bits_per_word;
4022 bitmask = 1 << bit_index;
4023 return !!(the_word & bitmask); */
4024 case IN_EXPR:
4025 preexpand_calls (exp);
4026 {
4027 tree set = TREE_OPERAND (exp, 0);
4028 tree index = TREE_OPERAND (exp, 1);
4029 tree set_type = TREE_TYPE (set);
4030
4031 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4032 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4033
4034 rtx index_val;
4035 rtx lo_r;
4036 rtx hi_r;
4037 rtx rlow;
4038 rtx diff, quo, rem, addr, bit, result;
4039 rtx setval, setaddr;
4040 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4041
4042 if (target == 0)
17938e57 4043 target = gen_reg_rtx (mode);
7308a047
RS
4044
4045 /* If domain is empty, answer is no. */
4046 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4047 return const0_rtx;
4048
4049 index_val = expand_expr (index, 0, VOIDmode, 0);
4050 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4051 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4052 setval = expand_expr (set, 0, VOIDmode, 0);
4053 setaddr = XEXP (setval, 0);
4054
4055 /* Compare index against bounds, if they are constant. */
4056 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4057 && GET_CODE (lo_r) == CONST_INT
4058 && INTVAL (index_val) < INTVAL (lo_r))
4059 return const0_rtx;
7308a047
RS
4060
4061 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4062 && GET_CODE (hi_r) == CONST_INT
4063 && INTVAL (hi_r) < INTVAL (index_val))
4064 return const0_rtx;
7308a047
RS
4065
4066 /* If we get here, we have to generate the code for both cases
4067 (in range and out of range). */
4068
4069 op0 = gen_label_rtx ();
4070 op1 = gen_label_rtx ();
4071
4072 if (! (GET_CODE (index_val) == CONST_INT
4073 && GET_CODE (lo_r) == CONST_INT))
4074 {
17938e57
RK
4075 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4076 GET_MODE (index_val), 0, 0);
7308a047
RS
4077 emit_jump_insn (gen_blt (op1));
4078 }
4079
4080 if (! (GET_CODE (index_val) == CONST_INT
4081 && GET_CODE (hi_r) == CONST_INT))
4082 {
17938e57
RK
4083 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4084 GET_MODE (index_val), 0, 0);
7308a047
RS
4085 emit_jump_insn (gen_bgt (op1));
4086 }
4087
4088 /* Calculate the element number of bit zero in the first word
4089 of the set. */
4090 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4091 rlow = GEN_INT (INTVAL (lo_r)
4092 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4093 else
17938e57
RK
4094 rlow = expand_binop (index_mode, and_optab, lo_r,
4095 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4096 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4097
4098 diff = expand_binop (index_mode, sub_optab,
17938e57 4099 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4100
4101 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4102 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4103 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4104 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4105 addr = memory_address (byte_mode,
4106 expand_binop (index_mode, add_optab,
17938e57
RK
4107 diff, setaddr, NULL_RTX, 0,
4108 OPTAB_LIB_WIDEN));
7308a047
RS
4109 /* Extract the bit we want to examine */
4110 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4111 gen_rtx (MEM, byte_mode, addr),
4112 make_tree (TREE_TYPE (index), rem),
4113 NULL_RTX, 1);
4114 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4115 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4116 1, OPTAB_LIB_WIDEN);
17938e57
RK
4117
4118 if (result != target)
4119 convert_move (target, result, 1);
7308a047
RS
4120
4121 /* Output the code to handle the out-of-range case. */
4122 emit_jump (op0);
4123 emit_label (op1);
4124 emit_move_insn (target, const0_rtx);
4125 emit_label (op0);
4126 return target;
4127 }
4128
bbf6f052
RK
4129 case WITH_CLEANUP_EXPR:
4130 if (RTL_EXPR_RTL (exp) == 0)
4131 {
4132 RTL_EXPR_RTL (exp)
4133 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4134 cleanups_this_call
4135 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4136 /* That's it for this cleanup. */
4137 TREE_OPERAND (exp, 2) = 0;
4138 }
4139 return RTL_EXPR_RTL (exp);
4140
4141 case CALL_EXPR:
4142 /* Check for a built-in function. */
4143 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4144 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4145 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4146 return expand_builtin (exp, target, subtarget, tmode, ignore);
4147 /* If this call was expanded already by preexpand_calls,
4148 just return the result we got. */
4149 if (CALL_EXPR_RTL (exp) != 0)
4150 return CALL_EXPR_RTL (exp);
8129842c 4151 return expand_call (exp, target, ignore);
bbf6f052
RK
4152
4153 case NON_LVALUE_EXPR:
4154 case NOP_EXPR:
4155 case CONVERT_EXPR:
4156 case REFERENCE_EXPR:
bbf6f052
RK
4157 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4158 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4159 if (TREE_CODE (type) == UNION_TYPE)
4160 {
4161 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4162 if (target == 0)
4163 {
4164 if (mode == BLKmode)
4165 {
4166 if (TYPE_SIZE (type) == 0
4167 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4168 abort ();
4169 target = assign_stack_temp (BLKmode,
4170 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4171 + BITS_PER_UNIT - 1)
4172 / BITS_PER_UNIT, 0);
4173 }
4174 else
4175 target = gen_reg_rtx (mode);
4176 }
4177 if (GET_CODE (target) == MEM)
4178 /* Store data into beginning of memory target. */
4179 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4180 change_address (target, TYPE_MODE (valtype), 0), 0);
4181
bbf6f052
RK
4182 else if (GET_CODE (target) == REG)
4183 /* Store this field into a union of the proper type. */
4184 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4185 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4186 VOIDmode, 0, 1,
4187 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4188 else
4189 abort ();
4190
4191 /* Return the entire union. */
4192 return target;
4193 }
1499e0a8 4194 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4195 if (GET_MODE (op0) == mode)
4196 return op0;
4197 /* If arg is a constant integer being extended from a narrower mode,
4198 we must really truncate to get the extended bits right. Otherwise
4199 (unsigned long) (unsigned char) ("\377"[0])
4200 would come out as ffffffff. */
4201 if (GET_MODE (op0) == VOIDmode
4202 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4203 < GET_MODE_BITSIZE (mode)))
4204 {
4205 /* MODE must be narrower than HOST_BITS_PER_INT. */
4206 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4207
4208 if (width < HOST_BITS_PER_WIDE_INT)
4209 {
4210 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4211 : CONST_DOUBLE_LOW (op0));
4212 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4213 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4214 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4215 else
4216 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4217
4218 op0 = GEN_INT (val);
4219 }
4220 else
4221 {
4222 op0 = (simplify_unary_operation
4223 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4224 ? ZERO_EXTEND : SIGN_EXTEND),
4225 mode, op0,
4226 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4227 if (op0 == 0)
4228 abort ();
4229 }
4230 }
4231 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4232 return op0;
26fcb35a
RS
4233 if (modifier == EXPAND_INITIALIZER)
4234 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4235 if (flag_force_mem && GET_CODE (op0) == MEM)
4236 op0 = copy_to_reg (op0);
4237
4238 if (target == 0)
4239 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4240 else
4241 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4242 return target;
4243
4244 case PLUS_EXPR:
4245 /* We come here from MINUS_EXPR when the second operand is a constant. */
4246 plus_expr:
4247 this_optab = add_optab;
4248
4249 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4250 something else, make sure we add the register to the constant and
4251 then to the other thing. This case can occur during strength
4252 reduction and doing it this way will produce better code if the
4253 frame pointer or argument pointer is eliminated.
4254
4255 fold-const.c will ensure that the constant is always in the inner
4256 PLUS_EXPR, so the only case we need to do anything about is if
4257 sp, ap, or fp is our second argument, in which case we must swap
4258 the innermost first argument and our second argument. */
4259
4260 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4261 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4262 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4263 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4264 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4265 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4266 {
4267 tree t = TREE_OPERAND (exp, 1);
4268
4269 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4270 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4271 }
4272
4273 /* If the result is to be Pmode and we are adding an integer to
4274 something, we might be forming a constant. So try to use
4275 plus_constant. If it produces a sum and we can't accept it,
4276 use force_operand. This allows P = &ARR[const] to generate
4277 efficient code on machines where a SYMBOL_REF is not a valid
4278 address.
4279
4280 If this is an EXPAND_SUM call, always return the sum. */
4281 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4282 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4283 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4284 || mode == Pmode))
4285 {
4286 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4287 EXPAND_SUM);
4288 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4289 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4290 op1 = force_operand (op1, target);
4291 return op1;
4292 }
4293
4294 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4295 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4296 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4297 || mode == Pmode))
4298 {
4299 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4300 EXPAND_SUM);
4301 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4302 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4303 op0 = force_operand (op0, target);
4304 return op0;
4305 }
4306
4307 /* No sense saving up arithmetic to be done
4308 if it's all in the wrong mode to form part of an address.
4309 And force_operand won't know whether to sign-extend or
4310 zero-extend. */
4311 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4312 || mode != Pmode) goto binop;
4313
4314 preexpand_calls (exp);
4315 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4316 subtarget = 0;
4317
4318 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4319 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4320
4321 /* Make sure any term that's a sum with a constant comes last. */
4322 if (GET_CODE (op0) == PLUS
4323 && CONSTANT_P (XEXP (op0, 1)))
4324 {
4325 temp = op0;
4326 op0 = op1;
4327 op1 = temp;
4328 }
4329 /* If adding to a sum including a constant,
4330 associate it to put the constant outside. */
4331 if (GET_CODE (op1) == PLUS
4332 && CONSTANT_P (XEXP (op1, 1)))
4333 {
4334 rtx constant_term = const0_rtx;
4335
4336 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4337 if (temp != 0)
4338 op0 = temp;
6f90e075
JW
4339 /* Ensure that MULT comes first if there is one. */
4340 else if (GET_CODE (op0) == MULT)
4341 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4342 else
4343 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4344
4345 /* Let's also eliminate constants from op0 if possible. */
4346 op0 = eliminate_constant_term (op0, &constant_term);
4347
4348 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4349 their sum should be a constant. Form it into OP1, since the
4350 result we want will then be OP0 + OP1. */
4351
4352 temp = simplify_binary_operation (PLUS, mode, constant_term,
4353 XEXP (op1, 1));
4354 if (temp != 0)
4355 op1 = temp;
4356 else
4357 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4358 }
4359
4360 /* Put a constant term last and put a multiplication first. */
4361 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4362 temp = op1, op1 = op0, op0 = temp;
4363
4364 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4365 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4366
4367 case MINUS_EXPR:
4368 /* Handle difference of two symbolic constants,
4369 for the sake of an initializer. */
4370 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4371 && really_constant_p (TREE_OPERAND (exp, 0))
4372 && really_constant_p (TREE_OPERAND (exp, 1)))
4373 {
906c4e36
RK
4374 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4375 VOIDmode, modifier);
4376 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4377 VOIDmode, modifier);
bbf6f052
RK
4378 return gen_rtx (MINUS, mode, op0, op1);
4379 }
4380 /* Convert A - const to A + (-const). */
4381 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4382 {
4383 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4384 fold (build1 (NEGATE_EXPR, type,
4385 TREE_OPERAND (exp, 1))));
4386 goto plus_expr;
4387 }
4388 this_optab = sub_optab;
4389 goto binop;
4390
4391 case MULT_EXPR:
4392 preexpand_calls (exp);
4393 /* If first operand is constant, swap them.
4394 Thus the following special case checks need only
4395 check the second operand. */
4396 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4397 {
4398 register tree t1 = TREE_OPERAND (exp, 0);
4399 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4400 TREE_OPERAND (exp, 1) = t1;
4401 }
4402
4403 /* Attempt to return something suitable for generating an
4404 indexed address, for machines that support that. */
4405
4406 if (modifier == EXPAND_SUM && mode == Pmode
4407 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4408 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4409 {
4410 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4411
4412 /* Apply distributive law if OP0 is x+c. */
4413 if (GET_CODE (op0) == PLUS
4414 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4415 return gen_rtx (PLUS, mode,
4416 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4417 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4418 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4419 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4420
4421 if (GET_CODE (op0) != REG)
906c4e36 4422 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4423 if (GET_CODE (op0) != REG)
4424 op0 = copy_to_mode_reg (mode, op0);
4425
4426 return gen_rtx (MULT, mode, op0,
906c4e36 4427 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4428 }
4429
4430 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4431 subtarget = 0;
4432
4433 /* Check for multiplying things that have been extended
4434 from a narrower type. If this machine supports multiplying
4435 in that narrower type with a result in the desired type,
4436 do it that way, and avoid the explicit type-conversion. */
4437 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4438 && TREE_CODE (type) == INTEGER_TYPE
4439 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4440 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4441 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4442 && int_fits_type_p (TREE_OPERAND (exp, 1),
4443 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4444 /* Don't use a widening multiply if a shift will do. */
4445 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4446 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4447 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4448 ||
4449 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4450 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4451 ==
4452 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4453 /* If both operands are extended, they must either both
4454 be zero-extended or both be sign-extended. */
4455 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4456 ==
4457 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4458 {
4459 enum machine_mode innermode
4460 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4461 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4462 ? umul_widen_optab : smul_widen_optab);
4463 if (mode == GET_MODE_WIDER_MODE (innermode)
4464 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4465 {
4466 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4467 NULL_RTX, VOIDmode, 0);
bbf6f052 4468 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4469 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4470 VOIDmode, 0);
bbf6f052
RK
4471 else
4472 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4473 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4474 goto binop2;
4475 }
4476 }
4477 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4478 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4479 return expand_mult (mode, op0, op1, target, unsignedp);
4480
4481 case TRUNC_DIV_EXPR:
4482 case FLOOR_DIV_EXPR:
4483 case CEIL_DIV_EXPR:
4484 case ROUND_DIV_EXPR:
4485 case EXACT_DIV_EXPR:
4486 preexpand_calls (exp);
4487 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4488 subtarget = 0;
4489 /* Possible optimization: compute the dividend with EXPAND_SUM
4490 then if the divisor is constant can optimize the case
4491 where some terms of the dividend have coeffs divisible by it. */
4492 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4493 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4494 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4495
4496 case RDIV_EXPR:
4497 this_optab = flodiv_optab;
4498 goto binop;
4499
4500 case TRUNC_MOD_EXPR:
4501 case FLOOR_MOD_EXPR:
4502 case CEIL_MOD_EXPR:
4503 case ROUND_MOD_EXPR:
4504 preexpand_calls (exp);
4505 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4506 subtarget = 0;
4507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4508 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4509 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4510
4511 case FIX_ROUND_EXPR:
4512 case FIX_FLOOR_EXPR:
4513 case FIX_CEIL_EXPR:
4514 abort (); /* Not used for C. */
4515
4516 case FIX_TRUNC_EXPR:
906c4e36 4517 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4518 if (target == 0)
4519 target = gen_reg_rtx (mode);
4520 expand_fix (target, op0, unsignedp);
4521 return target;
4522
4523 case FLOAT_EXPR:
906c4e36 4524 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4525 if (target == 0)
4526 target = gen_reg_rtx (mode);
4527 /* expand_float can't figure out what to do if FROM has VOIDmode.
4528 So give it the correct mode. With -O, cse will optimize this. */
4529 if (GET_MODE (op0) == VOIDmode)
4530 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4531 op0);
4532 expand_float (target, op0,
4533 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4534 return target;
4535
4536 case NEGATE_EXPR:
4537 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4538 temp = expand_unop (mode, neg_optab, op0, target, 0);
4539 if (temp == 0)
4540 abort ();
4541 return temp;
4542
4543 case ABS_EXPR:
4544 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4545
2d7050fd
RS
4546 /* Handle complex values specially. */
4547 {
4548 enum machine_mode opmode
4549 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4550
4551 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4552 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4553 return expand_complex_abs (opmode, op0, target, unsignedp);
4554 }
4555
bbf6f052
RK
4556 /* Unsigned abs is simply the operand. Testing here means we don't
4557 risk generating incorrect code below. */
4558 if (TREE_UNSIGNED (type))
4559 return op0;
4560
4561 /* First try to do it with a special abs instruction. */
4562 temp = expand_unop (mode, abs_optab, op0, target, 0);
4563 if (temp != 0)
4564 return temp;
4565
4566 /* If this machine has expensive jumps, we can do integer absolute
4567 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4568 where W is the width of MODE. */
4569
4570 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4571 {
4572 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4573 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4574 NULL_RTX, 0);
bbf6f052
RK
4575
4576 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4577 OPTAB_LIB_WIDEN);
4578 if (temp != 0)
4579 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4580 OPTAB_LIB_WIDEN);
4581
4582 if (temp != 0)
4583 return temp;
4584 }
4585
4586 /* If that does not win, use conditional jump and negate. */
4587 target = original_target;
4588 temp = gen_label_rtx ();
4589 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4590 || (GET_CODE (target) == REG
4591 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4592 target = gen_reg_rtx (mode);
4593 emit_move_insn (target, op0);
4594 emit_cmp_insn (target,
4595 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4596 NULL_RTX, VOIDmode, 0),
4597 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4598 NO_DEFER_POP;
4599 emit_jump_insn (gen_bge (temp));
4600 op0 = expand_unop (mode, neg_optab, target, target, 0);
4601 if (op0 != target)
4602 emit_move_insn (target, op0);
4603 emit_label (temp);
4604 OK_DEFER_POP;
4605 return target;
4606
4607 case MAX_EXPR:
4608 case MIN_EXPR:
4609 target = original_target;
4610 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4611 || (GET_CODE (target) == REG
4612 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4613 target = gen_reg_rtx (mode);
906c4e36 4614 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4615 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4616
4617 /* First try to do it with a special MIN or MAX instruction.
4618 If that does not win, use a conditional jump to select the proper
4619 value. */
4620 this_optab = (TREE_UNSIGNED (type)
4621 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4622 : (code == MIN_EXPR ? smin_optab : smax_optab));
4623
4624 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4625 OPTAB_WIDEN);
4626 if (temp != 0)
4627 return temp;
4628
4629 if (target != op0)
4630 emit_move_insn (target, op0);
4631 op0 = gen_label_rtx ();
f81497d9
RS
4632 /* If this mode is an integer too wide to compare properly,
4633 compare word by word. Rely on cse to optimize constant cases. */
4634 if (GET_MODE_CLASS (mode) == MODE_INT
4635 && !can_compare_p (mode))
bbf6f052 4636 {
f81497d9
RS
4637 if (code == MAX_EXPR)
4638 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 4639 else
f81497d9 4640 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
bbf6f052
RK
4641 emit_move_insn (target, op1);
4642 }
f81497d9
RS
4643 else
4644 {
4645 if (code == MAX_EXPR)
4646 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4647 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4648 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4649 else
4650 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4651 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4652 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4653 if (temp == const0_rtx)
4654 emit_move_insn (target, op1);
4655 else if (temp != const_true_rtx)
4656 {
4657 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4658 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4659 else
4660 abort ();
4661 emit_move_insn (target, op1);
4662 }
4663 }
bbf6f052
RK
4664 emit_label (op0);
4665 return target;
4666
4667/* ??? Can optimize when the operand of this is a bitwise operation,
4668 by using a different bitwise operation. */
4669 case BIT_NOT_EXPR:
4670 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4671 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4672 if (temp == 0)
4673 abort ();
4674 return temp;
4675
4676 case FFS_EXPR:
4677 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4678 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4679 if (temp == 0)
4680 abort ();
4681 return temp;
4682
4683/* ??? Can optimize bitwise operations with one arg constant.
4684 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4685 and (a bitwise1 b) bitwise2 b (etc)
4686 but that is probably not worth while. */
4687
4688/* BIT_AND_EXPR is for bitwise anding.
4689 TRUTH_AND_EXPR is for anding two boolean values
4690 when we want in all cases to compute both of them.
4691 In general it is fastest to do TRUTH_AND_EXPR by
4692 computing both operands as actual zero-or-1 values
4693 and then bitwise anding. In cases where there cannot
4694 be any side effects, better code would be made by
4695 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4696 but the question is how to recognize those cases. */
4697
4698 case TRUTH_AND_EXPR:
4699 case BIT_AND_EXPR:
4700 this_optab = and_optab;
4701 goto binop;
4702
4703/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4704 case TRUTH_OR_EXPR:
4705 case BIT_IOR_EXPR:
4706 this_optab = ior_optab;
4707 goto binop;
4708
874726a8 4709 case TRUTH_XOR_EXPR:
bbf6f052
RK
4710 case BIT_XOR_EXPR:
4711 this_optab = xor_optab;
4712 goto binop;
4713
4714 case LSHIFT_EXPR:
4715 case RSHIFT_EXPR:
4716 case LROTATE_EXPR:
4717 case RROTATE_EXPR:
4718 preexpand_calls (exp);
4719 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4720 subtarget = 0;
4721 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4722 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4723 unsignedp);
4724
4725/* Could determine the answer when only additive constants differ.
4726 Also, the addition of one can be handled by changing the condition. */
4727 case LT_EXPR:
4728 case LE_EXPR:
4729 case GT_EXPR:
4730 case GE_EXPR:
4731 case EQ_EXPR:
4732 case NE_EXPR:
4733 preexpand_calls (exp);
4734 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4735 if (temp != 0)
4736 return temp;
4737 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4738 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4739 && original_target
4740 && GET_CODE (original_target) == REG
4741 && (GET_MODE (original_target)
4742 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4743 {
4744 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4745 if (temp != original_target)
4746 temp = copy_to_reg (temp);
4747 op1 = gen_label_rtx ();
906c4e36 4748 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4749 GET_MODE (temp), unsignedp, 0);
4750 emit_jump_insn (gen_beq (op1));
4751 emit_move_insn (temp, const1_rtx);
4752 emit_label (op1);
4753 return temp;
4754 }
4755 /* If no set-flag instruction, must generate a conditional
4756 store into a temporary variable. Drop through
4757 and handle this like && and ||. */
4758
4759 case TRUTH_ANDIF_EXPR:
4760 case TRUTH_ORIF_EXPR:
4761 if (target == 0 || ! safe_from_p (target, exp)
4762 /* Make sure we don't have a hard reg (such as function's return
4763 value) live across basic blocks, if not optimizing. */
4764 || (!optimize && GET_CODE (target) == REG
4765 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4766 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4767 emit_clr_insn (target);
4768 op1 = gen_label_rtx ();
4769 jumpifnot (exp, op1);
4770 emit_0_to_1_insn (target);
4771 emit_label (op1);
4772 return target;
4773
4774 case TRUTH_NOT_EXPR:
4775 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4776 /* The parser is careful to generate TRUTH_NOT_EXPR
4777 only with operands that are always zero or one. */
906c4e36 4778 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4779 target, 1, OPTAB_LIB_WIDEN);
4780 if (temp == 0)
4781 abort ();
4782 return temp;
4783
4784 case COMPOUND_EXPR:
4785 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4786 emit_queue ();
4787 return expand_expr (TREE_OPERAND (exp, 1),
4788 (ignore ? const0_rtx : target),
4789 VOIDmode, 0);
4790
4791 case COND_EXPR:
4792 {
4793 /* Note that COND_EXPRs whose type is a structure or union
4794 are required to be constructed to contain assignments of
4795 a temporary variable, so that we can evaluate them here
4796 for side effect only. If type is void, we must do likewise. */
4797
4798 /* If an arm of the branch requires a cleanup,
4799 only that cleanup is performed. */
4800
4801 tree singleton = 0;
4802 tree binary_op = 0, unary_op = 0;
4803 tree old_cleanups = cleanups_this_call;
4804 cleanups_this_call = 0;
4805
4806 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4807 convert it to our mode, if necessary. */
4808 if (integer_onep (TREE_OPERAND (exp, 1))
4809 && integer_zerop (TREE_OPERAND (exp, 2))
4810 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4811 {
dd27116b
RK
4812 if (ignore)
4813 {
4814 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4815 modifier);
4816 return const0_rtx;
4817 }
4818
bbf6f052
RK
4819 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4820 if (GET_MODE (op0) == mode)
4821 return op0;
4822 if (target == 0)
4823 target = gen_reg_rtx (mode);
4824 convert_move (target, op0, unsignedp);
4825 return target;
4826 }
4827
4828 /* If we are not to produce a result, we have no target. Otherwise,
4829 if a target was specified use it; it will not be used as an
4830 intermediate target unless it is safe. If no target, use a
4831 temporary. */
4832
dd27116b 4833 if (ignore)
bbf6f052
RK
4834 temp = 0;
4835 else if (original_target
4836 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4837 temp = original_target;
4838 else if (mode == BLKmode)
4839 {
4840 if (TYPE_SIZE (type) == 0
4841 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4842 abort ();
4843 temp = assign_stack_temp (BLKmode,
4844 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4845 + BITS_PER_UNIT - 1)
4846 / BITS_PER_UNIT, 0);
4847 }
4848 else
4849 temp = gen_reg_rtx (mode);
4850
4851 /* Check for X ? A + B : A. If we have this, we can copy
4852 A to the output and conditionally add B. Similarly for unary
4853 operations. Don't do this if X has side-effects because
4854 those side effects might affect A or B and the "?" operation is
4855 a sequence point in ANSI. (We test for side effects later.) */
4856
4857 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4858 && operand_equal_p (TREE_OPERAND (exp, 2),
4859 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4860 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4861 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4862 && operand_equal_p (TREE_OPERAND (exp, 1),
4863 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4864 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4865 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4866 && operand_equal_p (TREE_OPERAND (exp, 2),
4867 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4868 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4869 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4870 && operand_equal_p (TREE_OPERAND (exp, 1),
4871 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4872 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4873
4874 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4875 operation, do this as A + (X != 0). Similarly for other simple
4876 binary operators. */
dd27116b 4877 if (temp && singleton && binary_op
bbf6f052
RK
4878 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4879 && (TREE_CODE (binary_op) == PLUS_EXPR
4880 || TREE_CODE (binary_op) == MINUS_EXPR
4881 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4882 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4883 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4884 && integer_onep (TREE_OPERAND (binary_op, 1))
4885 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4886 {
4887 rtx result;
4888 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4889 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4890 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4891 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4892 : and_optab);
4893
4894 /* If we had X ? A : A + 1, do this as A + (X == 0).
4895
4896 We have to invert the truth value here and then put it
4897 back later if do_store_flag fails. We cannot simply copy
4898 TREE_OPERAND (exp, 0) to another variable and modify that
4899 because invert_truthvalue can modify the tree pointed to
4900 by its argument. */
4901 if (singleton == TREE_OPERAND (exp, 1))
4902 TREE_OPERAND (exp, 0)
4903 = invert_truthvalue (TREE_OPERAND (exp, 0));
4904
4905 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4906 (safe_from_p (temp, singleton)
4907 ? temp : NULL_RTX),
bbf6f052
RK
4908 mode, BRANCH_COST <= 1);
4909
4910 if (result)
4911 {
906c4e36 4912 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4913 return expand_binop (mode, boptab, op1, result, temp,
4914 unsignedp, OPTAB_LIB_WIDEN);
4915 }
4916 else if (singleton == TREE_OPERAND (exp, 1))
4917 TREE_OPERAND (exp, 0)
4918 = invert_truthvalue (TREE_OPERAND (exp, 0));
4919 }
4920
4921 NO_DEFER_POP;
4922 op0 = gen_label_rtx ();
4923
4924 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4925 {
4926 if (temp != 0)
4927 {
4928 /* If the target conflicts with the other operand of the
4929 binary op, we can't use it. Also, we can't use the target
4930 if it is a hard register, because evaluating the condition
4931 might clobber it. */
4932 if ((binary_op
4933 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4934 || (GET_CODE (temp) == REG
4935 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4936 temp = gen_reg_rtx (mode);
4937 store_expr (singleton, temp, 0);
4938 }
4939 else
906c4e36 4940 expand_expr (singleton,
2937cf87 4941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4942 if (cleanups_this_call)
4943 {
4944 sorry ("aggregate value in COND_EXPR");
4945 cleanups_this_call = 0;
4946 }
4947 if (singleton == TREE_OPERAND (exp, 1))
4948 jumpif (TREE_OPERAND (exp, 0), op0);
4949 else
4950 jumpifnot (TREE_OPERAND (exp, 0), op0);
4951
4952 if (binary_op && temp == 0)
4953 /* Just touch the other operand. */
4954 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4955 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4956 else if (binary_op)
4957 store_expr (build (TREE_CODE (binary_op), type,
4958 make_tree (type, temp),
4959 TREE_OPERAND (binary_op, 1)),
4960 temp, 0);
4961 else
4962 store_expr (build1 (TREE_CODE (unary_op), type,
4963 make_tree (type, temp)),
4964 temp, 0);
4965 op1 = op0;
4966 }
4967#if 0
4968 /* This is now done in jump.c and is better done there because it
4969 produces shorter register lifetimes. */
4970
4971 /* Check for both possibilities either constants or variables
4972 in registers (but not the same as the target!). If so, can
4973 save branches by assigning one, branching, and assigning the
4974 other. */
4975 else if (temp && GET_MODE (temp) != BLKmode
4976 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4977 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4978 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4979 && DECL_RTL (TREE_OPERAND (exp, 1))
4980 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4981 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4982 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4983 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4984 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4985 && DECL_RTL (TREE_OPERAND (exp, 2))
4986 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4987 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4988 {
4989 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4990 temp = gen_reg_rtx (mode);
4991 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4992 jumpifnot (TREE_OPERAND (exp, 0), op0);
4993 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4994 op1 = op0;
4995 }
4996#endif
4997 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4998 comparison operator. If we have one of these cases, set the
4999 output to A, branch on A (cse will merge these two references),
5000 then set the output to FOO. */
5001 else if (temp
5002 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5003 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5004 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5005 TREE_OPERAND (exp, 1), 0)
5006 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5007 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5008 {
5009 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5010 temp = gen_reg_rtx (mode);
5011 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5012 jumpif (TREE_OPERAND (exp, 0), op0);
5013 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5014 op1 = op0;
5015 }
5016 else if (temp
5017 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5018 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5019 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5020 TREE_OPERAND (exp, 2), 0)
5021 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5022 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5023 {
5024 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5025 temp = gen_reg_rtx (mode);
5026 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5027 jumpifnot (TREE_OPERAND (exp, 0), op0);
5028 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5029 op1 = op0;
5030 }
5031 else
5032 {
5033 op1 = gen_label_rtx ();
5034 jumpifnot (TREE_OPERAND (exp, 0), op0);
5035 if (temp != 0)
5036 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5037 else
906c4e36
RK
5038 expand_expr (TREE_OPERAND (exp, 1),
5039 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5040 if (cleanups_this_call)
5041 {
5042 sorry ("aggregate value in COND_EXPR");
5043 cleanups_this_call = 0;
5044 }
5045
5046 emit_queue ();
5047 emit_jump_insn (gen_jump (op1));
5048 emit_barrier ();
5049 emit_label (op0);
5050 if (temp != 0)
5051 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5052 else
906c4e36
RK
5053 expand_expr (TREE_OPERAND (exp, 2),
5054 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5055 }
5056
5057 if (cleanups_this_call)
5058 {
5059 sorry ("aggregate value in COND_EXPR");
5060 cleanups_this_call = 0;
5061 }
5062
5063 emit_queue ();
5064 emit_label (op1);
5065 OK_DEFER_POP;
5066 cleanups_this_call = old_cleanups;
5067 return temp;
5068 }
5069
5070 case TARGET_EXPR:
5071 {
5072 /* Something needs to be initialized, but we didn't know
5073 where that thing was when building the tree. For example,
5074 it could be the return value of a function, or a parameter
5075 to a function which lays down in the stack, or a temporary
5076 variable which must be passed by reference.
5077
5078 We guarantee that the expression will either be constructed
5079 or copied into our original target. */
5080
5081 tree slot = TREE_OPERAND (exp, 0);
5c062816 5082 tree exp1;
bbf6f052
RK
5083
5084 if (TREE_CODE (slot) != VAR_DECL)
5085 abort ();
5086
5087 if (target == 0)
5088 {
5089 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5090 {
5091 target = DECL_RTL (slot);
5c062816 5092 /* If we have already expanded the slot, so don't do
ac993f4f 5093 it again. (mrs) */
5c062816
MS
5094 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5095 return target;
ac993f4f 5096 }
bbf6f052
RK
5097 else
5098 {
5099 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5100 /* All temp slots at this level must not conflict. */
5101 preserve_temp_slots (target);
5102 DECL_RTL (slot) = target;
5103 }
5104
5105#if 0
ac993f4f
MS
5106 /* I bet this needs to be done, and I bet that it needs to
5107 be above, inside the else clause. The reason is
5108 simple, how else is it going to get cleaned up? (mrs)
5109
5110 The reason is probably did not work before, and was
5111 commented out is because this was re-expanding already
5112 expanded target_exprs (target == 0 and DECL_RTL (slot)
5113 != 0) also cleaning them up many times as well. :-( */
5114
bbf6f052
RK
5115 /* Since SLOT is not known to the called function
5116 to belong to its stack frame, we must build an explicit
5117 cleanup. This case occurs when we must build up a reference
5118 to pass the reference as an argument. In this case,
5119 it is very likely that such a reference need not be
5120 built here. */
5121
5122 if (TREE_OPERAND (exp, 2) == 0)
5123 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5124 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5125 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5126 cleanups_this_call);
bbf6f052
RK
5127#endif
5128 }
5129 else
5130 {
5131 /* This case does occur, when expanding a parameter which
5132 needs to be constructed on the stack. The target
5133 is the actual stack address that we want to initialize.
5134 The function we call will perform the cleanup in this case. */
5135
8c042b47
RS
5136 /* If we have already assigned it space, use that space,
5137 not target that we were passed in, as our target
5138 parameter is only a hint. */
5139 if (DECL_RTL (slot) != 0)
5140 {
5141 target = DECL_RTL (slot);
5142 /* If we have already expanded the slot, so don't do
5143 it again. (mrs) */
5144 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5145 return target;
5146 }
5147
bbf6f052
RK
5148 DECL_RTL (slot) = target;
5149 }
5150
5c062816
MS
5151 exp1 = TREE_OPERAND (exp, 1);
5152 /* Mark it as expanded. */
5153 TREE_OPERAND (exp, 1) = NULL_TREE;
5154
5155 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5156 }
5157
5158 case INIT_EXPR:
5159 {
5160 tree lhs = TREE_OPERAND (exp, 0);
5161 tree rhs = TREE_OPERAND (exp, 1);
5162 tree noncopied_parts = 0;
5163 tree lhs_type = TREE_TYPE (lhs);
5164
5165 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5166 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5167 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5168 TYPE_NONCOPIED_PARTS (lhs_type));
5169 while (noncopied_parts != 0)
5170 {
5171 expand_assignment (TREE_VALUE (noncopied_parts),
5172 TREE_PURPOSE (noncopied_parts), 0, 0);
5173 noncopied_parts = TREE_CHAIN (noncopied_parts);
5174 }
5175 return temp;
5176 }
5177
5178 case MODIFY_EXPR:
5179 {
5180 /* If lhs is complex, expand calls in rhs before computing it.
5181 That's so we don't compute a pointer and save it over a call.
5182 If lhs is simple, compute it first so we can give it as a
5183 target if the rhs is just a call. This avoids an extra temp and copy
5184 and that prevents a partial-subsumption which makes bad code.
5185 Actually we could treat component_ref's of vars like vars. */
5186
5187 tree lhs = TREE_OPERAND (exp, 0);
5188 tree rhs = TREE_OPERAND (exp, 1);
5189 tree noncopied_parts = 0;
5190 tree lhs_type = TREE_TYPE (lhs);
5191
5192 temp = 0;
5193
5194 if (TREE_CODE (lhs) != VAR_DECL
5195 && TREE_CODE (lhs) != RESULT_DECL
5196 && TREE_CODE (lhs) != PARM_DECL)
5197 preexpand_calls (exp);
5198
5199 /* Check for |= or &= of a bitfield of size one into another bitfield
5200 of size 1. In this case, (unless we need the result of the
5201 assignment) we can do this more efficiently with a
5202 test followed by an assignment, if necessary.
5203
5204 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5205 things change so we do, this code should be enhanced to
5206 support it. */
5207 if (ignore
5208 && TREE_CODE (lhs) == COMPONENT_REF
5209 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5210 || TREE_CODE (rhs) == BIT_AND_EXPR)
5211 && TREE_OPERAND (rhs, 0) == lhs
5212 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5213 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5214 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5215 {
5216 rtx label = gen_label_rtx ();
5217
5218 do_jump (TREE_OPERAND (rhs, 1),
5219 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5220 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5221 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5222 (TREE_CODE (rhs) == BIT_IOR_EXPR
5223 ? integer_one_node
5224 : integer_zero_node)),
5225 0, 0);
e7c33f54 5226 do_pending_stack_adjust ();
bbf6f052
RK
5227 emit_label (label);
5228 return const0_rtx;
5229 }
5230
5231 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5232 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5233 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5234 TYPE_NONCOPIED_PARTS (lhs_type));
5235
5236 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5237 while (noncopied_parts != 0)
5238 {
5239 expand_assignment (TREE_PURPOSE (noncopied_parts),
5240 TREE_VALUE (noncopied_parts), 0, 0);
5241 noncopied_parts = TREE_CHAIN (noncopied_parts);
5242 }
5243 return temp;
5244 }
5245
5246 case PREINCREMENT_EXPR:
5247 case PREDECREMENT_EXPR:
5248 return expand_increment (exp, 0);
5249
5250 case POSTINCREMENT_EXPR:
5251 case POSTDECREMENT_EXPR:
5252 /* Faster to treat as pre-increment if result is not used. */
5253 return expand_increment (exp, ! ignore);
5254
5255 case ADDR_EXPR:
5256 /* Are we taking the address of a nested function? */
5257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5258 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5259 {
5260 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5261 op0 = force_operand (op0, target);
5262 }
5263 else
5264 {
906c4e36 5265 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5266 (modifier == EXPAND_INITIALIZER
5267 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5268
5269 /* We would like the object in memory. If it is a constant,
5270 we can have it be statically allocated into memory. For
5271 a non-constant (REG or SUBREG), we need to allocate some
5272 memory and store the value into it. */
5273
5274 if (CONSTANT_P (op0))
5275 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5276 op0);
5277
5278 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5279 {
5280 /* If this object is in a register, it must be not
5281 be BLKmode. */
5282 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5283 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5284 rtx memloc
5285 = assign_stack_temp (inner_mode,
5286 int_size_in_bytes (inner_type), 1);
5287
5288 emit_move_insn (memloc, op0);
5289 op0 = memloc;
5290 }
5291
bbf6f052
RK
5292 if (GET_CODE (op0) != MEM)
5293 abort ();
5294
5295 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5296 return XEXP (op0, 0);
5297 op0 = force_operand (XEXP (op0, 0), target);
5298 }
5299 if (flag_force_addr && GET_CODE (op0) != REG)
5300 return force_reg (Pmode, op0);
5301 return op0;
5302
5303 case ENTRY_VALUE_EXPR:
5304 abort ();
5305
7308a047
RS
5306 /* COMPLEX type for Extended Pascal & Fortran */
5307 case COMPLEX_EXPR:
5308 {
5309 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5310
5311 rtx prev;
5312
5313 /* Get the rtx code of the operands. */
5314 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5315 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5316
5317 if (! target)
5318 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5319
5320 prev = get_last_insn ();
5321
5322 /* Tell flow that the whole of the destination is being set. */
5323 if (GET_CODE (target) == REG)
5324 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5325
5326 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5327 emit_move_insn (gen_realpart (mode, target), op0);
5328 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5329
5330 /* Complex construction should appear as a single unit. */
5331 group_insns (prev);
5332
5333 return target;
5334 }
5335
5336 case REALPART_EXPR:
2d7050fd
RS
5337 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5338 return gen_realpart (mode, op0);
7308a047
RS
5339
5340 case IMAGPART_EXPR:
2d7050fd
RS
5341 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5342 return gen_imagpart (mode, op0);
7308a047
RS
5343
5344 case CONJ_EXPR:
5345 {
5346 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5347 rtx imag_t;
5348 rtx prev;
5349
5350 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5351
5352 if (! target)
5353 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5354
5355 prev = get_last_insn ();
5356
5357 /* Tell flow that the whole of the destination is being set. */
5358 if (GET_CODE (target) == REG)
5359 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5360
5361 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5362 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5363
2d7050fd 5364 imag_t = gen_imagpart (mode, target);
7308a047 5365 temp = expand_unop (mode, neg_optab,
2d7050fd 5366 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5367 if (temp != imag_t)
5368 emit_move_insn (imag_t, temp);
5369
5370 /* Conjugate should appear as a single unit */
5371 group_insns (prev);
5372
5373 return target;
5374 }
5375
bbf6f052 5376 case ERROR_MARK:
66538193
RS
5377 op0 = CONST0_RTX (tmode);
5378 if (op0 != 0)
5379 return op0;
bbf6f052
RK
5380 return const0_rtx;
5381
5382 default:
5383 return (*lang_expand_expr) (exp, target, tmode, modifier);
5384 }
5385
5386 /* Here to do an ordinary binary operator, generating an instruction
5387 from the optab already placed in `this_optab'. */
5388 binop:
5389 preexpand_calls (exp);
5390 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5391 subtarget = 0;
5392 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5393 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5394 binop2:
5395 temp = expand_binop (mode, this_optab, op0, op1, target,
5396 unsignedp, OPTAB_LIB_WIDEN);
5397 if (temp == 0)
5398 abort ();
5399 return temp;
5400}
5401\f
e87b4f3f
RS
5402/* Return the alignment in bits of EXP, a pointer valued expression.
5403 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5404 The alignment returned is, by default, the alignment of the thing that
5405 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5406
5407 Otherwise, look at the expression to see if we can do better, i.e., if the
5408 expression is actually pointing at an object whose alignment is tighter. */
5409
5410static int
5411get_pointer_alignment (exp, max_align)
5412 tree exp;
5413 unsigned max_align;
5414{
5415 unsigned align, inner;
5416
5417 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5418 return 0;
5419
5420 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5421 align = MIN (align, max_align);
5422
5423 while (1)
5424 {
5425 switch (TREE_CODE (exp))
5426 {
5427 case NOP_EXPR:
5428 case CONVERT_EXPR:
5429 case NON_LVALUE_EXPR:
5430 exp = TREE_OPERAND (exp, 0);
5431 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5432 return align;
5433 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5434 inner = MIN (inner, max_align);
5435 align = MAX (align, inner);
5436 break;
5437
5438 case PLUS_EXPR:
5439 /* If sum of pointer + int, restrict our maximum alignment to that
5440 imposed by the integer. If not, we can't do any better than
5441 ALIGN. */
5442 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5443 return align;
5444
e87b4f3f
RS
5445 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5446 & (max_align - 1))
5447 != 0)
bbf6f052
RK
5448 max_align >>= 1;
5449
5450 exp = TREE_OPERAND (exp, 0);
5451 break;
5452
5453 case ADDR_EXPR:
5454 /* See what we are pointing at and look at its alignment. */
5455 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5456 if (TREE_CODE (exp) == FUNCTION_DECL)
5457 align = MAX (align, FUNCTION_BOUNDARY);
5458 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5459 align = MAX (align, DECL_ALIGN (exp));
5460#ifdef CONSTANT_ALIGNMENT
5461 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5462 align = CONSTANT_ALIGNMENT (exp, align);
5463#endif
5464 return MIN (align, max_align);
5465
5466 default:
5467 return align;
5468 }
5469 }
5470}
5471\f
5472/* Return the tree node and offset if a given argument corresponds to
5473 a string constant. */
5474
5475static tree
5476string_constant (arg, ptr_offset)
5477 tree arg;
5478 tree *ptr_offset;
5479{
5480 STRIP_NOPS (arg);
5481
5482 if (TREE_CODE (arg) == ADDR_EXPR
5483 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5484 {
5485 *ptr_offset = integer_zero_node;
5486 return TREE_OPERAND (arg, 0);
5487 }
5488 else if (TREE_CODE (arg) == PLUS_EXPR)
5489 {
5490 tree arg0 = TREE_OPERAND (arg, 0);
5491 tree arg1 = TREE_OPERAND (arg, 1);
5492
5493 STRIP_NOPS (arg0);
5494 STRIP_NOPS (arg1);
5495
5496 if (TREE_CODE (arg0) == ADDR_EXPR
5497 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5498 {
5499 *ptr_offset = arg1;
5500 return TREE_OPERAND (arg0, 0);
5501 }
5502 else if (TREE_CODE (arg1) == ADDR_EXPR
5503 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5504 {
5505 *ptr_offset = arg0;
5506 return TREE_OPERAND (arg1, 0);
5507 }
5508 }
5509
5510 return 0;
5511}
5512
5513/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5514 way, because it could contain a zero byte in the middle.
5515 TREE_STRING_LENGTH is the size of the character array, not the string.
5516
5517 Unfortunately, string_constant can't access the values of const char
5518 arrays with initializers, so neither can we do so here. */
5519
5520static tree
5521c_strlen (src)
5522 tree src;
5523{
5524 tree offset_node;
5525 int offset, max;
5526 char *ptr;
5527
5528 src = string_constant (src, &offset_node);
5529 if (src == 0)
5530 return 0;
5531 max = TREE_STRING_LENGTH (src);
5532 ptr = TREE_STRING_POINTER (src);
5533 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5534 {
5535 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5536 compute the offset to the following null if we don't know where to
5537 start searching for it. */
5538 int i;
5539 for (i = 0; i < max; i++)
5540 if (ptr[i] == 0)
5541 return 0;
5542 /* We don't know the starting offset, but we do know that the string
5543 has no internal zero bytes. We can assume that the offset falls
5544 within the bounds of the string; otherwise, the programmer deserves
5545 what he gets. Subtract the offset from the length of the string,
5546 and return that. */
5547 /* This would perhaps not be valid if we were dealing with named
5548 arrays in addition to literal string constants. */
5549 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5550 }
5551
5552 /* We have a known offset into the string. Start searching there for
5553 a null character. */
5554 if (offset_node == 0)
5555 offset = 0;
5556 else
5557 {
5558 /* Did we get a long long offset? If so, punt. */
5559 if (TREE_INT_CST_HIGH (offset_node) != 0)
5560 return 0;
5561 offset = TREE_INT_CST_LOW (offset_node);
5562 }
5563 /* If the offset is known to be out of bounds, warn, and call strlen at
5564 runtime. */
5565 if (offset < 0 || offset > max)
5566 {
5567 warning ("offset outside bounds of constant string");
5568 return 0;
5569 }
5570 /* Use strlen to search for the first zero byte. Since any strings
5571 constructed with build_string will have nulls appended, we win even
5572 if we get handed something like (char[4])"abcd".
5573
5574 Since OFFSET is our starting index into the string, no further
5575 calculation is needed. */
5576 return size_int (strlen (ptr + offset));
5577}
5578\f
5579/* Expand an expression EXP that calls a built-in function,
5580 with result going to TARGET if that's convenient
5581 (and in mode MODE if that's convenient).
5582 SUBTARGET may be used as the target for computing one of EXP's operands.
5583 IGNORE is nonzero if the value is to be ignored. */
5584
5585static rtx
5586expand_builtin (exp, target, subtarget, mode, ignore)
5587 tree exp;
5588 rtx target;
5589 rtx subtarget;
5590 enum machine_mode mode;
5591 int ignore;
5592{
5593 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5594 tree arglist = TREE_OPERAND (exp, 1);
5595 rtx op0;
60bac6ea 5596 rtx lab1, insns;
bbf6f052 5597 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5598 optab builtin_optab;
bbf6f052
RK
5599
5600 switch (DECL_FUNCTION_CODE (fndecl))
5601 {
5602 case BUILT_IN_ABS:
5603 case BUILT_IN_LABS:
5604 case BUILT_IN_FABS:
5605 /* build_function_call changes these into ABS_EXPR. */
5606 abort ();
5607
1bbddf11
JVA
5608 case BUILT_IN_SIN:
5609 case BUILT_IN_COS:
e87b4f3f
RS
5610 case BUILT_IN_FSQRT:
5611 /* If not optimizing, call the library function. */
8c8a8e34 5612 if (! optimize)
e87b4f3f
RS
5613 break;
5614
5615 if (arglist == 0
19deaec9 5616 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5617 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5618 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5619
db0e6d01
RS
5620 /* Stabilize and compute the argument. */
5621 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5622 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5623 {
5624 exp = copy_node (exp);
5625 arglist = copy_node (arglist);
5626 TREE_OPERAND (exp, 1) = arglist;
5627 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5628 }
e87b4f3f 5629 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5630
5631 /* Make a suitable register to place result in. */
5632 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5633
c1f7c223 5634 emit_queue ();
8c8a8e34 5635 start_sequence ();
e7c33f54 5636
1bbddf11
JVA
5637 switch (DECL_FUNCTION_CODE (fndecl))
5638 {
5639 case BUILT_IN_SIN:
5640 builtin_optab = sin_optab; break;
5641 case BUILT_IN_COS:
5642 builtin_optab = cos_optab; break;
5643 case BUILT_IN_FSQRT:
5644 builtin_optab = sqrt_optab; break;
5645 default:
5646 abort ();
5647 }
5648
5649 /* Compute into TARGET.
e87b4f3f
RS
5650 Set TARGET to wherever the result comes back. */
5651 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5652 builtin_optab, op0, target, 0);
e7c33f54
RK
5653
5654 /* If we were unable to expand via the builtin, stop the
5655 sequence (without outputting the insns) and break, causing
5656 a call the the library function. */
e87b4f3f 5657 if (target == 0)
e7c33f54 5658 {
8c8a8e34 5659 end_sequence ();
e7c33f54
RK
5660 break;
5661 }
e87b4f3f 5662
60bac6ea
RS
5663 /* Check the results by default. But if flag_fast_math is turned on,
5664 then assume sqrt will always be called with valid arguments. */
5665
5666 if (! flag_fast_math)
5667 {
1bbddf11 5668 /* Don't define the builtin FP instructions
60bac6ea
RS
5669 if your machine is not IEEE. */
5670 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5671 abort ();
5672
5673 lab1 = gen_label_rtx ();
5674
5675 /* Test the result; if it is NaN, set errno=EDOM because
5676 the argument was not in the domain. */
5677 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5678 emit_jump_insn (gen_beq (lab1));
5679
5680#if TARGET_EDOM
5681 {
5682#ifdef GEN_ERRNO_RTX
5683 rtx errno_rtx = GEN_ERRNO_RTX;
5684#else
5685 rtx errno_rtx
5686 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5687#endif
5688
5689 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5690 }
5691#else
5692 /* We can't set errno=EDOM directly; let the library call do it.
5693 Pop the arguments right away in case the call gets deleted. */
5694 NO_DEFER_POP;
5695 expand_call (exp, target, 0);
5696 OK_DEFER_POP;
5697#endif
5698
5699 emit_label (lab1);
5700 }
e87b4f3f 5701
e7c33f54 5702 /* Output the entire sequence. */
8c8a8e34
JW
5703 insns = get_insns ();
5704 end_sequence ();
5705 emit_insns (insns);
e7c33f54
RK
5706
5707 return target;
5708
0006469d
TW
5709 /* __builtin_apply_args returns block of memory allocated on
5710 the stack into which is stored the arg pointer, structure
5711 value address, static chain, and all the registers that might
5712 possibly be used in performing a function call. The code is
5713 moved to the start of the function so the incoming values are
5714 saved. */
5715 case BUILT_IN_APPLY_ARGS:
5716 /* Don't do __builtin_apply_args more than once in a function.
5717 Save the result of the first call and reuse it. */
5718 if (apply_args_value != 0)
5719 return apply_args_value;
5720 {
5721 /* When this function is called, it means that registers must be
5722 saved on entry to this function. So we migrate the
5723 call to the first insn of this function. */
5724 rtx temp;
5725 rtx seq;
5726
5727 start_sequence ();
5728 temp = expand_builtin_apply_args ();
5729 seq = get_insns ();
5730 end_sequence ();
5731
5732 apply_args_value = temp;
5733
5734 /* Put the sequence after the NOTE that starts the function.
5735 If this is inside a SEQUENCE, make the outer-level insn
5736 chain current, so the code is placed at the start of the
5737 function. */
5738 push_topmost_sequence ();
5739 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5740 pop_topmost_sequence ();
5741 return temp;
5742 }
5743
5744 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5745 FUNCTION with a copy of the parameters described by
5746 ARGUMENTS, and ARGSIZE. It returns a block of memory
5747 allocated on the stack into which is stored all the registers
5748 that might possibly be used for returning the result of a
5749 function. ARGUMENTS is the value returned by
5750 __builtin_apply_args. ARGSIZE is the number of bytes of
5751 arguments that must be copied. ??? How should this value be
5752 computed? We'll also need a safe worst case value for varargs
5753 functions. */
5754 case BUILT_IN_APPLY:
5755 if (arglist == 0
5756 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5757 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5758 || TREE_CHAIN (arglist) == 0
5759 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5760 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5761 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5762 return const0_rtx;
5763 else
5764 {
5765 int i;
5766 tree t;
5767 rtx ops[3];
5768
5769 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5770 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5771
5772 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5773 }
5774
5775 /* __builtin_return (RESULT) causes the function to return the
5776 value described by RESULT. RESULT is address of the block of
5777 memory returned by __builtin_apply. */
5778 case BUILT_IN_RETURN:
5779 if (arglist
5780 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5781 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5782 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5783 NULL_RTX, VOIDmode, 0));
5784 return const0_rtx;
5785
bbf6f052
RK
5786 case BUILT_IN_SAVEREGS:
5787 /* Don't do __builtin_saveregs more than once in a function.
5788 Save the result of the first call and reuse it. */
5789 if (saveregs_value != 0)
5790 return saveregs_value;
5791 {
5792 /* When this function is called, it means that registers must be
5793 saved on entry to this function. So we migrate the
5794 call to the first insn of this function. */
5795 rtx temp;
5796 rtx seq;
5797 rtx valreg, saved_valreg;
5798
5799 /* Now really call the function. `expand_call' does not call
5800 expand_builtin, so there is no danger of infinite recursion here. */
5801 start_sequence ();
5802
5803#ifdef EXPAND_BUILTIN_SAVEREGS
5804 /* Do whatever the machine needs done in this case. */
5805 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5806#else
5807 /* The register where the function returns its value
5808 is likely to have something else in it, such as an argument.
5809 So preserve that register around the call. */
5810 if (value_mode != VOIDmode)
5811 {
5812 valreg = hard_libcall_value (value_mode);
5813 saved_valreg = gen_reg_rtx (value_mode);
5814 emit_move_insn (saved_valreg, valreg);
5815 }
5816
5817 /* Generate the call, putting the value in a pseudo. */
5818 temp = expand_call (exp, target, ignore);
5819
5820 if (value_mode != VOIDmode)
5821 emit_move_insn (valreg, saved_valreg);
5822#endif
5823
5824 seq = get_insns ();
5825 end_sequence ();
5826
5827 saveregs_value = temp;
5828
0006469d
TW
5829 /* Put the sequence after the NOTE that starts the function.
5830 If this is inside a SEQUENCE, make the outer-level insn
5831 chain current, so the code is placed at the start of the
5832 function. */
5833 push_topmost_sequence ();
bbf6f052 5834 emit_insns_before (seq, NEXT_INSN (get_insns ()));
0006469d 5835 pop_topmost_sequence ();
bbf6f052
RK
5836 return temp;
5837 }
5838
5839 /* __builtin_args_info (N) returns word N of the arg space info
5840 for the current function. The number and meanings of words
5841 is controlled by the definition of CUMULATIVE_ARGS. */
5842 case BUILT_IN_ARGS_INFO:
5843 {
5844 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5845 int i;
5846 int *word_ptr = (int *) &current_function_args_info;
5847 tree type, elts, result;
5848
5849 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5850 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5851 __FILE__, __LINE__);
5852
5853 if (arglist != 0)
5854 {
5855 tree arg = TREE_VALUE (arglist);
5856 if (TREE_CODE (arg) != INTEGER_CST)
42b85a55 5857 error ("argument of `__builtin_args_info' must be constant");
bbf6f052
RK
5858 else
5859 {
5860 int wordnum = TREE_INT_CST_LOW (arg);
5861
42b85a55
RS
5862 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5863 error ("argument of `__builtin_args_info' out of range");
bbf6f052 5864 else
906c4e36 5865 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5866 }
5867 }
5868 else
42b85a55 5869 error ("missing argument in `__builtin_args_info'");
bbf6f052
RK
5870
5871 return const0_rtx;
5872
5873#if 0
5874 for (i = 0; i < nwords; i++)
5875 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5876
5877 type = build_array_type (integer_type_node,
5878 build_index_type (build_int_2 (nwords, 0)));
5879 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5880 TREE_CONSTANT (result) = 1;
5881 TREE_STATIC (result) = 1;
5882 result = build (INDIRECT_REF, build_pointer_type (type), result);
5883 TREE_CONSTANT (result) = 1;
906c4e36 5884 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5885#endif
5886 }
5887
5888 /* Return the address of the first anonymous stack arg. */
5889 case BUILT_IN_NEXT_ARG:
5890 {
5891 tree fntype = TREE_TYPE (current_function_decl);
5892 if (!(TYPE_ARG_TYPES (fntype) != 0
5893 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5894 != void_type_node)))
5895 {
5896 error ("`va_start' used in function with fixed args");
5897 return const0_rtx;
5898 }
5899 }
5900
5901 return expand_binop (Pmode, add_optab,
5902 current_function_internal_arg_pointer,
5903 current_function_arg_offset_rtx,
906c4e36 5904 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5905
5906 case BUILT_IN_CLASSIFY_TYPE:
5907 if (arglist != 0)
5908 {
5909 tree type = TREE_TYPE (TREE_VALUE (arglist));
5910 enum tree_code code = TREE_CODE (type);
5911 if (code == VOID_TYPE)
906c4e36 5912 return GEN_INT (void_type_class);
bbf6f052 5913 if (code == INTEGER_TYPE)
906c4e36 5914 return GEN_INT (integer_type_class);
bbf6f052 5915 if (code == CHAR_TYPE)
906c4e36 5916 return GEN_INT (char_type_class);
bbf6f052 5917 if (code == ENUMERAL_TYPE)
906c4e36 5918 return GEN_INT (enumeral_type_class);
bbf6f052 5919 if (code == BOOLEAN_TYPE)
906c4e36 5920 return GEN_INT (boolean_type_class);
bbf6f052 5921 if (code == POINTER_TYPE)
906c4e36 5922 return GEN_INT (pointer_type_class);
bbf6f052 5923 if (code == REFERENCE_TYPE)
906c4e36 5924 return GEN_INT (reference_type_class);
bbf6f052 5925 if (code == OFFSET_TYPE)
906c4e36 5926 return GEN_INT (offset_type_class);
bbf6f052 5927 if (code == REAL_TYPE)
906c4e36 5928 return GEN_INT (real_type_class);
bbf6f052 5929 if (code == COMPLEX_TYPE)
906c4e36 5930 return GEN_INT (complex_type_class);
bbf6f052 5931 if (code == FUNCTION_TYPE)
906c4e36 5932 return GEN_INT (function_type_class);
bbf6f052 5933 if (code == METHOD_TYPE)
906c4e36 5934 return GEN_INT (method_type_class);
bbf6f052 5935 if (code == RECORD_TYPE)
906c4e36 5936 return GEN_INT (record_type_class);
e7f3c83f 5937 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
906c4e36 5938 return GEN_INT (union_type_class);
bbf6f052 5939 if (code == ARRAY_TYPE)
906c4e36 5940 return GEN_INT (array_type_class);
bbf6f052 5941 if (code == STRING_TYPE)
906c4e36 5942 return GEN_INT (string_type_class);
bbf6f052 5943 if (code == SET_TYPE)
906c4e36 5944 return GEN_INT (set_type_class);
bbf6f052 5945 if (code == FILE_TYPE)
906c4e36 5946 return GEN_INT (file_type_class);
bbf6f052 5947 if (code == LANG_TYPE)
906c4e36 5948 return GEN_INT (lang_type_class);
bbf6f052 5949 }
906c4e36 5950 return GEN_INT (no_type_class);
bbf6f052
RK
5951
5952 case BUILT_IN_CONSTANT_P:
5953 if (arglist == 0)
5954 return const0_rtx;
5955 else
cda0ec81 5956 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5957 ? const1_rtx : const0_rtx);
5958
5959 case BUILT_IN_FRAME_ADDRESS:
5960 /* The argument must be a nonnegative integer constant.
5961 It counts the number of frames to scan up the stack.
5962 The value is the address of that frame. */
5963 case BUILT_IN_RETURN_ADDRESS:
5964 /* The argument must be a nonnegative integer constant.
5965 It counts the number of frames to scan up the stack.
5966 The value is the return address saved in that frame. */
5967 if (arglist == 0)
5968 /* Warning about missing arg was already issued. */
5969 return const0_rtx;
5970 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5971 {
42b85a55 5972 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
5973 return const0_rtx;
5974 }
5975 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5976 {
42b85a55 5977 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
5978 return const0_rtx;
5979 }
5980 else
5981 {
5982 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5983 rtx tem = frame_pointer_rtx;
5984 int i;
5985
46b68a37
JW
5986 /* Some machines need special handling before we can access arbitrary
5987 frames. For example, on the sparc, we must first flush all
5988 register windows to the stack. */
5989#ifdef SETUP_FRAME_ADDRESSES
5990 SETUP_FRAME_ADDRESSES ();
5991#endif
5992
5993 /* On the sparc, the return address is not in the frame, it is
5994 in a register. There is no way to access it off of the current
5995 frame pointer, but it can be accessed off the previous frame
5996 pointer by reading the value from the register window save
5997 area. */
5998#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5999 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6000 count--;
6001#endif
6002
bbf6f052
RK
6003 /* Scan back COUNT frames to the specified frame. */
6004 for (i = 0; i < count; i++)
6005 {
6006 /* Assume the dynamic chain pointer is in the word that
6007 the frame address points to, unless otherwise specified. */
6008#ifdef DYNAMIC_CHAIN_ADDRESS
6009 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6010#endif
6011 tem = memory_address (Pmode, tem);
6012 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6013 }
6014
6015 /* For __builtin_frame_address, return what we've got. */
6016 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6017 return tem;
6018
6019 /* For __builtin_return_address,
6020 Get the return address from that frame. */
6021#ifdef RETURN_ADDR_RTX
6022 return RETURN_ADDR_RTX (count, tem);
6023#else
6024 tem = memory_address (Pmode,
6025 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6026 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6027#endif
6028 }
6029
6030 case BUILT_IN_ALLOCA:
6031 if (arglist == 0
6032 /* Arg could be non-integer if user redeclared this fcn wrong. */
6033 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6034 return const0_rtx;
6035 current_function_calls_alloca = 1;
6036 /* Compute the argument. */
906c4e36 6037 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6038
6039 /* Allocate the desired space. */
8c8a8e34 6040 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
6041
6042 /* Record the new stack level for nonlocal gotos. */
6dc42e49 6043 if (nonlocal_goto_handler_slot != 0)
906c4e36 6044 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
6045 return target;
6046
6047 case BUILT_IN_FFS:
6048 /* If not optimizing, call the library function. */
6049 if (!optimize)
6050 break;
6051
6052 if (arglist == 0
6053 /* Arg could be non-integer if user redeclared this fcn wrong. */
6054 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6055 return const0_rtx;
6056
6057 /* Compute the argument. */
6058 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6059 /* Compute ffs, into TARGET if possible.
6060 Set TARGET to wherever the result comes back. */
6061 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6062 ffs_optab, op0, target, 1);
6063 if (target == 0)
6064 abort ();
6065 return target;
6066
6067 case BUILT_IN_STRLEN:
6068 /* If not optimizing, call the library function. */
6069 if (!optimize)
6070 break;
6071
6072 if (arglist == 0
6073 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6074 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6075 return const0_rtx;
6076 else
6077 {
e7c33f54
RK
6078 tree src = TREE_VALUE (arglist);
6079 tree len = c_strlen (src);
bbf6f052 6080
e7c33f54
RK
6081 int align
6082 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6083
6084 rtx result, src_rtx, char_rtx;
6085 enum machine_mode insn_mode = value_mode, char_mode;
6086 enum insn_code icode;
6087
6088 /* If the length is known, just return it. */
6089 if (len != 0)
6090 return expand_expr (len, target, mode, 0);
6091
6092 /* If SRC is not a pointer type, don't do this operation inline. */
6093 if (align == 0)
6094 break;
6095
6096 /* Call a function if we can't compute strlen in the right mode. */
6097
6098 while (insn_mode != VOIDmode)
6099 {
6100 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6101 if (icode != CODE_FOR_nothing)
6102 break;
6103
6104 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6105 }
6106 if (insn_mode == VOIDmode)
bbf6f052 6107 break;
e7c33f54
RK
6108
6109 /* Make a place to write the result of the instruction. */
6110 result = target;
6111 if (! (result != 0
6112 && GET_CODE (result) == REG
6113 && GET_MODE (result) == insn_mode
6114 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6115 result = gen_reg_rtx (insn_mode);
6116
4d613828 6117 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6118
4d613828 6119 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6120 result = gen_reg_rtx (insn_mode);
6121
6122 src_rtx = memory_address (BLKmode,
906c4e36 6123 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6124 EXPAND_NORMAL));
4d613828 6125 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6126 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6127
6128 char_rtx = const0_rtx;
4d613828
RS
6129 char_mode = insn_operand_mode[(int)icode][2];
6130 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6131 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6132
6133 emit_insn (GEN_FCN (icode) (result,
6134 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6135 char_rtx, GEN_INT (align)));
e7c33f54
RK
6136
6137 /* Return the value in the proper mode for this function. */
6138 if (GET_MODE (result) == value_mode)
6139 return result;
6140 else if (target != 0)
6141 {
6142 convert_move (target, result, 0);
6143 return target;
6144 }
6145 else
6146 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6147 }
6148
6149 case BUILT_IN_STRCPY:
6150 /* If not optimizing, call the library function. */
6151 if (!optimize)
6152 break;
6153
6154 if (arglist == 0
6155 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6156 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6157 || TREE_CHAIN (arglist) == 0
6158 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6159 return const0_rtx;
6160 else
6161 {
6162 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6163
6164 if (len == 0)
6165 break;
6166
6167 len = size_binop (PLUS_EXPR, len, integer_one_node);
6168
906c4e36 6169 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6170 }
6171
6172 /* Drops in. */
6173 case BUILT_IN_MEMCPY:
6174 /* If not optimizing, call the library function. */
6175 if (!optimize)
6176 break;
6177
6178 if (arglist == 0
6179 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6180 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6181 || TREE_CHAIN (arglist) == 0
6182 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6183 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6184 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6185 return const0_rtx;
6186 else
6187 {
6188 tree dest = TREE_VALUE (arglist);
6189 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6190 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6191
6192 int src_align
6193 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6194 int dest_align
6195 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9937da1a 6196 rtx dest_rtx, dest_mem, src_mem;
bbf6f052
RK
6197
6198 /* If either SRC or DEST is not a pointer type, don't do
6199 this operation in-line. */
6200 if (src_align == 0 || dest_align == 0)
6201 {
6202 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6203 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6204 break;
6205 }
6206
906c4e36 6207 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
9937da1a
RS
6208 dest_mem = gen_rtx (MEM, BLKmode,
6209 memory_address (BLKmode, dest_rtx));
6210 src_mem = gen_rtx (MEM, BLKmode,
6211 memory_address (BLKmode,
6212 expand_expr (src, NULL_RTX,
6213 Pmode,
6214 EXPAND_NORMAL)));
bbf6f052
RK
6215
6216 /* Copy word part most expediently. */
9937da1a 6217 emit_block_move (dest_mem, src_mem,
906c4e36 6218 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6219 MIN (src_align, dest_align));
6220 return dest_rtx;
6221 }
6222
6223/* These comparison functions need an instruction that returns an actual
6224 index. An ordinary compare that just sets the condition codes
6225 is not enough. */
6226#ifdef HAVE_cmpstrsi
6227 case BUILT_IN_STRCMP:
6228 /* If not optimizing, call the library function. */
6229 if (!optimize)
6230 break;
6231
6232 if (arglist == 0
6233 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6234 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6235 || TREE_CHAIN (arglist) == 0
6236 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6237 return const0_rtx;
6238 else if (!HAVE_cmpstrsi)
6239 break;
6240 {
6241 tree arg1 = TREE_VALUE (arglist);
6242 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6243 tree offset;
6244 tree len, len2;
6245
6246 len = c_strlen (arg1);
6247 if (len)
6248 len = size_binop (PLUS_EXPR, integer_one_node, len);
6249 len2 = c_strlen (arg2);
6250 if (len2)
6251 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6252
6253 /* If we don't have a constant length for the first, use the length
6254 of the second, if we know it. We don't require a constant for
6255 this case; some cost analysis could be done if both are available
6256 but neither is constant. For now, assume they're equally cheap.
6257
6258 If both strings have constant lengths, use the smaller. This
6259 could arise if optimization results in strcpy being called with
6260 two fixed strings, or if the code was machine-generated. We should
6261 add some code to the `memcmp' handler below to deal with such
6262 situations, someday. */
6263 if (!len || TREE_CODE (len) != INTEGER_CST)
6264 {
6265 if (len2)
6266 len = len2;
6267 else if (len == 0)
6268 break;
6269 }
6270 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6271 {
6272 if (tree_int_cst_lt (len2, len))
6273 len = len2;
6274 }
6275
906c4e36 6276 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6277 }
6278
6279 /* Drops in. */
6280 case BUILT_IN_MEMCMP:
6281 /* If not optimizing, call the library function. */
6282 if (!optimize)
6283 break;
6284
6285 if (arglist == 0
6286 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6287 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6288 || TREE_CHAIN (arglist) == 0
6289 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6290 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6291 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6292 return const0_rtx;
6293 else if (!HAVE_cmpstrsi)
6294 break;
6295 {
6296 tree arg1 = TREE_VALUE (arglist);
6297 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6298 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6299 rtx result;
6300
6301 int arg1_align
6302 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6303 int arg2_align
6304 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6305 enum machine_mode insn_mode
6306 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6307
6308 /* If we don't have POINTER_TYPE, call the function. */
6309 if (arg1_align == 0 || arg2_align == 0)
6310 {
6311 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6312 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6313 break;
6314 }
6315
6316 /* Make a place to write the result of the instruction. */
6317 result = target;
6318 if (! (result != 0
6319 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6320 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6321 result = gen_reg_rtx (insn_mode);
6322
6323 emit_insn (gen_cmpstrsi (result,
6324 gen_rtx (MEM, BLKmode,
906c4e36
RK
6325 expand_expr (arg1, NULL_RTX, Pmode,
6326 EXPAND_NORMAL)),
bbf6f052 6327 gen_rtx (MEM, BLKmode,
906c4e36
RK
6328 expand_expr (arg2, NULL_RTX, Pmode,
6329 EXPAND_NORMAL)),
6330 expand_expr (len, NULL_RTX, VOIDmode, 0),
6331 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6332
6333 /* Return the value in the proper mode for this function. */
6334 mode = TYPE_MODE (TREE_TYPE (exp));
6335 if (GET_MODE (result) == mode)
6336 return result;
6337 else if (target != 0)
6338 {
6339 convert_move (target, result, 0);
6340 return target;
6341 }
6342 else
6343 return convert_to_mode (mode, result, 0);
6344 }
6345#else
6346 case BUILT_IN_STRCMP:
6347 case BUILT_IN_MEMCMP:
6348 break;
6349#endif
6350
6351 default: /* just do library call, if unknown builtin */
42b85a55 6352 error ("built-in function `%s' not currently supported",
bbf6f052
RK
6353 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6354 }
6355
6356 /* The switch statement above can drop through to cause the function
6357 to be called normally. */
6358
6359 return expand_call (exp, target, ignore);
6360}
6361\f
0006469d
TW
6362/* Built-in functions to perform an untyped call and return. */
6363
6364/* For each register that may be used for calling a function, this
6365 gives a mode used to copy the register's value. VOIDmode indicates
6366 the register is not used for calling a function. If the machine
6367 has register windows, this gives only the outbound registers.
6368 INCOMING_REGNO gives the corresponding inbound register. */
6369static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6370
6371/* For each register that may be used for returning values, this gives
6372 a mode used to copy the register's value. VOIDmode indicates the
6373 register is not used for returning values. If the machine has
6374 register windows, this gives only the outbound registers.
6375 INCOMING_REGNO gives the corresponding inbound register. */
6376static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6377
6378/* Return the size required for the block returned by __builtin_apply_args,
6379 and initialize apply_args_mode. */
6380static int
6381apply_args_size ()
6382{
6383 static int size = -1;
6384 int align, regno;
6385 enum machine_mode mode;
6386
6387 /* The values computed by this function never change. */
6388 if (size < 0)
6389 {
6390 /* The first value is the incoming arg-pointer. */
6391 size = GET_MODE_SIZE (Pmode);
6392
6393 /* The second value is the structure value address unless this is
6394 passed as an "invisible" first argument. */
6395 if (struct_value_rtx)
6396 size += GET_MODE_SIZE (Pmode);
6397
6398 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6399 if (FUNCTION_ARG_REGNO_P (regno))
6400 {
6401 /* Search for the proper mode for copying this register's
6402 value. I'm not sure this is right, but it works so far. */
6403 enum machine_mode best_mode = VOIDmode;
6404
6405 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6406 mode != VOIDmode;
6407 mode = GET_MODE_WIDER_MODE (mode))
6408 if (HARD_REGNO_MODE_OK (regno, mode)
6409 && HARD_REGNO_NREGS (regno, mode) == 1)
6410 best_mode = mode;
6411
6412 if (best_mode == VOIDmode)
6413 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6414 mode != VOIDmode;
6415 mode = GET_MODE_WIDER_MODE (mode))
6416 if (HARD_REGNO_MODE_OK (regno, mode)
6417 && (mov_optab->handlers[(int) mode].insn_code
6418 != CODE_FOR_nothing))
6419 best_mode = mode;
6420
6421 mode = best_mode;
6422 if (mode == VOIDmode)
6423 abort ();
6424
6425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6426 if (size % align != 0)
6427 size = CEIL (size, align) * align;
6428 size += GET_MODE_SIZE (mode);
6429 apply_args_mode[regno] = mode;
6430 }
6431 else
6432 apply_args_mode[regno] = VOIDmode;
6433 }
6434 return size;
6435}
6436
6437/* Return the size required for the block returned by __builtin_apply,
6438 and initialize apply_result_mode. */
6439static int
6440apply_result_size ()
6441{
6442 static int size = -1;
6443 int align, regno;
6444 enum machine_mode mode;
6445
6446 /* The values computed by this function never change. */
6447 if (size < 0)
6448 {
6449 size = 0;
6450
6451 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6452 if (FUNCTION_VALUE_REGNO_P (regno))
6453 {
6454 /* Search for the proper mode for copying this register's
6455 value. I'm not sure this is right, but it works so far. */
6456 enum machine_mode best_mode = VOIDmode;
6457
6458 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6459 mode != TImode;
6460 mode = GET_MODE_WIDER_MODE (mode))
6461 if (HARD_REGNO_MODE_OK (regno, mode))
6462 best_mode = mode;
6463
6464 if (best_mode == VOIDmode)
6465 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6466 mode != VOIDmode;
6467 mode = GET_MODE_WIDER_MODE (mode))
6468 if (HARD_REGNO_MODE_OK (regno, mode)
6469 && (mov_optab->handlers[(int) mode].insn_code
6470 != CODE_FOR_nothing))
6471 best_mode = mode;
6472
6473 mode = best_mode;
6474 if (mode == VOIDmode)
6475 abort ();
6476
6477 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6478 if (size % align != 0)
6479 size = CEIL (size, align) * align;
6480 size += GET_MODE_SIZE (mode);
6481 apply_result_mode[regno] = mode;
6482 }
6483 else
6484 apply_result_mode[regno] = VOIDmode;
6485
6486 /* Allow targets that use untyped_call and untyped_return to override
6487 the size so that machine-specific information can be stored here. */
6488#ifdef APPLY_RESULT_SIZE
6489 size = APPLY_RESULT_SIZE;
6490#endif
6491 }
6492 return size;
6493}
6494
6495#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6496/* Create a vector describing the result block RESULT. If SAVEP is true,
6497 the result block is used to save the values; otherwise it is used to
6498 restore the values. */
6499static rtx
6500result_vector (savep, result)
6501 int savep;
6502 rtx result;
6503{
6504 int regno, size, align, nelts;
6505 enum machine_mode mode;
6506 rtx reg, mem;
6507 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6508
6509 size = nelts = 0;
6510 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6511 if ((mode = apply_result_mode[regno]) != VOIDmode)
6512 {
6513 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6514 if (size % align != 0)
6515 size = CEIL (size, align) * align;
6516 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6517 mem = change_address (result, mode,
6518 plus_constant (XEXP (result, 0), size));
6519 savevec[nelts++] = (savep
6520 ? gen_rtx (SET, VOIDmode, mem, reg)
6521 : gen_rtx (SET, VOIDmode, reg, mem));
6522 size += GET_MODE_SIZE (mode);
6523 }
6524 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6525}
6526#endif /* HAVE_untyped_call or HAVE_untyped_return */
6527
6528
6529/* Save the state required to perform an untyped call with the same
6530 arguments as were passed to the current function. */
6531static rtx
6532expand_builtin_apply_args ()
6533{
6534 rtx registers;
6535 int size, align, regno;
6536 enum machine_mode mode;
6537
6538 /* Create a block where the arg-pointer, structure value address,
6539 and argument registers can be saved. */
6540 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6541
6542 /* Walk past the arg-pointer and structure value address. */
6543 size = GET_MODE_SIZE (Pmode);
6544 if (struct_value_rtx)
6545 size += GET_MODE_SIZE (Pmode);
6546
6547 /* Save each register used in calling a function to the block. */
6548 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6549 if ((mode = apply_args_mode[regno]) != VOIDmode)
6550 {
6551 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6552 if (size % align != 0)
6553 size = CEIL (size, align) * align;
6554 emit_move_insn (change_address (registers, mode,
6555 plus_constant (XEXP (registers, 0),
6556 size)),
6557 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6558 size += GET_MODE_SIZE (mode);
6559 }
6560
6561 /* Save the arg pointer to the block. */
6562 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6563 copy_to_reg (virtual_incoming_args_rtx));
6564 size = GET_MODE_SIZE (Pmode);
6565
6566 /* Save the structure value address unless this is passed as an
6567 "invisible" first argument. */
6568 if (struct_value_incoming_rtx)
6569 {
6570 emit_move_insn (change_address (registers, Pmode,
6571 plus_constant (XEXP (registers, 0),
6572 size)),
6573 copy_to_reg (struct_value_incoming_rtx));
6574 size += GET_MODE_SIZE (Pmode);
6575 }
6576
6577 /* Return the address of the block. */
6578 return copy_addr_to_reg (XEXP (registers, 0));
6579}
6580
6581/* Perform an untyped call and save the state required to perform an
6582 untyped return of whatever value was returned by the given function. */
6583static rtx
6584expand_builtin_apply (function, arguments, argsize)
6585 rtx function, arguments, argsize;
6586{
6587 int size, align, regno;
6588 enum machine_mode mode;
6589 rtx incoming_args, result, reg, dest, call_insn;
6590 rtx old_stack_level = 0;
6591 rtx use_insns = 0;
6592
6593 /* Create a block where the return registers can be saved. */
6594 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6595
6596 /* ??? The argsize value should be adjusted here. */
6597
6598 /* Fetch the arg pointer from the ARGUMENTS block. */
6599 incoming_args = gen_reg_rtx (Pmode);
6600 emit_move_insn (incoming_args,
6601 gen_rtx (MEM, Pmode, arguments));
6602#ifndef STACK_GROWS_DOWNWARD
6603 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6604 incoming_args, 0, OPTAB_LIB_WIDEN);
6605#endif
6606
6607 /* Perform postincrements before actually calling the function. */
6608 emit_queue ();
6609
6610 /* Push a new argument block and copy the arguments. */
6611 do_pending_stack_adjust ();
6612 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6613
6614 /* Push a block of memory onto the stack to store the memory arguments.
6615 Save the address in a register, and copy the memory arguments. ??? I
6616 haven't figured out how the calling convention macros effect this,
6617 but it's likely that the source and/or destination addresses in
6618 the block copy will need updating in machine specific ways. */
6619 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6620 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6621 gen_rtx (MEM, BLKmode, incoming_args),
6622 argsize,
6623 PARM_BOUNDARY / BITS_PER_UNIT);
6624
6625 /* Refer to the argument block. */
6626 apply_args_size ();
6627 arguments = gen_rtx (MEM, BLKmode, arguments);
6628
6629 /* Walk past the arg-pointer and structure value address. */
6630 size = GET_MODE_SIZE (Pmode);
6631 if (struct_value_rtx)
6632 size += GET_MODE_SIZE (Pmode);
6633
6634 /* Restore each of the registers previously saved. Make USE insns
6635 for each of these registers for use in making the call. */
6636 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6637 if ((mode = apply_args_mode[regno]) != VOIDmode)
6638 {
6639 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6640 if (size % align != 0)
6641 size = CEIL (size, align) * align;
6642 reg = gen_rtx (REG, mode, regno);
6643 emit_move_insn (reg,
6644 change_address (arguments, mode,
6645 plus_constant (XEXP (arguments, 0),
6646 size)));
6647
6648 push_to_sequence (use_insns);
6649 emit_insn (gen_rtx (USE, VOIDmode, reg));
6650 use_insns = get_insns ();
6651 end_sequence ();
6652 size += GET_MODE_SIZE (mode);
6653 }
6654
6655 /* Restore the structure value address unless this is passed as an
6656 "invisible" first argument. */
6657 size = GET_MODE_SIZE (Pmode);
6658 if (struct_value_rtx)
6659 {
6660 rtx value = gen_reg_rtx (Pmode);
6661 emit_move_insn (value,
6662 change_address (arguments, Pmode,
6663 plus_constant (XEXP (arguments, 0),
6664 size)));
6665 emit_move_insn (struct_value_rtx, value);
6666 if (GET_CODE (struct_value_rtx) == REG)
6667 {
6668 push_to_sequence (use_insns);
6669 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6670 use_insns = get_insns ();
6671 end_sequence ();
6672 }
6673 size += GET_MODE_SIZE (Pmode);
6674 }
6675
6676 /* All arguments and registers used for the call are set up by now! */
6677 function = prepare_call_address (function, NULL_TREE, &use_insns);
6678
6679 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6680 and we don't want to load it into a register as an optimization,
6681 because prepare_call_address already did it if it should be done. */
6682 if (GET_CODE (function) != SYMBOL_REF)
6683 function = memory_address (FUNCTION_MODE, function);
6684
6685 /* Generate the actual call instruction and save the return value. */
6686#ifdef HAVE_untyped_call
6687 if (HAVE_untyped_call)
6688 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6689 result, result_vector (1, result)));
6690 else
6691#endif
6692#ifdef HAVE_call_value
6693 if (HAVE_call_value)
6694 {
6695 rtx valreg = 0;
6696
6697 /* Locate the unique return register. It is not possible to
6698 express a call that sets more than one return register using
6699 call_value; use untyped_call for that. In fact, untyped_call
6700 only needs to save the return registers in the given block. */
6701 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6702 if ((mode = apply_result_mode[regno]) != VOIDmode)
6703 {
6704 if (valreg)
6705 abort (); /* HAVE_untyped_call required. */
6706 valreg = gen_rtx (REG, mode, regno);
6707 }
6708
6709 emit_call_insn (gen_call_value (valreg,
6710 gen_rtx (MEM, FUNCTION_MODE, function),
6711 const0_rtx, NULL_RTX, const0_rtx));
6712
6713 emit_move_insn (change_address (result, GET_MODE (valreg),
6714 XEXP (result, 0)),
6715 valreg);
6716 }
6717 else
6718#endif
6719 abort ();
6720
6721 /* Find the CALL insn we just emitted and write the USE insns before it. */
6722 for (call_insn = get_last_insn ();
6723 call_insn && GET_CODE (call_insn) != CALL_INSN;
6724 call_insn = PREV_INSN (call_insn))
6725 ;
6726
6727 if (! call_insn)
6728 abort ();
6729
6730 /* Put the USE insns before the CALL. */
6731 emit_insns_before (use_insns, call_insn);
6732
6733 /* Restore the stack. */
6734 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6735
6736 /* Return the address of the result block. */
6737 return copy_addr_to_reg (XEXP (result, 0));
6738}
6739
6740/* Perform an untyped return. */
6741static void
6742expand_builtin_return (result)
6743 rtx result;
6744{
6745 int size, align, regno;
6746 enum machine_mode mode;
6747 rtx reg;
6748 rtx use_insns = 0;
6749
6750 apply_result_size ();
6751 result = gen_rtx (MEM, BLKmode, result);
6752
6753#ifdef HAVE_untyped_return
6754 if (HAVE_untyped_return)
6755 {
6756 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6757 emit_barrier ();
6758 return;
6759 }
6760#endif
6761
6762 /* Restore the return value and note that each value is used. */
6763 size = 0;
6764 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6765 if ((mode = apply_result_mode[regno]) != VOIDmode)
6766 {
6767 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6768 if (size % align != 0)
6769 size = CEIL (size, align) * align;
6770 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6771 emit_move_insn (reg,
6772 change_address (result, mode,
6773 plus_constant (XEXP (result, 0),
6774 size)));
6775
6776 push_to_sequence (use_insns);
6777 emit_insn (gen_rtx (USE, VOIDmode, reg));
6778 use_insns = get_insns ();
6779 end_sequence ();
6780 size += GET_MODE_SIZE (mode);
6781 }
6782
6783 /* Put the USE insns before the return. */
6784 emit_insns (use_insns);
6785
6786 /* Return whatever values was restored by jumping directly to the end
6787 of the function. */
6788 expand_null_return ();
6789}
6790\f
bbf6f052
RK
6791/* Expand code for a post- or pre- increment or decrement
6792 and return the RTX for the result.
6793 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6794
6795static rtx
6796expand_increment (exp, post)
6797 register tree exp;
6798 int post;
6799{
6800 register rtx op0, op1;
6801 register rtx temp, value;
6802 register tree incremented = TREE_OPERAND (exp, 0);
6803 optab this_optab = add_optab;
6804 int icode;
6805 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6806 int op0_is_copy = 0;
6807
6808 /* Stabilize any component ref that might need to be
6809 evaluated more than once below. */
ca300798
RS
6810 if (!post
6811 || TREE_CODE (incremented) == BIT_FIELD_REF
bbf6f052
RK
6812 || (TREE_CODE (incremented) == COMPONENT_REF
6813 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6814 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6815 incremented = stabilize_reference (incremented);
6816
6817 /* Compute the operands as RTX.
6818 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6819 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6820 and insns were generated in computing it. */
6821
bbf6f052 6822 temp = get_last_insn ();
906c4e36 6823 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6824
6825 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6826 in place but intead must do sign- or zero-extension during assignment,
6827 so we copy it into a new register and let the code below use it as
6828 a copy.
6829
6830 Note that we can safely modify this SUBREG since it is know not to be
6831 shared (it was made by the expand_expr call above). */
6832
6833 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6834 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6835
94a58076
RS
6836 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6837 && temp != get_last_insn ());
906c4e36 6838 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6839
6840 /* Decide whether incrementing or decrementing. */
6841 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6842 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6843 this_optab = sub_optab;
6844
6845 /* If OP0 is not the actual lvalue, but rather a copy in a register,
ca300798
RS
6846 then we cannot just increment OP0. We must therefore contrive to
6847 increment the original value. Then, for postincrement, we can return
6848 OP0 since it is a copy of the old value. For preincrement, we want
6849 to always expand here, since this generates better or equivalent code. */
6850 if (!post || op0_is_copy)
bbf6f052
RK
6851 {
6852 /* This is the easiest way to increment the value wherever it is.
ca300798
RS
6853 Problems with multiple evaluation of INCREMENTED are prevented
6854 because either (1) it is a component_ref or preincrement,
bbf6f052
RK
6855 in which case it was stabilized above, or (2) it is an array_ref
6856 with constant index in an array in a register, which is
6857 safe to reevaluate. */
6858 tree newexp = build ((this_optab == add_optab
6859 ? PLUS_EXPR : MINUS_EXPR),
6860 TREE_TYPE (exp),
6861 incremented,
6862 TREE_OPERAND (exp, 1));
6863 temp = expand_assignment (incremented, newexp, ! post, 0);
6864 return post ? op0 : temp;
6865 }
6866
6867 /* Convert decrement by a constant into a negative increment. */
6868 if (this_optab == sub_optab
6869 && GET_CODE (op1) == CONST_INT)
6870 {
906c4e36 6871 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6872 this_optab = add_optab;
6873 }
6874
6875 if (post)
6876 {
6877 /* We have a true reference to the value in OP0.
6878 If there is an insn to add or subtract in this mode, queue it. */
6879
6880#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6881 op0 = stabilize (op0);
6882#endif
6883
6884 icode = (int) this_optab->handlers[(int) mode].insn_code;
6885 if (icode != (int) CODE_FOR_nothing
6886 /* Make sure that OP0 is valid for operands 0 and 1
6887 of the insn we want to queue. */
6888 && (*insn_operand_predicate[icode][0]) (op0, mode)
6889 && (*insn_operand_predicate[icode][1]) (op0, mode))
6890 {
6891 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6892 op1 = force_reg (mode, op1);
6893
6894 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6895 }
6896 }
6897
6898 /* Preincrement, or we can't increment with one simple insn. */
6899 if (post)
6900 /* Save a copy of the value before inc or dec, to return it later. */
6901 temp = value = copy_to_reg (op0);
6902 else
6903 /* Arrange to return the incremented value. */
6904 /* Copy the rtx because expand_binop will protect from the queue,
6905 and the results of that would be invalid for us to return
6906 if our caller does emit_queue before using our result. */
6907 temp = copy_rtx (value = op0);
6908
6909 /* Increment however we can. */
6910 op1 = expand_binop (mode, this_optab, value, op1, op0,
6911 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6912 /* Make sure the value is stored into OP0. */
6913 if (op1 != op0)
6914 emit_move_insn (op0, op1);
6915
6916 return temp;
6917}
6918\f
6919/* Expand all function calls contained within EXP, innermost ones first.
6920 But don't look within expressions that have sequence points.
6921 For each CALL_EXPR, record the rtx for its value
6922 in the CALL_EXPR_RTL field. */
6923
6924static void
6925preexpand_calls (exp)
6926 tree exp;
6927{
6928 register int nops, i;
6929 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6930
6931 if (! do_preexpand_calls)
6932 return;
6933
6934 /* Only expressions and references can contain calls. */
6935
6936 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6937 return;
6938
6939 switch (TREE_CODE (exp))
6940 {
6941 case CALL_EXPR:
6942 /* Do nothing if already expanded. */
6943 if (CALL_EXPR_RTL (exp) != 0)
6944 return;
6945
6946 /* Do nothing to built-in functions. */
6947 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6948 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6949 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6950 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6951 return;
6952
6953 case COMPOUND_EXPR:
6954 case COND_EXPR:
6955 case TRUTH_ANDIF_EXPR:
6956 case TRUTH_ORIF_EXPR:
6957 /* If we find one of these, then we can be sure
6958 the adjust will be done for it (since it makes jumps).
6959 Do it now, so that if this is inside an argument
6960 of a function, we don't get the stack adjustment
6961 after some other args have already been pushed. */
6962 do_pending_stack_adjust ();
6963 return;
6964
6965 case BLOCK:
6966 case RTL_EXPR:
6967 case WITH_CLEANUP_EXPR:
6968 return;
6969
6970 case SAVE_EXPR:
6971 if (SAVE_EXPR_RTL (exp) != 0)
6972 return;
6973 }
6974
6975 nops = tree_code_length[(int) TREE_CODE (exp)];
6976 for (i = 0; i < nops; i++)
6977 if (TREE_OPERAND (exp, i) != 0)
6978 {
6979 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6980 if (type == 'e' || type == '<' || type == '1' || type == '2'
6981 || type == 'r')
6982 preexpand_calls (TREE_OPERAND (exp, i));
6983 }
6984}
6985\f
6986/* At the start of a function, record that we have no previously-pushed
6987 arguments waiting to be popped. */
6988
6989void
6990init_pending_stack_adjust ()
6991{
6992 pending_stack_adjust = 0;
6993}
6994
6995/* When exiting from function, if safe, clear out any pending stack adjust
6996 so the adjustment won't get done. */
6997
6998void
6999clear_pending_stack_adjust ()
7000{
7001#ifdef EXIT_IGNORE_STACK
7002 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 7003 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
7004 && ! flag_inline_functions)
7005 pending_stack_adjust = 0;
7006#endif
7007}
7008
7009/* Pop any previously-pushed arguments that have not been popped yet. */
7010
7011void
7012do_pending_stack_adjust ()
7013{
7014 if (inhibit_defer_pop == 0)
7015 {
7016 if (pending_stack_adjust != 0)
906c4e36 7017 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
7018 pending_stack_adjust = 0;
7019 }
7020}
7021
7022/* Expand all cleanups up to OLD_CLEANUPS.
7023 Needed here, and also for language-dependent calls. */
7024
7025void
7026expand_cleanups_to (old_cleanups)
7027 tree old_cleanups;
7028{
7029 while (cleanups_this_call != old_cleanups)
7030 {
906c4e36 7031 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7032 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7033 }
7034}
7035\f
7036/* Expand conditional expressions. */
7037
7038/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7039 LABEL is an rtx of code CODE_LABEL, in this function and all the
7040 functions here. */
7041
7042void
7043jumpifnot (exp, label)
7044 tree exp;
7045 rtx label;
7046{
906c4e36 7047 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
7048}
7049
7050/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7051
7052void
7053jumpif (exp, label)
7054 tree exp;
7055 rtx label;
7056{
906c4e36 7057 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
7058}
7059
7060/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7061 the result is zero, or IF_TRUE_LABEL if the result is one.
7062 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7063 meaning fall through in that case.
7064
e7c33f54
RK
7065 do_jump always does any pending stack adjust except when it does not
7066 actually perform a jump. An example where there is no jump
7067 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7068
bbf6f052
RK
7069 This function is responsible for optimizing cases such as
7070 &&, || and comparison operators in EXP. */
7071
7072void
7073do_jump (exp, if_false_label, if_true_label)
7074 tree exp;
7075 rtx if_false_label, if_true_label;
7076{
7077 register enum tree_code code = TREE_CODE (exp);
7078 /* Some cases need to create a label to jump to
7079 in order to properly fall through.
7080 These cases set DROP_THROUGH_LABEL nonzero. */
7081 rtx drop_through_label = 0;
7082 rtx temp;
7083 rtx comparison = 0;
7084 int i;
7085 tree type;
7086
7087 emit_queue ();
7088
7089 switch (code)
7090 {
7091 case ERROR_MARK:
7092 break;
7093
7094 case INTEGER_CST:
7095 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7096 if (temp)
7097 emit_jump (temp);
7098 break;
7099
7100#if 0
7101 /* This is not true with #pragma weak */
7102 case ADDR_EXPR:
7103 /* The address of something can never be zero. */
7104 if (if_true_label)
7105 emit_jump (if_true_label);
7106 break;
7107#endif
7108
7109 case NOP_EXPR:
7110 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7111 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7112 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7113 goto normal;
7114 case CONVERT_EXPR:
7115 /* If we are narrowing the operand, we have to do the compare in the
7116 narrower mode. */
7117 if ((TYPE_PRECISION (TREE_TYPE (exp))
7118 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7119 goto normal;
7120 case NON_LVALUE_EXPR:
7121 case REFERENCE_EXPR:
7122 case ABS_EXPR:
7123 case NEGATE_EXPR:
7124 case LROTATE_EXPR:
7125 case RROTATE_EXPR:
7126 /* These cannot change zero->non-zero or vice versa. */
7127 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7128 break;
7129
7130#if 0
7131 /* This is never less insns than evaluating the PLUS_EXPR followed by
7132 a test and can be longer if the test is eliminated. */
7133 case PLUS_EXPR:
7134 /* Reduce to minus. */
7135 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7136 TREE_OPERAND (exp, 0),
7137 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7138 TREE_OPERAND (exp, 1))));
7139 /* Process as MINUS. */
7140#endif
7141
7142 case MINUS_EXPR:
7143 /* Non-zero iff operands of minus differ. */
7144 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7145 TREE_OPERAND (exp, 0),
7146 TREE_OPERAND (exp, 1)),
7147 NE, NE);
7148 break;
7149
7150 case BIT_AND_EXPR:
7151 /* If we are AND'ing with a small constant, do this comparison in the
7152 smallest type that fits. If the machine doesn't have comparisons
7153 that small, it will be converted back to the wider comparison.
7154 This helps if we are testing the sign bit of a narrower object.
7155 combine can't do this for us because it can't know whether a
7156 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7157
08af8e09
RK
7158 if (! SLOW_BYTE_ACCESS
7159 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7160 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
7161 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7162 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
7163 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7164 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7165 != CODE_FOR_nothing))
bbf6f052
RK
7166 {
7167 do_jump (convert (type, exp), if_false_label, if_true_label);
7168 break;
7169 }
7170 goto normal;
7171
7172 case TRUTH_NOT_EXPR:
7173 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7174 break;
7175
7176 case TRUTH_ANDIF_EXPR:
7177 if (if_false_label == 0)
7178 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 7179 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
7180 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7181 break;
7182
7183 case TRUTH_ORIF_EXPR:
7184 if (if_true_label == 0)
7185 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 7186 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
7187 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7188 break;
7189
7190 case COMPOUND_EXPR:
7191 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7192 free_temp_slots ();
7193 emit_queue ();
e7c33f54 7194 do_pending_stack_adjust ();
bbf6f052
RK
7195 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7196 break;
7197
7198 case COMPONENT_REF:
7199 case BIT_FIELD_REF:
7200 case ARRAY_REF:
7201 {
7202 int bitsize, bitpos, unsignedp;
7203 enum machine_mode mode;
7204 tree type;
7bb0943f 7205 tree offset;
bbf6f052
RK
7206 int volatilep = 0;
7207
7208 /* Get description of this reference. We don't actually care
7209 about the underlying object here. */
7bb0943f
RS
7210 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7211 &mode, &unsignedp, &volatilep);
bbf6f052
RK
7212
7213 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
7214 if (! SLOW_BYTE_ACCESS
7215 && type != 0 && bitsize >= 0
7216 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7217 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7218 != CODE_FOR_nothing))
bbf6f052
RK
7219 {
7220 do_jump (convert (type, exp), if_false_label, if_true_label);
7221 break;
7222 }
7223 goto normal;
7224 }
7225
7226 case COND_EXPR:
7227 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7228 if (integer_onep (TREE_OPERAND (exp, 1))
7229 && integer_zerop (TREE_OPERAND (exp, 2)))
7230 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7231
7232 else if (integer_zerop (TREE_OPERAND (exp, 1))
7233 && integer_onep (TREE_OPERAND (exp, 2)))
7234 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7235
7236 else
7237 {
7238 register rtx label1 = gen_label_rtx ();
7239 drop_through_label = gen_label_rtx ();
906c4e36 7240 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
7241 /* Now the THEN-expression. */
7242 do_jump (TREE_OPERAND (exp, 1),
7243 if_false_label ? if_false_label : drop_through_label,
7244 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
7245 /* In case the do_jump just above never jumps. */
7246 do_pending_stack_adjust ();
bbf6f052
RK
7247 emit_label (label1);
7248 /* Now the ELSE-expression. */
7249 do_jump (TREE_OPERAND (exp, 2),
7250 if_false_label ? if_false_label : drop_through_label,
7251 if_true_label ? if_true_label : drop_through_label);
7252 }
7253 break;
7254
7255 case EQ_EXPR:
7256 if (integer_zerop (TREE_OPERAND (exp, 1)))
7257 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7258 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7259 == MODE_INT)
7260 &&
7261 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7262 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7263 else
7264 comparison = compare (exp, EQ, EQ);
7265 break;
7266
7267 case NE_EXPR:
7268 if (integer_zerop (TREE_OPERAND (exp, 1)))
7269 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7270 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7271 == MODE_INT)
7272 &&
7273 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7274 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7275 else
7276 comparison = compare (exp, NE, NE);
7277 break;
7278
7279 case LT_EXPR:
7280 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7281 == MODE_INT)
7282 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7283 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7284 else
7285 comparison = compare (exp, LT, LTU);
7286 break;
7287
7288 case LE_EXPR:
7289 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7290 == MODE_INT)
7291 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7292 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7293 else
7294 comparison = compare (exp, LE, LEU);
7295 break;
7296
7297 case GT_EXPR:
7298 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7299 == MODE_INT)
7300 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7301 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7302 else
7303 comparison = compare (exp, GT, GTU);
7304 break;
7305
7306 case GE_EXPR:
7307 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7308 == MODE_INT)
7309 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7310 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7311 else
7312 comparison = compare (exp, GE, GEU);
7313 break;
7314
7315 default:
7316 normal:
906c4e36 7317 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7318#if 0
7319 /* This is not needed any more and causes poor code since it causes
7320 comparisons and tests from non-SI objects to have different code
7321 sequences. */
7322 /* Copy to register to avoid generating bad insns by cse
7323 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7324 if (!cse_not_expected && GET_CODE (temp) == MEM)
7325 temp = copy_to_reg (temp);
7326#endif
7327 do_pending_stack_adjust ();
7328 if (GET_CODE (temp) == CONST_INT)
7329 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7330 else if (GET_CODE (temp) == LABEL_REF)
7331 comparison = const_true_rtx;
7332 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7333 && !can_compare_p (GET_MODE (temp)))
7334 /* Note swapping the labels gives us not-equal. */
7335 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7336 else if (GET_MODE (temp) != VOIDmode)
7337 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
7338 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7339 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
7340 else
7341 abort ();
7342 }
7343
7344 /* Do any postincrements in the expression that was tested. */
7345 emit_queue ();
7346
7347 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7348 straight into a conditional jump instruction as the jump condition.
7349 Otherwise, all the work has been done already. */
7350
7351 if (comparison == const_true_rtx)
7352 {
7353 if (if_true_label)
7354 emit_jump (if_true_label);
7355 }
7356 else if (comparison == const0_rtx)
7357 {
7358 if (if_false_label)
7359 emit_jump (if_false_label);
7360 }
7361 else if (comparison)
7362 do_jump_for_compare (comparison, if_false_label, if_true_label);
7363
7364 free_temp_slots ();
7365
7366 if (drop_through_label)
e7c33f54
RK
7367 {
7368 /* If do_jump produces code that might be jumped around,
7369 do any stack adjusts from that code, before the place
7370 where control merges in. */
7371 do_pending_stack_adjust ();
7372 emit_label (drop_through_label);
7373 }
bbf6f052
RK
7374}
7375\f
7376/* Given a comparison expression EXP for values too wide to be compared
7377 with one insn, test the comparison and jump to the appropriate label.
7378 The code of EXP is ignored; we always test GT if SWAP is 0,
7379 and LT if SWAP is 1. */
7380
7381static void
7382do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7383 tree exp;
7384 int swap;
7385 rtx if_false_label, if_true_label;
7386{
906c4e36
RK
7387 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7388 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7389 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7390 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7391 rtx drop_through_label = 0;
7392 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7393 int i;
7394
7395 if (! if_true_label || ! if_false_label)
7396 drop_through_label = gen_label_rtx ();
7397 if (! if_true_label)
7398 if_true_label = drop_through_label;
7399 if (! if_false_label)
7400 if_false_label = drop_through_label;
7401
7402 /* Compare a word at a time, high order first. */
f81497d9
RS
7403 for (i = 0; i < nwords; i++)
7404 {
7405 rtx comp;
7406 rtx op0_word, op1_word;
7407
7408 if (WORDS_BIG_ENDIAN)
7409 {
7410 op0_word = operand_subword_force (op0, i, mode);
7411 op1_word = operand_subword_force (op1, i, mode);
7412 }
7413 else
7414 {
7415 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7416 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7417 }
7418
7419 /* All but high-order word must be compared as unsigned. */
7420 comp = compare_from_rtx (op0_word, op1_word,
7421 (unsignedp || i > 0) ? GTU : GT,
7422 unsignedp, word_mode, NULL_RTX, 0);
7423 if (comp == const_true_rtx)
7424 emit_jump (if_true_label);
7425 else if (comp != const0_rtx)
7426 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7427
7428 /* Consider lower words only if these are equal. */
7429 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7430 NULL_RTX, 0);
7431 if (comp == const_true_rtx)
7432 emit_jump (if_false_label);
7433 else if (comp != const0_rtx)
7434 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7435 }
7436
7437 if (if_false_label)
7438 emit_jump (if_false_label);
7439 if (drop_through_label)
7440 emit_label (drop_through_label);
7441}
7442
7443/* Compare OP0 with OP1, word at a time, in mode MODE.
7444 UNSIGNEDP says to do unsigned comparison.
7445 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7446
7447static void
7448do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7449 enum machine_mode mode;
7450 int unsignedp;
7451 rtx op0, op1;
7452 rtx if_false_label, if_true_label;
7453{
7454 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7455 rtx drop_through_label = 0;
7456 int i;
7457
7458 if (! if_true_label || ! if_false_label)
7459 drop_through_label = gen_label_rtx ();
7460 if (! if_true_label)
7461 if_true_label = drop_through_label;
7462 if (! if_false_label)
7463 if_false_label = drop_through_label;
7464
7465 /* Compare a word at a time, high order first. */
bbf6f052
RK
7466 for (i = 0; i < nwords; i++)
7467 {
7468 rtx comp;
7469 rtx op0_word, op1_word;
7470
7471 if (WORDS_BIG_ENDIAN)
7472 {
7473 op0_word = operand_subword_force (op0, i, mode);
7474 op1_word = operand_subword_force (op1, i, mode);
7475 }
7476 else
7477 {
7478 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7479 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7480 }
7481
7482 /* All but high-order word must be compared as unsigned. */
7483 comp = compare_from_rtx (op0_word, op1_word,
7484 (unsignedp || i > 0) ? GTU : GT,
906c4e36 7485 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
7486 if (comp == const_true_rtx)
7487 emit_jump (if_true_label);
7488 else if (comp != const0_rtx)
906c4e36 7489 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
7490
7491 /* Consider lower words only if these are equal. */
7492 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 7493 NULL_RTX, 0);
bbf6f052
RK
7494 if (comp == const_true_rtx)
7495 emit_jump (if_false_label);
7496 else if (comp != const0_rtx)
906c4e36 7497 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
7498 }
7499
7500 if (if_false_label)
7501 emit_jump (if_false_label);
7502 if (drop_through_label)
7503 emit_label (drop_through_label);
7504}
7505
7506/* Given an EQ_EXPR expression EXP for values too wide to be compared
7507 with one insn, test the comparison and jump to the appropriate label. */
7508
7509static void
7510do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7511 tree exp;
7512 rtx if_false_label, if_true_label;
7513{
906c4e36
RK
7514 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7515 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7516 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7517 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7518 int i;
7519 rtx drop_through_label = 0;
7520
7521 if (! if_false_label)
7522 drop_through_label = if_false_label = gen_label_rtx ();
7523
7524 for (i = 0; i < nwords; i++)
7525 {
7526 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7527 operand_subword_force (op1, i, mode),
cd1b4b44
RK
7528 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7529 word_mode, NULL_RTX, 0);
bbf6f052
RK
7530 if (comp == const_true_rtx)
7531 emit_jump (if_false_label);
7532 else if (comp != const0_rtx)
906c4e36 7533 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7534 }
7535
7536 if (if_true_label)
7537 emit_jump (if_true_label);
7538 if (drop_through_label)
7539 emit_label (drop_through_label);
7540}
7541\f
7542/* Jump according to whether OP0 is 0.
7543 We assume that OP0 has an integer mode that is too wide
7544 for the available compare insns. */
7545
7546static void
7547do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7548 rtx op0;
7549 rtx if_false_label, if_true_label;
7550{
7551 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7552 int i;
7553 rtx drop_through_label = 0;
7554
7555 if (! if_false_label)
7556 drop_through_label = if_false_label = gen_label_rtx ();
7557
7558 for (i = 0; i < nwords; i++)
7559 {
7560 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7561 GET_MODE (op0)),
cd1b4b44 7562 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
7563 if (comp == const_true_rtx)
7564 emit_jump (if_false_label);
7565 else if (comp != const0_rtx)
906c4e36 7566 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7567 }
7568
7569 if (if_true_label)
7570 emit_jump (if_true_label);
7571 if (drop_through_label)
7572 emit_label (drop_through_label);
7573}
7574
7575/* Given a comparison expression in rtl form, output conditional branches to
7576 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7577
7578static void
7579do_jump_for_compare (comparison, if_false_label, if_true_label)
7580 rtx comparison, if_false_label, if_true_label;
7581{
7582 if (if_true_label)
7583 {
7584 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7585 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7586 else
7587 abort ();
7588
7589 if (if_false_label)
7590 emit_jump (if_false_label);
7591 }
7592 else if (if_false_label)
7593 {
7594 rtx insn;
7595 rtx prev = PREV_INSN (get_last_insn ());
7596 rtx branch = 0;
7597
7598 /* Output the branch with the opposite condition. Then try to invert
7599 what is generated. If more than one insn is a branch, or if the
7600 branch is not the last insn written, abort. If we can't invert
7601 the branch, emit make a true label, redirect this jump to that,
7602 emit a jump to the false label and define the true label. */
7603
7604 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7605 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7606 else
7607 abort ();
7608
7609 /* Here we get the insn before what was just emitted.
7610 On some machines, emitting the branch can discard
7611 the previous compare insn and emit a replacement. */
7612 if (prev == 0)
7613 /* If there's only one preceding insn... */
7614 insn = get_insns ();
7615 else
7616 insn = NEXT_INSN (prev);
7617
7618 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7619 if (GET_CODE (insn) == JUMP_INSN)
7620 {
7621 if (branch)
7622 abort ();
7623 branch = insn;
7624 }
7625
7626 if (branch != get_last_insn ())
7627 abort ();
7628
7629 if (! invert_jump (branch, if_false_label))
7630 {
7631 if_true_label = gen_label_rtx ();
7632 redirect_jump (branch, if_true_label);
7633 emit_jump (if_false_label);
7634 emit_label (if_true_label);
7635 }
7636 }
7637}
7638\f
7639/* Generate code for a comparison expression EXP
7640 (including code to compute the values to be compared)
7641 and set (CC0) according to the result.
7642 SIGNED_CODE should be the rtx operation for this comparison for
7643 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7644
7645 We force a stack adjustment unless there are currently
7646 things pushed on the stack that aren't yet used. */
7647
7648static rtx
7649compare (exp, signed_code, unsigned_code)
7650 register tree exp;
7651 enum rtx_code signed_code, unsigned_code;
7652{
906c4e36
RK
7653 register rtx op0
7654 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7655 register rtx op1
7656 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7657 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7658 register enum machine_mode mode = TYPE_MODE (type);
7659 int unsignedp = TREE_UNSIGNED (type);
7660 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7661
7662 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7663 ((mode == BLKmode)
906c4e36 7664 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7665 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7666}
7667
7668/* Like compare but expects the values to compare as two rtx's.
7669 The decision as to signed or unsigned comparison must be made by the caller.
7670
7671 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7672 compared.
7673
7674 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7675 size of MODE should be used. */
7676
7677rtx
7678compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7679 register rtx op0, op1;
7680 enum rtx_code code;
7681 int unsignedp;
7682 enum machine_mode mode;
7683 rtx size;
7684 int align;
7685{
a7c5971a
RK
7686 rtx tem;
7687
bf743ac5
RK
7688 /* If one operand is constant, make it the second one. Only do this
7689 if the other operand is not constant as well. */
bbf6f052 7690
bf743ac5
RK
7691 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7692 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 7693 {
a7c5971a 7694 tem = op0;
bbf6f052
RK
7695 op0 = op1;
7696 op1 = tem;
7697 code = swap_condition (code);
7698 }
7699
7700 if (flag_force_mem)
7701 {
7702 op0 = force_not_mem (op0);
7703 op1 = force_not_mem (op1);
7704 }
7705
7706 do_pending_stack_adjust ();
7707
a7c5971a
RK
7708 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7709 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7710 return tem;
bbf6f052 7711
cd1b4b44
RK
7712#if 0
7713 /* There's no need to do this now that combine.c can eliminate lots of
7714 sign extensions. This can be less efficient in certain cases on other
1c6bc817 7715 machines. */
cd1b4b44 7716
bbf6f052
RK
7717 /* If this is a signed equality comparison, we can do it as an
7718 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7719 extension and comparisons with zero are done as unsigned. This is
7720 the case even on machines that can do fast sign extension, since
8008b228 7721 zero-extension is easier to combine with other operations than
77fa0940
RK
7722 sign-extension is. If we are comparing against a constant, we must
7723 convert it to what it would look like unsigned. */
bbf6f052 7724 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7725 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7726 {
7727 if (GET_CODE (op1) == CONST_INT
7728 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7729 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7730 unsignedp = 1;
7731 }
cd1b4b44 7732#endif
bbf6f052
RK
7733
7734 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7735
7736 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7737}
7738\f
7739/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7740 and return an rtx for the result. EXP is either a comparison
7741 or a TRUTH_NOT_EXPR whose operand is a comparison.
7742
bbf6f052
RK
7743 If TARGET is nonzero, store the result there if convenient.
7744
7745 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7746 cheap.
7747
7748 Return zero if there is no suitable set-flag instruction
7749 available on this machine.
7750
7751 Once expand_expr has been called on the arguments of the comparison,
7752 we are committed to doing the store flag, since it is not safe to
7753 re-evaluate the expression. We emit the store-flag insn by calling
7754 emit_store_flag, but only expand the arguments if we have a reason
7755 to believe that emit_store_flag will be successful. If we think that
7756 it will, but it isn't, we have to simulate the store-flag with a
7757 set/jump/set sequence. */
7758
7759static rtx
7760do_store_flag (exp, target, mode, only_cheap)
7761 tree exp;
7762 rtx target;
7763 enum machine_mode mode;
7764 int only_cheap;
7765{
7766 enum rtx_code code;
e7c33f54 7767 tree arg0, arg1, type;
bbf6f052 7768 tree tem;
e7c33f54
RK
7769 enum machine_mode operand_mode;
7770 int invert = 0;
7771 int unsignedp;
bbf6f052
RK
7772 rtx op0, op1;
7773 enum insn_code icode;
7774 rtx subtarget = target;
7775 rtx result, label, pattern, jump_pat;
7776
e7c33f54
RK
7777 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7778 result at the end. We can't simply invert the test since it would
7779 have already been inverted if it were valid. This case occurs for
7780 some floating-point comparisons. */
7781
7782 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7783 invert = 1, exp = TREE_OPERAND (exp, 0);
7784
7785 arg0 = TREE_OPERAND (exp, 0);
7786 arg1 = TREE_OPERAND (exp, 1);
7787 type = TREE_TYPE (arg0);
7788 operand_mode = TYPE_MODE (type);
7789 unsignedp = TREE_UNSIGNED (type);
7790
bbf6f052
RK
7791 /* We won't bother with BLKmode store-flag operations because it would mean
7792 passing a lot of information to emit_store_flag. */
7793 if (operand_mode == BLKmode)
7794 return 0;
7795
d964285c
CH
7796 STRIP_NOPS (arg0);
7797 STRIP_NOPS (arg1);
bbf6f052
RK
7798
7799 /* Get the rtx comparison code to use. We know that EXP is a comparison
7800 operation of some type. Some comparisons against 1 and -1 can be
7801 converted to comparisons with zero. Do so here so that the tests
7802 below will be aware that we have a comparison with zero. These
7803 tests will not catch constants in the first operand, but constants
7804 are rarely passed as the first operand. */
7805
7806 switch (TREE_CODE (exp))
7807 {
7808 case EQ_EXPR:
7809 code = EQ;
7810 break;
7811 case NE_EXPR:
7812 code = NE;
7813 break;
7814 case LT_EXPR:
7815 if (integer_onep (arg1))
7816 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7817 else
7818 code = unsignedp ? LTU : LT;
7819 break;
7820 case LE_EXPR:
5bf6e3bd
RK
7821 if (! unsignedp && integer_all_onesp (arg1))
7822 arg1 = integer_zero_node, code = LT;
bbf6f052
RK
7823 else
7824 code = unsignedp ? LEU : LE;
7825 break;
7826 case GT_EXPR:
5bf6e3bd
RK
7827 if (! unsignedp && integer_all_onesp (arg1))
7828 arg1 = integer_zero_node, code = GE;
bbf6f052
RK
7829 else
7830 code = unsignedp ? GTU : GT;
7831 break;
7832 case GE_EXPR:
7833 if (integer_onep (arg1))
7834 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7835 else
7836 code = unsignedp ? GEU : GE;
7837 break;
7838 default:
7839 abort ();
7840 }
7841
7842 /* Put a constant second. */
7843 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7844 {
7845 tem = arg0; arg0 = arg1; arg1 = tem;
7846 code = swap_condition (code);
7847 }
7848
7849 /* If this is an equality or inequality test of a single bit, we can
7850 do this by shifting the bit being tested to the low-order bit and
7851 masking the result with the constant 1. If the condition was EQ,
7852 we xor it with 1. This does not require an scc insn and is faster
7853 than an scc insn even if we have it. */
7854
7855 if ((code == NE || code == EQ)
7856 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7857 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7858 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7859 {
7860 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7861 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
7862
7863 if (subtarget == 0 || GET_CODE (subtarget) != REG
7864 || GET_MODE (subtarget) != operand_mode
7865 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7866 subtarget = 0;
7867
7868 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7869
7870 if (bitnum != 0)
7871 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7872 size_int (bitnum), target, 1);
7873
7874 if (GET_MODE (op0) != mode)
7875 op0 = convert_to_mode (mode, op0, 1);
7876
7877 if (bitnum != TYPE_PRECISION (type) - 1)
7878 op0 = expand_and (op0, const1_rtx, target);
7879
e7c33f54 7880 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
7881 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7882 OPTAB_LIB_WIDEN);
7883
7884 return op0;
7885 }
7886
7887 /* Now see if we are likely to be able to do this. Return if not. */
7888 if (! can_compare_p (operand_mode))
7889 return 0;
7890 icode = setcc_gen_code[(int) code];
7891 if (icode == CODE_FOR_nothing
7892 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7893 {
7894 /* We can only do this if it is one of the special cases that
7895 can be handled without an scc insn. */
7896 if ((code == LT && integer_zerop (arg1))
7897 || (! only_cheap && code == GE && integer_zerop (arg1)))
7898 ;
7899 else if (BRANCH_COST >= 0
7900 && ! only_cheap && (code == NE || code == EQ)
7901 && TREE_CODE (type) != REAL_TYPE
7902 && ((abs_optab->handlers[(int) operand_mode].insn_code
7903 != CODE_FOR_nothing)
7904 || (ffs_optab->handlers[(int) operand_mode].insn_code
7905 != CODE_FOR_nothing)))
7906 ;
7907 else
7908 return 0;
7909 }
7910
7911 preexpand_calls (exp);
7912 if (subtarget == 0 || GET_CODE (subtarget) != REG
7913 || GET_MODE (subtarget) != operand_mode
7914 || ! safe_from_p (subtarget, arg1))
7915 subtarget = 0;
7916
7917 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7918 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7919
7920 if (target == 0)
7921 target = gen_reg_rtx (mode);
7922
d39985fa
RK
7923 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7924 because, if the emit_store_flag does anything it will succeed and
7925 OP0 and OP1 will not be used subsequently. */
7926
7927 result = emit_store_flag (target, code,
7928 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7929 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7930 operand_mode, unsignedp, 1);
bbf6f052
RK
7931
7932 if (result)
e7c33f54
RK
7933 {
7934 if (invert)
7935 result = expand_binop (mode, xor_optab, result, const1_rtx,
7936 result, 0, OPTAB_LIB_WIDEN);
7937 return result;
7938 }
bbf6f052
RK
7939
7940 /* If this failed, we have to do this with set/compare/jump/set code. */
7941 if (target == 0 || GET_CODE (target) != REG
7942 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7943 target = gen_reg_rtx (GET_MODE (target));
7944
e7c33f54 7945 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
7946 result = compare_from_rtx (op0, op1, code, unsignedp,
7947 operand_mode, NULL_RTX, 0);
bbf6f052 7948 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
7949 return (((result == const0_rtx && ! invert)
7950 || (result != const0_rtx && invert))
7951 ? const0_rtx : const1_rtx);
bbf6f052
RK
7952
7953 label = gen_label_rtx ();
7954 if (bcc_gen_fctn[(int) code] == 0)
7955 abort ();
7956
7957 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 7958 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
7959 emit_label (label);
7960
7961 return target;
7962}
7963\f
7964/* Generate a tablejump instruction (used for switch statements). */
7965
7966#ifdef HAVE_tablejump
7967
7968/* INDEX is the value being switched on, with the lowest value
7969 in the table already subtracted.
88d3b7f0 7970 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
7971 RANGE is the length of the jump table.
7972 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7973
7974 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7975 index value is out of range. */
7976
7977void
e87b4f3f 7978do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 7979 rtx index, range, table_label, default_label;
e87b4f3f 7980 enum machine_mode mode;
bbf6f052
RK
7981{
7982 register rtx temp, vector;
7983
88d3b7f0
RS
7984 /* Do an unsigned comparison (in the proper mode) between the index
7985 expression and the value which represents the length of the range.
7986 Since we just finished subtracting the lower bound of the range
7987 from the index expression, this comparison allows us to simultaneously
7988 check that the original index expression value is both greater than
7989 or equal to the minimum value of the range and less than or equal to
7990 the maximum value of the range. */
e87b4f3f 7991
b4c65118 7992 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
bbf6f052 7993 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
7994
7995 /* If index is in range, it must fit in Pmode.
7996 Convert to Pmode so we can index with it. */
7997 if (mode != Pmode)
7998 index = convert_to_mode (Pmode, index, 1);
7999
bbf6f052
RK
8000 /* If flag_force_addr were to affect this address
8001 it could interfere with the tricky assumptions made
8002 about addresses that contain label-refs,
8003 which may be valid only very near the tablejump itself. */
8004 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8005 GET_MODE_SIZE, because this indicates how large insns are. The other
8006 uses should all be Pmode, because they are addresses. This code
8007 could fail if addresses and insns are not the same size. */
8008 index = memory_address_noforce
8009 (CASE_VECTOR_MODE,
8010 gen_rtx (PLUS, Pmode,
8011 gen_rtx (MULT, Pmode, index,
906c4e36 8012 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
8013 gen_rtx (LABEL_REF, Pmode, table_label)));
8014 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8015 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8016 RTX_UNCHANGING_P (vector) = 1;
8017 convert_move (temp, vector, 0);
8018
8019 emit_jump_insn (gen_tablejump (temp, table_label));
8020
8021#ifndef CASE_VECTOR_PC_RELATIVE
8022 /* If we are generating PIC code or if the table is PC-relative, the
8023 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8024 if (! flag_pic)
8025 emit_barrier ();
8026#endif
8027}
8028
8029#endif /* HAVE_tablejump */
This page took 1.676543 seconds and 5 git commands to generate.