]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Fix some AIX and NeXT problems.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
bbf6f052
RK
32#include "typeclass.h"
33
34#define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36/* Decide whether a function's arguments should be processed
bbc8a071
RK
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
bbf6f052 41
bbf6f052 42#ifdef PUSH_ROUNDING
bbc8a071 43
3319a347 44#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
45#define PUSH_ARGS_REVERSED /* If it's last to first */
46#endif
bbc8a071 47
bbf6f052
RK
48#endif
49
50#ifndef STACK_PUSH_CODE
51#ifdef STACK_GROWS_DOWNWARD
52#define STACK_PUSH_CODE PRE_DEC
53#else
54#define STACK_PUSH_CODE PRE_INC
55#endif
56#endif
57
58/* Like STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61/* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67int cse_not_expected;
68
69/* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72int do_preexpand_calls = 1;
73
74/* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76int pending_stack_adjust;
77
78/* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82int inhibit_defer_pop;
83
84/* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86tree cleanups_this_call;
87
88/* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91static rtx saveregs_value;
92
dcf76fff
TW
93/* Similarly for __builtin_apply_args. */
94static rtx apply_args_value;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112};
113
114static rtx enqueue_insn PROTO((rtx, rtx));
115static int queued_subexp_p PROTO((rtx));
116static void init_queue PROTO((void));
117static void move_by_pieces PROTO((rtx, rtx, int, int));
118static int move_by_pieces_ninsns PROTO((unsigned int, int));
119static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121static void group_insns PROTO((rtx));
122static void store_constructor PROTO((tree, rtx));
123static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125static tree save_noncopied_parts PROTO((tree, tree));
126static tree init_noncopied_parts PROTO((tree, tree));
127static int safe_from_p PROTO((rtx, tree));
128static int fixed_type_p PROTO((tree));
129static int get_pointer_alignment PROTO((tree, unsigned));
130static tree string_constant PROTO((tree, tree *));
131static tree c_strlen PROTO((tree));
132static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
133static int apply_args_size PROTO((void));
134static int apply_result_size PROTO((void));
135static rtx result_vector PROTO((int, rtx));
136static rtx expand_builtin_apply_args PROTO((void));
137static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138static void expand_builtin_return PROTO((rtx));
4969d05d
RK
139static rtx expand_increment PROTO((tree, int));
140static void preexpand_calls PROTO((tree));
141static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 142static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
143static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 148
4fa52007
RK
149/* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153static char direct_load[NUM_MACHINE_MODES];
154static char direct_store[NUM_MACHINE_MODES];
155
bbf6f052
RK
156/* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159#ifndef MOVE_RATIO
266007a7 160#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
161#define MOVE_RATIO 2
162#else
163/* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165#define MOVE_RATIO 15
166#endif
167#endif
e87b4f3f 168
266007a7 169/* This array records the insn_code of insns to perform block moves. */
e6677db3 170enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 171
e87b4f3f
RS
172/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174#ifndef SLOW_UNALIGNED_ACCESS
175#define SLOW_UNALIGNED_ACCESS 0
176#endif
0006469d
TW
177
178/* Register mappings for target machines without register windows. */
179#ifndef INCOMING_REGNO
180#define INCOMING_REGNO(OUT) (OUT)
181#endif
182#ifndef OUTGOING_REGNO
183#define OUTGOING_REGNO(IN) (IN)
184#endif
bbf6f052 185\f
4fa52007 186/* This is run once per compilation to set up which modes can be used
266007a7 187 directly in memory and to initialize the block move optab. */
4fa52007
RK
188
189void
190init_expr_once ()
191{
192 rtx insn, pat;
193 enum machine_mode mode;
e2549997
RS
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
4fa52007 197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
e2549997 213 PUT_MODE (mem1, mode);
4fa52007 214
e6fe56a4
RK
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
7308a047
RS
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
e6fe56a4 225
7308a047 226 reg = gen_rtx (REG, mode, regno);
e6fe56a4 227
7308a047
RS
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
e6fe56a4 232
e2549997
RS
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
7308a047
RS
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
e2549997
RS
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
7308a047 247 }
4fa52007
RK
248 }
249
250 end_sequence ();
251}
252
bbf6f052
RK
253/* This is run at the start of compiling a function. */
254
255void
256init_expr ()
257{
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
0006469d 264 apply_args_value = 0;
e87b4f3f 265 forced_labels = 0;
bbf6f052
RK
266}
267
268/* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271void
272save_expr_status (p)
273 struct function *p;
274{
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
0006469d 282 p->apply_args_value = apply_args_value;
e87b4f3f 283 p->forced_labels = forced_labels;
bbf6f052
RK
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
0006469d 289 apply_args_value = 0;
e87b4f3f 290 forced_labels = 0;
bbf6f052
RK
291}
292
293/* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296void
297restore_expr_status (p)
298 struct function *p;
299{
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
0006469d 304 apply_args_value = p->apply_args_value;
e87b4f3f 305 forced_labels = p->forced_labels;
bbf6f052
RK
306}
307\f
308/* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311static rtx pending_chain;
312
313/* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320static rtx
321enqueue_insn (var, body)
322 rtx var, body;
323{
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 325 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
326 return pending_chain;
327}
328
329/* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344rtx
345protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348{
349 register RTX_CODE code = GET_CODE (x);
350
351#if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355#endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402}
403
404/* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409static int
410queued_subexp_p (x)
411 rtx x;
412{
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427}
428
429/* Perform all the pending incrementations. */
430
431void
432emit_queue ()
433{
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440}
441
442static void
443init_queue ()
444{
445 if (pending_chain)
446 abort ();
447}
448\f
449/* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454void
455convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458{
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
1499e0a8
RK
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
bbf6f052
RK
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
b424402e
RS
497#ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503#endif
504#ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510#endif
511#ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517#endif
518#ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
525#ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
532
533#ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539#endif
540#ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546#endif
547#ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
554#ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
561
bbf6f052
RK
562#ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568#endif
b092b471
JW
569#ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575#endif
bbf6f052
RK
576#ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
b092b471
JW
583#ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
bbf6f052
RK
590#ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597
598#ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
612#ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
bbf6f052
RK
661#ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
b092b471
JW
668#ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
bbf6f052
RK
675#ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
b092b471
JW
682#ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
bbf6f052
RK
689#ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696
b092b471
JW
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
bbf6f052
RK
763 abort ();
764
e87b4f3f 765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
cd1b4b44
RK
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
bbf6f052
RK
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827#ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
906c4e36
RK
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
bbf6f052
RK
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838#endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 843 NULL_RTX, 0);
bbf6f052
RK
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
906c4e36 864 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
866 return;
867 }
868
d3c64ee3
RS
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052
RK
872 {
873 convert_move (to, gen_lowpart (word_mode, from), 0);
874 return;
875 }
876
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
879 {
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
882
883#ifdef HAVE_truncsipsi
884 if (HAVE_truncsipsi)
885 {
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
887 return;
888 }
889#endif /* HAVE_truncsipsi */
890 abort ();
891 }
892
893 if (from_mode == PSImode)
894 {
895 if (to_mode != SImode)
896 {
897 from = convert_to_mode (SImode, from, unsignedp);
898 from_mode = SImode;
899 }
900 else
901 {
902#ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
904 {
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
906 return;
907 }
908#endif /* HAVE_extendpsisi */
909 abort ();
910 }
911 }
912
913 /* Now follow all the conversions between integers
914 no more than a word long. */
915
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 919 GET_MODE_BITSIZE (from_mode)))
bbf6f052 920 {
d3c64ee3
RS
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
bbf6f052
RK
928 emit_move_insn (to, gen_lowpart (to_mode, from));
929 return;
930 }
931
d3c64ee3 932 /* Handle extension. */
bbf6f052
RK
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
934 {
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
937 != CODE_FOR_nothing)
938 {
3dc4195c
RK
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
bbf6f052
RK
945 emit_unop_insn (code, to, from, equiv_code);
946 return;
947 }
948 else
949 {
950 enum machine_mode intermediate;
951
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
956 != CODE_FOR_nothing)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
959 {
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
962 return;
963 }
964
965 /* No suitable intermediate mode. */
966 abort ();
967 }
968 }
969
970 /* Support special truncate insns for certain modes. */
971
972 if (from_mode == DImode && to_mode == SImode)
973 {
974#ifdef HAVE_truncdisi2
975 if (HAVE_truncdisi2)
976 {
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 return;
979 }
980#endif
981 convert_move (to, force_reg (from_mode, from), unsignedp);
982 return;
983 }
984
985 if (from_mode == DImode && to_mode == HImode)
986 {
987#ifdef HAVE_truncdihi2
988 if (HAVE_truncdihi2)
989 {
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 return;
992 }
993#endif
994 convert_move (to, force_reg (from_mode, from), unsignedp);
995 return;
996 }
997
998 if (from_mode == DImode && to_mode == QImode)
999 {
1000#ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 return;
1009 }
1010
1011 if (from_mode == SImode && to_mode == HImode)
1012 {
1013#ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1015 {
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 return;
1018 }
1019#endif
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 return;
1022 }
1023
1024 if (from_mode == SImode && to_mode == QImode)
1025 {
1026#ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1028 {
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 return;
1031 }
1032#endif
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 return;
1035 }
1036
1037 if (from_mode == HImode && to_mode == QImode)
1038 {
1039#ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1041 {
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 return;
1044 }
1045#endif
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 return;
1048 }
1049
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1054 {
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1057 return;
1058 }
1059
1060 /* Mode combination is not recognized. */
1061 abort ();
1062}
1063
1064/* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
5d901c31
RS
1069 or by copying to a new temporary with conversion.
1070
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1073
1074rtx
1075convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1077 rtx x;
1078 int unsignedp;
1079{
1080 register rtx temp;
1499e0a8
RK
1081
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1084
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
bbf6f052 1089
bbf6f052
RK
1090 if (mode == GET_MODE (x))
1091 return x;
1092
1093 /* There is one case that we must handle specially: If we are converting
906c4e36 1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1098
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1103
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1107 the operand. */
1108
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
d57c66da
JW
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1115 && direct_load[(int) mode])
bbf6f052
RK
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1118
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1121 return temp;
1122}
1123\f
1124/* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1129
bbf6f052
RK
1130static void
1131move_by_pieces (to, from, len, align)
1132 rtx to, from;
1133 int len, align;
1134{
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1137 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1138
1139 data.offset = 0;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1142 data.to = to;
1143 data.from = from;
1144 data.autinc_to
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1147 data.autinc_from
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1151
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1154 data.reverse
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1157 data.len = len;
1158
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1164 {
1165#ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1167 {
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1171 }
1172#endif
1173#ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1175 {
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1179 }
1180#endif
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183#ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1185 {
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1187 data.autinc_to = 1;
1188 data.explicit_inc_to = -1;
1189 }
1190#endif
1191#ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1193 {
1194 data.to_addr = copy_addr_to_reg (to_addr);
1195 data.autinc_to = 1;
1196 data.explicit_inc_to = 1;
1197 }
1198#endif
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1201 }
1202
e87b4f3f
RS
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1205 align = MOVE_MAX;
bbf6f052
RK
1206
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1209
1210 while (max_size > 1)
1211 {
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1214
e7c33f54
RK
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1218 mode = tmode;
1219
1220 if (mode == VOIDmode)
1221 break;
1222
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1228
1229 max_size = GET_MODE_SIZE (mode);
1230 }
1231
1232 /* The code above should have handled everything. */
1233 if (data.len != 0)
1234 abort ();
1235}
1236
1237/* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1239
1240static int
1241move_by_pieces_ninsns (l, align)
1242 unsigned int l;
1243 int align;
1244{
1245 register int n_insns = 0;
e87b4f3f 1246 int max_size = MOVE_MAX + 1;
bbf6f052 1247
e87b4f3f
RS
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1250 align = MOVE_MAX;
bbf6f052
RK
1251
1252 while (max_size > 1)
1253 {
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1256
e7c33f54
RK
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1260 mode = tmode;
1261
1262 if (mode == VOIDmode)
1263 break;
1264
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1270
1271 max_size = GET_MODE_SIZE (mode);
1272 }
1273
1274 return n_insns;
1275}
1276
1277/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1280
1281static void
1282move_by_pieces_1 (genfun, mode, data)
1283 rtx (*genfun) ();
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1286{
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1289
1290 while (data->len >= size)
1291 {
1292 if (data->reverse) data->offset -= size;
1293
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1298 from1 =
1299 (data->autinc_from
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1303
1304#ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
906c4e36 1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1307 if (data->explicit_inc_from < 0)
906c4e36 1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1309#endif
1310
1311 emit_insn ((*genfun) (to1, from1));
1312#ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
906c4e36 1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1315 if (data->explicit_inc_from > 0)
906c4e36 1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1317#endif
1318
1319 if (! data->reverse) data->offset += size;
1320
1321 data->len -= size;
1322 }
1323}
1324\f
1325/* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1328
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1330 with mode BLKmode.
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1334
1335void
1336emit_block_move (x, y, size, align)
1337 rtx x, y;
1338 rtx size;
1339 int align;
1340{
1341 if (GET_MODE (x) != BLKmode)
1342 abort ();
1343
1344 if (GET_MODE (y) != BLKmode)
1345 abort ();
1346
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
5d901c31 1349 size = protect_from_queue (size, 0);
bbf6f052
RK
1350
1351 if (GET_CODE (x) != MEM)
1352 abort ();
1353 if (GET_CODE (y) != MEM)
1354 abort ();
1355 if (size == 0)
1356 abort ();
1357
1358 if (GET_CODE (size) == CONST_INT
906c4e36 1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1360 move_by_pieces (x, y, INTVAL (size), align);
1361 else
1362 {
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
266007a7 1366
0bba3f6f 1367 rtx opalign = GEN_INT (align);
266007a7
RK
1368 enum machine_mode mode;
1369
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1372 {
266007a7 1373 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1374
1375 if (code != CODE_FOR_nothing
803090c4
RK
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
8008b228 1378 returned by the macro, it will definitely be less than the
803090c4 1379 actual mode mask. */
f85b95d1 1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 VOIDmode)))
bbf6f052 1388 {
1ba1e2a8 1389 rtx op2;
266007a7
RK
1390 rtx last = get_last_insn ();
1391 rtx pat;
1392
1ba1e2a8 1393 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1396 op2 = copy_to_mode_reg (mode, op2);
1397
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1399 if (pat)
1400 {
1401 emit_insn (pat);
1402 return;
1403 }
1404 else
1405 delete_insns_since (last);
bbf6f052
RK
1406 }
1407 }
bbf6f052
RK
1408
1409#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1410 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1412 XEXP (y, 0), Pmode,
0fa83258
RK
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
bbf6f052 1416#else
d562e42e 1417 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
0fa83258
RK
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
bbf6f052
RK
1423#endif
1424 }
1425}
1426\f
1427/* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1429
1430void
1431move_block_to_reg (regno, x, nregs, mode)
1432 int regno;
1433 rtx x;
1434 int nregs;
1435 enum machine_mode mode;
1436{
1437 int i;
1438 rtx pat, last;
1439
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1442
1443 /* See if the machine can do this with a load multiple insn. */
1444#ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1447 GEN_INT (nregs));
bbf6f052
RK
1448 if (pat)
1449 {
1450 emit_insn (pat);
1451 return;
1452 }
1453 else
1454 delete_insns_since (last);
1455#endif
1456
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1460}
1461
1462/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1464
1465void
1466move_block_from_reg (regno, x, nregs)
1467 int regno;
1468 rtx x;
1469 int nregs;
1470{
1471 int i;
1472 rtx pat, last;
1473
1474 /* See if the machine can do this with a store multiple insn. */
1475#ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1478 GEN_INT (nregs));
bbf6f052
RK
1479 if (pat)
1480 {
1481 emit_insn (pat);
1482 return;
1483 }
1484 else
1485 delete_insns_since (last);
1486#endif
1487
1488 for (i = 0; i < nregs; i++)
1489 {
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1491
1492 if (tem == 0)
1493 abort ();
1494
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1496 }
1497}
1498
1499/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1500
1501void
1502use_regs (regno, nregs)
1503 int regno;
1504 int nregs;
1505{
1506 int i;
1507
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1510}
7308a047
RS
1511
1512/* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1514
f76a70d5 1515static void
7308a047
RS
1516group_insns (prev)
1517 rtx prev;
1518{
1519 rtx insn_first;
1520 rtx insn_last;
1521
1522 /* Find the instructions to mark */
1523 if (prev)
1524 insn_first = NEXT_INSN (prev);
1525 else
1526 insn_first = get_insns ();
1527
1528 insn_last = get_last_insn ();
1529
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1532
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1535}
bbf6f052
RK
1536\f
1537/* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1539
1540void
1541clear_storage (object, size)
1542 rtx object;
1543 int size;
1544{
1545 if (GET_MODE (object) == BLKmode)
1546 {
1547#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1548 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1551 GEN_INT (size), Pmode);
bbf6f052 1552#else
d562e42e 1553 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1554 VOIDmode, 2,
1555 XEXP (object, 0), Pmode,
906c4e36 1556 GEN_INT (size), Pmode);
bbf6f052
RK
1557#endif
1558 }
1559 else
1560 emit_move_insn (object, const0_rtx);
1561}
1562
1563/* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1567
1568 Return the last instruction emitted. */
1569
1570rtx
1571emit_move_insn (x, y)
1572 rtx x, y;
1573{
1574 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1577 int i;
1578
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1581
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1583 abort ();
1584
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1587
1588 /* If X or Y are memory references, verify that their addresses are valid
1589 for the machine. */
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1593 || (flag_force_addr
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1596
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1599 || (flag_force_addr
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1602
1603 if (mode == BLKmode)
1604 abort ();
1605
261c4230
RS
1606 return emit_move_insn_1 (x, y);
1607}
1608
1609/* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1612
1613rtx
1614emit_move_insn_1 (x, y)
1615 rtx x, y;
1616{
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1620 int i;
1621
7308a047
RS
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1626 0);
1627
bbf6f052
RK
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1629 return
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1631
89742723 1632 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1637 {
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1641
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1645
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1648
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1659
1660 group_insns (prev);
7a1ab50a
RS
1661
1662 return get_last_insn ();
7308a047
RS
1663 }
1664
bbf6f052
RK
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
a4320483 1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1669 {
1670 rtx last_insn = 0;
7308a047 1671 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1672
1673 for (i = 0;
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1675 i++)
1676 {
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1679
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1684 {
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1687 }
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1690
1691 if (xpart == 0 || ypart == 0)
1692 abort ();
1693
1694 last_insn = emit_move_insn (xpart, ypart);
1695 }
7308a047
RS
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1698
bbf6f052
RK
1699 return last_insn;
1700 }
1701 else
1702 abort ();
1703}
1704\f
1705/* Pushing data onto the stack. */
1706
1707/* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1711
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1715
1716rtx
1717push_block (size, extra, below)
1718 rtx size;
1719 int extra, below;
1720{
1721 register rtx temp;
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1726 else
1727 {
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1729 if (extra != 0)
906c4e36 1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1733 }
1734
1735#ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1739#else
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1746 else
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1749#endif
1750
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1752}
1753
87e38d84 1754rtx
bbf6f052
RK
1755gen_push_operand ()
1756{
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1758}
1759
1760/* Generate code to push X onto the stack, assuming it has mode MODE and
1761 type TYPE.
1762 MODE is redundant except when X is a CONST_INT (since they don't
1763 carry mode info).
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1766
1767 ALIGN (in bytes) is maximum alignment we can assume.
1768
cd048831
RK
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
cd048831
RK
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1776 registers.
bbf6f052
RK
1777
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1779 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1780
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1785
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1787
1788void
1789emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1791 register rtx x;
1792 enum machine_mode mode;
1793 tree type;
1794 rtx size;
1795 int align;
1796 int partial;
1797 rtx reg;
1798 int extra;
1799 rtx args_addr;
1800 rtx args_so_far;
1801{
1802 rtx xinner;
1803 enum direction stack_direction
1804#ifdef STACK_GROWS_DOWNWARD
1805 = downward;
1806#else
1807 = upward;
1808#endif
1809
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1814
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1819
1820 xinner = x = protect_from_queue (x, 0);
1821
1822 if (mode == BLKmode)
1823 {
1824 /* Copy a block into the stack, entirely or partially. */
1825
1826 register rtx temp;
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1829 int skip;
1830
1831 if (size == 0)
1832 abort ();
1833
1834 used -= offset;
1835
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1838
1839 if (partial != 0)
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1842
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847#ifndef REG_PARM_STACK_SPACE
1848 skip = 0;
1849#else
1850 skip = used;
1851#endif
1852
1853#ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1857 if (args_addr == 0
1858 && GET_CODE (size) == CONST_INT
1859 && skip == 0
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1861 < MOVE_RATIO)
bbf6f052
RK
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1867 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1869 {
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
906c4e36 1875 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1876
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1879 }
1880 else
1881#endif /* PUSH_ROUNDING */
1882 {
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1885
1886 /* Deduct words put into registers from the size we must copy. */
1887 if (partial != 0)
1888 {
1889 if (GET_CODE (size) == CONST_INT)
906c4e36 1890 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1891 else
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1893 GEN_INT (used), NULL_RTX, 0,
1894 OPTAB_LIB_WIDEN);
bbf6f052
RK
1895 }
1896
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1900 if (! args_addr)
1901 {
1902 temp = push_block (size, extra, where_pad == downward);
1903 extra = 0;
1904 }
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1909 else
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1913 skip));
1914
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1918 < MOVE_RATIO))
1919 {
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1922 goto ret;
1923 }
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927#ifdef HAVE_movstrqi
1928 if (HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1932 {
c841050e
RS
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1935 if (pat != 0)
1936 {
1937 emit_insn (pat);
1938 goto ret;
1939 }
bbf6f052
RK
1940 }
1941#endif
1942#ifdef HAVE_movstrhi
1943 if (HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1947 {
c841050e
RS
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1950 if (pat != 0)
1951 {
1952 emit_insn (pat);
1953 goto ret;
1954 }
bbf6f052
RK
1955 }
1956#endif
1957#ifdef HAVE_movstrsi
1958 if (HAVE_movstrsi)
1959 {
c841050e
RS
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1962 if (pat != 0)
1963 {
1964 emit_insn (pat);
1965 goto ret;
1966 }
bbf6f052
RK
1967 }
1968#endif
1969#ifdef HAVE_movstrdi
1970 if (HAVE_movstrdi)
1971 {
c841050e
RS
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1974 if (pat != 0)
1975 {
1976 emit_insn (pat);
1977 goto ret;
1978 }
bbf6f052
RK
1979 }
1980#endif
1981
1982#ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1986
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1990#endif
1991
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1994 NO_DEFER_POP;
1995#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1996 emit_library_call (memcpy_libfunc, 0,
bbf6f052 1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2000 TYPE_MODE (sizetype));
bbf6f052 2001#else
d562e42e 2002 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2006 TYPE_MODE (sizetype));
bbf6f052
RK
2007#endif
2008 OK_DEFER_POP;
2009 }
2010 }
2011 else if (partial > 0)
2012 {
2013 /* Scalar partly in registers. */
2014
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2016 int i;
2017 int not_stack;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2022 int skip;
2023
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
906c4e36 2029 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2030
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2034 if (args_addr == 0)
2035 offset = 0;
2036
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2040
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045#ifndef REG_PARM_STACK_SPACE
2046 skip = 0;
2047#else
2048 skip = not_stack;
2049#endif
2050
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2053
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2059
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063#ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2065#else
2066 for (i = size - 1; i >= not_stack; i--)
2067#endif
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2071 0, args_addr,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2073 * UNITS_PER_WORD)));
2074 }
2075 else
2076 {
2077 rtx addr;
2078
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
906c4e36 2084 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2085
2086#ifdef PUSH_ROUNDING
2087 if (args_addr == 0)
2088 addr = gen_push_operand ();
2089 else
2090#endif
2091 if (GET_CODE (args_so_far) == CONST_INT)
2092 addr
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2095 else
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2097 args_so_far));
2098
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2100 }
2101
2102 ret:
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
cd048831 2106 if (partial > 0 && reg != 0)
bbf6f052
RK
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2108
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2110 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2111}
2112\f
bbf6f052
RK
2113/* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2117
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2122
2123 If the value stored is a constant, we return the constant. */
2124
2125rtx
2126expand_assignment (to, from, want_value, suggest_reg)
2127 tree to, from;
2128 int want_value;
2129 int suggest_reg;
2130{
2131 register rtx to_rtx = 0;
2132 rtx result;
2133
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2135
2136 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2138
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2143
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2149 {
2150 enum machine_mode mode1;
2151 int bitsize;
2152 int bitpos;
7bb0943f 2153 tree offset;
bbf6f052
RK
2154 int unsignedp;
2155 int volatilep = 0;
7bb0943f 2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2157 &mode1, &unsignedp, &volatilep);
2158
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2161
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2164
906c4e36 2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2166 if (offset != 0)
2167 {
906c4e36 2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2169
2170 if (GET_CODE (to_rtx) != MEM)
2171 abort ();
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2175 }
bbf6f052
RK
2176 if (volatilep)
2177 {
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180#if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2183 else
2184 abort ();
2185#endif
2186 }
2187
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2189 (want_value
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2192 : VOIDmode),
2193 unsignedp,
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2198 free_temp_slots ();
2199
4be204f0
RK
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2205 : result);
bbf6f052
RK
2206 }
2207
cd1db108
RS
2208 /* If the rhs is a function call and its value is not an aggregate,
2209 call the function before we start to compute the lhs.
2210 This is needed for correct code for cases such as
2211 val = setjmp (buf) on machines where reference to val
2212 requires loading up part of an address in a separate insn. */
2213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2214 {
2215 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2216 if (to_rtx == 0)
2217 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2218 emit_move_insn (to_rtx, value);
2219 preserve_temp_slots (to_rtx);
2220 free_temp_slots ();
2221 return to_rtx;
2222 }
2223
bbf6f052
RK
2224 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2225 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2226
2227 if (to_rtx == 0)
906c4e36 2228 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2229
86d38d25
RS
2230 /* Don't move directly into a return register. */
2231 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2232 {
66538193 2233 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2234 emit_move_insn (to_rtx, temp);
2235 preserve_temp_slots (to_rtx);
2236 free_temp_slots ();
2237 return to_rtx;
2238 }
2239
bbf6f052
RK
2240 /* In case we are returning the contents of an object which overlaps
2241 the place the value is being stored, use a safe function when copying
2242 a value through a pointer into a structure value return block. */
2243 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2244 && current_function_returns_struct
2245 && !current_function_returns_pcc_struct)
2246 {
906c4e36 2247 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2248 rtx size = expr_size (from);
2249
2250#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2251 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2252 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2253 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2254 convert_to_mode (TYPE_MODE (sizetype),
2255 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2256 TYPE_MODE (sizetype));
bbf6f052 2257#else
d562e42e 2258 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2259 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2260 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2261 convert_to_mode (TYPE_MODE (sizetype),
2262 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2263 TYPE_MODE (sizetype));
bbf6f052
RK
2264#endif
2265
2266 preserve_temp_slots (to_rtx);
2267 free_temp_slots ();
2268 return to_rtx;
2269 }
2270
2271 /* Compute FROM and store the value in the rtx we got. */
2272
2273 result = store_expr (from, to_rtx, want_value);
2274 preserve_temp_slots (result);
2275 free_temp_slots ();
2276 return result;
2277}
2278
2279/* Generate code for computing expression EXP,
2280 and storing the value into TARGET.
2281 Returns TARGET or an equivalent value.
2282 TARGET may contain a QUEUED rtx.
2283
2284 If SUGGEST_REG is nonzero, copy the value through a register
2285 and return that register, if that is possible.
2286
2287 If the value stored is a constant, we return the constant. */
2288
2289rtx
2290store_expr (exp, target, suggest_reg)
2291 register tree exp;
2292 register rtx target;
2293 int suggest_reg;
2294{
2295 register rtx temp;
2296 int dont_return_target = 0;
2297
2298 if (TREE_CODE (exp) == COMPOUND_EXPR)
2299 {
2300 /* Perform first part of compound expression, then assign from second
2301 part. */
2302 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2303 emit_queue ();
2304 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2305 }
2306 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2307 {
2308 /* For conditional expression, get safe form of the target. Then
2309 test the condition, doing the appropriate assignment on either
2310 side. This avoids the creation of unnecessary temporaries.
2311 For non-BLKmode, it is more efficient not to do this. */
2312
2313 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2314
2315 emit_queue ();
2316 target = protect_from_queue (target, 1);
2317
2318 NO_DEFER_POP;
2319 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2320 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2321 emit_queue ();
2322 emit_jump_insn (gen_jump (lab2));
2323 emit_barrier ();
2324 emit_label (lab1);
2325 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2326 emit_queue ();
2327 emit_label (lab2);
2328 OK_DEFER_POP;
2329 return target;
2330 }
c2e6aff6 2331 else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2332 && GET_MODE (target) != BLKmode)
2333 /* If target is in memory and caller wants value in a register instead,
2334 arrange that. Pass TARGET as target for expand_expr so that,
2335 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
c2e6aff6
RS
2336 We know expand_expr will not use the target in that case.
2337 Don't do this if TARGET is volatile because we are supposed
2338 to write it and then read it. */
bbf6f052 2339 {
906c4e36 2340 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2341 GET_MODE (target), 0);
2342 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2343 temp = copy_to_reg (temp);
2344 dont_return_target = 1;
2345 }
2346 else if (queued_subexp_p (target))
2347 /* If target contains a postincrement, it is not safe
2348 to use as the returned value. It would access the wrong
2349 place by the time the queued increment gets output.
2350 So copy the value through a temporary and use that temp
2351 as the result. */
2352 {
c2e6aff6
RS
2353 /* ??? There may be a bug here in the case of a target
2354 that is volatile, but I' too sleepy today to write anything
2355 to handle it. */
bbf6f052
RK
2356 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2357 {
2358 /* Expand EXP into a new pseudo. */
2359 temp = gen_reg_rtx (GET_MODE (target));
2360 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2361 }
2362 else
906c4e36 2363 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2364 dont_return_target = 1;
2365 }
1499e0a8
RK
2366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2367 /* If this is an scalar in a register that is stored in a wider mode
2368 than the declared mode, compute the result into its declared mode
2369 and then convert to the wider mode. Our value is the computed
2370 expression. */
2371 {
2372 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2373 convert_move (SUBREG_REG (target), temp,
2374 SUBREG_PROMOTED_UNSIGNED_P (target));
2375 return temp;
2376 }
bbf6f052
RK
2377 else
2378 {
2379 temp = expand_expr (exp, target, GET_MODE (target), 0);
2380 /* DO return TARGET if it's a specified hardware register.
c2e6aff6
RS
2381 expand_return relies on this.
2382 DO return TARGET if it's a volatile mem ref; ANSI requires this. */
bbf6f052
RK
2383 if (!(target && GET_CODE (target) == REG
2384 && REGNO (target) < FIRST_PSEUDO_REGISTER)
c2e6aff6
RS
2385 && CONSTANT_P (temp)
2386 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
bbf6f052
RK
2387 dont_return_target = 1;
2388 }
2389
2390 /* If value was not generated in the target, store it there.
2391 Convert the value to TARGET's type first if nec. */
2392
2393 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2394 {
2395 target = protect_from_queue (target, 1);
2396 if (GET_MODE (temp) != GET_MODE (target)
2397 && GET_MODE (temp) != VOIDmode)
2398 {
2399 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2400 if (dont_return_target)
2401 {
2402 /* In this case, we will return TEMP,
2403 so make sure it has the proper mode.
2404 But don't forget to store the value into TARGET. */
2405 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2406 emit_move_insn (target, temp);
2407 }
2408 else
2409 convert_move (target, temp, unsignedp);
2410 }
2411
2412 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2413 {
2414 /* Handle copying a string constant into an array.
2415 The string constant may be shorter than the array.
2416 So copy just the string's actual length, and clear the rest. */
2417 rtx size;
2418
e87b4f3f
RS
2419 /* Get the size of the data type of the string,
2420 which is actually the size of the target. */
2421 size = expr_size (exp);
2422 if (GET_CODE (size) == CONST_INT
2423 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2424 emit_block_move (target, temp, size,
2425 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2426 else
bbf6f052 2427 {
e87b4f3f
RS
2428 /* Compute the size of the data to copy from the string. */
2429 tree copy_size
c03b7665
RK
2430 = size_binop (MIN_EXPR,
2431 size_binop (CEIL_DIV_EXPR,
2432 TYPE_SIZE (TREE_TYPE (exp)),
2433 size_int (BITS_PER_UNIT)),
2434 convert (sizetype,
2435 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2436 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2437 VOIDmode, 0);
e87b4f3f
RS
2438 rtx label = 0;
2439
2440 /* Copy that much. */
2441 emit_block_move (target, temp, copy_size_rtx,
2442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2443
2444 /* Figure out how much is left in TARGET
2445 that we have to clear. */
2446 if (GET_CODE (copy_size_rtx) == CONST_INT)
2447 {
2448 temp = plus_constant (XEXP (target, 0),
2449 TREE_STRING_LENGTH (exp));
2450 size = plus_constant (size,
2451 - TREE_STRING_LENGTH (exp));
2452 }
2453 else
2454 {
2455 enum machine_mode size_mode = Pmode;
2456
2457 temp = force_reg (Pmode, XEXP (target, 0));
2458 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2459 copy_size_rtx, NULL_RTX, 0,
2460 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2461
2462 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2463 copy_size_rtx, NULL_RTX, 0,
2464 OPTAB_LIB_WIDEN);
e87b4f3f 2465
906c4e36 2466 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2467 GET_MODE (size), 0, 0);
2468 label = gen_label_rtx ();
2469 emit_jump_insn (gen_blt (label));
2470 }
2471
2472 if (size != const0_rtx)
2473 {
bbf6f052 2474#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2475 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2476 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2477#else
d562e42e 2478 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2479 temp, Pmode, size, Pmode);
bbf6f052 2480#endif
e87b4f3f
RS
2481 }
2482 if (label)
2483 emit_label (label);
bbf6f052
RK
2484 }
2485 }
2486 else if (GET_MODE (temp) == BLKmode)
2487 emit_block_move (target, temp, expr_size (exp),
2488 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2489 else
2490 emit_move_insn (target, temp);
2491 }
2492 if (dont_return_target)
2493 return temp;
2494 return target;
2495}
2496\f
2497/* Store the value of constructor EXP into the rtx TARGET.
2498 TARGET is either a REG or a MEM. */
2499
2500static void
2501store_constructor (exp, target)
2502 tree exp;
2503 rtx target;
2504{
4af3895e
JVA
2505 tree type = TREE_TYPE (exp);
2506
bbf6f052
RK
2507 /* We know our target cannot conflict, since safe_from_p has been called. */
2508#if 0
2509 /* Don't try copying piece by piece into a hard register
2510 since that is vulnerable to being clobbered by EXP.
2511 Instead, construct in a pseudo register and then copy it all. */
2512 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2513 {
2514 rtx temp = gen_reg_rtx (GET_MODE (target));
2515 store_constructor (exp, temp);
2516 emit_move_insn (target, temp);
2517 return;
2518 }
2519#endif
2520
e44842fe
RK
2521 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2522 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2523 {
2524 register tree elt;
2525
4af3895e 2526 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2527 if (TREE_CODE (type) == UNION_TYPE
2528 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2529 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2530
2531 /* If we are building a static constructor into a register,
2532 set the initial value as zero so we can fold the value into
2533 a constant. */
2534 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2535 emit_move_insn (target, const0_rtx);
2536
bbf6f052
RK
2537 /* If the constructor has fewer fields than the structure,
2538 clear the whole structure first. */
2539 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2540 != list_length (TYPE_FIELDS (type)))
2541 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2542 else
2543 /* Inform later passes that the old value is dead. */
2544 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2545
2546 /* Store each element of the constructor into
2547 the corresponding field of TARGET. */
2548
2549 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2550 {
2551 register tree field = TREE_PURPOSE (elt);
2552 register enum machine_mode mode;
2553 int bitsize;
2554 int bitpos;
2555 int unsignedp;
2556
f32fd778
RS
2557 /* Just ignore missing fields.
2558 We cleared the whole structure, above,
2559 if any fields are missing. */
2560 if (field == 0)
2561 continue;
2562
bbf6f052
RK
2563 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2564 unsignedp = TREE_UNSIGNED (field);
2565 mode = DECL_MODE (field);
2566 if (DECL_BIT_FIELD (field))
2567 mode = VOIDmode;
2568
2569 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2570 /* ??? This case remains to be written. */
2571 abort ();
2572
2573 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2574
2575 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2576 /* The alignment of TARGET is
2577 at least what its type requires. */
2578 VOIDmode, 0,
4af3895e
JVA
2579 TYPE_ALIGN (type) / BITS_PER_UNIT,
2580 int_size_in_bytes (type));
bbf6f052
RK
2581 }
2582 }
4af3895e 2583 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2584 {
2585 register tree elt;
2586 register int i;
4af3895e 2587 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2588 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2589 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2590 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2591
2592 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2593 clear the whole structure first. Similarly if this this is
2594 static constructor of a non-BLKmode object. */
bbf6f052 2595
4af3895e
JVA
2596 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2597 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2598 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2599 else
2600 /* Inform later passes that the old value is dead. */
2601 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2602
2603 /* Store each element of the constructor into
2604 the corresponding element of TARGET, determined
2605 by counting the elements. */
2606 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2607 elt;
2608 elt = TREE_CHAIN (elt), i++)
2609 {
2610 register enum machine_mode mode;
2611 int bitsize;
2612 int bitpos;
2613 int unsignedp;
2614
2615 mode = TYPE_MODE (elttype);
2616 bitsize = GET_MODE_BITSIZE (mode);
2617 unsignedp = TREE_UNSIGNED (elttype);
2618
2619 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2620
2621 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2622 /* The alignment of TARGET is
2623 at least what its type requires. */
2624 VOIDmode, 0,
4af3895e
JVA
2625 TYPE_ALIGN (type) / BITS_PER_UNIT,
2626 int_size_in_bytes (type));
bbf6f052
RK
2627 }
2628 }
2629
2630 else
2631 abort ();
2632}
2633
2634/* Store the value of EXP (an expression tree)
2635 into a subfield of TARGET which has mode MODE and occupies
2636 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2637 If MODE is VOIDmode, it means that we are storing into a bit-field.
2638
2639 If VALUE_MODE is VOIDmode, return nothing in particular.
2640 UNSIGNEDP is not used in this case.
2641
2642 Otherwise, return an rtx for the value stored. This rtx
2643 has mode VALUE_MODE if that is convenient to do.
2644 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2645
2646 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2647 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2648
2649static rtx
2650store_field (target, bitsize, bitpos, mode, exp, value_mode,
2651 unsignedp, align, total_size)
2652 rtx target;
2653 int bitsize, bitpos;
2654 enum machine_mode mode;
2655 tree exp;
2656 enum machine_mode value_mode;
2657 int unsignedp;
2658 int align;
2659 int total_size;
2660{
906c4e36 2661 HOST_WIDE_INT width_mask = 0;
bbf6f052 2662
906c4e36
RK
2663 if (bitsize < HOST_BITS_PER_WIDE_INT)
2664 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2665
2666 /* If we are storing into an unaligned field of an aligned union that is
2667 in a register, we may have the mode of TARGET being an integer mode but
2668 MODE == BLKmode. In that case, get an aligned object whose size and
2669 alignment are the same as TARGET and store TARGET into it (we can avoid
2670 the store if the field being stored is the entire width of TARGET). Then
2671 call ourselves recursively to store the field into a BLKmode version of
2672 that object. Finally, load from the object into TARGET. This is not
2673 very efficient in general, but should only be slightly more expensive
2674 than the otherwise-required unaligned accesses. Perhaps this can be
2675 cleaned up later. */
2676
2677 if (mode == BLKmode
2678 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2679 {
2680 rtx object = assign_stack_temp (GET_MODE (target),
2681 GET_MODE_SIZE (GET_MODE (target)), 0);
2682 rtx blk_object = copy_rtx (object);
2683
2684 PUT_MODE (blk_object, BLKmode);
2685
2686 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2687 emit_move_insn (object, target);
2688
2689 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2690 align, total_size);
2691
2692 emit_move_insn (target, object);
2693
2694 return target;
2695 }
2696
2697 /* If the structure is in a register or if the component
2698 is a bit field, we cannot use addressing to access it.
2699 Use bit-field techniques or SUBREG to store in it. */
2700
4fa52007
RK
2701 if (mode == VOIDmode
2702 || (mode != BLKmode && ! direct_store[(int) mode])
2703 || GET_CODE (target) == REG
c980ac49 2704 || GET_CODE (target) == SUBREG
ccc98036
RS
2705 /* If the field isn't aligned enough to store as an ordinary memref,
2706 store it as a bit field. */
2707 || (STRICT_ALIGNMENT
2708 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
2709 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 2710 {
906c4e36 2711 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2712 /* Store the value in the bitfield. */
2713 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2714 if (value_mode != VOIDmode)
2715 {
2716 /* The caller wants an rtx for the value. */
2717 /* If possible, avoid refetching from the bitfield itself. */
2718 if (width_mask != 0
2719 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2720 {
9074de27 2721 tree count;
5c4d7cfb 2722 enum machine_mode tmode;
86a2c12a 2723
5c4d7cfb
RS
2724 if (unsignedp)
2725 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2726 tmode = GET_MODE (temp);
86a2c12a
RS
2727 if (tmode == VOIDmode)
2728 tmode = value_mode;
5c4d7cfb
RS
2729 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2730 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2731 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2732 }
bbf6f052 2733 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2734 NULL_RTX, value_mode, 0, align,
2735 total_size);
bbf6f052
RK
2736 }
2737 return const0_rtx;
2738 }
2739 else
2740 {
2741 rtx addr = XEXP (target, 0);
2742 rtx to_rtx;
2743
2744 /* If a value is wanted, it must be the lhs;
2745 so make the address stable for multiple use. */
2746
2747 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2748 && ! CONSTANT_ADDRESS_P (addr)
2749 /* A frame-pointer reference is already stable. */
2750 && ! (GET_CODE (addr) == PLUS
2751 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2752 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2753 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2754 addr = copy_to_reg (addr);
2755
2756 /* Now build a reference to just the desired component. */
2757
2758 to_rtx = change_address (target, mode,
2759 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2760 MEM_IN_STRUCT_P (to_rtx) = 1;
2761
2762 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2763 }
2764}
2765\f
2766/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2767 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 2768 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
2769
2770 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2771 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2772 If the position of the field is variable, we store a tree
2773 giving the variable offset (in units) in *POFFSET.
2774 This offset is in addition to the bit position.
2775 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2776
2777 If any of the extraction expressions is volatile,
2778 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2779
2780 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2781 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2782 is redundant.
2783
2784 If the field describes a variable-sized object, *PMODE is set to
2785 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2786 this case, but the address of the object can be found. */
bbf6f052
RK
2787
2788tree
4969d05d
RK
2789get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2790 punsignedp, pvolatilep)
bbf6f052
RK
2791 tree exp;
2792 int *pbitsize;
2793 int *pbitpos;
7bb0943f 2794 tree *poffset;
bbf6f052
RK
2795 enum machine_mode *pmode;
2796 int *punsignedp;
2797 int *pvolatilep;
2798{
2799 tree size_tree = 0;
2800 enum machine_mode mode = VOIDmode;
742920c7 2801 tree offset = integer_zero_node;
bbf6f052
RK
2802
2803 if (TREE_CODE (exp) == COMPONENT_REF)
2804 {
2805 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2806 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2807 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2808 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2809 }
2810 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2811 {
2812 size_tree = TREE_OPERAND (exp, 1);
2813 *punsignedp = TREE_UNSIGNED (exp);
2814 }
2815 else
2816 {
2817 mode = TYPE_MODE (TREE_TYPE (exp));
2818 *pbitsize = GET_MODE_BITSIZE (mode);
2819 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2820 }
2821
2822 if (size_tree)
2823 {
2824 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2825 mode = BLKmode, *pbitsize = -1;
2826 else
2827 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2828 }
2829
2830 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2831 and find the ultimate containing object. */
2832
2833 *pbitpos = 0;
2834
2835 while (1)
2836 {
7bb0943f 2837 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2838 {
7bb0943f
RS
2839 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2840 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2841 : TREE_OPERAND (exp, 2));
bbf6f052 2842
e7f3c83f
RK
2843 /* If this field hasn't been filled in yet, don't go
2844 past it. This should only happen when folding expressions
2845 made during type construction. */
2846 if (pos == 0)
2847 break;
2848
7bb0943f
RS
2849 if (TREE_CODE (pos) == PLUS_EXPR)
2850 {
2851 tree constant, var;
2852 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2853 {
2854 constant = TREE_OPERAND (pos, 0);
2855 var = TREE_OPERAND (pos, 1);
2856 }
2857 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2858 {
2859 constant = TREE_OPERAND (pos, 1);
2860 var = TREE_OPERAND (pos, 0);
2861 }
2862 else
2863 abort ();
742920c7 2864
7bb0943f 2865 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
2866 offset = size_binop (PLUS_EXPR, offset,
2867 size_binop (FLOOR_DIV_EXPR, var,
2868 size_int (BITS_PER_UNIT)));
7bb0943f
RS
2869 }
2870 else if (TREE_CODE (pos) == INTEGER_CST)
2871 *pbitpos += TREE_INT_CST_LOW (pos);
2872 else
2873 {
2874 /* Assume here that the offset is a multiple of a unit.
2875 If not, there should be an explicitly added constant. */
742920c7
RK
2876 offset = size_binop (PLUS_EXPR, offset,
2877 size_binop (FLOOR_DIV_EXPR, pos,
2878 size_int (BITS_PER_UNIT)));
7bb0943f 2879 }
bbf6f052 2880 }
bbf6f052 2881
742920c7 2882 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 2883 {
742920c7
RK
2884 /* This code is based on the code in case ARRAY_REF in expand_expr
2885 below. We assume here that the size of an array element is
2886 always an integral multiple of BITS_PER_UNIT. */
2887
2888 tree index = TREE_OPERAND (exp, 1);
2889 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2890 tree low_bound
2891 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2892 tree index_type = TREE_TYPE (index);
2893
2894 if (! integer_zerop (low_bound))
2895 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2896
2897 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2898 {
2899 index = convert (type_for_size (POINTER_SIZE, 0), index);
2900 index_type = TREE_TYPE (index);
2901 }
2902
2903 index = fold (build (MULT_EXPR, index_type, index,
2904 TYPE_SIZE (TREE_TYPE (exp))));
2905
2906 if (TREE_CODE (index) == INTEGER_CST
2907 && TREE_INT_CST_HIGH (index) == 0)
2908 *pbitpos += TREE_INT_CST_LOW (index);
2909 else
2910 offset = size_binop (PLUS_EXPR, offset,
2911 size_binop (FLOOR_DIV_EXPR, index,
2912 size_int (BITS_PER_UNIT)));
bbf6f052
RK
2913 }
2914 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2915 && ! ((TREE_CODE (exp) == NOP_EXPR
2916 || TREE_CODE (exp) == CONVERT_EXPR)
2917 && (TYPE_MODE (TREE_TYPE (exp))
2918 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2919 break;
7bb0943f
RS
2920
2921 /* If any reference in the chain is volatile, the effect is volatile. */
2922 if (TREE_THIS_VOLATILE (exp))
2923 *pvolatilep = 1;
bbf6f052
RK
2924 exp = TREE_OPERAND (exp, 0);
2925 }
2926
2927 /* If this was a bit-field, see if there is a mode that allows direct
2928 access in case EXP is in memory. */
e7f3c83f 2929 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
2930 {
2931 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2932 if (mode == BLKmode)
2933 mode = VOIDmode;
2934 }
2935
742920c7
RK
2936 if (integer_zerop (offset))
2937 offset = 0;
2938
bbf6f052 2939 *pmode = mode;
7bb0943f
RS
2940 *poffset = offset;
2941#if 0
2942 /* We aren't finished fixing the callers to really handle nonzero offset. */
2943 if (offset != 0)
2944 abort ();
2945#endif
bbf6f052
RK
2946
2947 return exp;
2948}
2949\f
2950/* Given an rtx VALUE that may contain additions and multiplications,
2951 return an equivalent value that just refers to a register or memory.
2952 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
2953 and returning a pseudo-register containing the value.
2954
2955 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
2956
2957rtx
2958force_operand (value, target)
2959 rtx value, target;
2960{
2961 register optab binoptab = 0;
2962 /* Use a temporary to force order of execution of calls to
2963 `force_operand'. */
2964 rtx tmp;
2965 register rtx op2;
2966 /* Use subtarget as the target for operand 0 of a binary operation. */
2967 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2968
2969 if (GET_CODE (value) == PLUS)
2970 binoptab = add_optab;
2971 else if (GET_CODE (value) == MINUS)
2972 binoptab = sub_optab;
2973 else if (GET_CODE (value) == MULT)
2974 {
2975 op2 = XEXP (value, 1);
2976 if (!CONSTANT_P (op2)
2977 && !(GET_CODE (op2) == REG && op2 != subtarget))
2978 subtarget = 0;
2979 tmp = force_operand (XEXP (value, 0), subtarget);
2980 return expand_mult (GET_MODE (value), tmp,
906c4e36 2981 force_operand (op2, NULL_RTX),
bbf6f052
RK
2982 target, 0);
2983 }
2984
2985 if (binoptab)
2986 {
2987 op2 = XEXP (value, 1);
2988 if (!CONSTANT_P (op2)
2989 && !(GET_CODE (op2) == REG && op2 != subtarget))
2990 subtarget = 0;
2991 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2992 {
2993 binoptab = add_optab;
2994 op2 = negate_rtx (GET_MODE (value), op2);
2995 }
2996
2997 /* Check for an addition with OP2 a constant integer and our first
2998 operand a PLUS of a virtual register and something else. In that
2999 case, we want to emit the sum of the virtual register and the
3000 constant first and then add the other value. This allows virtual
3001 register instantiation to simply modify the constant rather than
3002 creating another one around this addition. */
3003 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3004 && GET_CODE (XEXP (value, 0)) == PLUS
3005 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3006 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3007 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3008 {
3009 rtx temp = expand_binop (GET_MODE (value), binoptab,
3010 XEXP (XEXP (value, 0), 0), op2,
3011 subtarget, 0, OPTAB_LIB_WIDEN);
3012 return expand_binop (GET_MODE (value), binoptab, temp,
3013 force_operand (XEXP (XEXP (value, 0), 1), 0),
3014 target, 0, OPTAB_LIB_WIDEN);
3015 }
3016
3017 tmp = force_operand (XEXP (value, 0), subtarget);
3018 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3019 force_operand (op2, NULL_RTX),
bbf6f052 3020 target, 0, OPTAB_LIB_WIDEN);
8008b228 3021 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3022 because the only operations we are expanding here are signed ones. */
3023 }
3024 return value;
3025}
3026\f
3027/* Subroutine of expand_expr:
3028 save the non-copied parts (LIST) of an expr (LHS), and return a list
3029 which can restore these values to their previous values,
3030 should something modify their storage. */
3031
3032static tree
3033save_noncopied_parts (lhs, list)
3034 tree lhs;
3035 tree list;
3036{
3037 tree tail;
3038 tree parts = 0;
3039
3040 for (tail = list; tail; tail = TREE_CHAIN (tail))
3041 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3042 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3043 else
3044 {
3045 tree part = TREE_VALUE (tail);
3046 tree part_type = TREE_TYPE (part);
906c4e36 3047 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3048 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3049 int_size_in_bytes (part_type), 0);
3050 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3051 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3052 parts = tree_cons (to_be_saved,
906c4e36
RK
3053 build (RTL_EXPR, part_type, NULL_TREE,
3054 (tree) target),
bbf6f052
RK
3055 parts);
3056 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3057 }
3058 return parts;
3059}
3060
3061/* Subroutine of expand_expr:
3062 record the non-copied parts (LIST) of an expr (LHS), and return a list
3063 which specifies the initial values of these parts. */
3064
3065static tree
3066init_noncopied_parts (lhs, list)
3067 tree lhs;
3068 tree list;
3069{
3070 tree tail;
3071 tree parts = 0;
3072
3073 for (tail = list; tail; tail = TREE_CHAIN (tail))
3074 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3075 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3076 else
3077 {
3078 tree part = TREE_VALUE (tail);
3079 tree part_type = TREE_TYPE (part);
906c4e36 3080 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3081 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3082 }
3083 return parts;
3084}
3085
3086/* Subroutine of expand_expr: return nonzero iff there is no way that
3087 EXP can reference X, which is being modified. */
3088
3089static int
3090safe_from_p (x, exp)
3091 rtx x;
3092 tree exp;
3093{
3094 rtx exp_rtl = 0;
3095 int i, nops;
3096
3097 if (x == 0)
3098 return 1;
3099
3100 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3101 find the underlying pseudo. */
3102 if (GET_CODE (x) == SUBREG)
3103 {
3104 x = SUBREG_REG (x);
3105 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3106 return 0;
3107 }
3108
3109 /* If X is a location in the outgoing argument area, it is always safe. */
3110 if (GET_CODE (x) == MEM
3111 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3112 || (GET_CODE (XEXP (x, 0)) == PLUS
3113 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3114 return 1;
3115
3116 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3117 {
3118 case 'd':
3119 exp_rtl = DECL_RTL (exp);
3120 break;
3121
3122 case 'c':
3123 return 1;
3124
3125 case 'x':
3126 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3127 return ((TREE_VALUE (exp) == 0
3128 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3129 && (TREE_CHAIN (exp) == 0
3130 || safe_from_p (x, TREE_CHAIN (exp))));
3131 else
3132 return 0;
3133
3134 case '1':
3135 return safe_from_p (x, TREE_OPERAND (exp, 0));
3136
3137 case '2':
3138 case '<':
3139 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3140 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3141
3142 case 'e':
3143 case 'r':
3144 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3145 the expression. If it is set, we conflict iff we are that rtx or
3146 both are in memory. Otherwise, we check all operands of the
3147 expression recursively. */
3148
3149 switch (TREE_CODE (exp))
3150 {
3151 case ADDR_EXPR:
e44842fe
RK
3152 return (staticp (TREE_OPERAND (exp, 0))
3153 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3154
3155 case INDIRECT_REF:
3156 if (GET_CODE (x) == MEM)
3157 return 0;
3158 break;
3159
3160 case CALL_EXPR:
3161 exp_rtl = CALL_EXPR_RTL (exp);
3162 if (exp_rtl == 0)
3163 {
3164 /* Assume that the call will clobber all hard registers and
3165 all of memory. */
3166 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3167 || GET_CODE (x) == MEM)
3168 return 0;
3169 }
3170
3171 break;
3172
3173 case RTL_EXPR:
3174 exp_rtl = RTL_EXPR_RTL (exp);
3175 if (exp_rtl == 0)
3176 /* We don't know what this can modify. */
3177 return 0;
3178
3179 break;
3180
3181 case WITH_CLEANUP_EXPR:
3182 exp_rtl = RTL_EXPR_RTL (exp);
3183 break;
3184
3185 case SAVE_EXPR:
3186 exp_rtl = SAVE_EXPR_RTL (exp);
3187 break;
3188
8129842c
RS
3189 case BIND_EXPR:
3190 /* The only operand we look at is operand 1. The rest aren't
3191 part of the expression. */
3192 return safe_from_p (x, TREE_OPERAND (exp, 1));
3193
bbf6f052
RK
3194 case METHOD_CALL_EXPR:
3195 /* This takes a rtx argument, but shouldn't appear here. */
3196 abort ();
3197 }
3198
3199 /* If we have an rtx, we do not need to scan our operands. */
3200 if (exp_rtl)
3201 break;
3202
3203 nops = tree_code_length[(int) TREE_CODE (exp)];
3204 for (i = 0; i < nops; i++)
3205 if (TREE_OPERAND (exp, i) != 0
3206 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3207 return 0;
3208 }
3209
3210 /* If we have an rtl, find any enclosed object. Then see if we conflict
3211 with it. */
3212 if (exp_rtl)
3213 {
3214 if (GET_CODE (exp_rtl) == SUBREG)
3215 {
3216 exp_rtl = SUBREG_REG (exp_rtl);
3217 if (GET_CODE (exp_rtl) == REG
3218 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3219 return 0;
3220 }
3221
3222 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3223 are memory and EXP is not readonly. */
3224 return ! (rtx_equal_p (x, exp_rtl)
3225 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3226 && ! TREE_READONLY (exp)));
3227 }
3228
3229 /* If we reach here, it is safe. */
3230 return 1;
3231}
3232
3233/* Subroutine of expand_expr: return nonzero iff EXP is an
3234 expression whose type is statically determinable. */
3235
3236static int
3237fixed_type_p (exp)
3238 tree exp;
3239{
3240 if (TREE_CODE (exp) == PARM_DECL
3241 || TREE_CODE (exp) == VAR_DECL
3242 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3243 || TREE_CODE (exp) == COMPONENT_REF
3244 || TREE_CODE (exp) == ARRAY_REF)
3245 return 1;
3246 return 0;
3247}
3248\f
3249/* expand_expr: generate code for computing expression EXP.
3250 An rtx for the computed value is returned. The value is never null.
3251 In the case of a void EXP, const0_rtx is returned.
3252
3253 The value may be stored in TARGET if TARGET is nonzero.
3254 TARGET is just a suggestion; callers must assume that
3255 the rtx returned may not be the same as TARGET.
3256
3257 If TARGET is CONST0_RTX, it means that the value will be ignored.
3258
3259 If TMODE is not VOIDmode, it suggests generating the
3260 result in mode TMODE. But this is done only when convenient.
3261 Otherwise, TMODE is ignored and the value generated in its natural mode.
3262 TMODE is just a suggestion; callers must assume that
3263 the rtx returned may not have mode TMODE.
3264
3265 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3266 with a constant address even if that address is not normally legitimate.
3267 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3268
3269 If MODIFIER is EXPAND_SUM then when EXP is an addition
3270 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3271 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3272 products as above, or REG or MEM, or constant.
3273 Ordinarily in such cases we would output mul or add instructions
3274 and then return a pseudo reg containing the sum.
3275
3276 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3277 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3278 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3279 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3280
3281rtx
3282expand_expr (exp, target, tmode, modifier)
3283 register tree exp;
3284 rtx target;
3285 enum machine_mode tmode;
3286 enum expand_modifier modifier;
3287{
3288 register rtx op0, op1, temp;
3289 tree type = TREE_TYPE (exp);
3290 int unsignedp = TREE_UNSIGNED (type);
3291 register enum machine_mode mode = TYPE_MODE (type);
3292 register enum tree_code code = TREE_CODE (exp);
3293 optab this_optab;
3294 /* Use subtarget as the target for operand 0 of a binary operation. */
3295 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3296 rtx original_target = target;
dd27116b
RK
3297 int ignore = (target == const0_rtx
3298 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3299 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3300 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3301 tree context;
3302
3303 /* Don't use hard regs as subtargets, because the combiner
3304 can only handle pseudo regs. */
3305 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3306 subtarget = 0;
3307 /* Avoid subtargets inside loops,
3308 since they hide some invariant expressions. */
3309 if (preserve_subexpressions_p ())
3310 subtarget = 0;
3311
dd27116b
RK
3312 /* If we are going to ignore this result, we need only do something
3313 if there is a side-effect somewhere in the expression. If there
3314 is, short-circuit the most common cases here. */
bbf6f052 3315
dd27116b
RK
3316 if (ignore)
3317 {
3318 if (! TREE_SIDE_EFFECTS (exp))
3319 return const0_rtx;
3320
3321 /* Ensure we reference a volatile object even if value is ignored. */
3322 if (TREE_THIS_VOLATILE (exp)
3323 && TREE_CODE (exp) != FUNCTION_DECL
3324 && mode != VOIDmode && mode != BLKmode)
3325 {
3326 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3327 if (GET_CODE (temp) == MEM)
3328 temp = copy_to_reg (temp);
3329 return const0_rtx;
3330 }
3331
3332 if (TREE_CODE_CLASS (code) == '1')
3333 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3334 VOIDmode, modifier);
3335 else if (TREE_CODE_CLASS (code) == '2'
3336 || TREE_CODE_CLASS (code) == '<')
3337 {
3338 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3339 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3340 return const0_rtx;
3341 }
3342 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3343 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3344 /* If the second operand has no side effects, just evaluate
3345 the first. */
3346 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3347 VOIDmode, modifier);
dd27116b
RK
3348
3349 target = 0, original_target = 0;
3350 }
bbf6f052 3351
e44842fe
RK
3352 /* If will do cse, generate all results into pseudo registers
3353 since 1) that allows cse to find more things
3354 and 2) otherwise cse could produce an insn the machine
3355 cannot support. */
3356
bbf6f052
RK
3357 if (! cse_not_expected && mode != BLKmode && target
3358 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3359 target = subtarget;
3360
bbf6f052
RK
3361 switch (code)
3362 {
3363 case LABEL_DECL:
b552441b
RS
3364 {
3365 tree function = decl_function_context (exp);
3366 /* Handle using a label in a containing function. */
3367 if (function != current_function_decl && function != 0)
3368 {
3369 struct function *p = find_function_data (function);
3370 /* Allocate in the memory associated with the function
3371 that the label is in. */
3372 push_obstacks (p->function_obstack,
3373 p->function_maybepermanent_obstack);
3374
3375 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3376 label_rtx (exp), p->forced_labels);
3377 pop_obstacks ();
3378 }
3379 else if (modifier == EXPAND_INITIALIZER)
3380 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3381 label_rtx (exp), forced_labels);
26fcb35a 3382 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3383 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3384 if (function != current_function_decl && function != 0)
3385 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3386 return temp;
b552441b 3387 }
bbf6f052
RK
3388
3389 case PARM_DECL:
3390 if (DECL_RTL (exp) == 0)
3391 {
3392 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3393 return CONST0_RTX (mode);
bbf6f052
RK
3394 }
3395
3396 case FUNCTION_DECL:
3397 case VAR_DECL:
3398 case RESULT_DECL:
3399 if (DECL_RTL (exp) == 0)
3400 abort ();
e44842fe
RK
3401 /* Ensure variable marked as used even if it doesn't go through
3402 a parser. If it hasn't be used yet, write out an external
3403 definition. */
3404 if (! TREE_USED (exp))
3405 {
3406 assemble_external (exp);
3407 TREE_USED (exp) = 1;
3408 }
3409
bbf6f052
RK
3410 /* Handle variables inherited from containing functions. */
3411 context = decl_function_context (exp);
3412
3413 /* We treat inline_function_decl as an alias for the current function
3414 because that is the inline function whose vars, types, etc.
3415 are being merged into the current function.
3416 See expand_inline_function. */
3417 if (context != 0 && context != current_function_decl
3418 && context != inline_function_decl
3419 /* If var is static, we don't need a static chain to access it. */
3420 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3421 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3422 {
3423 rtx addr;
3424
3425 /* Mark as non-local and addressable. */
81feeecb 3426 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3427 mark_addressable (exp);
3428 if (GET_CODE (DECL_RTL (exp)) != MEM)
3429 abort ();
3430 addr = XEXP (DECL_RTL (exp), 0);
3431 if (GET_CODE (addr) == MEM)
3432 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3433 else
3434 addr = fix_lexical_addr (addr, exp);
3435 return change_address (DECL_RTL (exp), mode, addr);
3436 }
4af3895e 3437
bbf6f052
RK
3438 /* This is the case of an array whose size is to be determined
3439 from its initializer, while the initializer is still being parsed.
3440 See expand_decl. */
3441 if (GET_CODE (DECL_RTL (exp)) == MEM
3442 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3443 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3444 XEXP (DECL_RTL (exp), 0));
3445 if (GET_CODE (DECL_RTL (exp)) == MEM
3446 && modifier != EXPAND_CONST_ADDRESS
3447 && modifier != EXPAND_SUM
3448 && modifier != EXPAND_INITIALIZER)
3449 {
3450 /* DECL_RTL probably contains a constant address.
3451 On RISC machines where a constant address isn't valid,
3452 make some insns to get that address into a register. */
3453 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3454 || (flag_force_addr
3455 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3456 return change_address (DECL_RTL (exp), VOIDmode,
3457 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3458 }
1499e0a8
RK
3459
3460 /* If the mode of DECL_RTL does not match that of the decl, it
3461 must be a promoted value. We return a SUBREG of the wanted mode,
3462 but mark it so that we know that it was already extended. */
3463
3464 if (GET_CODE (DECL_RTL (exp)) == REG
3465 && GET_MODE (DECL_RTL (exp)) != mode)
3466 {
3467 enum machine_mode decl_mode = DECL_MODE (exp);
3468
3469 /* Get the signedness used for this variable. Ensure we get the
3470 same mode we got when the variable was declared. */
3471
3472 PROMOTE_MODE (decl_mode, unsignedp, type);
3473
3474 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3475 abort ();
3476
3477 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3478 SUBREG_PROMOTED_VAR_P (temp) = 1;
3479 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3480 return temp;
3481 }
3482
bbf6f052
RK
3483 return DECL_RTL (exp);
3484
3485 case INTEGER_CST:
3486 return immed_double_const (TREE_INT_CST_LOW (exp),
3487 TREE_INT_CST_HIGH (exp),
3488 mode);
3489
3490 case CONST_DECL:
3491 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3492
3493 case REAL_CST:
3494 /* If optimized, generate immediate CONST_DOUBLE
3495 which will be turned into memory by reload if necessary.
3496
3497 We used to force a register so that loop.c could see it. But
3498 this does not allow gen_* patterns to perform optimizations with
3499 the constants. It also produces two insns in cases like "x = 1.0;".
3500 On most machines, floating-point constants are not permitted in
3501 many insns, so we'd end up copying it to a register in any case.
3502
3503 Now, we do the copying in expand_binop, if appropriate. */
3504 return immed_real_const (exp);
3505
3506 case COMPLEX_CST:
3507 case STRING_CST:
3508 if (! TREE_CST_RTL (exp))
3509 output_constant_def (exp);
3510
3511 /* TREE_CST_RTL probably contains a constant address.
3512 On RISC machines where a constant address isn't valid,
3513 make some insns to get that address into a register. */
3514 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3515 && modifier != EXPAND_CONST_ADDRESS
3516 && modifier != EXPAND_INITIALIZER
3517 && modifier != EXPAND_SUM
3518 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3519 return change_address (TREE_CST_RTL (exp), VOIDmode,
3520 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3521 return TREE_CST_RTL (exp);
3522
3523 case SAVE_EXPR:
3524 context = decl_function_context (exp);
3525 /* We treat inline_function_decl as an alias for the current function
3526 because that is the inline function whose vars, types, etc.
3527 are being merged into the current function.
3528 See expand_inline_function. */
3529 if (context == current_function_decl || context == inline_function_decl)
3530 context = 0;
3531
3532 /* If this is non-local, handle it. */
3533 if (context)
3534 {
3535 temp = SAVE_EXPR_RTL (exp);
3536 if (temp && GET_CODE (temp) == REG)
3537 {
3538 put_var_into_stack (exp);
3539 temp = SAVE_EXPR_RTL (exp);
3540 }
3541 if (temp == 0 || GET_CODE (temp) != MEM)
3542 abort ();
3543 return change_address (temp, mode,
3544 fix_lexical_addr (XEXP (temp, 0), exp));
3545 }
3546 if (SAVE_EXPR_RTL (exp) == 0)
3547 {
3548 if (mode == BLKmode)
3549 temp
3550 = assign_stack_temp (mode,
3551 int_size_in_bytes (TREE_TYPE (exp)), 0);
3552 else
1499e0a8
RK
3553 {
3554 enum machine_mode var_mode = mode;
3555
3556 if (TREE_CODE (type) == INTEGER_TYPE
3557 || TREE_CODE (type) == ENUMERAL_TYPE
3558 || TREE_CODE (type) == BOOLEAN_TYPE
3559 || TREE_CODE (type) == CHAR_TYPE
3560 || TREE_CODE (type) == REAL_TYPE
3561 || TREE_CODE (type) == POINTER_TYPE
3562 || TREE_CODE (type) == OFFSET_TYPE)
3563 {
3564 PROMOTE_MODE (var_mode, unsignedp, type);
3565 }
3566
3567 temp = gen_reg_rtx (var_mode);
3568 }
3569
bbf6f052 3570 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3571 if (!optimize && GET_CODE (temp) == REG)
3572 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3573 save_expr_regs);
ff78f773
RK
3574
3575 /* If the mode of TEMP does not match that of the expression, it
3576 must be a promoted value. We pass store_expr a SUBREG of the
3577 wanted mode but mark it so that we know that it was already
3578 extended. Note that `unsignedp' was modified above in
3579 this case. */
3580
3581 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3582 {
3583 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3584 SUBREG_PROMOTED_VAR_P (temp) = 1;
3585 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3586 }
3587
3588 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3589 }
1499e0a8
RK
3590
3591 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3592 must be a promoted value. We return a SUBREG of the wanted mode,
3593 but mark it so that we know that it was already extended. Note
3594 that `unsignedp' was modified above in this case. */
3595
3596 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3597 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3598 {
3599 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3600 SUBREG_PROMOTED_VAR_P (temp) = 1;
3601 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3602 return temp;
3603 }
3604
bbf6f052
RK
3605 return SAVE_EXPR_RTL (exp);
3606
3607 case EXIT_EXPR:
e44842fe
RK
3608 expand_exit_loop_if_false (NULL_PTR,
3609 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
3610 return const0_rtx;
3611
3612 case LOOP_EXPR:
3613 expand_start_loop (1);
3614 expand_expr_stmt (TREE_OPERAND (exp, 0));
3615 expand_end_loop ();
3616
3617 return const0_rtx;
3618
3619 case BIND_EXPR:
3620 {
3621 tree vars = TREE_OPERAND (exp, 0);
3622 int vars_need_expansion = 0;
3623
3624 /* Need to open a binding contour here because
3625 if there are any cleanups they most be contained here. */
3626 expand_start_bindings (0);
3627
2df53c0b
RS
3628 /* Mark the corresponding BLOCK for output in its proper place. */
3629 if (TREE_OPERAND (exp, 2) != 0
3630 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3631 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3632
3633 /* If VARS have not yet been expanded, expand them now. */
3634 while (vars)
3635 {
3636 if (DECL_RTL (vars) == 0)
3637 {
3638 vars_need_expansion = 1;
3639 expand_decl (vars);
3640 }
3641 expand_decl_init (vars);
3642 vars = TREE_CHAIN (vars);
3643 }
3644
3645 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3646
3647 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3648
3649 return temp;
3650 }
3651
3652 case RTL_EXPR:
3653 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3654 abort ();
3655 emit_insns (RTL_EXPR_SEQUENCE (exp));
3656 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3657 return RTL_EXPR_RTL (exp);
3658
3659 case CONSTRUCTOR:
dd27116b
RK
3660 /* If we don't need the result, just ensure we evaluate any
3661 subexpressions. */
3662 if (ignore)
3663 {
3664 tree elt;
3665 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3666 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3667 return const0_rtx;
3668 }
4af3895e
JVA
3669 /* All elts simple constants => refer to a constant in memory. But
3670 if this is a non-BLKmode mode, let it store a field at a time
3671 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
3672 fold. If we are making an initializer and all operands are
3673 constant, put it in memory as well. */
3674 else if ((TREE_STATIC (exp)
3675 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3676 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
3677 {
3678 rtx constructor = output_constant_def (exp);
b552441b
RS
3679 if (modifier != EXPAND_CONST_ADDRESS
3680 && modifier != EXPAND_INITIALIZER
3681 && modifier != EXPAND_SUM
3682 && !memory_address_p (GET_MODE (constructor),
3683 XEXP (constructor, 0)))
bbf6f052
RK
3684 constructor = change_address (constructor, VOIDmode,
3685 XEXP (constructor, 0));
3686 return constructor;
3687 }
3688
bbf6f052
RK
3689 else
3690 {
3691 if (target == 0 || ! safe_from_p (target, exp))
3692 {
3693 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3694 target = gen_reg_rtx (mode);
3695 else
3696 {
3b94d087
RS
3697 enum tree_code c = TREE_CODE (type);
3698 target
3699 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
3700 if (c == RECORD_TYPE || c == UNION_TYPE
3701 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 3702 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3703 }
3704 }
3705 store_constructor (exp, target);
3706 return target;
3707 }
3708
3709 case INDIRECT_REF:
3710 {
3711 tree exp1 = TREE_OPERAND (exp, 0);
3712 tree exp2;
3713
3714 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3715 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3716 This code has the same general effect as simply doing
3717 expand_expr on the save expr, except that the expression PTR
3718 is computed for use as a memory address. This means different
3719 code, suitable for indexing, may be generated. */
3720 if (TREE_CODE (exp1) == SAVE_EXPR
3721 && SAVE_EXPR_RTL (exp1) == 0
3722 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3723 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3724 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3725 {
906c4e36
RK
3726 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3727 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3728 op0 = memory_address (mode, temp);
3729 op0 = copy_all_regs (op0);
3730 SAVE_EXPR_RTL (exp1) = op0;
3731 }
3732 else
3733 {
906c4e36 3734 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3735 op0 = memory_address (mode, op0);
3736 }
8c8a8e34
JW
3737
3738 temp = gen_rtx (MEM, mode, op0);
3739 /* If address was computed by addition,
3740 mark this as an element of an aggregate. */
3741 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3742 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3743 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3744 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3745 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3746 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 3747 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
3748 || (TREE_CODE (exp1) == ADDR_EXPR
3749 && (exp2 = TREE_OPERAND (exp1, 0))
3750 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3751 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
3752 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3753 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 3754 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3755 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 3756#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
3757 a location is accessed through a pointer to const does not mean
3758 that the value there can never change. */
8c8a8e34 3759 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3760#endif
8c8a8e34
JW
3761 return temp;
3762 }
bbf6f052
RK
3763
3764 case ARRAY_REF:
742920c7
RK
3765 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3766 abort ();
bbf6f052 3767
bbf6f052 3768 {
742920c7
RK
3769 tree array = TREE_OPERAND (exp, 0);
3770 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3771 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3772 tree index = TREE_OPERAND (exp, 1);
3773 tree index_type = TREE_TYPE (index);
bbf6f052 3774 int i;
bbf6f052 3775
742920c7
RK
3776 /* Optimize the special-case of a zero lower bound. */
3777 if (! integer_zerop (low_bound))
3778 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3779
3780 if (TREE_CODE (index) != INTEGER_CST
3781 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3782 {
3783 /* Nonconstant array index or nonconstant element size.
3784 Generate the tree for *(&array+index) and expand that,
3785 except do it in a language-independent way
3786 and don't complain about non-lvalue arrays.
3787 `mark_addressable' should already have been called
3788 for any array for which this case will be reached. */
3789
3790 /* Don't forget the const or volatile flag from the array
3791 element. */
3792 tree variant_type = build_type_variant (type,
3793 TREE_READONLY (exp),
3794 TREE_THIS_VOLATILE (exp));
3795 tree array_adr = build1 (ADDR_EXPR,
3796 build_pointer_type (variant_type), array);
3797 tree elt;
3798
3799 /* Convert the integer argument to a type the same size as a
3800 pointer so the multiply won't overflow spuriously. */
3801 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3802 index = convert (type_for_size (POINTER_SIZE, 0), index);
3803
3804 /* Don't think the address has side effects
3805 just because the array does.
3806 (In some cases the address might have side effects,
3807 and we fail to record that fact here. However, it should not
3808 matter, since expand_expr should not care.) */
3809 TREE_SIDE_EFFECTS (array_adr) = 0;
3810
3811 elt = build1 (INDIRECT_REF, type,
3812 fold (build (PLUS_EXPR,
3813 TYPE_POINTER_TO (variant_type),
3814 array_adr,
3815 fold (build (MULT_EXPR,
3816 TYPE_POINTER_TO (variant_type),
3817 index,
3818 size_in_bytes (type))))));
3819
3820 /* Volatility, etc., of new expression is same as old
3821 expression. */
3822 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3823 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3824 TREE_READONLY (elt) = TREE_READONLY (exp);
3825
3826 return expand_expr (elt, target, tmode, modifier);
3827 }
3828
3829 /* Fold an expression like: "foo"[2].
3830 This is not done in fold so it won't happen inside &. */
3831
3832 if (TREE_CODE (array) == STRING_CST
3833 && TREE_CODE (index) == INTEGER_CST
3834 && !TREE_INT_CST_HIGH (index)
3835 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 3836 {
742920c7 3837 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 3838 {
742920c7 3839 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
3840 TREE_TYPE (exp) = integer_type_node;
3841 return expand_expr (exp, target, tmode, modifier);
3842 }
742920c7 3843 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 3844 {
742920c7 3845 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 3846 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
3847 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3848 exp),
3849 target, tmode, modifier);
bbf6f052
RK
3850 }
3851 }
bbf6f052 3852
742920c7
RK
3853 /* If this is a constant index into a constant array,
3854 just get the value from the array. Handle both the cases when
3855 we have an explicit constructor and when our operand is a variable
3856 that was declared const. */
4af3895e 3857
742920c7
RK
3858 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3859 {
3860 if (TREE_CODE (index) == INTEGER_CST
3861 && TREE_INT_CST_HIGH (index) == 0)
3862 {
3863 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3864
3865 i = TREE_INT_CST_LOW (index);
3866 while (elem && i--)
3867 elem = TREE_CHAIN (elem);
3868 if (elem)
3869 return expand_expr (fold (TREE_VALUE (elem)), target,
3870 tmode, modifier);
3871 }
3872 }
4af3895e 3873
742920c7
RK
3874 else if (optimize >= 1
3875 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3876 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3877 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3878 {
3879 if (TREE_CODE (index) == INTEGER_CST
3880 && TREE_INT_CST_HIGH (index) == 0)
3881 {
3882 tree init = DECL_INITIAL (array);
3883
3884 i = TREE_INT_CST_LOW (index);
3885 if (TREE_CODE (init) == CONSTRUCTOR)
3886 {
3887 tree elem = CONSTRUCTOR_ELTS (init);
3888
3889 while (elem && i--)
3890 elem = TREE_CHAIN (elem);
3891 if (elem)
3892 return expand_expr (fold (TREE_VALUE (elem)), target,
3893 tmode, modifier);
3894 }
3895 else if (TREE_CODE (init) == STRING_CST
3896 && i < TREE_STRING_LENGTH (init))
3897 {
3898 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3899 return convert_to_mode (mode, temp, 0);
3900 }
3901 }
3902 }
3903 }
8c8a8e34 3904
bbf6f052
RK
3905 /* Treat array-ref with constant index as a component-ref. */
3906
3907 case COMPONENT_REF:
3908 case BIT_FIELD_REF:
4af3895e
JVA
3909 /* If the operand is a CONSTRUCTOR, we can just extract the
3910 appropriate field if it is present. */
3911 if (code != ARRAY_REF
3912 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3913 {
3914 tree elt;
3915
3916 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3917 elt = TREE_CHAIN (elt))
3918 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3919 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3920 }
3921
bbf6f052
RK
3922 {
3923 enum machine_mode mode1;
3924 int bitsize;
3925 int bitpos;
7bb0943f 3926 tree offset;
bbf6f052 3927 int volatilep = 0;
7bb0943f 3928 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3929 &mode1, &unsignedp, &volatilep);
3930
e7f3c83f
RK
3931 /* If we got back the original object, something is wrong. Perhaps
3932 we are evaluating an expression too early. In any event, don't
3933 infinitely recurse. */
3934 if (tem == exp)
3935 abort ();
3936
bbf6f052
RK
3937 /* In some cases, we will be offsetting OP0's address by a constant.
3938 So get it as a sum, if possible. If we will be using it
3939 directly in an insn, we validate it. */
906c4e36 3940 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3941
8c8a8e34 3942 /* If this is a constant, put it into a register if it is a
8008b228 3943 legitimate constant and memory if it isn't. */
8c8a8e34
JW
3944 if (CONSTANT_P (op0))
3945 {
3946 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 3947 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
3948 op0 = force_reg (mode, op0);
3949 else
3950 op0 = validize_mem (force_const_mem (mode, op0));
3951 }
3952
7bb0943f
RS
3953 if (offset != 0)
3954 {
906c4e36 3955 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3956
3957 if (GET_CODE (op0) != MEM)
3958 abort ();
3959 op0 = change_address (op0, VOIDmode,
3960 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3961 force_reg (Pmode, offset_rtx)));
3962 }
3963
bbf6f052
RK
3964 /* Don't forget about volatility even if this is a bitfield. */
3965 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3966 {
3967 op0 = copy_rtx (op0);
3968 MEM_VOLATILE_P (op0) = 1;
3969 }
3970
ccc98036
RS
3971 /* In cases where an aligned union has an unaligned object
3972 as a field, we might be extracting a BLKmode value from
3973 an integer-mode (e.g., SImode) object. Handle this case
3974 by doing the extract into an object as wide as the field
3975 (which we know to be the width of a basic mode), then
3976 storing into memory, and changing the mode to BLKmode. */
bbf6f052 3977 if (mode1 == VOIDmode
0bba3f6f
RK
3978 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3979 && modifier != EXPAND_CONST_ADDRESS
3980 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
ccc98036
RS
3981 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
3982 /* If the field isn't aligned enough to fetch as a memref,
3983 fetch it as a bit field. */
3984 || (STRICT_ALIGNMENT
3985 && TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
3986 || (STRICT_ALIGNMENT && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 3987 {
bbf6f052
RK
3988 enum machine_mode ext_mode = mode;
3989
3990 if (ext_mode == BLKmode)
3991 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3992
3993 if (ext_mode == BLKmode)
3994 abort ();
3995
3996 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3997 unsignedp, target, ext_mode, ext_mode,
3998 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3999 int_size_in_bytes (TREE_TYPE (tem)));
4000 if (mode == BLKmode)
4001 {
4002 rtx new = assign_stack_temp (ext_mode,
4003 bitsize / BITS_PER_UNIT, 0);
4004
4005 emit_move_insn (new, op0);
4006 op0 = copy_rtx (new);
4007 PUT_MODE (op0, BLKmode);
092dded9 4008 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
4009 }
4010
4011 return op0;
4012 }
4013
4014 /* Get a reference to just this component. */
4015 if (modifier == EXPAND_CONST_ADDRESS
4016 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4017 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4018 (bitpos / BITS_PER_UNIT)));
4019 else
4020 op0 = change_address (op0, mode1,
4021 plus_constant (XEXP (op0, 0),
4022 (bitpos / BITS_PER_UNIT)));
4023 MEM_IN_STRUCT_P (op0) = 1;
4024 MEM_VOLATILE_P (op0) |= volatilep;
4025 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4026 return op0;
4027 if (target == 0)
4028 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4029 convert_move (target, op0, unsignedp);
4030 return target;
4031 }
4032
4033 case OFFSET_REF:
4034 {
da120c2f 4035 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4036 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4037 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4038 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4039 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4040 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4041#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4042 a location is accessed through a pointer to const does not mean
4043 that the value there can never change. */
4044 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4045#endif
4046 return temp;
4047 }
4048
4049 /* Intended for a reference to a buffer of a file-object in Pascal.
4050 But it's not certain that a special tree code will really be
4051 necessary for these. INDIRECT_REF might work for them. */
4052 case BUFFER_REF:
4053 abort ();
4054
7308a047
RS
4055 /* IN_EXPR: Inlined pascal set IN expression.
4056
4057 Algorithm:
4058 rlo = set_low - (set_low%bits_per_word);
4059 the_word = set [ (index - rlo)/bits_per_word ];
4060 bit_index = index % bits_per_word;
4061 bitmask = 1 << bit_index;
4062 return !!(the_word & bitmask); */
4063 case IN_EXPR:
4064 preexpand_calls (exp);
4065 {
4066 tree set = TREE_OPERAND (exp, 0);
4067 tree index = TREE_OPERAND (exp, 1);
4068 tree set_type = TREE_TYPE (set);
4069
4070 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4071 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4072
4073 rtx index_val;
4074 rtx lo_r;
4075 rtx hi_r;
4076 rtx rlow;
4077 rtx diff, quo, rem, addr, bit, result;
4078 rtx setval, setaddr;
4079 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4080
4081 if (target == 0)
17938e57 4082 target = gen_reg_rtx (mode);
7308a047
RS
4083
4084 /* If domain is empty, answer is no. */
4085 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4086 return const0_rtx;
4087
4088 index_val = expand_expr (index, 0, VOIDmode, 0);
4089 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4090 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4091 setval = expand_expr (set, 0, VOIDmode, 0);
4092 setaddr = XEXP (setval, 0);
4093
4094 /* Compare index against bounds, if they are constant. */
4095 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4096 && GET_CODE (lo_r) == CONST_INT
4097 && INTVAL (index_val) < INTVAL (lo_r))
4098 return const0_rtx;
7308a047
RS
4099
4100 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4101 && GET_CODE (hi_r) == CONST_INT
4102 && INTVAL (hi_r) < INTVAL (index_val))
4103 return const0_rtx;
7308a047
RS
4104
4105 /* If we get here, we have to generate the code for both cases
4106 (in range and out of range). */
4107
4108 op0 = gen_label_rtx ();
4109 op1 = gen_label_rtx ();
4110
4111 if (! (GET_CODE (index_val) == CONST_INT
4112 && GET_CODE (lo_r) == CONST_INT))
4113 {
17938e57
RK
4114 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4115 GET_MODE (index_val), 0, 0);
7308a047
RS
4116 emit_jump_insn (gen_blt (op1));
4117 }
4118
4119 if (! (GET_CODE (index_val) == CONST_INT
4120 && GET_CODE (hi_r) == CONST_INT))
4121 {
17938e57
RK
4122 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4123 GET_MODE (index_val), 0, 0);
7308a047
RS
4124 emit_jump_insn (gen_bgt (op1));
4125 }
4126
4127 /* Calculate the element number of bit zero in the first word
4128 of the set. */
4129 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4130 rlow = GEN_INT (INTVAL (lo_r)
4131 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4132 else
17938e57
RK
4133 rlow = expand_binop (index_mode, and_optab, lo_r,
4134 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4135 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4136
4137 diff = expand_binop (index_mode, sub_optab,
17938e57 4138 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4139
4140 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4141 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4142 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4143 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4144 addr = memory_address (byte_mode,
4145 expand_binop (index_mode, add_optab,
17938e57
RK
4146 diff, setaddr, NULL_RTX, 0,
4147 OPTAB_LIB_WIDEN));
7308a047
RS
4148 /* Extract the bit we want to examine */
4149 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4150 gen_rtx (MEM, byte_mode, addr),
4151 make_tree (TREE_TYPE (index), rem),
4152 NULL_RTX, 1);
4153 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4154 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4155 1, OPTAB_LIB_WIDEN);
17938e57
RK
4156
4157 if (result != target)
4158 convert_move (target, result, 1);
7308a047
RS
4159
4160 /* Output the code to handle the out-of-range case. */
4161 emit_jump (op0);
4162 emit_label (op1);
4163 emit_move_insn (target, const0_rtx);
4164 emit_label (op0);
4165 return target;
4166 }
4167
bbf6f052
RK
4168 case WITH_CLEANUP_EXPR:
4169 if (RTL_EXPR_RTL (exp) == 0)
4170 {
4171 RTL_EXPR_RTL (exp)
4172 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4173 cleanups_this_call
4174 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4175 /* That's it for this cleanup. */
4176 TREE_OPERAND (exp, 2) = 0;
4177 }
4178 return RTL_EXPR_RTL (exp);
4179
4180 case CALL_EXPR:
4181 /* Check for a built-in function. */
4182 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4183 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4184 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4185 return expand_builtin (exp, target, subtarget, tmode, ignore);
4186 /* If this call was expanded already by preexpand_calls,
4187 just return the result we got. */
4188 if (CALL_EXPR_RTL (exp) != 0)
4189 return CALL_EXPR_RTL (exp);
8129842c 4190 return expand_call (exp, target, ignore);
bbf6f052
RK
4191
4192 case NON_LVALUE_EXPR:
4193 case NOP_EXPR:
4194 case CONVERT_EXPR:
4195 case REFERENCE_EXPR:
bbf6f052
RK
4196 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4197 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4198 if (TREE_CODE (type) == UNION_TYPE)
4199 {
4200 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4201 if (target == 0)
4202 {
4203 if (mode == BLKmode)
4204 {
4205 if (TYPE_SIZE (type) == 0
4206 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4207 abort ();
4208 target = assign_stack_temp (BLKmode,
4209 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4210 + BITS_PER_UNIT - 1)
4211 / BITS_PER_UNIT, 0);
4212 }
4213 else
4214 target = gen_reg_rtx (mode);
4215 }
4216 if (GET_CODE (target) == MEM)
4217 /* Store data into beginning of memory target. */
4218 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4219 change_address (target, TYPE_MODE (valtype), 0), 0);
4220
bbf6f052
RK
4221 else if (GET_CODE (target) == REG)
4222 /* Store this field into a union of the proper type. */
4223 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4224 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4225 VOIDmode, 0, 1,
4226 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4227 else
4228 abort ();
4229
4230 /* Return the entire union. */
4231 return target;
4232 }
1499e0a8 4233 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4234 if (GET_MODE (op0) == mode)
4235 return op0;
4236 /* If arg is a constant integer being extended from a narrower mode,
4237 we must really truncate to get the extended bits right. Otherwise
4238 (unsigned long) (unsigned char) ("\377"[0])
4239 would come out as ffffffff. */
4240 if (GET_MODE (op0) == VOIDmode
4241 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4242 < GET_MODE_BITSIZE (mode)))
4243 {
4244 /* MODE must be narrower than HOST_BITS_PER_INT. */
4245 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4246
4247 if (width < HOST_BITS_PER_WIDE_INT)
4248 {
4249 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4250 : CONST_DOUBLE_LOW (op0));
4251 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4252 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4253 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4254 else
4255 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4256
4257 op0 = GEN_INT (val);
4258 }
4259 else
4260 {
4261 op0 = (simplify_unary_operation
4262 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4263 ? ZERO_EXTEND : SIGN_EXTEND),
4264 mode, op0,
4265 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4266 if (op0 == 0)
4267 abort ();
4268 }
4269 }
4270 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4271 return op0;
26fcb35a
RS
4272 if (modifier == EXPAND_INITIALIZER)
4273 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4274 if (flag_force_mem && GET_CODE (op0) == MEM)
4275 op0 = copy_to_reg (op0);
4276
4277 if (target == 0)
4278 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4279 else
4280 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4281 return target;
4282
4283 case PLUS_EXPR:
4284 /* We come here from MINUS_EXPR when the second operand is a constant. */
4285 plus_expr:
4286 this_optab = add_optab;
4287
4288 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4289 something else, make sure we add the register to the constant and
4290 then to the other thing. This case can occur during strength
4291 reduction and doing it this way will produce better code if the
4292 frame pointer or argument pointer is eliminated.
4293
4294 fold-const.c will ensure that the constant is always in the inner
4295 PLUS_EXPR, so the only case we need to do anything about is if
4296 sp, ap, or fp is our second argument, in which case we must swap
4297 the innermost first argument and our second argument. */
4298
4299 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4300 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4301 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4302 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4303 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4304 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4305 {
4306 tree t = TREE_OPERAND (exp, 1);
4307
4308 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4309 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4310 }
4311
4312 /* If the result is to be Pmode and we are adding an integer to
4313 something, we might be forming a constant. So try to use
4314 plus_constant. If it produces a sum and we can't accept it,
4315 use force_operand. This allows P = &ARR[const] to generate
4316 efficient code on machines where a SYMBOL_REF is not a valid
4317 address.
4318
4319 If this is an EXPAND_SUM call, always return the sum. */
c980ac49
RS
4320 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4321 || mode == Pmode)
bbf6f052 4322 {
c980ac49
RS
4323 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
4324 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
4325 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
4326 {
4327 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4328 EXPAND_SUM);
4329 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4330 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4331 op1 = force_operand (op1, target);
4332 return op1;
4333 }
bbf6f052 4334
c980ac49
RS
4335 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4336 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4337 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
4338 {
4339 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4340 EXPAND_SUM);
4341 if (! CONSTANT_P (op0))
4342 {
4343 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4344 VOIDmode, modifier);
4345 goto both_summands;
4346 }
4347 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4348 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4349 op0 = force_operand (op0, target);
4350 return op0;
4351 }
bbf6f052
RK
4352 }
4353
4354 /* No sense saving up arithmetic to be done
4355 if it's all in the wrong mode to form part of an address.
4356 And force_operand won't know whether to sign-extend or
4357 zero-extend. */
4358 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
c980ac49
RS
4359 || mode != Pmode)
4360 goto binop;
bbf6f052
RK
4361
4362 preexpand_calls (exp);
4363 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4364 subtarget = 0;
4365
4366 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4367 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 4368
c980ac49 4369 both_summands:
bbf6f052
RK
4370 /* Make sure any term that's a sum with a constant comes last. */
4371 if (GET_CODE (op0) == PLUS
4372 && CONSTANT_P (XEXP (op0, 1)))
4373 {
4374 temp = op0;
4375 op0 = op1;
4376 op1 = temp;
4377 }
4378 /* If adding to a sum including a constant,
4379 associate it to put the constant outside. */
4380 if (GET_CODE (op1) == PLUS
4381 && CONSTANT_P (XEXP (op1, 1)))
4382 {
4383 rtx constant_term = const0_rtx;
4384
4385 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4386 if (temp != 0)
4387 op0 = temp;
6f90e075
JW
4388 /* Ensure that MULT comes first if there is one. */
4389 else if (GET_CODE (op0) == MULT)
4390 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4391 else
4392 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4393
4394 /* Let's also eliminate constants from op0 if possible. */
4395 op0 = eliminate_constant_term (op0, &constant_term);
4396
4397 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4398 their sum should be a constant. Form it into OP1, since the
4399 result we want will then be OP0 + OP1. */
4400
4401 temp = simplify_binary_operation (PLUS, mode, constant_term,
4402 XEXP (op1, 1));
4403 if (temp != 0)
4404 op1 = temp;
4405 else
4406 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4407 }
4408
4409 /* Put a constant term last and put a multiplication first. */
4410 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4411 temp = op1, op1 = op0, op0 = temp;
4412
4413 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4414 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4415
4416 case MINUS_EXPR:
4417 /* Handle difference of two symbolic constants,
4418 for the sake of an initializer. */
4419 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4420 && really_constant_p (TREE_OPERAND (exp, 0))
4421 && really_constant_p (TREE_OPERAND (exp, 1)))
4422 {
906c4e36
RK
4423 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4424 VOIDmode, modifier);
4425 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4426 VOIDmode, modifier);
bbf6f052
RK
4427 return gen_rtx (MINUS, mode, op0, op1);
4428 }
4429 /* Convert A - const to A + (-const). */
4430 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4431 {
4432 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4433 fold (build1 (NEGATE_EXPR, type,
4434 TREE_OPERAND (exp, 1))));
4435 goto plus_expr;
4436 }
4437 this_optab = sub_optab;
4438 goto binop;
4439
4440 case MULT_EXPR:
4441 preexpand_calls (exp);
4442 /* If first operand is constant, swap them.
4443 Thus the following special case checks need only
4444 check the second operand. */
4445 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4446 {
4447 register tree t1 = TREE_OPERAND (exp, 0);
4448 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4449 TREE_OPERAND (exp, 1) = t1;
4450 }
4451
4452 /* Attempt to return something suitable for generating an
4453 indexed address, for machines that support that. */
4454
4455 if (modifier == EXPAND_SUM && mode == Pmode
4456 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4457 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4458 {
4459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4460
4461 /* Apply distributive law if OP0 is x+c. */
4462 if (GET_CODE (op0) == PLUS
4463 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4464 return gen_rtx (PLUS, mode,
4465 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4466 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4467 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4468 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4469
4470 if (GET_CODE (op0) != REG)
906c4e36 4471 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4472 if (GET_CODE (op0) != REG)
4473 op0 = copy_to_mode_reg (mode, op0);
4474
4475 return gen_rtx (MULT, mode, op0,
906c4e36 4476 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4477 }
4478
4479 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4480 subtarget = 0;
4481
4482 /* Check for multiplying things that have been extended
4483 from a narrower type. If this machine supports multiplying
4484 in that narrower type with a result in the desired type,
4485 do it that way, and avoid the explicit type-conversion. */
4486 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4487 && TREE_CODE (type) == INTEGER_TYPE
4488 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4489 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4490 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4491 && int_fits_type_p (TREE_OPERAND (exp, 1),
4492 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4493 /* Don't use a widening multiply if a shift will do. */
4494 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4495 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4496 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4497 ||
4498 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4499 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4500 ==
4501 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4502 /* If both operands are extended, they must either both
4503 be zero-extended or both be sign-extended. */
4504 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4505 ==
4506 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4507 {
4508 enum machine_mode innermode
4509 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4510 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4511 ? umul_widen_optab : smul_widen_optab);
4512 if (mode == GET_MODE_WIDER_MODE (innermode)
4513 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4514 {
4515 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4516 NULL_RTX, VOIDmode, 0);
bbf6f052 4517 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4518 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4519 VOIDmode, 0);
bbf6f052
RK
4520 else
4521 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4522 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4523 goto binop2;
4524 }
4525 }
4526 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4527 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4528 return expand_mult (mode, op0, op1, target, unsignedp);
4529
4530 case TRUNC_DIV_EXPR:
4531 case FLOOR_DIV_EXPR:
4532 case CEIL_DIV_EXPR:
4533 case ROUND_DIV_EXPR:
4534 case EXACT_DIV_EXPR:
4535 preexpand_calls (exp);
4536 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4537 subtarget = 0;
4538 /* Possible optimization: compute the dividend with EXPAND_SUM
4539 then if the divisor is constant can optimize the case
4540 where some terms of the dividend have coeffs divisible by it. */
4541 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4542 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4543 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4544
4545 case RDIV_EXPR:
4546 this_optab = flodiv_optab;
4547 goto binop;
4548
4549 case TRUNC_MOD_EXPR:
4550 case FLOOR_MOD_EXPR:
4551 case CEIL_MOD_EXPR:
4552 case ROUND_MOD_EXPR:
4553 preexpand_calls (exp);
4554 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4555 subtarget = 0;
4556 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4557 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4558 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4559
4560 case FIX_ROUND_EXPR:
4561 case FIX_FLOOR_EXPR:
4562 case FIX_CEIL_EXPR:
4563 abort (); /* Not used for C. */
4564
4565 case FIX_TRUNC_EXPR:
906c4e36 4566 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4567 if (target == 0)
4568 target = gen_reg_rtx (mode);
4569 expand_fix (target, op0, unsignedp);
4570 return target;
4571
4572 case FLOAT_EXPR:
906c4e36 4573 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4574 if (target == 0)
4575 target = gen_reg_rtx (mode);
4576 /* expand_float can't figure out what to do if FROM has VOIDmode.
4577 So give it the correct mode. With -O, cse will optimize this. */
4578 if (GET_MODE (op0) == VOIDmode)
4579 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4580 op0);
4581 expand_float (target, op0,
4582 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4583 return target;
4584
4585 case NEGATE_EXPR:
4586 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4587 temp = expand_unop (mode, neg_optab, op0, target, 0);
4588 if (temp == 0)
4589 abort ();
4590 return temp;
4591
4592 case ABS_EXPR:
4593 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4594
2d7050fd
RS
4595 /* Handle complex values specially. */
4596 {
4597 enum machine_mode opmode
4598 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4599
4600 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4601 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4602 return expand_complex_abs (opmode, op0, target, unsignedp);
4603 }
4604
bbf6f052
RK
4605 /* Unsigned abs is simply the operand. Testing here means we don't
4606 risk generating incorrect code below. */
4607 if (TREE_UNSIGNED (type))
4608 return op0;
4609
4610 /* First try to do it with a special abs instruction. */
4611 temp = expand_unop (mode, abs_optab, op0, target, 0);
4612 if (temp != 0)
4613 return temp;
4614
4615 /* If this machine has expensive jumps, we can do integer absolute
4616 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4617 where W is the width of MODE. */
4618
4619 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4620 {
4621 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4622 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4623 NULL_RTX, 0);
bbf6f052
RK
4624
4625 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4626 OPTAB_LIB_WIDEN);
4627 if (temp != 0)
4628 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4629 OPTAB_LIB_WIDEN);
4630
4631 if (temp != 0)
4632 return temp;
4633 }
4634
4635 /* If that does not win, use conditional jump and negate. */
4636 target = original_target;
4637 temp = gen_label_rtx ();
4638 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4639 || (GET_CODE (target) == REG
4640 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4641 target = gen_reg_rtx (mode);
4642 emit_move_insn (target, op0);
4643 emit_cmp_insn (target,
4644 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4645 NULL_RTX, VOIDmode, 0),
4646 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4647 NO_DEFER_POP;
4648 emit_jump_insn (gen_bge (temp));
4649 op0 = expand_unop (mode, neg_optab, target, target, 0);
4650 if (op0 != target)
4651 emit_move_insn (target, op0);
4652 emit_label (temp);
4653 OK_DEFER_POP;
4654 return target;
4655
4656 case MAX_EXPR:
4657 case MIN_EXPR:
4658 target = original_target;
4659 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4660 || (GET_CODE (target) == REG
4661 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4662 target = gen_reg_rtx (mode);
906c4e36 4663 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4664 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4665
4666 /* First try to do it with a special MIN or MAX instruction.
4667 If that does not win, use a conditional jump to select the proper
4668 value. */
4669 this_optab = (TREE_UNSIGNED (type)
4670 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4671 : (code == MIN_EXPR ? smin_optab : smax_optab));
4672
4673 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4674 OPTAB_WIDEN);
4675 if (temp != 0)
4676 return temp;
4677
4678 if (target != op0)
4679 emit_move_insn (target, op0);
4680 op0 = gen_label_rtx ();
f81497d9
RS
4681 /* If this mode is an integer too wide to compare properly,
4682 compare word by word. Rely on cse to optimize constant cases. */
4683 if (GET_MODE_CLASS (mode) == MODE_INT
4684 && !can_compare_p (mode))
bbf6f052 4685 {
f81497d9
RS
4686 if (code == MAX_EXPR)
4687 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 4688 else
f81497d9 4689 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
bbf6f052
RK
4690 emit_move_insn (target, op1);
4691 }
f81497d9
RS
4692 else
4693 {
4694 if (code == MAX_EXPR)
4695 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4696 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4697 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4698 else
4699 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4700 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4701 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4702 if (temp == const0_rtx)
4703 emit_move_insn (target, op1);
4704 else if (temp != const_true_rtx)
4705 {
4706 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4707 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4708 else
4709 abort ();
4710 emit_move_insn (target, op1);
4711 }
4712 }
bbf6f052
RK
4713 emit_label (op0);
4714 return target;
4715
4716/* ??? Can optimize when the operand of this is a bitwise operation,
4717 by using a different bitwise operation. */
4718 case BIT_NOT_EXPR:
4719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4720 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4721 if (temp == 0)
4722 abort ();
4723 return temp;
4724
4725 case FFS_EXPR:
4726 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4727 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4728 if (temp == 0)
4729 abort ();
4730 return temp;
4731
4732/* ??? Can optimize bitwise operations with one arg constant.
4733 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4734 and (a bitwise1 b) bitwise2 b (etc)
4735 but that is probably not worth while. */
4736
4737/* BIT_AND_EXPR is for bitwise anding.
4738 TRUTH_AND_EXPR is for anding two boolean values
4739 when we want in all cases to compute both of them.
4740 In general it is fastest to do TRUTH_AND_EXPR by
4741 computing both operands as actual zero-or-1 values
4742 and then bitwise anding. In cases where there cannot
4743 be any side effects, better code would be made by
4744 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4745 but the question is how to recognize those cases. */
4746
4747 case TRUTH_AND_EXPR:
4748 case BIT_AND_EXPR:
4749 this_optab = and_optab;
4750 goto binop;
4751
4752/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4753 case TRUTH_OR_EXPR:
4754 case BIT_IOR_EXPR:
4755 this_optab = ior_optab;
4756 goto binop;
4757
874726a8 4758 case TRUTH_XOR_EXPR:
bbf6f052
RK
4759 case BIT_XOR_EXPR:
4760 this_optab = xor_optab;
4761 goto binop;
4762
4763 case LSHIFT_EXPR:
4764 case RSHIFT_EXPR:
4765 case LROTATE_EXPR:
4766 case RROTATE_EXPR:
4767 preexpand_calls (exp);
4768 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4769 subtarget = 0;
4770 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4771 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4772 unsignedp);
4773
4774/* Could determine the answer when only additive constants differ.
4775 Also, the addition of one can be handled by changing the condition. */
4776 case LT_EXPR:
4777 case LE_EXPR:
4778 case GT_EXPR:
4779 case GE_EXPR:
4780 case EQ_EXPR:
4781 case NE_EXPR:
4782 preexpand_calls (exp);
4783 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4784 if (temp != 0)
4785 return temp;
4786 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4787 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4788 && original_target
4789 && GET_CODE (original_target) == REG
4790 && (GET_MODE (original_target)
4791 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4792 {
4793 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4794 if (temp != original_target)
4795 temp = copy_to_reg (temp);
4796 op1 = gen_label_rtx ();
906c4e36 4797 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4798 GET_MODE (temp), unsignedp, 0);
4799 emit_jump_insn (gen_beq (op1));
4800 emit_move_insn (temp, const1_rtx);
4801 emit_label (op1);
4802 return temp;
4803 }
4804 /* If no set-flag instruction, must generate a conditional
4805 store into a temporary variable. Drop through
4806 and handle this like && and ||. */
4807
4808 case TRUTH_ANDIF_EXPR:
4809 case TRUTH_ORIF_EXPR:
e44842fe
RK
4810 if (! ignore
4811 && (target == 0 || ! safe_from_p (target, exp)
4812 /* Make sure we don't have a hard reg (such as function's return
4813 value) live across basic blocks, if not optimizing. */
4814 || (!optimize && GET_CODE (target) == REG
4815 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 4816 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
4817
4818 if (target)
4819 emit_clr_insn (target);
4820
bbf6f052
RK
4821 op1 = gen_label_rtx ();
4822 jumpifnot (exp, op1);
e44842fe
RK
4823
4824 if (target)
4825 emit_0_to_1_insn (target);
4826
bbf6f052 4827 emit_label (op1);
e44842fe 4828 return ignore ? const0_rtx : target;
bbf6f052
RK
4829
4830 case TRUTH_NOT_EXPR:
4831 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4832 /* The parser is careful to generate TRUTH_NOT_EXPR
4833 only with operands that are always zero or one. */
906c4e36 4834 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4835 target, 1, OPTAB_LIB_WIDEN);
4836 if (temp == 0)
4837 abort ();
4838 return temp;
4839
4840 case COMPOUND_EXPR:
4841 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4842 emit_queue ();
4843 return expand_expr (TREE_OPERAND (exp, 1),
4844 (ignore ? const0_rtx : target),
4845 VOIDmode, 0);
4846
4847 case COND_EXPR:
4848 {
4849 /* Note that COND_EXPRs whose type is a structure or union
4850 are required to be constructed to contain assignments of
4851 a temporary variable, so that we can evaluate them here
4852 for side effect only. If type is void, we must do likewise. */
4853
4854 /* If an arm of the branch requires a cleanup,
4855 only that cleanup is performed. */
4856
4857 tree singleton = 0;
4858 tree binary_op = 0, unary_op = 0;
4859 tree old_cleanups = cleanups_this_call;
4860 cleanups_this_call = 0;
4861
4862 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4863 convert it to our mode, if necessary. */
4864 if (integer_onep (TREE_OPERAND (exp, 1))
4865 && integer_zerop (TREE_OPERAND (exp, 2))
4866 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4867 {
dd27116b
RK
4868 if (ignore)
4869 {
4870 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4871 modifier);
4872 return const0_rtx;
4873 }
4874
bbf6f052
RK
4875 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4876 if (GET_MODE (op0) == mode)
4877 return op0;
4878 if (target == 0)
4879 target = gen_reg_rtx (mode);
4880 convert_move (target, op0, unsignedp);
4881 return target;
4882 }
4883
4884 /* If we are not to produce a result, we have no target. Otherwise,
4885 if a target was specified use it; it will not be used as an
4886 intermediate target unless it is safe. If no target, use a
4887 temporary. */
4888
dd27116b 4889 if (ignore)
bbf6f052
RK
4890 temp = 0;
4891 else if (original_target
4892 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4893 temp = original_target;
4894 else if (mode == BLKmode)
4895 {
4896 if (TYPE_SIZE (type) == 0
4897 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4898 abort ();
4899 temp = assign_stack_temp (BLKmode,
4900 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4901 + BITS_PER_UNIT - 1)
4902 / BITS_PER_UNIT, 0);
4903 }
4904 else
4905 temp = gen_reg_rtx (mode);
4906
4907 /* Check for X ? A + B : A. If we have this, we can copy
4908 A to the output and conditionally add B. Similarly for unary
4909 operations. Don't do this if X has side-effects because
4910 those side effects might affect A or B and the "?" operation is
4911 a sequence point in ANSI. (We test for side effects later.) */
4912
4913 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4914 && operand_equal_p (TREE_OPERAND (exp, 2),
4915 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4916 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4917 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4918 && operand_equal_p (TREE_OPERAND (exp, 1),
4919 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4920 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4921 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4922 && operand_equal_p (TREE_OPERAND (exp, 2),
4923 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4924 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4925 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4926 && operand_equal_p (TREE_OPERAND (exp, 1),
4927 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4928 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4929
4930 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4931 operation, do this as A + (X != 0). Similarly for other simple
4932 binary operators. */
dd27116b 4933 if (temp && singleton && binary_op
bbf6f052
RK
4934 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4935 && (TREE_CODE (binary_op) == PLUS_EXPR
4936 || TREE_CODE (binary_op) == MINUS_EXPR
4937 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4938 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4939 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4940 && integer_onep (TREE_OPERAND (binary_op, 1))
4941 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4942 {
4943 rtx result;
4944 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4945 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4946 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4947 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4948 : and_optab);
4949
4950 /* If we had X ? A : A + 1, do this as A + (X == 0).
4951
4952 We have to invert the truth value here and then put it
4953 back later if do_store_flag fails. We cannot simply copy
4954 TREE_OPERAND (exp, 0) to another variable and modify that
4955 because invert_truthvalue can modify the tree pointed to
4956 by its argument. */
4957 if (singleton == TREE_OPERAND (exp, 1))
4958 TREE_OPERAND (exp, 0)
4959 = invert_truthvalue (TREE_OPERAND (exp, 0));
4960
4961 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4962 (safe_from_p (temp, singleton)
4963 ? temp : NULL_RTX),
bbf6f052
RK
4964 mode, BRANCH_COST <= 1);
4965
4966 if (result)
4967 {
906c4e36 4968 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4969 return expand_binop (mode, boptab, op1, result, temp,
4970 unsignedp, OPTAB_LIB_WIDEN);
4971 }
4972 else if (singleton == TREE_OPERAND (exp, 1))
4973 TREE_OPERAND (exp, 0)
4974 = invert_truthvalue (TREE_OPERAND (exp, 0));
4975 }
4976
4977 NO_DEFER_POP;
4978 op0 = gen_label_rtx ();
4979
4980 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4981 {
4982 if (temp != 0)
4983 {
4984 /* If the target conflicts with the other operand of the
4985 binary op, we can't use it. Also, we can't use the target
4986 if it is a hard register, because evaluating the condition
4987 might clobber it. */
4988 if ((binary_op
4989 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4990 || (GET_CODE (temp) == REG
4991 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4992 temp = gen_reg_rtx (mode);
4993 store_expr (singleton, temp, 0);
4994 }
4995 else
906c4e36 4996 expand_expr (singleton,
2937cf87 4997 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4998 if (cleanups_this_call)
4999 {
5000 sorry ("aggregate value in COND_EXPR");
5001 cleanups_this_call = 0;
5002 }
5003 if (singleton == TREE_OPERAND (exp, 1))
5004 jumpif (TREE_OPERAND (exp, 0), op0);
5005 else
5006 jumpifnot (TREE_OPERAND (exp, 0), op0);
5007
5008 if (binary_op && temp == 0)
5009 /* Just touch the other operand. */
5010 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5011 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5012 else if (binary_op)
5013 store_expr (build (TREE_CODE (binary_op), type,
5014 make_tree (type, temp),
5015 TREE_OPERAND (binary_op, 1)),
5016 temp, 0);
5017 else
5018 store_expr (build1 (TREE_CODE (unary_op), type,
5019 make_tree (type, temp)),
5020 temp, 0);
5021 op1 = op0;
5022 }
5023#if 0
5024 /* This is now done in jump.c and is better done there because it
5025 produces shorter register lifetimes. */
5026
5027 /* Check for both possibilities either constants or variables
5028 in registers (but not the same as the target!). If so, can
5029 save branches by assigning one, branching, and assigning the
5030 other. */
5031 else if (temp && GET_MODE (temp) != BLKmode
5032 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5033 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5034 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5035 && DECL_RTL (TREE_OPERAND (exp, 1))
5036 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5037 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5038 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5039 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5040 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5041 && DECL_RTL (TREE_OPERAND (exp, 2))
5042 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5043 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5044 {
5045 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5046 temp = gen_reg_rtx (mode);
5047 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5048 jumpifnot (TREE_OPERAND (exp, 0), op0);
5049 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5050 op1 = op0;
5051 }
5052#endif
5053 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5054 comparison operator. If we have one of these cases, set the
5055 output to A, branch on A (cse will merge these two references),
5056 then set the output to FOO. */
5057 else if (temp
5058 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5059 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5060 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5061 TREE_OPERAND (exp, 1), 0)
5062 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5063 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5064 {
5065 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5066 temp = gen_reg_rtx (mode);
5067 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5068 jumpif (TREE_OPERAND (exp, 0), op0);
5069 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5070 op1 = op0;
5071 }
5072 else if (temp
5073 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5074 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5075 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5076 TREE_OPERAND (exp, 2), 0)
5077 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5078 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5079 {
5080 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5081 temp = gen_reg_rtx (mode);
5082 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5083 jumpifnot (TREE_OPERAND (exp, 0), op0);
5084 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5085 op1 = op0;
5086 }
5087 else
5088 {
5089 op1 = gen_label_rtx ();
5090 jumpifnot (TREE_OPERAND (exp, 0), op0);
5091 if (temp != 0)
5092 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5093 else
906c4e36
RK
5094 expand_expr (TREE_OPERAND (exp, 1),
5095 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5096 if (cleanups_this_call)
5097 {
5098 sorry ("aggregate value in COND_EXPR");
5099 cleanups_this_call = 0;
5100 }
5101
5102 emit_queue ();
5103 emit_jump_insn (gen_jump (op1));
5104 emit_barrier ();
5105 emit_label (op0);
5106 if (temp != 0)
5107 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5108 else
906c4e36
RK
5109 expand_expr (TREE_OPERAND (exp, 2),
5110 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5111 }
5112
5113 if (cleanups_this_call)
5114 {
5115 sorry ("aggregate value in COND_EXPR");
5116 cleanups_this_call = 0;
5117 }
5118
5119 emit_queue ();
5120 emit_label (op1);
5121 OK_DEFER_POP;
5122 cleanups_this_call = old_cleanups;
5123 return temp;
5124 }
5125
5126 case TARGET_EXPR:
5127 {
5128 /* Something needs to be initialized, but we didn't know
5129 where that thing was when building the tree. For example,
5130 it could be the return value of a function, or a parameter
5131 to a function which lays down in the stack, or a temporary
5132 variable which must be passed by reference.
5133
5134 We guarantee that the expression will either be constructed
5135 or copied into our original target. */
5136
5137 tree slot = TREE_OPERAND (exp, 0);
5c062816 5138 tree exp1;
bbf6f052
RK
5139
5140 if (TREE_CODE (slot) != VAR_DECL)
5141 abort ();
5142
5143 if (target == 0)
5144 {
5145 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5146 {
5147 target = DECL_RTL (slot);
5c062816 5148 /* If we have already expanded the slot, so don't do
ac993f4f 5149 it again. (mrs) */
5c062816
MS
5150 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5151 return target;
ac993f4f 5152 }
bbf6f052
RK
5153 else
5154 {
5155 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5156 /* All temp slots at this level must not conflict. */
5157 preserve_temp_slots (target);
5158 DECL_RTL (slot) = target;
5159 }
5160
5161#if 0
ac993f4f
MS
5162 /* I bet this needs to be done, and I bet that it needs to
5163 be above, inside the else clause. The reason is
5164 simple, how else is it going to get cleaned up? (mrs)
5165
5166 The reason is probably did not work before, and was
5167 commented out is because this was re-expanding already
5168 expanded target_exprs (target == 0 and DECL_RTL (slot)
5169 != 0) also cleaning them up many times as well. :-( */
5170
bbf6f052
RK
5171 /* Since SLOT is not known to the called function
5172 to belong to its stack frame, we must build an explicit
5173 cleanup. This case occurs when we must build up a reference
5174 to pass the reference as an argument. In this case,
5175 it is very likely that such a reference need not be
5176 built here. */
5177
5178 if (TREE_OPERAND (exp, 2) == 0)
5179 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5180 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5181 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5182 cleanups_this_call);
bbf6f052
RK
5183#endif
5184 }
5185 else
5186 {
5187 /* This case does occur, when expanding a parameter which
5188 needs to be constructed on the stack. The target
5189 is the actual stack address that we want to initialize.
5190 The function we call will perform the cleanup in this case. */
5191
8c042b47
RS
5192 /* If we have already assigned it space, use that space,
5193 not target that we were passed in, as our target
5194 parameter is only a hint. */
5195 if (DECL_RTL (slot) != 0)
5196 {
5197 target = DECL_RTL (slot);
5198 /* If we have already expanded the slot, so don't do
5199 it again. (mrs) */
5200 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5201 return target;
5202 }
5203
bbf6f052
RK
5204 DECL_RTL (slot) = target;
5205 }
5206
5c062816
MS
5207 exp1 = TREE_OPERAND (exp, 1);
5208 /* Mark it as expanded. */
5209 TREE_OPERAND (exp, 1) = NULL_TREE;
5210
5211 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5212 }
5213
5214 case INIT_EXPR:
5215 {
5216 tree lhs = TREE_OPERAND (exp, 0);
5217 tree rhs = TREE_OPERAND (exp, 1);
5218 tree noncopied_parts = 0;
5219 tree lhs_type = TREE_TYPE (lhs);
5220
5221 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5222 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5223 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5224 TYPE_NONCOPIED_PARTS (lhs_type));
5225 while (noncopied_parts != 0)
5226 {
5227 expand_assignment (TREE_VALUE (noncopied_parts),
5228 TREE_PURPOSE (noncopied_parts), 0, 0);
5229 noncopied_parts = TREE_CHAIN (noncopied_parts);
5230 }
5231 return temp;
5232 }
5233
5234 case MODIFY_EXPR:
5235 {
5236 /* If lhs is complex, expand calls in rhs before computing it.
5237 That's so we don't compute a pointer and save it over a call.
5238 If lhs is simple, compute it first so we can give it as a
5239 target if the rhs is just a call. This avoids an extra temp and copy
5240 and that prevents a partial-subsumption which makes bad code.
5241 Actually we could treat component_ref's of vars like vars. */
5242
5243 tree lhs = TREE_OPERAND (exp, 0);
5244 tree rhs = TREE_OPERAND (exp, 1);
5245 tree noncopied_parts = 0;
5246 tree lhs_type = TREE_TYPE (lhs);
5247
5248 temp = 0;
5249
5250 if (TREE_CODE (lhs) != VAR_DECL
5251 && TREE_CODE (lhs) != RESULT_DECL
5252 && TREE_CODE (lhs) != PARM_DECL)
5253 preexpand_calls (exp);
5254
5255 /* Check for |= or &= of a bitfield of size one into another bitfield
5256 of size 1. In this case, (unless we need the result of the
5257 assignment) we can do this more efficiently with a
5258 test followed by an assignment, if necessary.
5259
5260 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5261 things change so we do, this code should be enhanced to
5262 support it. */
5263 if (ignore
5264 && TREE_CODE (lhs) == COMPONENT_REF
5265 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5266 || TREE_CODE (rhs) == BIT_AND_EXPR)
5267 && TREE_OPERAND (rhs, 0) == lhs
5268 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5269 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5270 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5271 {
5272 rtx label = gen_label_rtx ();
5273
5274 do_jump (TREE_OPERAND (rhs, 1),
5275 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5276 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5277 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5278 (TREE_CODE (rhs) == BIT_IOR_EXPR
5279 ? integer_one_node
5280 : integer_zero_node)),
5281 0, 0);
e7c33f54 5282 do_pending_stack_adjust ();
bbf6f052
RK
5283 emit_label (label);
5284 return const0_rtx;
5285 }
5286
5287 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5288 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5289 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5290 TYPE_NONCOPIED_PARTS (lhs_type));
5291
5292 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5293 while (noncopied_parts != 0)
5294 {
5295 expand_assignment (TREE_PURPOSE (noncopied_parts),
5296 TREE_VALUE (noncopied_parts), 0, 0);
5297 noncopied_parts = TREE_CHAIN (noncopied_parts);
5298 }
5299 return temp;
5300 }
5301
5302 case PREINCREMENT_EXPR:
5303 case PREDECREMENT_EXPR:
5304 return expand_increment (exp, 0);
5305
5306 case POSTINCREMENT_EXPR:
5307 case POSTDECREMENT_EXPR:
5308 /* Faster to treat as pre-increment if result is not used. */
5309 return expand_increment (exp, ! ignore);
5310
5311 case ADDR_EXPR:
5312 /* Are we taking the address of a nested function? */
5313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5314 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5315 {
5316 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5317 op0 = force_operand (op0, target);
5318 }
5319 else
5320 {
906c4e36 5321 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5322 (modifier == EXPAND_INITIALIZER
5323 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5324
5325 /* We would like the object in memory. If it is a constant,
5326 we can have it be statically allocated into memory. For
5327 a non-constant (REG or SUBREG), we need to allocate some
5328 memory and store the value into it. */
5329
5330 if (CONSTANT_P (op0))
5331 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5332 op0);
5333
5334 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5335 {
5336 /* If this object is in a register, it must be not
5337 be BLKmode. */
5338 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5339 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5340 rtx memloc
5341 = assign_stack_temp (inner_mode,
5342 int_size_in_bytes (inner_type), 1);
5343
5344 emit_move_insn (memloc, op0);
5345 op0 = memloc;
5346 }
5347
bbf6f052
RK
5348 if (GET_CODE (op0) != MEM)
5349 abort ();
5350
5351 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5352 return XEXP (op0, 0);
5353 op0 = force_operand (XEXP (op0, 0), target);
5354 }
5355 if (flag_force_addr && GET_CODE (op0) != REG)
5356 return force_reg (Pmode, op0);
5357 return op0;
5358
5359 case ENTRY_VALUE_EXPR:
5360 abort ();
5361
7308a047
RS
5362 /* COMPLEX type for Extended Pascal & Fortran */
5363 case COMPLEX_EXPR:
5364 {
5365 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5366
5367 rtx prev;
5368
5369 /* Get the rtx code of the operands. */
5370 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5371 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5372
5373 if (! target)
5374 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5375
5376 prev = get_last_insn ();
5377
5378 /* Tell flow that the whole of the destination is being set. */
5379 if (GET_CODE (target) == REG)
5380 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5381
5382 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5383 emit_move_insn (gen_realpart (mode, target), op0);
5384 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5385
5386 /* Complex construction should appear as a single unit. */
5387 group_insns (prev);
5388
5389 return target;
5390 }
5391
5392 case REALPART_EXPR:
2d7050fd
RS
5393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5394 return gen_realpart (mode, op0);
7308a047
RS
5395
5396 case IMAGPART_EXPR:
2d7050fd
RS
5397 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5398 return gen_imagpart (mode, op0);
7308a047
RS
5399
5400 case CONJ_EXPR:
5401 {
5402 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5403 rtx imag_t;
5404 rtx prev;
5405
5406 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5407
5408 if (! target)
5409 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5410
5411 prev = get_last_insn ();
5412
5413 /* Tell flow that the whole of the destination is being set. */
5414 if (GET_CODE (target) == REG)
5415 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5416
5417 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5418 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5419
2d7050fd 5420 imag_t = gen_imagpart (mode, target);
7308a047 5421 temp = expand_unop (mode, neg_optab,
2d7050fd 5422 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5423 if (temp != imag_t)
5424 emit_move_insn (imag_t, temp);
5425
5426 /* Conjugate should appear as a single unit */
5427 group_insns (prev);
5428
5429 return target;
5430 }
5431
bbf6f052 5432 case ERROR_MARK:
66538193
RS
5433 op0 = CONST0_RTX (tmode);
5434 if (op0 != 0)
5435 return op0;
bbf6f052
RK
5436 return const0_rtx;
5437
5438 default:
5439 return (*lang_expand_expr) (exp, target, tmode, modifier);
5440 }
5441
5442 /* Here to do an ordinary binary operator, generating an instruction
5443 from the optab already placed in `this_optab'. */
5444 binop:
5445 preexpand_calls (exp);
5446 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5447 subtarget = 0;
5448 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5449 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5450 binop2:
5451 temp = expand_binop (mode, this_optab, op0, op1, target,
5452 unsignedp, OPTAB_LIB_WIDEN);
5453 if (temp == 0)
5454 abort ();
5455 return temp;
5456}
5457\f
e87b4f3f
RS
5458/* Return the alignment in bits of EXP, a pointer valued expression.
5459 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5460 The alignment returned is, by default, the alignment of the thing that
5461 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5462
5463 Otherwise, look at the expression to see if we can do better, i.e., if the
5464 expression is actually pointing at an object whose alignment is tighter. */
5465
5466static int
5467get_pointer_alignment (exp, max_align)
5468 tree exp;
5469 unsigned max_align;
5470{
5471 unsigned align, inner;
5472
5473 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5474 return 0;
5475
5476 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5477 align = MIN (align, max_align);
5478
5479 while (1)
5480 {
5481 switch (TREE_CODE (exp))
5482 {
5483 case NOP_EXPR:
5484 case CONVERT_EXPR:
5485 case NON_LVALUE_EXPR:
5486 exp = TREE_OPERAND (exp, 0);
5487 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5488 return align;
5489 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5490 inner = MIN (inner, max_align);
5491 align = MAX (align, inner);
5492 break;
5493
5494 case PLUS_EXPR:
5495 /* If sum of pointer + int, restrict our maximum alignment to that
5496 imposed by the integer. If not, we can't do any better than
5497 ALIGN. */
5498 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5499 return align;
5500
e87b4f3f
RS
5501 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5502 & (max_align - 1))
5503 != 0)
bbf6f052
RK
5504 max_align >>= 1;
5505
5506 exp = TREE_OPERAND (exp, 0);
5507 break;
5508
5509 case ADDR_EXPR:
5510 /* See what we are pointing at and look at its alignment. */
5511 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5512 if (TREE_CODE (exp) == FUNCTION_DECL)
5513 align = MAX (align, FUNCTION_BOUNDARY);
5514 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5515 align = MAX (align, DECL_ALIGN (exp));
5516#ifdef CONSTANT_ALIGNMENT
5517 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5518 align = CONSTANT_ALIGNMENT (exp, align);
5519#endif
5520 return MIN (align, max_align);
5521
5522 default:
5523 return align;
5524 }
5525 }
5526}
5527\f
5528/* Return the tree node and offset if a given argument corresponds to
5529 a string constant. */
5530
5531static tree
5532string_constant (arg, ptr_offset)
5533 tree arg;
5534 tree *ptr_offset;
5535{
5536 STRIP_NOPS (arg);
5537
5538 if (TREE_CODE (arg) == ADDR_EXPR
5539 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5540 {
5541 *ptr_offset = integer_zero_node;
5542 return TREE_OPERAND (arg, 0);
5543 }
5544 else if (TREE_CODE (arg) == PLUS_EXPR)
5545 {
5546 tree arg0 = TREE_OPERAND (arg, 0);
5547 tree arg1 = TREE_OPERAND (arg, 1);
5548
5549 STRIP_NOPS (arg0);
5550 STRIP_NOPS (arg1);
5551
5552 if (TREE_CODE (arg0) == ADDR_EXPR
5553 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5554 {
5555 *ptr_offset = arg1;
5556 return TREE_OPERAND (arg0, 0);
5557 }
5558 else if (TREE_CODE (arg1) == ADDR_EXPR
5559 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5560 {
5561 *ptr_offset = arg0;
5562 return TREE_OPERAND (arg1, 0);
5563 }
5564 }
5565
5566 return 0;
5567}
5568
5569/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5570 way, because it could contain a zero byte in the middle.
5571 TREE_STRING_LENGTH is the size of the character array, not the string.
5572
5573 Unfortunately, string_constant can't access the values of const char
5574 arrays with initializers, so neither can we do so here. */
5575
5576static tree
5577c_strlen (src)
5578 tree src;
5579{
5580 tree offset_node;
5581 int offset, max;
5582 char *ptr;
5583
5584 src = string_constant (src, &offset_node);
5585 if (src == 0)
5586 return 0;
5587 max = TREE_STRING_LENGTH (src);
5588 ptr = TREE_STRING_POINTER (src);
5589 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5590 {
5591 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5592 compute the offset to the following null if we don't know where to
5593 start searching for it. */
5594 int i;
5595 for (i = 0; i < max; i++)
5596 if (ptr[i] == 0)
5597 return 0;
5598 /* We don't know the starting offset, but we do know that the string
5599 has no internal zero bytes. We can assume that the offset falls
5600 within the bounds of the string; otherwise, the programmer deserves
5601 what he gets. Subtract the offset from the length of the string,
5602 and return that. */
5603 /* This would perhaps not be valid if we were dealing with named
5604 arrays in addition to literal string constants. */
5605 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5606 }
5607
5608 /* We have a known offset into the string. Start searching there for
5609 a null character. */
5610 if (offset_node == 0)
5611 offset = 0;
5612 else
5613 {
5614 /* Did we get a long long offset? If so, punt. */
5615 if (TREE_INT_CST_HIGH (offset_node) != 0)
5616 return 0;
5617 offset = TREE_INT_CST_LOW (offset_node);
5618 }
5619 /* If the offset is known to be out of bounds, warn, and call strlen at
5620 runtime. */
5621 if (offset < 0 || offset > max)
5622 {
5623 warning ("offset outside bounds of constant string");
5624 return 0;
5625 }
5626 /* Use strlen to search for the first zero byte. Since any strings
5627 constructed with build_string will have nulls appended, we win even
5628 if we get handed something like (char[4])"abcd".
5629
5630 Since OFFSET is our starting index into the string, no further
5631 calculation is needed. */
5632 return size_int (strlen (ptr + offset));
5633}
5634\f
5635/* Expand an expression EXP that calls a built-in function,
5636 with result going to TARGET if that's convenient
5637 (and in mode MODE if that's convenient).
5638 SUBTARGET may be used as the target for computing one of EXP's operands.
5639 IGNORE is nonzero if the value is to be ignored. */
5640
5641static rtx
5642expand_builtin (exp, target, subtarget, mode, ignore)
5643 tree exp;
5644 rtx target;
5645 rtx subtarget;
5646 enum machine_mode mode;
5647 int ignore;
5648{
5649 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5650 tree arglist = TREE_OPERAND (exp, 1);
5651 rtx op0;
60bac6ea 5652 rtx lab1, insns;
bbf6f052 5653 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5654 optab builtin_optab;
bbf6f052
RK
5655
5656 switch (DECL_FUNCTION_CODE (fndecl))
5657 {
5658 case BUILT_IN_ABS:
5659 case BUILT_IN_LABS:
5660 case BUILT_IN_FABS:
5661 /* build_function_call changes these into ABS_EXPR. */
5662 abort ();
5663
1bbddf11
JVA
5664 case BUILT_IN_SIN:
5665 case BUILT_IN_COS:
e87b4f3f
RS
5666 case BUILT_IN_FSQRT:
5667 /* If not optimizing, call the library function. */
8c8a8e34 5668 if (! optimize)
e87b4f3f
RS
5669 break;
5670
5671 if (arglist == 0
19deaec9 5672 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5673 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5674 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5675
db0e6d01
RS
5676 /* Stabilize and compute the argument. */
5677 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5678 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5679 {
5680 exp = copy_node (exp);
5681 arglist = copy_node (arglist);
5682 TREE_OPERAND (exp, 1) = arglist;
5683 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5684 }
e87b4f3f 5685 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5686
5687 /* Make a suitable register to place result in. */
5688 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5689
c1f7c223 5690 emit_queue ();
8c8a8e34 5691 start_sequence ();
e7c33f54 5692
1bbddf11
JVA
5693 switch (DECL_FUNCTION_CODE (fndecl))
5694 {
5695 case BUILT_IN_SIN:
5696 builtin_optab = sin_optab; break;
5697 case BUILT_IN_COS:
5698 builtin_optab = cos_optab; break;
5699 case BUILT_IN_FSQRT:
5700 builtin_optab = sqrt_optab; break;
5701 default:
5702 abort ();
5703 }
5704
5705 /* Compute into TARGET.
e87b4f3f
RS
5706 Set TARGET to wherever the result comes back. */
5707 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5708 builtin_optab, op0, target, 0);
e7c33f54
RK
5709
5710 /* If we were unable to expand via the builtin, stop the
5711 sequence (without outputting the insns) and break, causing
5712 a call the the library function. */
e87b4f3f 5713 if (target == 0)
e7c33f54 5714 {
8c8a8e34 5715 end_sequence ();
e7c33f54
RK
5716 break;
5717 }
e87b4f3f 5718
60bac6ea
RS
5719 /* Check the results by default. But if flag_fast_math is turned on,
5720 then assume sqrt will always be called with valid arguments. */
5721
5722 if (! flag_fast_math)
5723 {
1bbddf11 5724 /* Don't define the builtin FP instructions
60bac6ea
RS
5725 if your machine is not IEEE. */
5726 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5727 abort ();
5728
5729 lab1 = gen_label_rtx ();
5730
5731 /* Test the result; if it is NaN, set errno=EDOM because
5732 the argument was not in the domain. */
5733 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5734 emit_jump_insn (gen_beq (lab1));
5735
5736#if TARGET_EDOM
5737 {
5738#ifdef GEN_ERRNO_RTX
5739 rtx errno_rtx = GEN_ERRNO_RTX;
5740#else
5741 rtx errno_rtx
5742 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5743#endif
5744
5745 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5746 }
5747#else
5748 /* We can't set errno=EDOM directly; let the library call do it.
5749 Pop the arguments right away in case the call gets deleted. */
5750 NO_DEFER_POP;
5751 expand_call (exp, target, 0);
5752 OK_DEFER_POP;
5753#endif
5754
5755 emit_label (lab1);
5756 }
e87b4f3f 5757
e7c33f54 5758 /* Output the entire sequence. */
8c8a8e34
JW
5759 insns = get_insns ();
5760 end_sequence ();
5761 emit_insns (insns);
e7c33f54
RK
5762
5763 return target;
5764
0006469d
TW
5765 /* __builtin_apply_args returns block of memory allocated on
5766 the stack into which is stored the arg pointer, structure
5767 value address, static chain, and all the registers that might
5768 possibly be used in performing a function call. The code is
5769 moved to the start of the function so the incoming values are
5770 saved. */
5771 case BUILT_IN_APPLY_ARGS:
5772 /* Don't do __builtin_apply_args more than once in a function.
5773 Save the result of the first call and reuse it. */
5774 if (apply_args_value != 0)
5775 return apply_args_value;
5776 {
5777 /* When this function is called, it means that registers must be
5778 saved on entry to this function. So we migrate the
5779 call to the first insn of this function. */
5780 rtx temp;
5781 rtx seq;
5782
5783 start_sequence ();
5784 temp = expand_builtin_apply_args ();
5785 seq = get_insns ();
5786 end_sequence ();
5787
5788 apply_args_value = temp;
5789
5790 /* Put the sequence after the NOTE that starts the function.
5791 If this is inside a SEQUENCE, make the outer-level insn
5792 chain current, so the code is placed at the start of the
5793 function. */
5794 push_topmost_sequence ();
5795 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5796 pop_topmost_sequence ();
5797 return temp;
5798 }
5799
5800 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5801 FUNCTION with a copy of the parameters described by
5802 ARGUMENTS, and ARGSIZE. It returns a block of memory
5803 allocated on the stack into which is stored all the registers
5804 that might possibly be used for returning the result of a
5805 function. ARGUMENTS is the value returned by
5806 __builtin_apply_args. ARGSIZE is the number of bytes of
5807 arguments that must be copied. ??? How should this value be
5808 computed? We'll also need a safe worst case value for varargs
5809 functions. */
5810 case BUILT_IN_APPLY:
5811 if (arglist == 0
5812 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5813 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5814 || TREE_CHAIN (arglist) == 0
5815 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5816 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5817 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5818 return const0_rtx;
5819 else
5820 {
5821 int i;
5822 tree t;
5823 rtx ops[3];
5824
5825 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5826 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5827
5828 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5829 }
5830
5831 /* __builtin_return (RESULT) causes the function to return the
5832 value described by RESULT. RESULT is address of the block of
5833 memory returned by __builtin_apply. */
5834 case BUILT_IN_RETURN:
5835 if (arglist
5836 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5837 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5838 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5839 NULL_RTX, VOIDmode, 0));
5840 return const0_rtx;
5841
bbf6f052
RK
5842 case BUILT_IN_SAVEREGS:
5843 /* Don't do __builtin_saveregs more than once in a function.
5844 Save the result of the first call and reuse it. */
5845 if (saveregs_value != 0)
5846 return saveregs_value;
5847 {
5848 /* When this function is called, it means that registers must be
5849 saved on entry to this function. So we migrate the
5850 call to the first insn of this function. */
5851 rtx temp;
5852 rtx seq;
5853 rtx valreg, saved_valreg;
5854
5855 /* Now really call the function. `expand_call' does not call
5856 expand_builtin, so there is no danger of infinite recursion here. */
5857 start_sequence ();
5858
5859#ifdef EXPAND_BUILTIN_SAVEREGS
5860 /* Do whatever the machine needs done in this case. */
5861 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5862#else
5863 /* The register where the function returns its value
5864 is likely to have something else in it, such as an argument.
5865 So preserve that register around the call. */
5866 if (value_mode != VOIDmode)
5867 {
5868 valreg = hard_libcall_value (value_mode);
5869 saved_valreg = gen_reg_rtx (value_mode);
5870 emit_move_insn (saved_valreg, valreg);
5871 }
5872
5873 /* Generate the call, putting the value in a pseudo. */
5874 temp = expand_call (exp, target, ignore);
5875
5876 if (value_mode != VOIDmode)
5877 emit_move_insn (valreg, saved_valreg);
5878#endif
5879
5880 seq = get_insns ();
5881 end_sequence ();
5882
5883 saveregs_value = temp;
5884
0006469d
TW
5885 /* Put the sequence after the NOTE that starts the function.
5886 If this is inside a SEQUENCE, make the outer-level insn
5887 chain current, so the code is placed at the start of the
5888 function. */
5889 push_topmost_sequence ();
bbf6f052 5890 emit_insns_before (seq, NEXT_INSN (get_insns ()));
0006469d 5891 pop_topmost_sequence ();
bbf6f052
RK
5892 return temp;
5893 }
5894
5895 /* __builtin_args_info (N) returns word N of the arg space info
5896 for the current function. The number and meanings of words
5897 is controlled by the definition of CUMULATIVE_ARGS. */
5898 case BUILT_IN_ARGS_INFO:
5899 {
5900 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5901 int i;
5902 int *word_ptr = (int *) &current_function_args_info;
5903 tree type, elts, result;
5904
5905 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5906 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5907 __FILE__, __LINE__);
5908
5909 if (arglist != 0)
5910 {
5911 tree arg = TREE_VALUE (arglist);
5912 if (TREE_CODE (arg) != INTEGER_CST)
42b85a55 5913 error ("argument of `__builtin_args_info' must be constant");
bbf6f052
RK
5914 else
5915 {
5916 int wordnum = TREE_INT_CST_LOW (arg);
5917
42b85a55
RS
5918 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5919 error ("argument of `__builtin_args_info' out of range");
bbf6f052 5920 else
906c4e36 5921 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5922 }
5923 }
5924 else
42b85a55 5925 error ("missing argument in `__builtin_args_info'");
bbf6f052
RK
5926
5927 return const0_rtx;
5928
5929#if 0
5930 for (i = 0; i < nwords; i++)
5931 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5932
5933 type = build_array_type (integer_type_node,
5934 build_index_type (build_int_2 (nwords, 0)));
5935 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5936 TREE_CONSTANT (result) = 1;
5937 TREE_STATIC (result) = 1;
5938 result = build (INDIRECT_REF, build_pointer_type (type), result);
5939 TREE_CONSTANT (result) = 1;
906c4e36 5940 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5941#endif
5942 }
5943
5944 /* Return the address of the first anonymous stack arg. */
5945 case BUILT_IN_NEXT_ARG:
5946 {
5947 tree fntype = TREE_TYPE (current_function_decl);
5948 if (!(TYPE_ARG_TYPES (fntype) != 0
5949 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5950 != void_type_node)))
5951 {
5952 error ("`va_start' used in function with fixed args");
5953 return const0_rtx;
5954 }
5955 }
5956
5957 return expand_binop (Pmode, add_optab,
5958 current_function_internal_arg_pointer,
5959 current_function_arg_offset_rtx,
906c4e36 5960 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5961
5962 case BUILT_IN_CLASSIFY_TYPE:
5963 if (arglist != 0)
5964 {
5965 tree type = TREE_TYPE (TREE_VALUE (arglist));
5966 enum tree_code code = TREE_CODE (type);
5967 if (code == VOID_TYPE)
906c4e36 5968 return GEN_INT (void_type_class);
bbf6f052 5969 if (code == INTEGER_TYPE)
906c4e36 5970 return GEN_INT (integer_type_class);
bbf6f052 5971 if (code == CHAR_TYPE)
906c4e36 5972 return GEN_INT (char_type_class);
bbf6f052 5973 if (code == ENUMERAL_TYPE)
906c4e36 5974 return GEN_INT (enumeral_type_class);
bbf6f052 5975 if (code == BOOLEAN_TYPE)
906c4e36 5976 return GEN_INT (boolean_type_class);
bbf6f052 5977 if (code == POINTER_TYPE)
906c4e36 5978 return GEN_INT (pointer_type_class);
bbf6f052 5979 if (code == REFERENCE_TYPE)
906c4e36 5980 return GEN_INT (reference_type_class);
bbf6f052 5981 if (code == OFFSET_TYPE)
906c4e36 5982 return GEN_INT (offset_type_class);
bbf6f052 5983 if (code == REAL_TYPE)
906c4e36 5984 return GEN_INT (real_type_class);
bbf6f052 5985 if (code == COMPLEX_TYPE)
906c4e36 5986 return GEN_INT (complex_type_class);
bbf6f052 5987 if (code == FUNCTION_TYPE)
906c4e36 5988 return GEN_INT (function_type_class);
bbf6f052 5989 if (code == METHOD_TYPE)
906c4e36 5990 return GEN_INT (method_type_class);
bbf6f052 5991 if (code == RECORD_TYPE)
906c4e36 5992 return GEN_INT (record_type_class);
e7f3c83f 5993 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
906c4e36 5994 return GEN_INT (union_type_class);
bbf6f052 5995 if (code == ARRAY_TYPE)
906c4e36 5996 return GEN_INT (array_type_class);
bbf6f052 5997 if (code == STRING_TYPE)
906c4e36 5998 return GEN_INT (string_type_class);
bbf6f052 5999 if (code == SET_TYPE)
906c4e36 6000 return GEN_INT (set_type_class);
bbf6f052 6001 if (code == FILE_TYPE)
906c4e36 6002 return GEN_INT (file_type_class);
bbf6f052 6003 if (code == LANG_TYPE)
906c4e36 6004 return GEN_INT (lang_type_class);
bbf6f052 6005 }
906c4e36 6006 return GEN_INT (no_type_class);
bbf6f052
RK
6007
6008 case BUILT_IN_CONSTANT_P:
6009 if (arglist == 0)
6010 return const0_rtx;
6011 else
cda0ec81 6012 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
6013 ? const1_rtx : const0_rtx);
6014
6015 case BUILT_IN_FRAME_ADDRESS:
6016 /* The argument must be a nonnegative integer constant.
6017 It counts the number of frames to scan up the stack.
6018 The value is the address of that frame. */
6019 case BUILT_IN_RETURN_ADDRESS:
6020 /* The argument must be a nonnegative integer constant.
6021 It counts the number of frames to scan up the stack.
6022 The value is the return address saved in that frame. */
6023 if (arglist == 0)
6024 /* Warning about missing arg was already issued. */
6025 return const0_rtx;
6026 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6027 {
42b85a55 6028 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6029 return const0_rtx;
6030 }
6031 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6032 {
42b85a55 6033 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6034 return const0_rtx;
6035 }
6036 else
6037 {
6038 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6039 rtx tem = frame_pointer_rtx;
6040 int i;
6041
46b68a37
JW
6042 /* Some machines need special handling before we can access arbitrary
6043 frames. For example, on the sparc, we must first flush all
6044 register windows to the stack. */
6045#ifdef SETUP_FRAME_ADDRESSES
6046 SETUP_FRAME_ADDRESSES ();
6047#endif
6048
6049 /* On the sparc, the return address is not in the frame, it is
6050 in a register. There is no way to access it off of the current
6051 frame pointer, but it can be accessed off the previous frame
6052 pointer by reading the value from the register window save
6053 area. */
6054#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6055 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6056 count--;
6057#endif
6058
bbf6f052
RK
6059 /* Scan back COUNT frames to the specified frame. */
6060 for (i = 0; i < count; i++)
6061 {
6062 /* Assume the dynamic chain pointer is in the word that
6063 the frame address points to, unless otherwise specified. */
6064#ifdef DYNAMIC_CHAIN_ADDRESS
6065 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6066#endif
6067 tem = memory_address (Pmode, tem);
6068 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6069 }
6070
6071 /* For __builtin_frame_address, return what we've got. */
6072 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6073 return tem;
6074
6075 /* For __builtin_return_address,
6076 Get the return address from that frame. */
6077#ifdef RETURN_ADDR_RTX
6078 return RETURN_ADDR_RTX (count, tem);
6079#else
6080 tem = memory_address (Pmode,
6081 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6082 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6083#endif
6084 }
6085
6086 case BUILT_IN_ALLOCA:
6087 if (arglist == 0
6088 /* Arg could be non-integer if user redeclared this fcn wrong. */
6089 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6090 return const0_rtx;
6091 current_function_calls_alloca = 1;
6092 /* Compute the argument. */
906c4e36 6093 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6094
6095 /* Allocate the desired space. */
8c8a8e34 6096 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
6097
6098 /* Record the new stack level for nonlocal gotos. */
6dc42e49 6099 if (nonlocal_goto_handler_slot != 0)
906c4e36 6100 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
6101 return target;
6102
6103 case BUILT_IN_FFS:
6104 /* If not optimizing, call the library function. */
6105 if (!optimize)
6106 break;
6107
6108 if (arglist == 0
6109 /* Arg could be non-integer if user redeclared this fcn wrong. */
6110 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6111 return const0_rtx;
6112
6113 /* Compute the argument. */
6114 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6115 /* Compute ffs, into TARGET if possible.
6116 Set TARGET to wherever the result comes back. */
6117 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6118 ffs_optab, op0, target, 1);
6119 if (target == 0)
6120 abort ();
6121 return target;
6122
6123 case BUILT_IN_STRLEN:
6124 /* If not optimizing, call the library function. */
6125 if (!optimize)
6126 break;
6127
6128 if (arglist == 0
6129 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6130 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6131 return const0_rtx;
6132 else
6133 {
e7c33f54
RK
6134 tree src = TREE_VALUE (arglist);
6135 tree len = c_strlen (src);
bbf6f052 6136
e7c33f54
RK
6137 int align
6138 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6139
6140 rtx result, src_rtx, char_rtx;
6141 enum machine_mode insn_mode = value_mode, char_mode;
6142 enum insn_code icode;
6143
6144 /* If the length is known, just return it. */
6145 if (len != 0)
6146 return expand_expr (len, target, mode, 0);
6147
6148 /* If SRC is not a pointer type, don't do this operation inline. */
6149 if (align == 0)
6150 break;
6151
6152 /* Call a function if we can't compute strlen in the right mode. */
6153
6154 while (insn_mode != VOIDmode)
6155 {
6156 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6157 if (icode != CODE_FOR_nothing)
6158 break;
6159
6160 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6161 }
6162 if (insn_mode == VOIDmode)
bbf6f052 6163 break;
e7c33f54
RK
6164
6165 /* Make a place to write the result of the instruction. */
6166 result = target;
6167 if (! (result != 0
6168 && GET_CODE (result) == REG
6169 && GET_MODE (result) == insn_mode
6170 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6171 result = gen_reg_rtx (insn_mode);
6172
4d613828 6173 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6174
4d613828 6175 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6176 result = gen_reg_rtx (insn_mode);
6177
6178 src_rtx = memory_address (BLKmode,
906c4e36 6179 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6180 EXPAND_NORMAL));
4d613828 6181 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6182 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6183
6184 char_rtx = const0_rtx;
4d613828
RS
6185 char_mode = insn_operand_mode[(int)icode][2];
6186 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6187 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6188
6189 emit_insn (GEN_FCN (icode) (result,
6190 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6191 char_rtx, GEN_INT (align)));
e7c33f54
RK
6192
6193 /* Return the value in the proper mode for this function. */
6194 if (GET_MODE (result) == value_mode)
6195 return result;
6196 else if (target != 0)
6197 {
6198 convert_move (target, result, 0);
6199 return target;
6200 }
6201 else
6202 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6203 }
6204
6205 case BUILT_IN_STRCPY:
6206 /* If not optimizing, call the library function. */
6207 if (!optimize)
6208 break;
6209
6210 if (arglist == 0
6211 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6212 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6213 || TREE_CHAIN (arglist) == 0
6214 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6215 return const0_rtx;
6216 else
6217 {
6218 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6219
6220 if (len == 0)
6221 break;
6222
6223 len = size_binop (PLUS_EXPR, len, integer_one_node);
6224
906c4e36 6225 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6226 }
6227
6228 /* Drops in. */
6229 case BUILT_IN_MEMCPY:
6230 /* If not optimizing, call the library function. */
6231 if (!optimize)
6232 break;
6233
6234 if (arglist == 0
6235 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6236 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6237 || TREE_CHAIN (arglist) == 0
6238 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6239 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6240 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6241 return const0_rtx;
6242 else
6243 {
6244 tree dest = TREE_VALUE (arglist);
6245 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6246 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6247
6248 int src_align
6249 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6250 int dest_align
6251 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9937da1a 6252 rtx dest_rtx, dest_mem, src_mem;
bbf6f052
RK
6253
6254 /* If either SRC or DEST is not a pointer type, don't do
6255 this operation in-line. */
6256 if (src_align == 0 || dest_align == 0)
6257 {
6258 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6259 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6260 break;
6261 }
6262
906c4e36 6263 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
9937da1a
RS
6264 dest_mem = gen_rtx (MEM, BLKmode,
6265 memory_address (BLKmode, dest_rtx));
6266 src_mem = gen_rtx (MEM, BLKmode,
6267 memory_address (BLKmode,
6268 expand_expr (src, NULL_RTX,
6269 Pmode,
6270 EXPAND_NORMAL)));
bbf6f052
RK
6271
6272 /* Copy word part most expediently. */
9937da1a 6273 emit_block_move (dest_mem, src_mem,
906c4e36 6274 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6275 MIN (src_align, dest_align));
6276 return dest_rtx;
6277 }
6278
6279/* These comparison functions need an instruction that returns an actual
6280 index. An ordinary compare that just sets the condition codes
6281 is not enough. */
6282#ifdef HAVE_cmpstrsi
6283 case BUILT_IN_STRCMP:
6284 /* If not optimizing, call the library function. */
6285 if (!optimize)
6286 break;
6287
6288 if (arglist == 0
6289 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6290 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6291 || TREE_CHAIN (arglist) == 0
6292 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6293 return const0_rtx;
6294 else if (!HAVE_cmpstrsi)
6295 break;
6296 {
6297 tree arg1 = TREE_VALUE (arglist);
6298 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6299 tree offset;
6300 tree len, len2;
6301
6302 len = c_strlen (arg1);
6303 if (len)
6304 len = size_binop (PLUS_EXPR, integer_one_node, len);
6305 len2 = c_strlen (arg2);
6306 if (len2)
6307 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6308
6309 /* If we don't have a constant length for the first, use the length
6310 of the second, if we know it. We don't require a constant for
6311 this case; some cost analysis could be done if both are available
6312 but neither is constant. For now, assume they're equally cheap.
6313
6314 If both strings have constant lengths, use the smaller. This
6315 could arise if optimization results in strcpy being called with
6316 two fixed strings, or if the code was machine-generated. We should
6317 add some code to the `memcmp' handler below to deal with such
6318 situations, someday. */
6319 if (!len || TREE_CODE (len) != INTEGER_CST)
6320 {
6321 if (len2)
6322 len = len2;
6323 else if (len == 0)
6324 break;
6325 }
6326 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6327 {
6328 if (tree_int_cst_lt (len2, len))
6329 len = len2;
6330 }
6331
906c4e36 6332 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6333 }
6334
6335 /* Drops in. */
6336 case BUILT_IN_MEMCMP:
6337 /* If not optimizing, call the library function. */
6338 if (!optimize)
6339 break;
6340
6341 if (arglist == 0
6342 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6343 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6344 || TREE_CHAIN (arglist) == 0
6345 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6346 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6347 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6348 return const0_rtx;
6349 else if (!HAVE_cmpstrsi)
6350 break;
6351 {
6352 tree arg1 = TREE_VALUE (arglist);
6353 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6354 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6355 rtx result;
6356
6357 int arg1_align
6358 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6359 int arg2_align
6360 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6361 enum machine_mode insn_mode
6362 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6363
6364 /* If we don't have POINTER_TYPE, call the function. */
6365 if (arg1_align == 0 || arg2_align == 0)
6366 {
6367 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6368 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6369 break;
6370 }
6371
6372 /* Make a place to write the result of the instruction. */
6373 result = target;
6374 if (! (result != 0
6375 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6376 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6377 result = gen_reg_rtx (insn_mode);
6378
6379 emit_insn (gen_cmpstrsi (result,
6380 gen_rtx (MEM, BLKmode,
906c4e36
RK
6381 expand_expr (arg1, NULL_RTX, Pmode,
6382 EXPAND_NORMAL)),
bbf6f052 6383 gen_rtx (MEM, BLKmode,
906c4e36
RK
6384 expand_expr (arg2, NULL_RTX, Pmode,
6385 EXPAND_NORMAL)),
6386 expand_expr (len, NULL_RTX, VOIDmode, 0),
6387 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6388
6389 /* Return the value in the proper mode for this function. */
6390 mode = TYPE_MODE (TREE_TYPE (exp));
6391 if (GET_MODE (result) == mode)
6392 return result;
6393 else if (target != 0)
6394 {
6395 convert_move (target, result, 0);
6396 return target;
6397 }
6398 else
6399 return convert_to_mode (mode, result, 0);
6400 }
6401#else
6402 case BUILT_IN_STRCMP:
6403 case BUILT_IN_MEMCMP:
6404 break;
6405#endif
6406
6407 default: /* just do library call, if unknown builtin */
42b85a55 6408 error ("built-in function `%s' not currently supported",
bbf6f052
RK
6409 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6410 }
6411
6412 /* The switch statement above can drop through to cause the function
6413 to be called normally. */
6414
6415 return expand_call (exp, target, ignore);
6416}
6417\f
0006469d
TW
6418/* Built-in functions to perform an untyped call and return. */
6419
6420/* For each register that may be used for calling a function, this
6421 gives a mode used to copy the register's value. VOIDmode indicates
6422 the register is not used for calling a function. If the machine
6423 has register windows, this gives only the outbound registers.
6424 INCOMING_REGNO gives the corresponding inbound register. */
6425static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6426
6427/* For each register that may be used for returning values, this gives
6428 a mode used to copy the register's value. VOIDmode indicates the
6429 register is not used for returning values. If the machine has
6430 register windows, this gives only the outbound registers.
6431 INCOMING_REGNO gives the corresponding inbound register. */
6432static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6433
6434/* Return the size required for the block returned by __builtin_apply_args,
6435 and initialize apply_args_mode. */
6436static int
6437apply_args_size ()
6438{
6439 static int size = -1;
6440 int align, regno;
6441 enum machine_mode mode;
6442
6443 /* The values computed by this function never change. */
6444 if (size < 0)
6445 {
6446 /* The first value is the incoming arg-pointer. */
6447 size = GET_MODE_SIZE (Pmode);
6448
6449 /* The second value is the structure value address unless this is
6450 passed as an "invisible" first argument. */
6451 if (struct_value_rtx)
6452 size += GET_MODE_SIZE (Pmode);
6453
6454 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6455 if (FUNCTION_ARG_REGNO_P (regno))
6456 {
6457 /* Search for the proper mode for copying this register's
6458 value. I'm not sure this is right, but it works so far. */
6459 enum machine_mode best_mode = VOIDmode;
6460
6461 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6462 mode != VOIDmode;
6463 mode = GET_MODE_WIDER_MODE (mode))
6464 if (HARD_REGNO_MODE_OK (regno, mode)
6465 && HARD_REGNO_NREGS (regno, mode) == 1)
6466 best_mode = mode;
6467
6468 if (best_mode == VOIDmode)
6469 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6470 mode != VOIDmode;
6471 mode = GET_MODE_WIDER_MODE (mode))
6472 if (HARD_REGNO_MODE_OK (regno, mode)
6473 && (mov_optab->handlers[(int) mode].insn_code
6474 != CODE_FOR_nothing))
6475 best_mode = mode;
6476
6477 mode = best_mode;
6478 if (mode == VOIDmode)
6479 abort ();
6480
6481 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6482 if (size % align != 0)
6483 size = CEIL (size, align) * align;
6484 size += GET_MODE_SIZE (mode);
6485 apply_args_mode[regno] = mode;
6486 }
6487 else
6488 apply_args_mode[regno] = VOIDmode;
6489 }
6490 return size;
6491}
6492
6493/* Return the size required for the block returned by __builtin_apply,
6494 and initialize apply_result_mode. */
6495static int
6496apply_result_size ()
6497{
6498 static int size = -1;
6499 int align, regno;
6500 enum machine_mode mode;
6501
6502 /* The values computed by this function never change. */
6503 if (size < 0)
6504 {
6505 size = 0;
6506
6507 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6508 if (FUNCTION_VALUE_REGNO_P (regno))
6509 {
6510 /* Search for the proper mode for copying this register's
6511 value. I'm not sure this is right, but it works so far. */
6512 enum machine_mode best_mode = VOIDmode;
6513
6514 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6515 mode != TImode;
6516 mode = GET_MODE_WIDER_MODE (mode))
6517 if (HARD_REGNO_MODE_OK (regno, mode))
6518 best_mode = mode;
6519
6520 if (best_mode == VOIDmode)
6521 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6522 mode != VOIDmode;
6523 mode = GET_MODE_WIDER_MODE (mode))
6524 if (HARD_REGNO_MODE_OK (regno, mode)
6525 && (mov_optab->handlers[(int) mode].insn_code
6526 != CODE_FOR_nothing))
6527 best_mode = mode;
6528
6529 mode = best_mode;
6530 if (mode == VOIDmode)
6531 abort ();
6532
6533 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6534 if (size % align != 0)
6535 size = CEIL (size, align) * align;
6536 size += GET_MODE_SIZE (mode);
6537 apply_result_mode[regno] = mode;
6538 }
6539 else
6540 apply_result_mode[regno] = VOIDmode;
6541
6542 /* Allow targets that use untyped_call and untyped_return to override
6543 the size so that machine-specific information can be stored here. */
6544#ifdef APPLY_RESULT_SIZE
6545 size = APPLY_RESULT_SIZE;
6546#endif
6547 }
6548 return size;
6549}
6550
6551#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6552/* Create a vector describing the result block RESULT. If SAVEP is true,
6553 the result block is used to save the values; otherwise it is used to
6554 restore the values. */
6555static rtx
6556result_vector (savep, result)
6557 int savep;
6558 rtx result;
6559{
6560 int regno, size, align, nelts;
6561 enum machine_mode mode;
6562 rtx reg, mem;
6563 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6564
6565 size = nelts = 0;
6566 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6567 if ((mode = apply_result_mode[regno]) != VOIDmode)
6568 {
6569 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6570 if (size % align != 0)
6571 size = CEIL (size, align) * align;
6572 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6573 mem = change_address (result, mode,
6574 plus_constant (XEXP (result, 0), size));
6575 savevec[nelts++] = (savep
6576 ? gen_rtx (SET, VOIDmode, mem, reg)
6577 : gen_rtx (SET, VOIDmode, reg, mem));
6578 size += GET_MODE_SIZE (mode);
6579 }
6580 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6581}
6582#endif /* HAVE_untyped_call or HAVE_untyped_return */
6583
6584
6585/* Save the state required to perform an untyped call with the same
6586 arguments as were passed to the current function. */
6587static rtx
6588expand_builtin_apply_args ()
6589{
6590 rtx registers;
6591 int size, align, regno;
6592 enum machine_mode mode;
6593
6594 /* Create a block where the arg-pointer, structure value address,
6595 and argument registers can be saved. */
6596 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6597
6598 /* Walk past the arg-pointer and structure value address. */
6599 size = GET_MODE_SIZE (Pmode);
6600 if (struct_value_rtx)
6601 size += GET_MODE_SIZE (Pmode);
6602
6603 /* Save each register used in calling a function to the block. */
6604 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6605 if ((mode = apply_args_mode[regno]) != VOIDmode)
6606 {
6607 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6608 if (size % align != 0)
6609 size = CEIL (size, align) * align;
6610 emit_move_insn (change_address (registers, mode,
6611 plus_constant (XEXP (registers, 0),
6612 size)),
6613 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6614 size += GET_MODE_SIZE (mode);
6615 }
6616
6617 /* Save the arg pointer to the block. */
6618 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6619 copy_to_reg (virtual_incoming_args_rtx));
6620 size = GET_MODE_SIZE (Pmode);
6621
6622 /* Save the structure value address unless this is passed as an
6623 "invisible" first argument. */
6624 if (struct_value_incoming_rtx)
6625 {
6626 emit_move_insn (change_address (registers, Pmode,
6627 plus_constant (XEXP (registers, 0),
6628 size)),
6629 copy_to_reg (struct_value_incoming_rtx));
6630 size += GET_MODE_SIZE (Pmode);
6631 }
6632
6633 /* Return the address of the block. */
6634 return copy_addr_to_reg (XEXP (registers, 0));
6635}
6636
6637/* Perform an untyped call and save the state required to perform an
6638 untyped return of whatever value was returned by the given function. */
6639static rtx
6640expand_builtin_apply (function, arguments, argsize)
6641 rtx function, arguments, argsize;
6642{
6643 int size, align, regno;
6644 enum machine_mode mode;
6645 rtx incoming_args, result, reg, dest, call_insn;
6646 rtx old_stack_level = 0;
6647 rtx use_insns = 0;
6648
6649 /* Create a block where the return registers can be saved. */
6650 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6651
6652 /* ??? The argsize value should be adjusted here. */
6653
6654 /* Fetch the arg pointer from the ARGUMENTS block. */
6655 incoming_args = gen_reg_rtx (Pmode);
6656 emit_move_insn (incoming_args,
6657 gen_rtx (MEM, Pmode, arguments));
6658#ifndef STACK_GROWS_DOWNWARD
6659 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6660 incoming_args, 0, OPTAB_LIB_WIDEN);
6661#endif
6662
6663 /* Perform postincrements before actually calling the function. */
6664 emit_queue ();
6665
6666 /* Push a new argument block and copy the arguments. */
6667 do_pending_stack_adjust ();
6668 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6669
6670 /* Push a block of memory onto the stack to store the memory arguments.
6671 Save the address in a register, and copy the memory arguments. ??? I
6672 haven't figured out how the calling convention macros effect this,
6673 but it's likely that the source and/or destination addresses in
6674 the block copy will need updating in machine specific ways. */
6675 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6676 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6677 gen_rtx (MEM, BLKmode, incoming_args),
6678 argsize,
6679 PARM_BOUNDARY / BITS_PER_UNIT);
6680
6681 /* Refer to the argument block. */
6682 apply_args_size ();
6683 arguments = gen_rtx (MEM, BLKmode, arguments);
6684
6685 /* Walk past the arg-pointer and structure value address. */
6686 size = GET_MODE_SIZE (Pmode);
6687 if (struct_value_rtx)
6688 size += GET_MODE_SIZE (Pmode);
6689
6690 /* Restore each of the registers previously saved. Make USE insns
6691 for each of these registers for use in making the call. */
6692 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6693 if ((mode = apply_args_mode[regno]) != VOIDmode)
6694 {
6695 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6696 if (size % align != 0)
6697 size = CEIL (size, align) * align;
6698 reg = gen_rtx (REG, mode, regno);
6699 emit_move_insn (reg,
6700 change_address (arguments, mode,
6701 plus_constant (XEXP (arguments, 0),
6702 size)));
6703
6704 push_to_sequence (use_insns);
6705 emit_insn (gen_rtx (USE, VOIDmode, reg));
6706 use_insns = get_insns ();
6707 end_sequence ();
6708 size += GET_MODE_SIZE (mode);
6709 }
6710
6711 /* Restore the structure value address unless this is passed as an
6712 "invisible" first argument. */
6713 size = GET_MODE_SIZE (Pmode);
6714 if (struct_value_rtx)
6715 {
6716 rtx value = gen_reg_rtx (Pmode);
6717 emit_move_insn (value,
6718 change_address (arguments, Pmode,
6719 plus_constant (XEXP (arguments, 0),
6720 size)));
6721 emit_move_insn (struct_value_rtx, value);
6722 if (GET_CODE (struct_value_rtx) == REG)
6723 {
6724 push_to_sequence (use_insns);
6725 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6726 use_insns = get_insns ();
6727 end_sequence ();
6728 }
6729 size += GET_MODE_SIZE (Pmode);
6730 }
6731
6732 /* All arguments and registers used for the call are set up by now! */
6733 function = prepare_call_address (function, NULL_TREE, &use_insns);
6734
6735 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6736 and we don't want to load it into a register as an optimization,
6737 because prepare_call_address already did it if it should be done. */
6738 if (GET_CODE (function) != SYMBOL_REF)
6739 function = memory_address (FUNCTION_MODE, function);
6740
6741 /* Generate the actual call instruction and save the return value. */
6742#ifdef HAVE_untyped_call
6743 if (HAVE_untyped_call)
6744 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6745 result, result_vector (1, result)));
6746 else
6747#endif
6748#ifdef HAVE_call_value
6749 if (HAVE_call_value)
6750 {
6751 rtx valreg = 0;
6752
6753 /* Locate the unique return register. It is not possible to
6754 express a call that sets more than one return register using
6755 call_value; use untyped_call for that. In fact, untyped_call
6756 only needs to save the return registers in the given block. */
6757 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6758 if ((mode = apply_result_mode[regno]) != VOIDmode)
6759 {
6760 if (valreg)
6761 abort (); /* HAVE_untyped_call required. */
6762 valreg = gen_rtx (REG, mode, regno);
6763 }
6764
6765 emit_call_insn (gen_call_value (valreg,
6766 gen_rtx (MEM, FUNCTION_MODE, function),
6767 const0_rtx, NULL_RTX, const0_rtx));
6768
6769 emit_move_insn (change_address (result, GET_MODE (valreg),
6770 XEXP (result, 0)),
6771 valreg);
6772 }
6773 else
6774#endif
6775 abort ();
6776
6777 /* Find the CALL insn we just emitted and write the USE insns before it. */
6778 for (call_insn = get_last_insn ();
6779 call_insn && GET_CODE (call_insn) != CALL_INSN;
6780 call_insn = PREV_INSN (call_insn))
6781 ;
6782
6783 if (! call_insn)
6784 abort ();
6785
6786 /* Put the USE insns before the CALL. */
6787 emit_insns_before (use_insns, call_insn);
6788
6789 /* Restore the stack. */
6790 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6791
6792 /* Return the address of the result block. */
6793 return copy_addr_to_reg (XEXP (result, 0));
6794}
6795
6796/* Perform an untyped return. */
6797static void
6798expand_builtin_return (result)
6799 rtx result;
6800{
6801 int size, align, regno;
6802 enum machine_mode mode;
6803 rtx reg;
6804 rtx use_insns = 0;
6805
6806 apply_result_size ();
6807 result = gen_rtx (MEM, BLKmode, result);
6808
6809#ifdef HAVE_untyped_return
6810 if (HAVE_untyped_return)
6811 {
6812 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6813 emit_barrier ();
6814 return;
6815 }
6816#endif
6817
6818 /* Restore the return value and note that each value is used. */
6819 size = 0;
6820 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6821 if ((mode = apply_result_mode[regno]) != VOIDmode)
6822 {
6823 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6824 if (size % align != 0)
6825 size = CEIL (size, align) * align;
6826 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6827 emit_move_insn (reg,
6828 change_address (result, mode,
6829 plus_constant (XEXP (result, 0),
6830 size)));
6831
6832 push_to_sequence (use_insns);
6833 emit_insn (gen_rtx (USE, VOIDmode, reg));
6834 use_insns = get_insns ();
6835 end_sequence ();
6836 size += GET_MODE_SIZE (mode);
6837 }
6838
6839 /* Put the USE insns before the return. */
6840 emit_insns (use_insns);
6841
6842 /* Return whatever values was restored by jumping directly to the end
6843 of the function. */
6844 expand_null_return ();
6845}
6846\f
bbf6f052
RK
6847/* Expand code for a post- or pre- increment or decrement
6848 and return the RTX for the result.
6849 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6850
6851static rtx
6852expand_increment (exp, post)
6853 register tree exp;
6854 int post;
6855{
6856 register rtx op0, op1;
6857 register rtx temp, value;
6858 register tree incremented = TREE_OPERAND (exp, 0);
6859 optab this_optab = add_optab;
6860 int icode;
6861 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6862 int op0_is_copy = 0;
c980ac49 6863 int single_insn = 0;
bbf6f052
RK
6864
6865 /* Stabilize any component ref that might need to be
6866 evaluated more than once below. */
ca300798
RS
6867 if (!post
6868 || TREE_CODE (incremented) == BIT_FIELD_REF
bbf6f052
RK
6869 || (TREE_CODE (incremented) == COMPONENT_REF
6870 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6871 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6872 incremented = stabilize_reference (incremented);
6873
6874 /* Compute the operands as RTX.
6875 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6876 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6877 and insns were generated in computing it. */
6878
bbf6f052 6879 temp = get_last_insn ();
906c4e36 6880 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6881
6882 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6883 in place but intead must do sign- or zero-extension during assignment,
6884 so we copy it into a new register and let the code below use it as
6885 a copy.
6886
6887 Note that we can safely modify this SUBREG since it is know not to be
6888 shared (it was made by the expand_expr call above). */
6889
6890 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6891 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6892
94a58076
RS
6893 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6894 && temp != get_last_insn ());
906c4e36 6895 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6896
6897 /* Decide whether incrementing or decrementing. */
6898 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6899 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6900 this_optab = sub_optab;
6901
a358cee0
RS
6902 /* Convert decrement by a constant into a negative increment. */
6903 if (this_optab == sub_optab
6904 && GET_CODE (op1) == CONST_INT)
6905 {
6906 op1 = GEN_INT (- INTVAL (op1));
6907 this_optab = add_optab;
6908 }
6909
c980ac49
RS
6910 /* For a preincrement, see if we can do this with a single instruction. */
6911 if (!post)
6912 {
6913 icode = (int) this_optab->handlers[(int) mode].insn_code;
6914 if (icode != (int) CODE_FOR_nothing
6915 /* Make sure that OP0 is valid for operands 0 and 1
6916 of the insn we want to queue. */
6917 && (*insn_operand_predicate[icode][0]) (op0, mode)
6918 && (*insn_operand_predicate[icode][1]) (op0, mode)
6919 && (*insn_operand_predicate[icode][2]) (op1, mode))
6920 single_insn = 1;
6921 }
6922
bbf6f052 6923 /* If OP0 is not the actual lvalue, but rather a copy in a register,
ca300798
RS
6924 then we cannot just increment OP0. We must therefore contrive to
6925 increment the original value. Then, for postincrement, we can return
c980ac49
RS
6926 OP0 since it is a copy of the old value. For preincrement, expand here
6927 unless we can do it with a single insn. */
6928 if (op0_is_copy || (!post && !single_insn))
bbf6f052
RK
6929 {
6930 /* This is the easiest way to increment the value wherever it is.
ca300798
RS
6931 Problems with multiple evaluation of INCREMENTED are prevented
6932 because either (1) it is a component_ref or preincrement,
bbf6f052
RK
6933 in which case it was stabilized above, or (2) it is an array_ref
6934 with constant index in an array in a register, which is
6935 safe to reevaluate. */
a358cee0
RS
6936 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
6937 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6938 ? MINUS_EXPR : PLUS_EXPR),
bbf6f052
RK
6939 TREE_TYPE (exp),
6940 incremented,
6941 TREE_OPERAND (exp, 1));
6942 temp = expand_assignment (incremented, newexp, ! post, 0);
6943 return post ? op0 : temp;
6944 }
6945
bbf6f052
RK
6946 if (post)
6947 {
6948 /* We have a true reference to the value in OP0.
6949 If there is an insn to add or subtract in this mode, queue it. */
6950
6951#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6952 op0 = stabilize (op0);
6953#endif
6954
6955 icode = (int) this_optab->handlers[(int) mode].insn_code;
6956 if (icode != (int) CODE_FOR_nothing
6957 /* Make sure that OP0 is valid for operands 0 and 1
6958 of the insn we want to queue. */
6959 && (*insn_operand_predicate[icode][0]) (op0, mode)
6960 && (*insn_operand_predicate[icode][1]) (op0, mode))
6961 {
6962 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6963 op1 = force_reg (mode, op1);
6964
6965 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6966 }
6967 }
6968
6969 /* Preincrement, or we can't increment with one simple insn. */
6970 if (post)
6971 /* Save a copy of the value before inc or dec, to return it later. */
6972 temp = value = copy_to_reg (op0);
6973 else
6974 /* Arrange to return the incremented value. */
6975 /* Copy the rtx because expand_binop will protect from the queue,
6976 and the results of that would be invalid for us to return
6977 if our caller does emit_queue before using our result. */
6978 temp = copy_rtx (value = op0);
6979
6980 /* Increment however we can. */
6981 op1 = expand_binop (mode, this_optab, value, op1, op0,
6982 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6983 /* Make sure the value is stored into OP0. */
6984 if (op1 != op0)
6985 emit_move_insn (op0, op1);
6986
6987 return temp;
6988}
6989\f
6990/* Expand all function calls contained within EXP, innermost ones first.
6991 But don't look within expressions that have sequence points.
6992 For each CALL_EXPR, record the rtx for its value
6993 in the CALL_EXPR_RTL field. */
6994
6995static void
6996preexpand_calls (exp)
6997 tree exp;
6998{
6999 register int nops, i;
7000 int type = TREE_CODE_CLASS (TREE_CODE (exp));
7001
7002 if (! do_preexpand_calls)
7003 return;
7004
7005 /* Only expressions and references can contain calls. */
7006
7007 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
7008 return;
7009
7010 switch (TREE_CODE (exp))
7011 {
7012 case CALL_EXPR:
7013 /* Do nothing if already expanded. */
7014 if (CALL_EXPR_RTL (exp) != 0)
7015 return;
7016
7017 /* Do nothing to built-in functions. */
7018 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
7019 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
7020 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 7021 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
7022 return;
7023
7024 case COMPOUND_EXPR:
7025 case COND_EXPR:
7026 case TRUTH_ANDIF_EXPR:
7027 case TRUTH_ORIF_EXPR:
7028 /* If we find one of these, then we can be sure
7029 the adjust will be done for it (since it makes jumps).
7030 Do it now, so that if this is inside an argument
7031 of a function, we don't get the stack adjustment
7032 after some other args have already been pushed. */
7033 do_pending_stack_adjust ();
7034 return;
7035
7036 case BLOCK:
7037 case RTL_EXPR:
7038 case WITH_CLEANUP_EXPR:
7039 return;
7040
7041 case SAVE_EXPR:
7042 if (SAVE_EXPR_RTL (exp) != 0)
7043 return;
7044 }
7045
7046 nops = tree_code_length[(int) TREE_CODE (exp)];
7047 for (i = 0; i < nops; i++)
7048 if (TREE_OPERAND (exp, i) != 0)
7049 {
7050 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7051 if (type == 'e' || type == '<' || type == '1' || type == '2'
7052 || type == 'r')
7053 preexpand_calls (TREE_OPERAND (exp, i));
7054 }
7055}
7056\f
7057/* At the start of a function, record that we have no previously-pushed
7058 arguments waiting to be popped. */
7059
7060void
7061init_pending_stack_adjust ()
7062{
7063 pending_stack_adjust = 0;
7064}
7065
7066/* When exiting from function, if safe, clear out any pending stack adjust
7067 so the adjustment won't get done. */
7068
7069void
7070clear_pending_stack_adjust ()
7071{
7072#ifdef EXIT_IGNORE_STACK
7073 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 7074 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
7075 && ! flag_inline_functions)
7076 pending_stack_adjust = 0;
7077#endif
7078}
7079
7080/* Pop any previously-pushed arguments that have not been popped yet. */
7081
7082void
7083do_pending_stack_adjust ()
7084{
7085 if (inhibit_defer_pop == 0)
7086 {
7087 if (pending_stack_adjust != 0)
906c4e36 7088 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
7089 pending_stack_adjust = 0;
7090 }
7091}
7092
7093/* Expand all cleanups up to OLD_CLEANUPS.
7094 Needed here, and also for language-dependent calls. */
7095
7096void
7097expand_cleanups_to (old_cleanups)
7098 tree old_cleanups;
7099{
7100 while (cleanups_this_call != old_cleanups)
7101 {
906c4e36 7102 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7103 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7104 }
7105}
7106\f
7107/* Expand conditional expressions. */
7108
7109/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7110 LABEL is an rtx of code CODE_LABEL, in this function and all the
7111 functions here. */
7112
7113void
7114jumpifnot (exp, label)
7115 tree exp;
7116 rtx label;
7117{
906c4e36 7118 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
7119}
7120
7121/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7122
7123void
7124jumpif (exp, label)
7125 tree exp;
7126 rtx label;
7127{
906c4e36 7128 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
7129}
7130
7131/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7132 the result is zero, or IF_TRUE_LABEL if the result is one.
7133 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7134 meaning fall through in that case.
7135
e7c33f54
RK
7136 do_jump always does any pending stack adjust except when it does not
7137 actually perform a jump. An example where there is no jump
7138 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7139
bbf6f052
RK
7140 This function is responsible for optimizing cases such as
7141 &&, || and comparison operators in EXP. */
7142
7143void
7144do_jump (exp, if_false_label, if_true_label)
7145 tree exp;
7146 rtx if_false_label, if_true_label;
7147{
7148 register enum tree_code code = TREE_CODE (exp);
7149 /* Some cases need to create a label to jump to
7150 in order to properly fall through.
7151 These cases set DROP_THROUGH_LABEL nonzero. */
7152 rtx drop_through_label = 0;
7153 rtx temp;
7154 rtx comparison = 0;
7155 int i;
7156 tree type;
7157
7158 emit_queue ();
7159
7160 switch (code)
7161 {
7162 case ERROR_MARK:
7163 break;
7164
7165 case INTEGER_CST:
7166 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7167 if (temp)
7168 emit_jump (temp);
7169 break;
7170
7171#if 0
7172 /* This is not true with #pragma weak */
7173 case ADDR_EXPR:
7174 /* The address of something can never be zero. */
7175 if (if_true_label)
7176 emit_jump (if_true_label);
7177 break;
7178#endif
7179
7180 case NOP_EXPR:
7181 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7182 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7183 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7184 goto normal;
7185 case CONVERT_EXPR:
7186 /* If we are narrowing the operand, we have to do the compare in the
7187 narrower mode. */
7188 if ((TYPE_PRECISION (TREE_TYPE (exp))
7189 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7190 goto normal;
7191 case NON_LVALUE_EXPR:
7192 case REFERENCE_EXPR:
7193 case ABS_EXPR:
7194 case NEGATE_EXPR:
7195 case LROTATE_EXPR:
7196 case RROTATE_EXPR:
7197 /* These cannot change zero->non-zero or vice versa. */
7198 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7199 break;
7200
7201#if 0
7202 /* This is never less insns than evaluating the PLUS_EXPR followed by
7203 a test and can be longer if the test is eliminated. */
7204 case PLUS_EXPR:
7205 /* Reduce to minus. */
7206 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7207 TREE_OPERAND (exp, 0),
7208 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7209 TREE_OPERAND (exp, 1))));
7210 /* Process as MINUS. */
7211#endif
7212
7213 case MINUS_EXPR:
7214 /* Non-zero iff operands of minus differ. */
7215 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7216 TREE_OPERAND (exp, 0),
7217 TREE_OPERAND (exp, 1)),
7218 NE, NE);
7219 break;
7220
7221 case BIT_AND_EXPR:
7222 /* If we are AND'ing with a small constant, do this comparison in the
7223 smallest type that fits. If the machine doesn't have comparisons
7224 that small, it will be converted back to the wider comparison.
7225 This helps if we are testing the sign bit of a narrower object.
7226 combine can't do this for us because it can't know whether a
7227 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7228
08af8e09
RK
7229 if (! SLOW_BYTE_ACCESS
7230 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7231 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
7232 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7233 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
7234 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7235 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7236 != CODE_FOR_nothing))
bbf6f052
RK
7237 {
7238 do_jump (convert (type, exp), if_false_label, if_true_label);
7239 break;
7240 }
7241 goto normal;
7242
7243 case TRUTH_NOT_EXPR:
7244 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7245 break;
7246
7247 case TRUTH_ANDIF_EXPR:
7248 if (if_false_label == 0)
7249 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 7250 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
7251 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7252 break;
7253
7254 case TRUTH_ORIF_EXPR:
7255 if (if_true_label == 0)
7256 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 7257 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
7258 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7259 break;
7260
7261 case COMPOUND_EXPR:
7262 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7263 free_temp_slots ();
7264 emit_queue ();
e7c33f54 7265 do_pending_stack_adjust ();
bbf6f052
RK
7266 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7267 break;
7268
7269 case COMPONENT_REF:
7270 case BIT_FIELD_REF:
7271 case ARRAY_REF:
7272 {
7273 int bitsize, bitpos, unsignedp;
7274 enum machine_mode mode;
7275 tree type;
7bb0943f 7276 tree offset;
bbf6f052
RK
7277 int volatilep = 0;
7278
7279 /* Get description of this reference. We don't actually care
7280 about the underlying object here. */
7bb0943f
RS
7281 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7282 &mode, &unsignedp, &volatilep);
bbf6f052
RK
7283
7284 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
7285 if (! SLOW_BYTE_ACCESS
7286 && type != 0 && bitsize >= 0
7287 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7288 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7289 != CODE_FOR_nothing))
bbf6f052
RK
7290 {
7291 do_jump (convert (type, exp), if_false_label, if_true_label);
7292 break;
7293 }
7294 goto normal;
7295 }
7296
7297 case COND_EXPR:
7298 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7299 if (integer_onep (TREE_OPERAND (exp, 1))
7300 && integer_zerop (TREE_OPERAND (exp, 2)))
7301 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7302
7303 else if (integer_zerop (TREE_OPERAND (exp, 1))
7304 && integer_onep (TREE_OPERAND (exp, 2)))
7305 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7306
7307 else
7308 {
7309 register rtx label1 = gen_label_rtx ();
7310 drop_through_label = gen_label_rtx ();
906c4e36 7311 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
7312 /* Now the THEN-expression. */
7313 do_jump (TREE_OPERAND (exp, 1),
7314 if_false_label ? if_false_label : drop_through_label,
7315 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
7316 /* In case the do_jump just above never jumps. */
7317 do_pending_stack_adjust ();
bbf6f052
RK
7318 emit_label (label1);
7319 /* Now the ELSE-expression. */
7320 do_jump (TREE_OPERAND (exp, 2),
7321 if_false_label ? if_false_label : drop_through_label,
7322 if_true_label ? if_true_label : drop_through_label);
7323 }
7324 break;
7325
7326 case EQ_EXPR:
7327 if (integer_zerop (TREE_OPERAND (exp, 1)))
7328 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7329 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7330 == MODE_INT)
7331 &&
7332 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7333 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7334 else
7335 comparison = compare (exp, EQ, EQ);
7336 break;
7337
7338 case NE_EXPR:
7339 if (integer_zerop (TREE_OPERAND (exp, 1)))
7340 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7341 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7342 == MODE_INT)
7343 &&
7344 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7345 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7346 else
7347 comparison = compare (exp, NE, NE);
7348 break;
7349
7350 case LT_EXPR:
7351 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7352 == MODE_INT)
7353 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7354 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7355 else
7356 comparison = compare (exp, LT, LTU);
7357 break;
7358
7359 case LE_EXPR:
7360 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7361 == MODE_INT)
7362 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7363 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7364 else
7365 comparison = compare (exp, LE, LEU);
7366 break;
7367
7368 case GT_EXPR:
7369 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7370 == MODE_INT)
7371 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7372 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7373 else
7374 comparison = compare (exp, GT, GTU);
7375 break;
7376
7377 case GE_EXPR:
7378 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7379 == MODE_INT)
7380 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7381 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7382 else
7383 comparison = compare (exp, GE, GEU);
7384 break;
7385
7386 default:
7387 normal:
906c4e36 7388 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7389#if 0
7390 /* This is not needed any more and causes poor code since it causes
7391 comparisons and tests from non-SI objects to have different code
7392 sequences. */
7393 /* Copy to register to avoid generating bad insns by cse
7394 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7395 if (!cse_not_expected && GET_CODE (temp) == MEM)
7396 temp = copy_to_reg (temp);
7397#endif
7398 do_pending_stack_adjust ();
7399 if (GET_CODE (temp) == CONST_INT)
7400 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7401 else if (GET_CODE (temp) == LABEL_REF)
7402 comparison = const_true_rtx;
7403 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7404 && !can_compare_p (GET_MODE (temp)))
7405 /* Note swapping the labels gives us not-equal. */
7406 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7407 else if (GET_MODE (temp) != VOIDmode)
7408 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
7409 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7410 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
7411 else
7412 abort ();
7413 }
7414
7415 /* Do any postincrements in the expression that was tested. */
7416 emit_queue ();
7417
7418 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7419 straight into a conditional jump instruction as the jump condition.
7420 Otherwise, all the work has been done already. */
7421
7422 if (comparison == const_true_rtx)
7423 {
7424 if (if_true_label)
7425 emit_jump (if_true_label);
7426 }
7427 else if (comparison == const0_rtx)
7428 {
7429 if (if_false_label)
7430 emit_jump (if_false_label);
7431 }
7432 else if (comparison)
7433 do_jump_for_compare (comparison, if_false_label, if_true_label);
7434
7435 free_temp_slots ();
7436
7437 if (drop_through_label)
e7c33f54
RK
7438 {
7439 /* If do_jump produces code that might be jumped around,
7440 do any stack adjusts from that code, before the place
7441 where control merges in. */
7442 do_pending_stack_adjust ();
7443 emit_label (drop_through_label);
7444 }
bbf6f052
RK
7445}
7446\f
7447/* Given a comparison expression EXP for values too wide to be compared
7448 with one insn, test the comparison and jump to the appropriate label.
7449 The code of EXP is ignored; we always test GT if SWAP is 0,
7450 and LT if SWAP is 1. */
7451
7452static void
7453do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7454 tree exp;
7455 int swap;
7456 rtx if_false_label, if_true_label;
7457{
906c4e36
RK
7458 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7459 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7460 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7461 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7462 rtx drop_through_label = 0;
7463 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7464 int i;
7465
7466 if (! if_true_label || ! if_false_label)
7467 drop_through_label = gen_label_rtx ();
7468 if (! if_true_label)
7469 if_true_label = drop_through_label;
7470 if (! if_false_label)
7471 if_false_label = drop_through_label;
7472
7473 /* Compare a word at a time, high order first. */
f81497d9
RS
7474 for (i = 0; i < nwords; i++)
7475 {
7476 rtx comp;
7477 rtx op0_word, op1_word;
7478
7479 if (WORDS_BIG_ENDIAN)
7480 {
7481 op0_word = operand_subword_force (op0, i, mode);
7482 op1_word = operand_subword_force (op1, i, mode);
7483 }
7484 else
7485 {
7486 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7487 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7488 }
7489
7490 /* All but high-order word must be compared as unsigned. */
7491 comp = compare_from_rtx (op0_word, op1_word,
7492 (unsignedp || i > 0) ? GTU : GT,
7493 unsignedp, word_mode, NULL_RTX, 0);
7494 if (comp == const_true_rtx)
7495 emit_jump (if_true_label);
7496 else if (comp != const0_rtx)
7497 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7498
7499 /* Consider lower words only if these are equal. */
7500 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7501 NULL_RTX, 0);
7502 if (comp == const_true_rtx)
7503 emit_jump (if_false_label);
7504 else if (comp != const0_rtx)
7505 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7506 }
7507
7508 if (if_false_label)
7509 emit_jump (if_false_label);
7510 if (drop_through_label)
7511 emit_label (drop_through_label);
7512}
7513
7514/* Compare OP0 with OP1, word at a time, in mode MODE.
7515 UNSIGNEDP says to do unsigned comparison.
7516 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7517
7518static void
7519do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7520 enum machine_mode mode;
7521 int unsignedp;
7522 rtx op0, op1;
7523 rtx if_false_label, if_true_label;
7524{
7525 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7526 rtx drop_through_label = 0;
7527 int i;
7528
7529 if (! if_true_label || ! if_false_label)
7530 drop_through_label = gen_label_rtx ();
7531 if (! if_true_label)
7532 if_true_label = drop_through_label;
7533 if (! if_false_label)
7534 if_false_label = drop_through_label;
7535
7536 /* Compare a word at a time, high order first. */
bbf6f052
RK
7537 for (i = 0; i < nwords; i++)
7538 {
7539 rtx comp;
7540 rtx op0_word, op1_word;
7541
7542 if (WORDS_BIG_ENDIAN)
7543 {
7544 op0_word = operand_subword_force (op0, i, mode);
7545 op1_word = operand_subword_force (op1, i, mode);
7546 }
7547 else
7548 {
7549 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7550 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7551 }
7552
7553 /* All but high-order word must be compared as unsigned. */
7554 comp = compare_from_rtx (op0_word, op1_word,
7555 (unsignedp || i > 0) ? GTU : GT,
906c4e36 7556 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
7557 if (comp == const_true_rtx)
7558 emit_jump (if_true_label);
7559 else if (comp != const0_rtx)
906c4e36 7560 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
7561
7562 /* Consider lower words only if these are equal. */
7563 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 7564 NULL_RTX, 0);
bbf6f052
RK
7565 if (comp == const_true_rtx)
7566 emit_jump (if_false_label);
7567 else if (comp != const0_rtx)
906c4e36 7568 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
7569 }
7570
7571 if (if_false_label)
7572 emit_jump (if_false_label);
7573 if (drop_through_label)
7574 emit_label (drop_through_label);
7575}
7576
7577/* Given an EQ_EXPR expression EXP for values too wide to be compared
7578 with one insn, test the comparison and jump to the appropriate label. */
7579
7580static void
7581do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7582 tree exp;
7583 rtx if_false_label, if_true_label;
7584{
906c4e36
RK
7585 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7586 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7587 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7588 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7589 int i;
7590 rtx drop_through_label = 0;
7591
7592 if (! if_false_label)
7593 drop_through_label = if_false_label = gen_label_rtx ();
7594
7595 for (i = 0; i < nwords; i++)
7596 {
7597 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7598 operand_subword_force (op1, i, mode),
cd1b4b44
RK
7599 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7600 word_mode, NULL_RTX, 0);
bbf6f052
RK
7601 if (comp == const_true_rtx)
7602 emit_jump (if_false_label);
7603 else if (comp != const0_rtx)
906c4e36 7604 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7605 }
7606
7607 if (if_true_label)
7608 emit_jump (if_true_label);
7609 if (drop_through_label)
7610 emit_label (drop_through_label);
7611}
7612\f
7613/* Jump according to whether OP0 is 0.
7614 We assume that OP0 has an integer mode that is too wide
7615 for the available compare insns. */
7616
7617static void
7618do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7619 rtx op0;
7620 rtx if_false_label, if_true_label;
7621{
7622 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7623 int i;
7624 rtx drop_through_label = 0;
7625
7626 if (! if_false_label)
7627 drop_through_label = if_false_label = gen_label_rtx ();
7628
7629 for (i = 0; i < nwords; i++)
7630 {
7631 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7632 GET_MODE (op0)),
cd1b4b44 7633 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
7634 if (comp == const_true_rtx)
7635 emit_jump (if_false_label);
7636 else if (comp != const0_rtx)
906c4e36 7637 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7638 }
7639
7640 if (if_true_label)
7641 emit_jump (if_true_label);
7642 if (drop_through_label)
7643 emit_label (drop_through_label);
7644}
7645
7646/* Given a comparison expression in rtl form, output conditional branches to
7647 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7648
7649static void
7650do_jump_for_compare (comparison, if_false_label, if_true_label)
7651 rtx comparison, if_false_label, if_true_label;
7652{
7653 if (if_true_label)
7654 {
7655 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7656 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7657 else
7658 abort ();
7659
7660 if (if_false_label)
7661 emit_jump (if_false_label);
7662 }
7663 else if (if_false_label)
7664 {
7665 rtx insn;
7666 rtx prev = PREV_INSN (get_last_insn ());
7667 rtx branch = 0;
7668
7669 /* Output the branch with the opposite condition. Then try to invert
7670 what is generated. If more than one insn is a branch, or if the
7671 branch is not the last insn written, abort. If we can't invert
7672 the branch, emit make a true label, redirect this jump to that,
7673 emit a jump to the false label and define the true label. */
7674
7675 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7676 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7677 else
7678 abort ();
7679
7680 /* Here we get the insn before what was just emitted.
7681 On some machines, emitting the branch can discard
7682 the previous compare insn and emit a replacement. */
7683 if (prev == 0)
7684 /* If there's only one preceding insn... */
7685 insn = get_insns ();
7686 else
7687 insn = NEXT_INSN (prev);
7688
7689 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7690 if (GET_CODE (insn) == JUMP_INSN)
7691 {
7692 if (branch)
7693 abort ();
7694 branch = insn;
7695 }
7696
7697 if (branch != get_last_insn ())
7698 abort ();
7699
7700 if (! invert_jump (branch, if_false_label))
7701 {
7702 if_true_label = gen_label_rtx ();
7703 redirect_jump (branch, if_true_label);
7704 emit_jump (if_false_label);
7705 emit_label (if_true_label);
7706 }
7707 }
7708}
7709\f
7710/* Generate code for a comparison expression EXP
7711 (including code to compute the values to be compared)
7712 and set (CC0) according to the result.
7713 SIGNED_CODE should be the rtx operation for this comparison for
7714 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7715
7716 We force a stack adjustment unless there are currently
7717 things pushed on the stack that aren't yet used. */
7718
7719static rtx
7720compare (exp, signed_code, unsigned_code)
7721 register tree exp;
7722 enum rtx_code signed_code, unsigned_code;
7723{
906c4e36
RK
7724 register rtx op0
7725 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7726 register rtx op1
7727 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7728 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7729 register enum machine_mode mode = TYPE_MODE (type);
7730 int unsignedp = TREE_UNSIGNED (type);
7731 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7732
7733 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7734 ((mode == BLKmode)
906c4e36 7735 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7736 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7737}
7738
7739/* Like compare but expects the values to compare as two rtx's.
7740 The decision as to signed or unsigned comparison must be made by the caller.
7741
7742 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7743 compared.
7744
7745 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7746 size of MODE should be used. */
7747
7748rtx
7749compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7750 register rtx op0, op1;
7751 enum rtx_code code;
7752 int unsignedp;
7753 enum machine_mode mode;
7754 rtx size;
7755 int align;
7756{
a7c5971a
RK
7757 rtx tem;
7758
bf743ac5
RK
7759 /* If one operand is constant, make it the second one. Only do this
7760 if the other operand is not constant as well. */
bbf6f052 7761
bf743ac5
RK
7762 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7763 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 7764 {
a7c5971a 7765 tem = op0;
bbf6f052
RK
7766 op0 = op1;
7767 op1 = tem;
7768 code = swap_condition (code);
7769 }
7770
7771 if (flag_force_mem)
7772 {
7773 op0 = force_not_mem (op0);
7774 op1 = force_not_mem (op1);
7775 }
7776
7777 do_pending_stack_adjust ();
7778
a7c5971a
RK
7779 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7780 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7781 return tem;
bbf6f052 7782
cd1b4b44
RK
7783#if 0
7784 /* There's no need to do this now that combine.c can eliminate lots of
7785 sign extensions. This can be less efficient in certain cases on other
1c6bc817 7786 machines. */
cd1b4b44 7787
bbf6f052
RK
7788 /* If this is a signed equality comparison, we can do it as an
7789 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7790 extension and comparisons with zero are done as unsigned. This is
7791 the case even on machines that can do fast sign extension, since
8008b228 7792 zero-extension is easier to combine with other operations than
77fa0940
RK
7793 sign-extension is. If we are comparing against a constant, we must
7794 convert it to what it would look like unsigned. */
bbf6f052 7795 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7796 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7797 {
7798 if (GET_CODE (op1) == CONST_INT
7799 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7800 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7801 unsignedp = 1;
7802 }
cd1b4b44 7803#endif
bbf6f052
RK
7804
7805 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7806
7807 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7808}
7809\f
7810/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7811 and return an rtx for the result. EXP is either a comparison
7812 or a TRUTH_NOT_EXPR whose operand is a comparison.
7813
bbf6f052
RK
7814 If TARGET is nonzero, store the result there if convenient.
7815
7816 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7817 cheap.
7818
7819 Return zero if there is no suitable set-flag instruction
7820 available on this machine.
7821
7822 Once expand_expr has been called on the arguments of the comparison,
7823 we are committed to doing the store flag, since it is not safe to
7824 re-evaluate the expression. We emit the store-flag insn by calling
7825 emit_store_flag, but only expand the arguments if we have a reason
7826 to believe that emit_store_flag will be successful. If we think that
7827 it will, but it isn't, we have to simulate the store-flag with a
7828 set/jump/set sequence. */
7829
7830static rtx
7831do_store_flag (exp, target, mode, only_cheap)
7832 tree exp;
7833 rtx target;
7834 enum machine_mode mode;
7835 int only_cheap;
7836{
7837 enum rtx_code code;
e7c33f54 7838 tree arg0, arg1, type;
bbf6f052 7839 tree tem;
e7c33f54
RK
7840 enum machine_mode operand_mode;
7841 int invert = 0;
7842 int unsignedp;
bbf6f052
RK
7843 rtx op0, op1;
7844 enum insn_code icode;
7845 rtx subtarget = target;
7846 rtx result, label, pattern, jump_pat;
7847
e7c33f54
RK
7848 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7849 result at the end. We can't simply invert the test since it would
7850 have already been inverted if it were valid. This case occurs for
7851 some floating-point comparisons. */
7852
7853 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7854 invert = 1, exp = TREE_OPERAND (exp, 0);
7855
7856 arg0 = TREE_OPERAND (exp, 0);
7857 arg1 = TREE_OPERAND (exp, 1);
7858 type = TREE_TYPE (arg0);
7859 operand_mode = TYPE_MODE (type);
7860 unsignedp = TREE_UNSIGNED (type);
7861
bbf6f052
RK
7862 /* We won't bother with BLKmode store-flag operations because it would mean
7863 passing a lot of information to emit_store_flag. */
7864 if (operand_mode == BLKmode)
7865 return 0;
7866
d964285c
CH
7867 STRIP_NOPS (arg0);
7868 STRIP_NOPS (arg1);
bbf6f052
RK
7869
7870 /* Get the rtx comparison code to use. We know that EXP is a comparison
7871 operation of some type. Some comparisons against 1 and -1 can be
7872 converted to comparisons with zero. Do so here so that the tests
7873 below will be aware that we have a comparison with zero. These
7874 tests will not catch constants in the first operand, but constants
7875 are rarely passed as the first operand. */
7876
7877 switch (TREE_CODE (exp))
7878 {
7879 case EQ_EXPR:
7880 code = EQ;
7881 break;
7882 case NE_EXPR:
7883 code = NE;
7884 break;
7885 case LT_EXPR:
7886 if (integer_onep (arg1))
7887 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7888 else
7889 code = unsignedp ? LTU : LT;
7890 break;
7891 case LE_EXPR:
5bf6e3bd
RK
7892 if (! unsignedp && integer_all_onesp (arg1))
7893 arg1 = integer_zero_node, code = LT;
bbf6f052
RK
7894 else
7895 code = unsignedp ? LEU : LE;
7896 break;
7897 case GT_EXPR:
5bf6e3bd
RK
7898 if (! unsignedp && integer_all_onesp (arg1))
7899 arg1 = integer_zero_node, code = GE;
bbf6f052
RK
7900 else
7901 code = unsignedp ? GTU : GT;
7902 break;
7903 case GE_EXPR:
7904 if (integer_onep (arg1))
7905 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7906 else
7907 code = unsignedp ? GEU : GE;
7908 break;
7909 default:
7910 abort ();
7911 }
7912
7913 /* Put a constant second. */
7914 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7915 {
7916 tem = arg0; arg0 = arg1; arg1 = tem;
7917 code = swap_condition (code);
7918 }
7919
7920 /* If this is an equality or inequality test of a single bit, we can
7921 do this by shifting the bit being tested to the low-order bit and
7922 masking the result with the constant 1. If the condition was EQ,
7923 we xor it with 1. This does not require an scc insn and is faster
7924 than an scc insn even if we have it. */
7925
7926 if ((code == NE || code == EQ)
7927 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7928 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7929 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7930 {
af508edd 7931 tree inner = TREE_OPERAND (arg0, 0);
bbf6f052 7932 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7933 NULL_RTX, VOIDmode, 0)));
af508edd
RK
7934 int ops_unsignedp;
7935
7936 /* If INNER is a right shift of a constant and it plus BITNUM does
7937 not overflow, adjust BITNUM and INNER. */
7938
7939 if (TREE_CODE (inner) == RSHIFT_EXPR
7940 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7941 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7942 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
7943 < TYPE_PRECISION (type)))
7944 {
7945 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7946 inner = TREE_OPERAND (inner, 0);
7947 }
7948
7949 /* If we are going to be able to omit the AND below, we must do our
7950 operations as unsigned. If we must use the AND, we have a choice.
7951 Normally unsigned is faster, but for some machines signed is. */
7952 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
7953#ifdef BYTE_LOADS_SIGN_EXTEND
7954 : 0
7955#else
7956 : 1
7957#endif
7958 );
bbf6f052
RK
7959
7960 if (subtarget == 0 || GET_CODE (subtarget) != REG
7961 || GET_MODE (subtarget) != operand_mode
af508edd 7962 || ! safe_from_p (subtarget, inner))
bbf6f052
RK
7963 subtarget = 0;
7964
af508edd 7965 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052
RK
7966
7967 if (bitnum != 0)
7968 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
af508edd 7969 size_int (bitnum), target, ops_unsignedp);
bbf6f052
RK
7970
7971 if (GET_MODE (op0) != mode)
af508edd
RK
7972 op0 = convert_to_mode (mode, op0, ops_unsignedp);
7973
7974 if ((code == EQ && ! invert) || (code == NE && invert))
7975 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
7976 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 7977
af508edd 7978 /* Put the AND last so it can combine with more things. */
bbf6f052
RK
7979 if (bitnum != TYPE_PRECISION (type) - 1)
7980 op0 = expand_and (op0, const1_rtx, target);
7981
bbf6f052
RK
7982 return op0;
7983 }
7984
7985 /* Now see if we are likely to be able to do this. Return if not. */
7986 if (! can_compare_p (operand_mode))
7987 return 0;
7988 icode = setcc_gen_code[(int) code];
7989 if (icode == CODE_FOR_nothing
7990 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7991 {
7992 /* We can only do this if it is one of the special cases that
7993 can be handled without an scc insn. */
7994 if ((code == LT && integer_zerop (arg1))
7995 || (! only_cheap && code == GE && integer_zerop (arg1)))
7996 ;
7997 else if (BRANCH_COST >= 0
7998 && ! only_cheap && (code == NE || code == EQ)
7999 && TREE_CODE (type) != REAL_TYPE
8000 && ((abs_optab->handlers[(int) operand_mode].insn_code
8001 != CODE_FOR_nothing)
8002 || (ffs_optab->handlers[(int) operand_mode].insn_code
8003 != CODE_FOR_nothing)))
8004 ;
8005 else
8006 return 0;
8007 }
8008
8009 preexpand_calls (exp);
8010 if (subtarget == 0 || GET_CODE (subtarget) != REG
8011 || GET_MODE (subtarget) != operand_mode
8012 || ! safe_from_p (subtarget, arg1))
8013 subtarget = 0;
8014
8015 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 8016 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8017
8018 if (target == 0)
8019 target = gen_reg_rtx (mode);
8020
d39985fa
RK
8021 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
8022 because, if the emit_store_flag does anything it will succeed and
8023 OP0 and OP1 will not be used subsequently. */
8024
8025 result = emit_store_flag (target, code,
8026 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
8027 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
8028 operand_mode, unsignedp, 1);
bbf6f052
RK
8029
8030 if (result)
e7c33f54
RK
8031 {
8032 if (invert)
8033 result = expand_binop (mode, xor_optab, result, const1_rtx,
8034 result, 0, OPTAB_LIB_WIDEN);
8035 return result;
8036 }
bbf6f052
RK
8037
8038 /* If this failed, we have to do this with set/compare/jump/set code. */
8039 if (target == 0 || GET_CODE (target) != REG
8040 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8041 target = gen_reg_rtx (GET_MODE (target));
8042
e7c33f54 8043 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
8044 result = compare_from_rtx (op0, op1, code, unsignedp,
8045 operand_mode, NULL_RTX, 0);
bbf6f052 8046 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
8047 return (((result == const0_rtx && ! invert)
8048 || (result != const0_rtx && invert))
8049 ? const0_rtx : const1_rtx);
bbf6f052
RK
8050
8051 label = gen_label_rtx ();
8052 if (bcc_gen_fctn[(int) code] == 0)
8053 abort ();
8054
8055 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 8056 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
8057 emit_label (label);
8058
8059 return target;
8060}
8061\f
8062/* Generate a tablejump instruction (used for switch statements). */
8063
8064#ifdef HAVE_tablejump
8065
8066/* INDEX is the value being switched on, with the lowest value
8067 in the table already subtracted.
88d3b7f0 8068 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
8069 RANGE is the length of the jump table.
8070 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8071
8072 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8073 index value is out of range. */
8074
8075void
e87b4f3f 8076do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 8077 rtx index, range, table_label, default_label;
e87b4f3f 8078 enum machine_mode mode;
bbf6f052
RK
8079{
8080 register rtx temp, vector;
8081
88d3b7f0
RS
8082 /* Do an unsigned comparison (in the proper mode) between the index
8083 expression and the value which represents the length of the range.
8084 Since we just finished subtracting the lower bound of the range
8085 from the index expression, this comparison allows us to simultaneously
8086 check that the original index expression value is both greater than
8087 or equal to the minimum value of the range and less than or equal to
8088 the maximum value of the range. */
e87b4f3f 8089
b4c65118 8090 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
bbf6f052 8091 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
8092
8093 /* If index is in range, it must fit in Pmode.
8094 Convert to Pmode so we can index with it. */
8095 if (mode != Pmode)
8096 index = convert_to_mode (Pmode, index, 1);
8097
bbf6f052
RK
8098 /* If flag_force_addr were to affect this address
8099 it could interfere with the tricky assumptions made
8100 about addresses that contain label-refs,
8101 which may be valid only very near the tablejump itself. */
8102 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8103 GET_MODE_SIZE, because this indicates how large insns are. The other
8104 uses should all be Pmode, because they are addresses. This code
8105 could fail if addresses and insns are not the same size. */
8106 index = memory_address_noforce
8107 (CASE_VECTOR_MODE,
8108 gen_rtx (PLUS, Pmode,
8109 gen_rtx (MULT, Pmode, index,
906c4e36 8110 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
8111 gen_rtx (LABEL_REF, Pmode, table_label)));
8112 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8113 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8114 RTX_UNCHANGING_P (vector) = 1;
8115 convert_move (temp, vector, 0);
8116
8117 emit_jump_insn (gen_tablejump (temp, table_label));
8118
8119#ifndef CASE_VECTOR_PC_RELATIVE
8120 /* If we are generating PIC code or if the table is PC-relative, the
8121 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8122 if (! flag_pic)
8123 emit_barrier ();
8124#endif
8125}
8126
8127#endif /* HAVE_tablejump */
This page took 1.038723 seconds and 5 git commands to generate.