]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(SIZE_TYPE): Don't define.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
4be204f0 2 Copyright (C) 1988, 1992, 1993 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
bbf6f052
RK
32#include "typeclass.h"
33
34#define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36/* Decide whether a function's arguments should be processed
bbc8a071
RK
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
bbf6f052 41
bbf6f052 42#ifdef PUSH_ROUNDING
bbc8a071 43
3319a347 44#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
45#define PUSH_ARGS_REVERSED /* If it's last to first */
46#endif
bbc8a071 47
bbf6f052
RK
48#endif
49
50#ifndef STACK_PUSH_CODE
51#ifdef STACK_GROWS_DOWNWARD
52#define STACK_PUSH_CODE PRE_DEC
53#else
54#define STACK_PUSH_CODE PRE_INC
55#endif
56#endif
57
58/* Like STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61/* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67int cse_not_expected;
68
69/* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72int do_preexpand_calls = 1;
73
74/* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76int pending_stack_adjust;
77
78/* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82int inhibit_defer_pop;
83
84/* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86tree cleanups_this_call;
87
88/* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91static rtx saveregs_value;
92
dcf76fff
TW
93/* Similarly for __builtin_apply_args. */
94static rtx apply_args_value;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112};
113
114static rtx enqueue_insn PROTO((rtx, rtx));
115static int queued_subexp_p PROTO((rtx));
116static void init_queue PROTO((void));
117static void move_by_pieces PROTO((rtx, rtx, int, int));
118static int move_by_pieces_ninsns PROTO((unsigned int, int));
119static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121static void group_insns PROTO((rtx));
122static void store_constructor PROTO((tree, rtx));
123static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125static tree save_noncopied_parts PROTO((tree, tree));
126static tree init_noncopied_parts PROTO((tree, tree));
127static int safe_from_p PROTO((rtx, tree));
128static int fixed_type_p PROTO((tree));
129static int get_pointer_alignment PROTO((tree, unsigned));
130static tree string_constant PROTO((tree, tree *));
131static tree c_strlen PROTO((tree));
132static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
133static int apply_args_size PROTO((void));
134static int apply_result_size PROTO((void));
135static rtx result_vector PROTO((int, rtx));
136static rtx expand_builtin_apply_args PROTO((void));
137static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138static void expand_builtin_return PROTO((rtx));
4969d05d
RK
139static rtx expand_increment PROTO((tree, int));
140static void preexpand_calls PROTO((tree));
141static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 142static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
143static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 148
4fa52007
RK
149/* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153static char direct_load[NUM_MACHINE_MODES];
154static char direct_store[NUM_MACHINE_MODES];
155
bbf6f052
RK
156/* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159#ifndef MOVE_RATIO
266007a7 160#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
161#define MOVE_RATIO 2
162#else
163/* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165#define MOVE_RATIO 15
166#endif
167#endif
e87b4f3f 168
266007a7 169/* This array records the insn_code of insns to perform block moves. */
e6677db3 170enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 171
e87b4f3f
RS
172/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174#ifndef SLOW_UNALIGNED_ACCESS
175#define SLOW_UNALIGNED_ACCESS 0
176#endif
0006469d
TW
177
178/* Register mappings for target machines without register windows. */
179#ifndef INCOMING_REGNO
180#define INCOMING_REGNO(OUT) (OUT)
181#endif
182#ifndef OUTGOING_REGNO
183#define OUTGOING_REGNO(IN) (IN)
184#endif
bbf6f052 185\f
4fa52007 186/* This is run once per compilation to set up which modes can be used
266007a7 187 directly in memory and to initialize the block move optab. */
4fa52007
RK
188
189void
190init_expr_once ()
191{
192 rtx insn, pat;
193 enum machine_mode mode;
e2549997
RS
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
4fa52007 197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
e2549997 213 PUT_MODE (mem1, mode);
4fa52007 214
e6fe56a4
RK
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
7308a047
RS
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
e6fe56a4 225
7308a047 226 reg = gen_rtx (REG, mode, regno);
e6fe56a4 227
7308a047
RS
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
e6fe56a4 232
e2549997
RS
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
7308a047
RS
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
e2549997
RS
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
7308a047 247 }
4fa52007
RK
248 }
249
250 end_sequence ();
251}
252
bbf6f052
RK
253/* This is run at the start of compiling a function. */
254
255void
256init_expr ()
257{
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
0006469d 264 apply_args_value = 0;
e87b4f3f 265 forced_labels = 0;
bbf6f052
RK
266}
267
268/* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271void
272save_expr_status (p)
273 struct function *p;
274{
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
0006469d 282 p->apply_args_value = apply_args_value;
e87b4f3f 283 p->forced_labels = forced_labels;
bbf6f052
RK
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
0006469d 289 apply_args_value = 0;
e87b4f3f 290 forced_labels = 0;
bbf6f052
RK
291}
292
293/* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296void
297restore_expr_status (p)
298 struct function *p;
299{
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
0006469d 304 apply_args_value = p->apply_args_value;
e87b4f3f 305 forced_labels = p->forced_labels;
bbf6f052
RK
306}
307\f
308/* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311static rtx pending_chain;
312
313/* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320static rtx
321enqueue_insn (var, body)
322 rtx var, body;
323{
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 325 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
326 return pending_chain;
327}
328
329/* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344rtx
345protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348{
349 register RTX_CODE code = GET_CODE (x);
350
351#if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355#endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402}
403
404/* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409static int
410queued_subexp_p (x)
411 rtx x;
412{
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427}
428
429/* Perform all the pending incrementations. */
430
431void
432emit_queue ()
433{
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440}
441
442static void
443init_queue ()
444{
445 if (pending_chain)
446 abort ();
447}
448\f
449/* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454void
455convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458{
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
1499e0a8
RK
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
bbf6f052
RK
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
b424402e
RS
497#ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503#endif
504#ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510#endif
511#ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517#endif
518#ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
525#ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
532
533#ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539#endif
540#ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546#endif
547#ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
554#ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
561
bbf6f052
RK
562#ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568#endif
b092b471
JW
569#ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575#endif
bbf6f052
RK
576#ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
b092b471
JW
583#ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
bbf6f052
RK
590#ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597
598#ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
612#ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
bbf6f052
RK
661#ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
b092b471
JW
668#ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
bbf6f052
RK
675#ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
b092b471
JW
682#ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
bbf6f052
RK
689#ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696
b092b471
JW
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
bbf6f052
RK
763 abort ();
764
e87b4f3f 765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
cd1b4b44
RK
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
bbf6f052
RK
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827#ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
906c4e36
RK
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
bbf6f052
RK
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838#endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 843 NULL_RTX, 0);
bbf6f052
RK
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
906c4e36 864 emit_no_conflict_block (insns, to, from, NULL_RTX,
2abec1b7 865 gen_rtx (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
866 return;
867 }
868
d3c64ee3
RS
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052
RK
872 {
873 convert_move (to, gen_lowpart (word_mode, from), 0);
874 return;
875 }
876
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
879 {
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
882
883#ifdef HAVE_truncsipsi
884 if (HAVE_truncsipsi)
885 {
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
887 return;
888 }
889#endif /* HAVE_truncsipsi */
890 abort ();
891 }
892
893 if (from_mode == PSImode)
894 {
895 if (to_mode != SImode)
896 {
897 from = convert_to_mode (SImode, from, unsignedp);
898 from_mode = SImode;
899 }
900 else
901 {
902#ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
904 {
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
906 return;
907 }
908#endif /* HAVE_extendpsisi */
909 abort ();
910 }
911 }
912
913 /* Now follow all the conversions between integers
914 no more than a word long. */
915
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 919 GET_MODE_BITSIZE (from_mode)))
bbf6f052 920 {
d3c64ee3
RS
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
bbf6f052
RK
928 emit_move_insn (to, gen_lowpart (to_mode, from));
929 return;
930 }
931
d3c64ee3 932 /* Handle extension. */
bbf6f052
RK
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
934 {
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
937 != CODE_FOR_nothing)
938 {
3dc4195c
RK
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
bbf6f052
RK
945 emit_unop_insn (code, to, from, equiv_code);
946 return;
947 }
948 else
949 {
950 enum machine_mode intermediate;
951
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
956 != CODE_FOR_nothing)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
959 {
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
962 return;
963 }
964
965 /* No suitable intermediate mode. */
966 abort ();
967 }
968 }
969
970 /* Support special truncate insns for certain modes. */
971
972 if (from_mode == DImode && to_mode == SImode)
973 {
974#ifdef HAVE_truncdisi2
975 if (HAVE_truncdisi2)
976 {
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 return;
979 }
980#endif
981 convert_move (to, force_reg (from_mode, from), unsignedp);
982 return;
983 }
984
985 if (from_mode == DImode && to_mode == HImode)
986 {
987#ifdef HAVE_truncdihi2
988 if (HAVE_truncdihi2)
989 {
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 return;
992 }
993#endif
994 convert_move (to, force_reg (from_mode, from), unsignedp);
995 return;
996 }
997
998 if (from_mode == DImode && to_mode == QImode)
999 {
1000#ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 return;
1009 }
1010
1011 if (from_mode == SImode && to_mode == HImode)
1012 {
1013#ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1015 {
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 return;
1018 }
1019#endif
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 return;
1022 }
1023
1024 if (from_mode == SImode && to_mode == QImode)
1025 {
1026#ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1028 {
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 return;
1031 }
1032#endif
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 return;
1035 }
1036
1037 if (from_mode == HImode && to_mode == QImode)
1038 {
1039#ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1041 {
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 return;
1044 }
1045#endif
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 return;
1048 }
1049
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1054 {
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1057 return;
1058 }
1059
1060 /* Mode combination is not recognized. */
1061 abort ();
1062}
1063
1064/* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
5d901c31
RS
1069 or by copying to a new temporary with conversion.
1070
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1073
1074rtx
1075convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1077 rtx x;
1078 int unsignedp;
1079{
1080 register rtx temp;
1499e0a8
RK
1081
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1084
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
bbf6f052 1089
bbf6f052
RK
1090 if (mode == GET_MODE (x))
1091 return x;
1092
1093 /* There is one case that we must handle specially: If we are converting
906c4e36 1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1098
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1103
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1107 the operand. */
1108
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 1115 && direct_load[(int) mode]
bbf6f052
RK
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1118
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1121 return temp;
1122}
1123\f
1124/* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1129
bbf6f052
RK
1130static void
1131move_by_pieces (to, from, len, align)
1132 rtx to, from;
1133 int len, align;
1134{
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1137 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1138
1139 data.offset = 0;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1142 data.to = to;
1143 data.from = from;
1144 data.autinc_to
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1147 data.autinc_from
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1151
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1154 data.reverse
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1157 data.len = len;
1158
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1164 {
1165#ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1167 {
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1171 }
1172#endif
1173#ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1175 {
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1179 }
1180#endif
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183#ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1185 {
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1187 data.autinc_to = 1;
1188 data.explicit_inc_to = -1;
1189 }
1190#endif
1191#ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1193 {
1194 data.to_addr = copy_addr_to_reg (to_addr);
1195 data.autinc_to = 1;
1196 data.explicit_inc_to = 1;
1197 }
1198#endif
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1201 }
1202
e87b4f3f
RS
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1205 align = MOVE_MAX;
bbf6f052
RK
1206
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1209
1210 while (max_size > 1)
1211 {
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1214
e7c33f54
RK
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1218 mode = tmode;
1219
1220 if (mode == VOIDmode)
1221 break;
1222
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1228
1229 max_size = GET_MODE_SIZE (mode);
1230 }
1231
1232 /* The code above should have handled everything. */
1233 if (data.len != 0)
1234 abort ();
1235}
1236
1237/* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1239
1240static int
1241move_by_pieces_ninsns (l, align)
1242 unsigned int l;
1243 int align;
1244{
1245 register int n_insns = 0;
e87b4f3f 1246 int max_size = MOVE_MAX + 1;
bbf6f052 1247
e87b4f3f
RS
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1250 align = MOVE_MAX;
bbf6f052
RK
1251
1252 while (max_size > 1)
1253 {
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1256
e7c33f54
RK
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1260 mode = tmode;
1261
1262 if (mode == VOIDmode)
1263 break;
1264
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1270
1271 max_size = GET_MODE_SIZE (mode);
1272 }
1273
1274 return n_insns;
1275}
1276
1277/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1280
1281static void
1282move_by_pieces_1 (genfun, mode, data)
1283 rtx (*genfun) ();
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1286{
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1289
1290 while (data->len >= size)
1291 {
1292 if (data->reverse) data->offset -= size;
1293
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1298 from1 =
1299 (data->autinc_from
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1303
1304#ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
906c4e36 1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1307 if (data->explicit_inc_from < 0)
906c4e36 1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1309#endif
1310
1311 emit_insn ((*genfun) (to1, from1));
1312#ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
906c4e36 1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1315 if (data->explicit_inc_from > 0)
906c4e36 1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1317#endif
1318
1319 if (! data->reverse) data->offset += size;
1320
1321 data->len -= size;
1322 }
1323}
1324\f
1325/* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1328
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1330 with mode BLKmode.
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1334
1335void
1336emit_block_move (x, y, size, align)
1337 rtx x, y;
1338 rtx size;
1339 int align;
1340{
1341 if (GET_MODE (x) != BLKmode)
1342 abort ();
1343
1344 if (GET_MODE (y) != BLKmode)
1345 abort ();
1346
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
5d901c31 1349 size = protect_from_queue (size, 0);
bbf6f052
RK
1350
1351 if (GET_CODE (x) != MEM)
1352 abort ();
1353 if (GET_CODE (y) != MEM)
1354 abort ();
1355 if (size == 0)
1356 abort ();
1357
1358 if (GET_CODE (size) == CONST_INT
906c4e36 1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1360 move_by_pieces (x, y, INTVAL (size), align);
1361 else
1362 {
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
266007a7 1366
0bba3f6f 1367 rtx opalign = GEN_INT (align);
266007a7
RK
1368 enum machine_mode mode;
1369
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1372 {
266007a7 1373 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1374
1375 if (code != CODE_FOR_nothing
803090c4
RK
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
8008b228 1378 returned by the macro, it will definitely be less than the
803090c4 1379 actual mode mask. */
f85b95d1 1380 && (unsigned HOST_WIDE_INT) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 VOIDmode)))
bbf6f052 1388 {
1ba1e2a8 1389 rtx op2;
266007a7
RK
1390 rtx last = get_last_insn ();
1391 rtx pat;
1392
1ba1e2a8 1393 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1396 op2 = copy_to_mode_reg (mode, op2);
1397
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1399 if (pat)
1400 {
1401 emit_insn (pat);
1402 return;
1403 }
1404 else
1405 delete_insns_since (last);
bbf6f052
RK
1406 }
1407 }
bbf6f052
RK
1408
1409#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1410 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1412 XEXP (y, 0), Pmode,
0fa83258
RK
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
bbf6f052 1416#else
d562e42e 1417 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
0fa83258
RK
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
bbf6f052
RK
1423#endif
1424 }
1425}
1426\f
1427/* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1429
1430void
1431move_block_to_reg (regno, x, nregs, mode)
1432 int regno;
1433 rtx x;
1434 int nregs;
1435 enum machine_mode mode;
1436{
1437 int i;
1438 rtx pat, last;
1439
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1442
1443 /* See if the machine can do this with a load multiple insn. */
1444#ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1447 GEN_INT (nregs));
bbf6f052
RK
1448 if (pat)
1449 {
1450 emit_insn (pat);
1451 return;
1452 }
1453 else
1454 delete_insns_since (last);
1455#endif
1456
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1460}
1461
1462/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1464
1465void
1466move_block_from_reg (regno, x, nregs)
1467 int regno;
1468 rtx x;
1469 int nregs;
1470{
1471 int i;
1472 rtx pat, last;
1473
1474 /* See if the machine can do this with a store multiple insn. */
1475#ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1478 GEN_INT (nregs));
bbf6f052
RK
1479 if (pat)
1480 {
1481 emit_insn (pat);
1482 return;
1483 }
1484 else
1485 delete_insns_since (last);
1486#endif
1487
1488 for (i = 0; i < nregs; i++)
1489 {
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1491
1492 if (tem == 0)
1493 abort ();
1494
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1496 }
1497}
1498
1499/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1500
1501void
1502use_regs (regno, nregs)
1503 int regno;
1504 int nregs;
1505{
1506 int i;
1507
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1510}
7308a047
RS
1511
1512/* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1514
f76a70d5 1515static void
7308a047
RS
1516group_insns (prev)
1517 rtx prev;
1518{
1519 rtx insn_first;
1520 rtx insn_last;
1521
1522 /* Find the instructions to mark */
1523 if (prev)
1524 insn_first = NEXT_INSN (prev);
1525 else
1526 insn_first = get_insns ();
1527
1528 insn_last = get_last_insn ();
1529
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1532
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1535}
bbf6f052
RK
1536\f
1537/* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1539
1540void
1541clear_storage (object, size)
1542 rtx object;
1543 int size;
1544{
1545 if (GET_MODE (object) == BLKmode)
1546 {
1547#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1548 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1551 GEN_INT (size), Pmode);
bbf6f052 1552#else
d562e42e 1553 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1554 VOIDmode, 2,
1555 XEXP (object, 0), Pmode,
906c4e36 1556 GEN_INT (size), Pmode);
bbf6f052
RK
1557#endif
1558 }
1559 else
1560 emit_move_insn (object, const0_rtx);
1561}
1562
1563/* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1567
1568 Return the last instruction emitted. */
1569
1570rtx
1571emit_move_insn (x, y)
1572 rtx x, y;
1573{
1574 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1577 int i;
1578
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1581
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1583 abort ();
1584
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1587
1588 /* If X or Y are memory references, verify that their addresses are valid
1589 for the machine. */
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1593 || (flag_force_addr
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1596
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1599 || (flag_force_addr
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1602
1603 if (mode == BLKmode)
1604 abort ();
1605
261c4230
RS
1606 return emit_move_insn_1 (x, y);
1607}
1608
1609/* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1612
1613rtx
1614emit_move_insn_1 (x, y)
1615 rtx x, y;
1616{
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1620 int i;
1621
7308a047
RS
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1626 0);
1627
bbf6f052
RK
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1629 return
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1631
89742723 1632 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1637 {
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1641
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1645
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1648
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1659
1660 group_insns (prev);
7a1ab50a
RS
1661
1662 return get_last_insn ();
7308a047
RS
1663 }
1664
bbf6f052
RK
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
a4320483 1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1669 {
1670 rtx last_insn = 0;
7308a047 1671 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1672
1673 for (i = 0;
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1675 i++)
1676 {
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1679
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1684 {
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1687 }
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1690
1691 if (xpart == 0 || ypart == 0)
1692 abort ();
1693
1694 last_insn = emit_move_insn (xpart, ypart);
1695 }
7308a047
RS
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1698
bbf6f052
RK
1699 return last_insn;
1700 }
1701 else
1702 abort ();
1703}
1704\f
1705/* Pushing data onto the stack. */
1706
1707/* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1711
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1715
1716rtx
1717push_block (size, extra, below)
1718 rtx size;
1719 int extra, below;
1720{
1721 register rtx temp;
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1726 else
1727 {
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1729 if (extra != 0)
906c4e36 1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1733 }
1734
1735#ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1739#else
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1746 else
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1749#endif
1750
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1752}
1753
87e38d84 1754rtx
bbf6f052
RK
1755gen_push_operand ()
1756{
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1758}
1759
1760/* Generate code to push X onto the stack, assuming it has mode MODE and
1761 type TYPE.
1762 MODE is redundant except when X is a CONST_INT (since they don't
1763 carry mode info).
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1766
1767 ALIGN (in bytes) is maximum alignment we can assume.
1768
cd048831
RK
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
cd048831
RK
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1776 registers.
bbf6f052
RK
1777
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1779 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1780
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1785
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1787
1788void
1789emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1791 register rtx x;
1792 enum machine_mode mode;
1793 tree type;
1794 rtx size;
1795 int align;
1796 int partial;
1797 rtx reg;
1798 int extra;
1799 rtx args_addr;
1800 rtx args_so_far;
1801{
1802 rtx xinner;
1803 enum direction stack_direction
1804#ifdef STACK_GROWS_DOWNWARD
1805 = downward;
1806#else
1807 = upward;
1808#endif
1809
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1814
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1819
1820 xinner = x = protect_from_queue (x, 0);
1821
1822 if (mode == BLKmode)
1823 {
1824 /* Copy a block into the stack, entirely or partially. */
1825
1826 register rtx temp;
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1829 int skip;
1830
1831 if (size == 0)
1832 abort ();
1833
1834 used -= offset;
1835
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1838
1839 if (partial != 0)
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1842
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847#ifndef REG_PARM_STACK_SPACE
1848 skip = 0;
1849#else
1850 skip = used;
1851#endif
1852
1853#ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1857 if (args_addr == 0
1858 && GET_CODE (size) == CONST_INT
1859 && skip == 0
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1861 < MOVE_RATIO)
bbf6f052
RK
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1867 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1869 {
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
906c4e36 1875 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1876
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1879 }
1880 else
1881#endif /* PUSH_ROUNDING */
1882 {
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1885
1886 /* Deduct words put into registers from the size we must copy. */
1887 if (partial != 0)
1888 {
1889 if (GET_CODE (size) == CONST_INT)
906c4e36 1890 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1891 else
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1893 GEN_INT (used), NULL_RTX, 0,
1894 OPTAB_LIB_WIDEN);
bbf6f052
RK
1895 }
1896
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1900 if (! args_addr)
1901 {
1902 temp = push_block (size, extra, where_pad == downward);
1903 extra = 0;
1904 }
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1909 else
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1913 skip));
1914
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1918 < MOVE_RATIO))
1919 {
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1922 goto ret;
1923 }
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927#ifdef HAVE_movstrqi
1928 if (HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1932 {
c841050e
RS
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1935 if (pat != 0)
1936 {
1937 emit_insn (pat);
1938 goto ret;
1939 }
bbf6f052
RK
1940 }
1941#endif
1942#ifdef HAVE_movstrhi
1943 if (HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1947 {
c841050e
RS
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1950 if (pat != 0)
1951 {
1952 emit_insn (pat);
1953 goto ret;
1954 }
bbf6f052
RK
1955 }
1956#endif
1957#ifdef HAVE_movstrsi
1958 if (HAVE_movstrsi)
1959 {
c841050e
RS
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1962 if (pat != 0)
1963 {
1964 emit_insn (pat);
1965 goto ret;
1966 }
bbf6f052
RK
1967 }
1968#endif
1969#ifdef HAVE_movstrdi
1970 if (HAVE_movstrdi)
1971 {
c841050e
RS
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1974 if (pat != 0)
1975 {
1976 emit_insn (pat);
1977 goto ret;
1978 }
bbf6f052
RK
1979 }
1980#endif
1981
1982#ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1986
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1990#endif
1991
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1994 NO_DEFER_POP;
1995#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1996 emit_library_call (memcpy_libfunc, 0,
bbf6f052 1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2000 TYPE_MODE (sizetype));
bbf6f052 2001#else
d562e42e 2002 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2006 TYPE_MODE (sizetype));
bbf6f052
RK
2007#endif
2008 OK_DEFER_POP;
2009 }
2010 }
2011 else if (partial > 0)
2012 {
2013 /* Scalar partly in registers. */
2014
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2016 int i;
2017 int not_stack;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2022 int skip;
2023
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
906c4e36 2029 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2030
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2034 if (args_addr == 0)
2035 offset = 0;
2036
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2040
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045#ifndef REG_PARM_STACK_SPACE
2046 skip = 0;
2047#else
2048 skip = not_stack;
2049#endif
2050
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2053
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2059
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063#ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2065#else
2066 for (i = size - 1; i >= not_stack; i--)
2067#endif
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2071 0, args_addr,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2073 * UNITS_PER_WORD)));
2074 }
2075 else
2076 {
2077 rtx addr;
2078
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
906c4e36 2084 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2085
2086#ifdef PUSH_ROUNDING
2087 if (args_addr == 0)
2088 addr = gen_push_operand ();
2089 else
2090#endif
2091 if (GET_CODE (args_so_far) == CONST_INT)
2092 addr
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2095 else
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2097 args_so_far));
2098
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2100 }
2101
2102 ret:
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
cd048831 2106 if (partial > 0 && reg != 0)
bbf6f052
RK
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2108
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2110 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2111}
2112\f
bbf6f052
RK
2113/* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2117
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2122
2123 If the value stored is a constant, we return the constant. */
2124
2125rtx
2126expand_assignment (to, from, want_value, suggest_reg)
2127 tree to, from;
2128 int want_value;
2129 int suggest_reg;
2130{
2131 register rtx to_rtx = 0;
2132 rtx result;
2133
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2135
2136 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2138
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2143
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2149 {
2150 enum machine_mode mode1;
2151 int bitsize;
2152 int bitpos;
7bb0943f 2153 tree offset;
bbf6f052
RK
2154 int unsignedp;
2155 int volatilep = 0;
7bb0943f 2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2157 &mode1, &unsignedp, &volatilep);
2158
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2161
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2164
906c4e36 2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2166 if (offset != 0)
2167 {
906c4e36 2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2169
2170 if (GET_CODE (to_rtx) != MEM)
2171 abort ();
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2175 }
bbf6f052
RK
2176 if (volatilep)
2177 {
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180#if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2183 else
2184 abort ();
2185#endif
2186 }
2187
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2189 (want_value
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2192 : VOIDmode),
2193 unsignedp,
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2198 free_temp_slots ();
2199
4be204f0
RK
2200 /* If we aren't returning a result, just pass on what expand_expr
2201 returned; it was probably const0_rtx. Otherwise, convert RESULT
2202 to the proper mode. */
2203 return (want_value ? convert_to_mode (TYPE_MODE (TREE_TYPE (to)), result,
2204 TREE_UNSIGNED (TREE_TYPE (to)))
2205 : result);
bbf6f052
RK
2206 }
2207
cd1db108
RS
2208 /* If the rhs is a function call and its value is not an aggregate,
2209 call the function before we start to compute the lhs.
2210 This is needed for correct code for cases such as
2211 val = setjmp (buf) on machines where reference to val
2212 requires loading up part of an address in a separate insn. */
2213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from))
2214 {
2215 rtx value = expand_expr (from, NULL_RTX, VOIDmode, 0);
2216 if (to_rtx == 0)
2217 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
2218 emit_move_insn (to_rtx, value);
2219 preserve_temp_slots (to_rtx);
2220 free_temp_slots ();
2221 return to_rtx;
2222 }
2223
bbf6f052
RK
2224 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2225 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2226
2227 if (to_rtx == 0)
906c4e36 2228 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2229
86d38d25
RS
2230 /* Don't move directly into a return register. */
2231 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2232 {
66538193 2233 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2234 emit_move_insn (to_rtx, temp);
2235 preserve_temp_slots (to_rtx);
2236 free_temp_slots ();
2237 return to_rtx;
2238 }
2239
bbf6f052
RK
2240 /* In case we are returning the contents of an object which overlaps
2241 the place the value is being stored, use a safe function when copying
2242 a value through a pointer into a structure value return block. */
2243 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2244 && current_function_returns_struct
2245 && !current_function_returns_pcc_struct)
2246 {
906c4e36 2247 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2248 rtx size = expr_size (from);
2249
2250#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2251 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2252 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2253 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2254 convert_to_mode (TYPE_MODE (sizetype),
2255 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2256 TYPE_MODE (sizetype));
bbf6f052 2257#else
d562e42e 2258 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2259 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2260 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2261 convert_to_mode (TYPE_MODE (sizetype),
2262 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2263 TYPE_MODE (sizetype));
bbf6f052
RK
2264#endif
2265
2266 preserve_temp_slots (to_rtx);
2267 free_temp_slots ();
2268 return to_rtx;
2269 }
2270
2271 /* Compute FROM and store the value in the rtx we got. */
2272
2273 result = store_expr (from, to_rtx, want_value);
2274 preserve_temp_slots (result);
2275 free_temp_slots ();
2276 return result;
2277}
2278
2279/* Generate code for computing expression EXP,
2280 and storing the value into TARGET.
2281 Returns TARGET or an equivalent value.
2282 TARGET may contain a QUEUED rtx.
2283
2284 If SUGGEST_REG is nonzero, copy the value through a register
2285 and return that register, if that is possible.
2286
2287 If the value stored is a constant, we return the constant. */
2288
2289rtx
2290store_expr (exp, target, suggest_reg)
2291 register tree exp;
2292 register rtx target;
2293 int suggest_reg;
2294{
2295 register rtx temp;
2296 int dont_return_target = 0;
2297
2298 if (TREE_CODE (exp) == COMPOUND_EXPR)
2299 {
2300 /* Perform first part of compound expression, then assign from second
2301 part. */
2302 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2303 emit_queue ();
2304 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2305 }
2306 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2307 {
2308 /* For conditional expression, get safe form of the target. Then
2309 test the condition, doing the appropriate assignment on either
2310 side. This avoids the creation of unnecessary temporaries.
2311 For non-BLKmode, it is more efficient not to do this. */
2312
2313 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2314
2315 emit_queue ();
2316 target = protect_from_queue (target, 1);
2317
2318 NO_DEFER_POP;
2319 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2320 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2321 emit_queue ();
2322 emit_jump_insn (gen_jump (lab2));
2323 emit_barrier ();
2324 emit_label (lab1);
2325 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2326 emit_queue ();
2327 emit_label (lab2);
2328 OK_DEFER_POP;
2329 return target;
2330 }
c2e6aff6 2331 else if (suggest_reg && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
2332 && GET_MODE (target) != BLKmode)
2333 /* If target is in memory and caller wants value in a register instead,
2334 arrange that. Pass TARGET as target for expand_expr so that,
2335 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
c2e6aff6
RS
2336 We know expand_expr will not use the target in that case.
2337 Don't do this if TARGET is volatile because we are supposed
2338 to write it and then read it. */
bbf6f052 2339 {
906c4e36 2340 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2341 GET_MODE (target), 0);
2342 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2343 temp = copy_to_reg (temp);
2344 dont_return_target = 1;
2345 }
2346 else if (queued_subexp_p (target))
2347 /* If target contains a postincrement, it is not safe
2348 to use as the returned value. It would access the wrong
2349 place by the time the queued increment gets output.
2350 So copy the value through a temporary and use that temp
2351 as the result. */
2352 {
c2e6aff6
RS
2353 /* ??? There may be a bug here in the case of a target
2354 that is volatile, but I' too sleepy today to write anything
2355 to handle it. */
bbf6f052
RK
2356 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2357 {
2358 /* Expand EXP into a new pseudo. */
2359 temp = gen_reg_rtx (GET_MODE (target));
2360 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2361 }
2362 else
906c4e36 2363 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2364 dont_return_target = 1;
2365 }
1499e0a8
RK
2366 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2367 /* If this is an scalar in a register that is stored in a wider mode
2368 than the declared mode, compute the result into its declared mode
2369 and then convert to the wider mode. Our value is the computed
2370 expression. */
2371 {
2372 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2373 convert_move (SUBREG_REG (target), temp,
2374 SUBREG_PROMOTED_UNSIGNED_P (target));
2375 return temp;
2376 }
bbf6f052
RK
2377 else
2378 {
2379 temp = expand_expr (exp, target, GET_MODE (target), 0);
2380 /* DO return TARGET if it's a specified hardware register.
c2e6aff6
RS
2381 expand_return relies on this.
2382 DO return TARGET if it's a volatile mem ref; ANSI requires this. */
bbf6f052
RK
2383 if (!(target && GET_CODE (target) == REG
2384 && REGNO (target) < FIRST_PSEUDO_REGISTER)
c2e6aff6
RS
2385 && CONSTANT_P (temp)
2386 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
bbf6f052
RK
2387 dont_return_target = 1;
2388 }
2389
2390 /* If value was not generated in the target, store it there.
2391 Convert the value to TARGET's type first if nec. */
2392
2393 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2394 {
2395 target = protect_from_queue (target, 1);
2396 if (GET_MODE (temp) != GET_MODE (target)
2397 && GET_MODE (temp) != VOIDmode)
2398 {
2399 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2400 if (dont_return_target)
2401 {
2402 /* In this case, we will return TEMP,
2403 so make sure it has the proper mode.
2404 But don't forget to store the value into TARGET. */
2405 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2406 emit_move_insn (target, temp);
2407 }
2408 else
2409 convert_move (target, temp, unsignedp);
2410 }
2411
2412 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2413 {
2414 /* Handle copying a string constant into an array.
2415 The string constant may be shorter than the array.
2416 So copy just the string's actual length, and clear the rest. */
2417 rtx size;
2418
e87b4f3f
RS
2419 /* Get the size of the data type of the string,
2420 which is actually the size of the target. */
2421 size = expr_size (exp);
2422 if (GET_CODE (size) == CONST_INT
2423 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2424 emit_block_move (target, temp, size,
2425 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2426 else
bbf6f052 2427 {
e87b4f3f
RS
2428 /* Compute the size of the data to copy from the string. */
2429 tree copy_size
c03b7665
RK
2430 = size_binop (MIN_EXPR,
2431 size_binop (CEIL_DIV_EXPR,
2432 TYPE_SIZE (TREE_TYPE (exp)),
2433 size_int (BITS_PER_UNIT)),
2434 convert (sizetype,
2435 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2436 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2437 VOIDmode, 0);
e87b4f3f
RS
2438 rtx label = 0;
2439
2440 /* Copy that much. */
2441 emit_block_move (target, temp, copy_size_rtx,
2442 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2443
2444 /* Figure out how much is left in TARGET
2445 that we have to clear. */
2446 if (GET_CODE (copy_size_rtx) == CONST_INT)
2447 {
2448 temp = plus_constant (XEXP (target, 0),
2449 TREE_STRING_LENGTH (exp));
2450 size = plus_constant (size,
2451 - TREE_STRING_LENGTH (exp));
2452 }
2453 else
2454 {
2455 enum machine_mode size_mode = Pmode;
2456
2457 temp = force_reg (Pmode, XEXP (target, 0));
2458 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2459 copy_size_rtx, NULL_RTX, 0,
2460 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2461
2462 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2463 copy_size_rtx, NULL_RTX, 0,
2464 OPTAB_LIB_WIDEN);
e87b4f3f 2465
906c4e36 2466 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2467 GET_MODE (size), 0, 0);
2468 label = gen_label_rtx ();
2469 emit_jump_insn (gen_blt (label));
2470 }
2471
2472 if (size != const0_rtx)
2473 {
bbf6f052 2474#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2475 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2476 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2477#else
d562e42e 2478 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2479 temp, Pmode, size, Pmode);
bbf6f052 2480#endif
e87b4f3f
RS
2481 }
2482 if (label)
2483 emit_label (label);
bbf6f052
RK
2484 }
2485 }
2486 else if (GET_MODE (temp) == BLKmode)
2487 emit_block_move (target, temp, expr_size (exp),
2488 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2489 else
2490 emit_move_insn (target, temp);
2491 }
2492 if (dont_return_target)
2493 return temp;
2494 return target;
2495}
2496\f
2497/* Store the value of constructor EXP into the rtx TARGET.
2498 TARGET is either a REG or a MEM. */
2499
2500static void
2501store_constructor (exp, target)
2502 tree exp;
2503 rtx target;
2504{
4af3895e
JVA
2505 tree type = TREE_TYPE (exp);
2506
bbf6f052
RK
2507 /* We know our target cannot conflict, since safe_from_p has been called. */
2508#if 0
2509 /* Don't try copying piece by piece into a hard register
2510 since that is vulnerable to being clobbered by EXP.
2511 Instead, construct in a pseudo register and then copy it all. */
2512 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2513 {
2514 rtx temp = gen_reg_rtx (GET_MODE (target));
2515 store_constructor (exp, temp);
2516 emit_move_insn (target, temp);
2517 return;
2518 }
2519#endif
2520
e44842fe
RK
2521 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
2522 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
2523 {
2524 register tree elt;
2525
4af3895e 2526 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
2527 if (TREE_CODE (type) == UNION_TYPE
2528 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 2529 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2530
2531 /* If we are building a static constructor into a register,
2532 set the initial value as zero so we can fold the value into
2533 a constant. */
2534 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2535 emit_move_insn (target, const0_rtx);
2536
bbf6f052
RK
2537 /* If the constructor has fewer fields than the structure,
2538 clear the whole structure first. */
2539 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2540 != list_length (TYPE_FIELDS (type)))
2541 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2542 else
2543 /* Inform later passes that the old value is dead. */
2544 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2545
2546 /* Store each element of the constructor into
2547 the corresponding field of TARGET. */
2548
2549 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2550 {
2551 register tree field = TREE_PURPOSE (elt);
2552 register enum machine_mode mode;
2553 int bitsize;
2554 int bitpos;
2555 int unsignedp;
2556
f32fd778
RS
2557 /* Just ignore missing fields.
2558 We cleared the whole structure, above,
2559 if any fields are missing. */
2560 if (field == 0)
2561 continue;
2562
bbf6f052
RK
2563 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2564 unsignedp = TREE_UNSIGNED (field);
2565 mode = DECL_MODE (field);
2566 if (DECL_BIT_FIELD (field))
2567 mode = VOIDmode;
2568
2569 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2570 /* ??? This case remains to be written. */
2571 abort ();
2572
2573 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2574
2575 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2576 /* The alignment of TARGET is
2577 at least what its type requires. */
2578 VOIDmode, 0,
4af3895e
JVA
2579 TYPE_ALIGN (type) / BITS_PER_UNIT,
2580 int_size_in_bytes (type));
bbf6f052
RK
2581 }
2582 }
4af3895e 2583 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2584 {
2585 register tree elt;
2586 register int i;
4af3895e 2587 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2588 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2589 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2590 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2591
2592 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2593 clear the whole structure first. Similarly if this this is
2594 static constructor of a non-BLKmode object. */
bbf6f052 2595
4af3895e
JVA
2596 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2597 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2598 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2599 else
2600 /* Inform later passes that the old value is dead. */
2601 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2602
2603 /* Store each element of the constructor into
2604 the corresponding element of TARGET, determined
2605 by counting the elements. */
2606 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2607 elt;
2608 elt = TREE_CHAIN (elt), i++)
2609 {
2610 register enum machine_mode mode;
2611 int bitsize;
2612 int bitpos;
2613 int unsignedp;
2614
2615 mode = TYPE_MODE (elttype);
2616 bitsize = GET_MODE_BITSIZE (mode);
2617 unsignedp = TREE_UNSIGNED (elttype);
2618
2619 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2620
2621 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2622 /* The alignment of TARGET is
2623 at least what its type requires. */
2624 VOIDmode, 0,
4af3895e
JVA
2625 TYPE_ALIGN (type) / BITS_PER_UNIT,
2626 int_size_in_bytes (type));
bbf6f052
RK
2627 }
2628 }
2629
2630 else
2631 abort ();
2632}
2633
2634/* Store the value of EXP (an expression tree)
2635 into a subfield of TARGET which has mode MODE and occupies
2636 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2637 If MODE is VOIDmode, it means that we are storing into a bit-field.
2638
2639 If VALUE_MODE is VOIDmode, return nothing in particular.
2640 UNSIGNEDP is not used in this case.
2641
2642 Otherwise, return an rtx for the value stored. This rtx
2643 has mode VALUE_MODE if that is convenient to do.
2644 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2645
2646 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2647 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2648
2649static rtx
2650store_field (target, bitsize, bitpos, mode, exp, value_mode,
2651 unsignedp, align, total_size)
2652 rtx target;
2653 int bitsize, bitpos;
2654 enum machine_mode mode;
2655 tree exp;
2656 enum machine_mode value_mode;
2657 int unsignedp;
2658 int align;
2659 int total_size;
2660{
906c4e36 2661 HOST_WIDE_INT width_mask = 0;
bbf6f052 2662
906c4e36
RK
2663 if (bitsize < HOST_BITS_PER_WIDE_INT)
2664 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2665
2666 /* If we are storing into an unaligned field of an aligned union that is
2667 in a register, we may have the mode of TARGET being an integer mode but
2668 MODE == BLKmode. In that case, get an aligned object whose size and
2669 alignment are the same as TARGET and store TARGET into it (we can avoid
2670 the store if the field being stored is the entire width of TARGET). Then
2671 call ourselves recursively to store the field into a BLKmode version of
2672 that object. Finally, load from the object into TARGET. This is not
2673 very efficient in general, but should only be slightly more expensive
2674 than the otherwise-required unaligned accesses. Perhaps this can be
2675 cleaned up later. */
2676
2677 if (mode == BLKmode
2678 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2679 {
2680 rtx object = assign_stack_temp (GET_MODE (target),
2681 GET_MODE_SIZE (GET_MODE (target)), 0);
2682 rtx blk_object = copy_rtx (object);
2683
2684 PUT_MODE (blk_object, BLKmode);
2685
2686 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2687 emit_move_insn (object, target);
2688
2689 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2690 align, total_size);
2691
2692 emit_move_insn (target, object);
2693
2694 return target;
2695 }
2696
2697 /* If the structure is in a register or if the component
2698 is a bit field, we cannot use addressing to access it.
2699 Use bit-field techniques or SUBREG to store in it. */
2700
4fa52007
RK
2701 if (mode == VOIDmode
2702 || (mode != BLKmode && ! direct_store[(int) mode])
2703 || GET_CODE (target) == REG
bbf6f052
RK
2704 || GET_CODE (target) == SUBREG)
2705 {
906c4e36 2706 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2707 /* Store the value in the bitfield. */
2708 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2709 if (value_mode != VOIDmode)
2710 {
2711 /* The caller wants an rtx for the value. */
2712 /* If possible, avoid refetching from the bitfield itself. */
2713 if (width_mask != 0
2714 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2715 {
9074de27 2716 tree count;
5c4d7cfb 2717 enum machine_mode tmode;
86a2c12a 2718
5c4d7cfb
RS
2719 if (unsignedp)
2720 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2721 tmode = GET_MODE (temp);
86a2c12a
RS
2722 if (tmode == VOIDmode)
2723 tmode = value_mode;
5c4d7cfb
RS
2724 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2725 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2726 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2727 }
bbf6f052 2728 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2729 NULL_RTX, value_mode, 0, align,
2730 total_size);
bbf6f052
RK
2731 }
2732 return const0_rtx;
2733 }
2734 else
2735 {
2736 rtx addr = XEXP (target, 0);
2737 rtx to_rtx;
2738
2739 /* If a value is wanted, it must be the lhs;
2740 so make the address stable for multiple use. */
2741
2742 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2743 && ! CONSTANT_ADDRESS_P (addr)
2744 /* A frame-pointer reference is already stable. */
2745 && ! (GET_CODE (addr) == PLUS
2746 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2747 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2748 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2749 addr = copy_to_reg (addr);
2750
2751 /* Now build a reference to just the desired component. */
2752
2753 to_rtx = change_address (target, mode,
2754 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2755 MEM_IN_STRUCT_P (to_rtx) = 1;
2756
2757 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2758 }
2759}
2760\f
2761/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2762 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 2763 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
2764
2765 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2766 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2767 If the position of the field is variable, we store a tree
2768 giving the variable offset (in units) in *POFFSET.
2769 This offset is in addition to the bit position.
2770 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2771
2772 If any of the extraction expressions is volatile,
2773 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2774
2775 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2776 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2777 is redundant.
2778
2779 If the field describes a variable-sized object, *PMODE is set to
2780 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2781 this case, but the address of the object can be found. */
bbf6f052
RK
2782
2783tree
4969d05d
RK
2784get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2785 punsignedp, pvolatilep)
bbf6f052
RK
2786 tree exp;
2787 int *pbitsize;
2788 int *pbitpos;
7bb0943f 2789 tree *poffset;
bbf6f052
RK
2790 enum machine_mode *pmode;
2791 int *punsignedp;
2792 int *pvolatilep;
2793{
2794 tree size_tree = 0;
2795 enum machine_mode mode = VOIDmode;
742920c7 2796 tree offset = integer_zero_node;
bbf6f052
RK
2797
2798 if (TREE_CODE (exp) == COMPONENT_REF)
2799 {
2800 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2801 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2802 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2803 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2804 }
2805 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2806 {
2807 size_tree = TREE_OPERAND (exp, 1);
2808 *punsignedp = TREE_UNSIGNED (exp);
2809 }
2810 else
2811 {
2812 mode = TYPE_MODE (TREE_TYPE (exp));
2813 *pbitsize = GET_MODE_BITSIZE (mode);
2814 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2815 }
2816
2817 if (size_tree)
2818 {
2819 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2820 mode = BLKmode, *pbitsize = -1;
2821 else
2822 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2823 }
2824
2825 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2826 and find the ultimate containing object. */
2827
2828 *pbitpos = 0;
2829
2830 while (1)
2831 {
7bb0943f 2832 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2833 {
7bb0943f
RS
2834 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2835 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2836 : TREE_OPERAND (exp, 2));
bbf6f052 2837
e7f3c83f
RK
2838 /* If this field hasn't been filled in yet, don't go
2839 past it. This should only happen when folding expressions
2840 made during type construction. */
2841 if (pos == 0)
2842 break;
2843
7bb0943f
RS
2844 if (TREE_CODE (pos) == PLUS_EXPR)
2845 {
2846 tree constant, var;
2847 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2848 {
2849 constant = TREE_OPERAND (pos, 0);
2850 var = TREE_OPERAND (pos, 1);
2851 }
2852 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2853 {
2854 constant = TREE_OPERAND (pos, 1);
2855 var = TREE_OPERAND (pos, 0);
2856 }
2857 else
2858 abort ();
742920c7 2859
7bb0943f 2860 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
2861 offset = size_binop (PLUS_EXPR, offset,
2862 size_binop (FLOOR_DIV_EXPR, var,
2863 size_int (BITS_PER_UNIT)));
7bb0943f
RS
2864 }
2865 else if (TREE_CODE (pos) == INTEGER_CST)
2866 *pbitpos += TREE_INT_CST_LOW (pos);
2867 else
2868 {
2869 /* Assume here that the offset is a multiple of a unit.
2870 If not, there should be an explicitly added constant. */
742920c7
RK
2871 offset = size_binop (PLUS_EXPR, offset,
2872 size_binop (FLOOR_DIV_EXPR, pos,
2873 size_int (BITS_PER_UNIT)));
7bb0943f 2874 }
bbf6f052 2875 }
bbf6f052 2876
742920c7 2877 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 2878 {
742920c7
RK
2879 /* This code is based on the code in case ARRAY_REF in expand_expr
2880 below. We assume here that the size of an array element is
2881 always an integral multiple of BITS_PER_UNIT. */
2882
2883 tree index = TREE_OPERAND (exp, 1);
2884 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2885 tree low_bound
2886 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2887 tree index_type = TREE_TYPE (index);
2888
2889 if (! integer_zerop (low_bound))
2890 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2891
2892 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2893 {
2894 index = convert (type_for_size (POINTER_SIZE, 0), index);
2895 index_type = TREE_TYPE (index);
2896 }
2897
2898 index = fold (build (MULT_EXPR, index_type, index,
2899 TYPE_SIZE (TREE_TYPE (exp))));
2900
2901 if (TREE_CODE (index) == INTEGER_CST
2902 && TREE_INT_CST_HIGH (index) == 0)
2903 *pbitpos += TREE_INT_CST_LOW (index);
2904 else
2905 offset = size_binop (PLUS_EXPR, offset,
2906 size_binop (FLOOR_DIV_EXPR, index,
2907 size_int (BITS_PER_UNIT)));
bbf6f052
RK
2908 }
2909 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2910 && ! ((TREE_CODE (exp) == NOP_EXPR
2911 || TREE_CODE (exp) == CONVERT_EXPR)
2912 && (TYPE_MODE (TREE_TYPE (exp))
2913 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2914 break;
7bb0943f
RS
2915
2916 /* If any reference in the chain is volatile, the effect is volatile. */
2917 if (TREE_THIS_VOLATILE (exp))
2918 *pvolatilep = 1;
bbf6f052
RK
2919 exp = TREE_OPERAND (exp, 0);
2920 }
2921
2922 /* If this was a bit-field, see if there is a mode that allows direct
2923 access in case EXP is in memory. */
e7f3c83f 2924 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
2925 {
2926 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2927 if (mode == BLKmode)
2928 mode = VOIDmode;
2929 }
2930
742920c7
RK
2931 if (integer_zerop (offset))
2932 offset = 0;
2933
bbf6f052 2934 *pmode = mode;
7bb0943f
RS
2935 *poffset = offset;
2936#if 0
2937 /* We aren't finished fixing the callers to really handle nonzero offset. */
2938 if (offset != 0)
2939 abort ();
2940#endif
bbf6f052
RK
2941
2942 return exp;
2943}
2944\f
2945/* Given an rtx VALUE that may contain additions and multiplications,
2946 return an equivalent value that just refers to a register or memory.
2947 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
2948 and returning a pseudo-register containing the value.
2949
2950 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
2951
2952rtx
2953force_operand (value, target)
2954 rtx value, target;
2955{
2956 register optab binoptab = 0;
2957 /* Use a temporary to force order of execution of calls to
2958 `force_operand'. */
2959 rtx tmp;
2960 register rtx op2;
2961 /* Use subtarget as the target for operand 0 of a binary operation. */
2962 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2963
2964 if (GET_CODE (value) == PLUS)
2965 binoptab = add_optab;
2966 else if (GET_CODE (value) == MINUS)
2967 binoptab = sub_optab;
2968 else if (GET_CODE (value) == MULT)
2969 {
2970 op2 = XEXP (value, 1);
2971 if (!CONSTANT_P (op2)
2972 && !(GET_CODE (op2) == REG && op2 != subtarget))
2973 subtarget = 0;
2974 tmp = force_operand (XEXP (value, 0), subtarget);
2975 return expand_mult (GET_MODE (value), tmp,
906c4e36 2976 force_operand (op2, NULL_RTX),
bbf6f052
RK
2977 target, 0);
2978 }
2979
2980 if (binoptab)
2981 {
2982 op2 = XEXP (value, 1);
2983 if (!CONSTANT_P (op2)
2984 && !(GET_CODE (op2) == REG && op2 != subtarget))
2985 subtarget = 0;
2986 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2987 {
2988 binoptab = add_optab;
2989 op2 = negate_rtx (GET_MODE (value), op2);
2990 }
2991
2992 /* Check for an addition with OP2 a constant integer and our first
2993 operand a PLUS of a virtual register and something else. In that
2994 case, we want to emit the sum of the virtual register and the
2995 constant first and then add the other value. This allows virtual
2996 register instantiation to simply modify the constant rather than
2997 creating another one around this addition. */
2998 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2999 && GET_CODE (XEXP (value, 0)) == PLUS
3000 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3001 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3002 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3003 {
3004 rtx temp = expand_binop (GET_MODE (value), binoptab,
3005 XEXP (XEXP (value, 0), 0), op2,
3006 subtarget, 0, OPTAB_LIB_WIDEN);
3007 return expand_binop (GET_MODE (value), binoptab, temp,
3008 force_operand (XEXP (XEXP (value, 0), 1), 0),
3009 target, 0, OPTAB_LIB_WIDEN);
3010 }
3011
3012 tmp = force_operand (XEXP (value, 0), subtarget);
3013 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3014 force_operand (op2, NULL_RTX),
bbf6f052 3015 target, 0, OPTAB_LIB_WIDEN);
8008b228 3016 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3017 because the only operations we are expanding here are signed ones. */
3018 }
3019 return value;
3020}
3021\f
3022/* Subroutine of expand_expr:
3023 save the non-copied parts (LIST) of an expr (LHS), and return a list
3024 which can restore these values to their previous values,
3025 should something modify their storage. */
3026
3027static tree
3028save_noncopied_parts (lhs, list)
3029 tree lhs;
3030 tree list;
3031{
3032 tree tail;
3033 tree parts = 0;
3034
3035 for (tail = list; tail; tail = TREE_CHAIN (tail))
3036 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3037 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3038 else
3039 {
3040 tree part = TREE_VALUE (tail);
3041 tree part_type = TREE_TYPE (part);
906c4e36 3042 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3043 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3044 int_size_in_bytes (part_type), 0);
3045 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3046 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3047 parts = tree_cons (to_be_saved,
906c4e36
RK
3048 build (RTL_EXPR, part_type, NULL_TREE,
3049 (tree) target),
bbf6f052
RK
3050 parts);
3051 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3052 }
3053 return parts;
3054}
3055
3056/* Subroutine of expand_expr:
3057 record the non-copied parts (LIST) of an expr (LHS), and return a list
3058 which specifies the initial values of these parts. */
3059
3060static tree
3061init_noncopied_parts (lhs, list)
3062 tree lhs;
3063 tree list;
3064{
3065 tree tail;
3066 tree parts = 0;
3067
3068 for (tail = list; tail; tail = TREE_CHAIN (tail))
3069 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3070 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3071 else
3072 {
3073 tree part = TREE_VALUE (tail);
3074 tree part_type = TREE_TYPE (part);
906c4e36 3075 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3076 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3077 }
3078 return parts;
3079}
3080
3081/* Subroutine of expand_expr: return nonzero iff there is no way that
3082 EXP can reference X, which is being modified. */
3083
3084static int
3085safe_from_p (x, exp)
3086 rtx x;
3087 tree exp;
3088{
3089 rtx exp_rtl = 0;
3090 int i, nops;
3091
3092 if (x == 0)
3093 return 1;
3094
3095 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3096 find the underlying pseudo. */
3097 if (GET_CODE (x) == SUBREG)
3098 {
3099 x = SUBREG_REG (x);
3100 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3101 return 0;
3102 }
3103
3104 /* If X is a location in the outgoing argument area, it is always safe. */
3105 if (GET_CODE (x) == MEM
3106 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3107 || (GET_CODE (XEXP (x, 0)) == PLUS
3108 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3109 return 1;
3110
3111 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3112 {
3113 case 'd':
3114 exp_rtl = DECL_RTL (exp);
3115 break;
3116
3117 case 'c':
3118 return 1;
3119
3120 case 'x':
3121 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3122 return ((TREE_VALUE (exp) == 0
3123 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3124 && (TREE_CHAIN (exp) == 0
3125 || safe_from_p (x, TREE_CHAIN (exp))));
3126 else
3127 return 0;
3128
3129 case '1':
3130 return safe_from_p (x, TREE_OPERAND (exp, 0));
3131
3132 case '2':
3133 case '<':
3134 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3135 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3136
3137 case 'e':
3138 case 'r':
3139 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3140 the expression. If it is set, we conflict iff we are that rtx or
3141 both are in memory. Otherwise, we check all operands of the
3142 expression recursively. */
3143
3144 switch (TREE_CODE (exp))
3145 {
3146 case ADDR_EXPR:
e44842fe
RK
3147 return (staticp (TREE_OPERAND (exp, 0))
3148 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
3149
3150 case INDIRECT_REF:
3151 if (GET_CODE (x) == MEM)
3152 return 0;
3153 break;
3154
3155 case CALL_EXPR:
3156 exp_rtl = CALL_EXPR_RTL (exp);
3157 if (exp_rtl == 0)
3158 {
3159 /* Assume that the call will clobber all hard registers and
3160 all of memory. */
3161 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3162 || GET_CODE (x) == MEM)
3163 return 0;
3164 }
3165
3166 break;
3167
3168 case RTL_EXPR:
3169 exp_rtl = RTL_EXPR_RTL (exp);
3170 if (exp_rtl == 0)
3171 /* We don't know what this can modify. */
3172 return 0;
3173
3174 break;
3175
3176 case WITH_CLEANUP_EXPR:
3177 exp_rtl = RTL_EXPR_RTL (exp);
3178 break;
3179
3180 case SAVE_EXPR:
3181 exp_rtl = SAVE_EXPR_RTL (exp);
3182 break;
3183
8129842c
RS
3184 case BIND_EXPR:
3185 /* The only operand we look at is operand 1. The rest aren't
3186 part of the expression. */
3187 return safe_from_p (x, TREE_OPERAND (exp, 1));
3188
bbf6f052
RK
3189 case METHOD_CALL_EXPR:
3190 /* This takes a rtx argument, but shouldn't appear here. */
3191 abort ();
3192 }
3193
3194 /* If we have an rtx, we do not need to scan our operands. */
3195 if (exp_rtl)
3196 break;
3197
3198 nops = tree_code_length[(int) TREE_CODE (exp)];
3199 for (i = 0; i < nops; i++)
3200 if (TREE_OPERAND (exp, i) != 0
3201 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3202 return 0;
3203 }
3204
3205 /* If we have an rtl, find any enclosed object. Then see if we conflict
3206 with it. */
3207 if (exp_rtl)
3208 {
3209 if (GET_CODE (exp_rtl) == SUBREG)
3210 {
3211 exp_rtl = SUBREG_REG (exp_rtl);
3212 if (GET_CODE (exp_rtl) == REG
3213 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3214 return 0;
3215 }
3216
3217 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3218 are memory and EXP is not readonly. */
3219 return ! (rtx_equal_p (x, exp_rtl)
3220 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3221 && ! TREE_READONLY (exp)));
3222 }
3223
3224 /* If we reach here, it is safe. */
3225 return 1;
3226}
3227
3228/* Subroutine of expand_expr: return nonzero iff EXP is an
3229 expression whose type is statically determinable. */
3230
3231static int
3232fixed_type_p (exp)
3233 tree exp;
3234{
3235 if (TREE_CODE (exp) == PARM_DECL
3236 || TREE_CODE (exp) == VAR_DECL
3237 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3238 || TREE_CODE (exp) == COMPONENT_REF
3239 || TREE_CODE (exp) == ARRAY_REF)
3240 return 1;
3241 return 0;
3242}
3243\f
3244/* expand_expr: generate code for computing expression EXP.
3245 An rtx for the computed value is returned. The value is never null.
3246 In the case of a void EXP, const0_rtx is returned.
3247
3248 The value may be stored in TARGET if TARGET is nonzero.
3249 TARGET is just a suggestion; callers must assume that
3250 the rtx returned may not be the same as TARGET.
3251
3252 If TARGET is CONST0_RTX, it means that the value will be ignored.
3253
3254 If TMODE is not VOIDmode, it suggests generating the
3255 result in mode TMODE. But this is done only when convenient.
3256 Otherwise, TMODE is ignored and the value generated in its natural mode.
3257 TMODE is just a suggestion; callers must assume that
3258 the rtx returned may not have mode TMODE.
3259
3260 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3261 with a constant address even if that address is not normally legitimate.
3262 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3263
3264 If MODIFIER is EXPAND_SUM then when EXP is an addition
3265 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3266 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3267 products as above, or REG or MEM, or constant.
3268 Ordinarily in such cases we would output mul or add instructions
3269 and then return a pseudo reg containing the sum.
3270
3271 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3272 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3273 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3274 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3275
3276rtx
3277expand_expr (exp, target, tmode, modifier)
3278 register tree exp;
3279 rtx target;
3280 enum machine_mode tmode;
3281 enum expand_modifier modifier;
3282{
3283 register rtx op0, op1, temp;
3284 tree type = TREE_TYPE (exp);
3285 int unsignedp = TREE_UNSIGNED (type);
3286 register enum machine_mode mode = TYPE_MODE (type);
3287 register enum tree_code code = TREE_CODE (exp);
3288 optab this_optab;
3289 /* Use subtarget as the target for operand 0 of a binary operation. */
3290 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3291 rtx original_target = target;
dd27116b
RK
3292 int ignore = (target == const0_rtx
3293 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
3294 || code == CONVERT_EXPR || code == REFERENCE_EXPR)
3295 && TREE_CODE (type) == VOID_TYPE));
bbf6f052
RK
3296 tree context;
3297
3298 /* Don't use hard regs as subtargets, because the combiner
3299 can only handle pseudo regs. */
3300 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3301 subtarget = 0;
3302 /* Avoid subtargets inside loops,
3303 since they hide some invariant expressions. */
3304 if (preserve_subexpressions_p ())
3305 subtarget = 0;
3306
dd27116b
RK
3307 /* If we are going to ignore this result, we need only do something
3308 if there is a side-effect somewhere in the expression. If there
3309 is, short-circuit the most common cases here. */
bbf6f052 3310
dd27116b
RK
3311 if (ignore)
3312 {
3313 if (! TREE_SIDE_EFFECTS (exp))
3314 return const0_rtx;
3315
3316 /* Ensure we reference a volatile object even if value is ignored. */
3317 if (TREE_THIS_VOLATILE (exp)
3318 && TREE_CODE (exp) != FUNCTION_DECL
3319 && mode != VOIDmode && mode != BLKmode)
3320 {
3321 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
3322 if (GET_CODE (temp) == MEM)
3323 temp = copy_to_reg (temp);
3324 return const0_rtx;
3325 }
3326
3327 if (TREE_CODE_CLASS (code) == '1')
3328 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3329 VOIDmode, modifier);
3330 else if (TREE_CODE_CLASS (code) == '2'
3331 || TREE_CODE_CLASS (code) == '<')
3332 {
3333 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
3334 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
3335 return const0_rtx;
3336 }
3337 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
3338 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
3339 /* If the second operand has no side effects, just evaluate
3340 the first. */
3341 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
3342 VOIDmode, modifier);
dd27116b
RK
3343
3344 target = 0, original_target = 0;
3345 }
bbf6f052 3346
e44842fe
RK
3347 /* If will do cse, generate all results into pseudo registers
3348 since 1) that allows cse to find more things
3349 and 2) otherwise cse could produce an insn the machine
3350 cannot support. */
3351
bbf6f052
RK
3352 if (! cse_not_expected && mode != BLKmode && target
3353 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3354 target = subtarget;
3355
bbf6f052
RK
3356 switch (code)
3357 {
3358 case LABEL_DECL:
b552441b
RS
3359 {
3360 tree function = decl_function_context (exp);
3361 /* Handle using a label in a containing function. */
3362 if (function != current_function_decl && function != 0)
3363 {
3364 struct function *p = find_function_data (function);
3365 /* Allocate in the memory associated with the function
3366 that the label is in. */
3367 push_obstacks (p->function_obstack,
3368 p->function_maybepermanent_obstack);
3369
3370 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3371 label_rtx (exp), p->forced_labels);
3372 pop_obstacks ();
3373 }
3374 else if (modifier == EXPAND_INITIALIZER)
3375 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3376 label_rtx (exp), forced_labels);
26fcb35a 3377 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3378 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3379 if (function != current_function_decl && function != 0)
3380 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3381 return temp;
b552441b 3382 }
bbf6f052
RK
3383
3384 case PARM_DECL:
3385 if (DECL_RTL (exp) == 0)
3386 {
3387 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3388 return CONST0_RTX (mode);
bbf6f052
RK
3389 }
3390
3391 case FUNCTION_DECL:
3392 case VAR_DECL:
3393 case RESULT_DECL:
3394 if (DECL_RTL (exp) == 0)
3395 abort ();
e44842fe
RK
3396 /* Ensure variable marked as used even if it doesn't go through
3397 a parser. If it hasn't be used yet, write out an external
3398 definition. */
3399 if (! TREE_USED (exp))
3400 {
3401 assemble_external (exp);
3402 TREE_USED (exp) = 1;
3403 }
3404
bbf6f052
RK
3405 /* Handle variables inherited from containing functions. */
3406 context = decl_function_context (exp);
3407
3408 /* We treat inline_function_decl as an alias for the current function
3409 because that is the inline function whose vars, types, etc.
3410 are being merged into the current function.
3411 See expand_inline_function. */
3412 if (context != 0 && context != current_function_decl
3413 && context != inline_function_decl
3414 /* If var is static, we don't need a static chain to access it. */
3415 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3416 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3417 {
3418 rtx addr;
3419
3420 /* Mark as non-local and addressable. */
81feeecb 3421 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3422 mark_addressable (exp);
3423 if (GET_CODE (DECL_RTL (exp)) != MEM)
3424 abort ();
3425 addr = XEXP (DECL_RTL (exp), 0);
3426 if (GET_CODE (addr) == MEM)
3427 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3428 else
3429 addr = fix_lexical_addr (addr, exp);
3430 return change_address (DECL_RTL (exp), mode, addr);
3431 }
4af3895e 3432
bbf6f052
RK
3433 /* This is the case of an array whose size is to be determined
3434 from its initializer, while the initializer is still being parsed.
3435 See expand_decl. */
3436 if (GET_CODE (DECL_RTL (exp)) == MEM
3437 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3438 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3439 XEXP (DECL_RTL (exp), 0));
3440 if (GET_CODE (DECL_RTL (exp)) == MEM
3441 && modifier != EXPAND_CONST_ADDRESS
3442 && modifier != EXPAND_SUM
3443 && modifier != EXPAND_INITIALIZER)
3444 {
3445 /* DECL_RTL probably contains a constant address.
3446 On RISC machines where a constant address isn't valid,
3447 make some insns to get that address into a register. */
3448 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3449 || (flag_force_addr
3450 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3451 return change_address (DECL_RTL (exp), VOIDmode,
3452 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3453 }
1499e0a8
RK
3454
3455 /* If the mode of DECL_RTL does not match that of the decl, it
3456 must be a promoted value. We return a SUBREG of the wanted mode,
3457 but mark it so that we know that it was already extended. */
3458
3459 if (GET_CODE (DECL_RTL (exp)) == REG
3460 && GET_MODE (DECL_RTL (exp)) != mode)
3461 {
3462 enum machine_mode decl_mode = DECL_MODE (exp);
3463
3464 /* Get the signedness used for this variable. Ensure we get the
3465 same mode we got when the variable was declared. */
3466
3467 PROMOTE_MODE (decl_mode, unsignedp, type);
3468
3469 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3470 abort ();
3471
3472 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3473 SUBREG_PROMOTED_VAR_P (temp) = 1;
3474 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3475 return temp;
3476 }
3477
bbf6f052
RK
3478 return DECL_RTL (exp);
3479
3480 case INTEGER_CST:
3481 return immed_double_const (TREE_INT_CST_LOW (exp),
3482 TREE_INT_CST_HIGH (exp),
3483 mode);
3484
3485 case CONST_DECL:
3486 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3487
3488 case REAL_CST:
3489 /* If optimized, generate immediate CONST_DOUBLE
3490 which will be turned into memory by reload if necessary.
3491
3492 We used to force a register so that loop.c could see it. But
3493 this does not allow gen_* patterns to perform optimizations with
3494 the constants. It also produces two insns in cases like "x = 1.0;".
3495 On most machines, floating-point constants are not permitted in
3496 many insns, so we'd end up copying it to a register in any case.
3497
3498 Now, we do the copying in expand_binop, if appropriate. */
3499 return immed_real_const (exp);
3500
3501 case COMPLEX_CST:
3502 case STRING_CST:
3503 if (! TREE_CST_RTL (exp))
3504 output_constant_def (exp);
3505
3506 /* TREE_CST_RTL probably contains a constant address.
3507 On RISC machines where a constant address isn't valid,
3508 make some insns to get that address into a register. */
3509 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3510 && modifier != EXPAND_CONST_ADDRESS
3511 && modifier != EXPAND_INITIALIZER
3512 && modifier != EXPAND_SUM
3513 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3514 return change_address (TREE_CST_RTL (exp), VOIDmode,
3515 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3516 return TREE_CST_RTL (exp);
3517
3518 case SAVE_EXPR:
3519 context = decl_function_context (exp);
3520 /* We treat inline_function_decl as an alias for the current function
3521 because that is the inline function whose vars, types, etc.
3522 are being merged into the current function.
3523 See expand_inline_function. */
3524 if (context == current_function_decl || context == inline_function_decl)
3525 context = 0;
3526
3527 /* If this is non-local, handle it. */
3528 if (context)
3529 {
3530 temp = SAVE_EXPR_RTL (exp);
3531 if (temp && GET_CODE (temp) == REG)
3532 {
3533 put_var_into_stack (exp);
3534 temp = SAVE_EXPR_RTL (exp);
3535 }
3536 if (temp == 0 || GET_CODE (temp) != MEM)
3537 abort ();
3538 return change_address (temp, mode,
3539 fix_lexical_addr (XEXP (temp, 0), exp));
3540 }
3541 if (SAVE_EXPR_RTL (exp) == 0)
3542 {
3543 if (mode == BLKmode)
3544 temp
3545 = assign_stack_temp (mode,
3546 int_size_in_bytes (TREE_TYPE (exp)), 0);
3547 else
1499e0a8
RK
3548 {
3549 enum machine_mode var_mode = mode;
3550
3551 if (TREE_CODE (type) == INTEGER_TYPE
3552 || TREE_CODE (type) == ENUMERAL_TYPE
3553 || TREE_CODE (type) == BOOLEAN_TYPE
3554 || TREE_CODE (type) == CHAR_TYPE
3555 || TREE_CODE (type) == REAL_TYPE
3556 || TREE_CODE (type) == POINTER_TYPE
3557 || TREE_CODE (type) == OFFSET_TYPE)
3558 {
3559 PROMOTE_MODE (var_mode, unsignedp, type);
3560 }
3561
3562 temp = gen_reg_rtx (var_mode);
3563 }
3564
bbf6f052 3565 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3566 if (!optimize && GET_CODE (temp) == REG)
3567 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3568 save_expr_regs);
ff78f773
RK
3569
3570 /* If the mode of TEMP does not match that of the expression, it
3571 must be a promoted value. We pass store_expr a SUBREG of the
3572 wanted mode but mark it so that we know that it was already
3573 extended. Note that `unsignedp' was modified above in
3574 this case. */
3575
3576 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3577 {
3578 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3579 SUBREG_PROMOTED_VAR_P (temp) = 1;
3580 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3581 }
3582
3583 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3584 }
1499e0a8
RK
3585
3586 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3587 must be a promoted value. We return a SUBREG of the wanted mode,
3588 but mark it so that we know that it was already extended. Note
3589 that `unsignedp' was modified above in this case. */
3590
3591 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3592 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3593 {
3594 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3595 SUBREG_PROMOTED_VAR_P (temp) = 1;
3596 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3597 return temp;
3598 }
3599
bbf6f052
RK
3600 return SAVE_EXPR_RTL (exp);
3601
3602 case EXIT_EXPR:
e44842fe
RK
3603 expand_exit_loop_if_false (NULL_PTR,
3604 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
3605 return const0_rtx;
3606
3607 case LOOP_EXPR:
3608 expand_start_loop (1);
3609 expand_expr_stmt (TREE_OPERAND (exp, 0));
3610 expand_end_loop ();
3611
3612 return const0_rtx;
3613
3614 case BIND_EXPR:
3615 {
3616 tree vars = TREE_OPERAND (exp, 0);
3617 int vars_need_expansion = 0;
3618
3619 /* Need to open a binding contour here because
3620 if there are any cleanups they most be contained here. */
3621 expand_start_bindings (0);
3622
2df53c0b
RS
3623 /* Mark the corresponding BLOCK for output in its proper place. */
3624 if (TREE_OPERAND (exp, 2) != 0
3625 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3626 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3627
3628 /* If VARS have not yet been expanded, expand them now. */
3629 while (vars)
3630 {
3631 if (DECL_RTL (vars) == 0)
3632 {
3633 vars_need_expansion = 1;
3634 expand_decl (vars);
3635 }
3636 expand_decl_init (vars);
3637 vars = TREE_CHAIN (vars);
3638 }
3639
3640 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3641
3642 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3643
3644 return temp;
3645 }
3646
3647 case RTL_EXPR:
3648 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3649 abort ();
3650 emit_insns (RTL_EXPR_SEQUENCE (exp));
3651 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3652 return RTL_EXPR_RTL (exp);
3653
3654 case CONSTRUCTOR:
dd27116b
RK
3655 /* If we don't need the result, just ensure we evaluate any
3656 subexpressions. */
3657 if (ignore)
3658 {
3659 tree elt;
3660 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3661 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3662 return const0_rtx;
3663 }
4af3895e
JVA
3664 /* All elts simple constants => refer to a constant in memory. But
3665 if this is a non-BLKmode mode, let it store a field at a time
3666 since that should make a CONST_INT or CONST_DOUBLE when we
dd27116b
RK
3667 fold. If we are making an initializer and all operands are
3668 constant, put it in memory as well. */
3669 else if ((TREE_STATIC (exp)
3670 && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
3671 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
3672 {
3673 rtx constructor = output_constant_def (exp);
b552441b
RS
3674 if (modifier != EXPAND_CONST_ADDRESS
3675 && modifier != EXPAND_INITIALIZER
3676 && modifier != EXPAND_SUM
3677 && !memory_address_p (GET_MODE (constructor),
3678 XEXP (constructor, 0)))
bbf6f052
RK
3679 constructor = change_address (constructor, VOIDmode,
3680 XEXP (constructor, 0));
3681 return constructor;
3682 }
3683
bbf6f052
RK
3684 else
3685 {
3686 if (target == 0 || ! safe_from_p (target, exp))
3687 {
3688 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3689 target = gen_reg_rtx (mode);
3690 else
3691 {
3b94d087
RS
3692 enum tree_code c = TREE_CODE (type);
3693 target
3694 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
3695 if (c == RECORD_TYPE || c == UNION_TYPE
3696 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 3697 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3698 }
3699 }
3700 store_constructor (exp, target);
3701 return target;
3702 }
3703
3704 case INDIRECT_REF:
3705 {
3706 tree exp1 = TREE_OPERAND (exp, 0);
3707 tree exp2;
3708
3709 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3710 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3711 This code has the same general effect as simply doing
3712 expand_expr on the save expr, except that the expression PTR
3713 is computed for use as a memory address. This means different
3714 code, suitable for indexing, may be generated. */
3715 if (TREE_CODE (exp1) == SAVE_EXPR
3716 && SAVE_EXPR_RTL (exp1) == 0
3717 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3718 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3719 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3720 {
906c4e36
RK
3721 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3722 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3723 op0 = memory_address (mode, temp);
3724 op0 = copy_all_regs (op0);
3725 SAVE_EXPR_RTL (exp1) = op0;
3726 }
3727 else
3728 {
906c4e36 3729 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3730 op0 = memory_address (mode, op0);
3731 }
8c8a8e34
JW
3732
3733 temp = gen_rtx (MEM, mode, op0);
3734 /* If address was computed by addition,
3735 mark this as an element of an aggregate. */
3736 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3737 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3738 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3739 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3740 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3741 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 3742 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
3743 || (TREE_CODE (exp1) == ADDR_EXPR
3744 && (exp2 = TREE_OPERAND (exp1, 0))
3745 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3746 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
3747 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3748 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 3749 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3750 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 3751#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
3752 a location is accessed through a pointer to const does not mean
3753 that the value there can never change. */
8c8a8e34 3754 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3755#endif
8c8a8e34
JW
3756 return temp;
3757 }
bbf6f052
RK
3758
3759 case ARRAY_REF:
742920c7
RK
3760 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3761 abort ();
bbf6f052 3762
bbf6f052 3763 {
742920c7
RK
3764 tree array = TREE_OPERAND (exp, 0);
3765 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3766 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3767 tree index = TREE_OPERAND (exp, 1);
3768 tree index_type = TREE_TYPE (index);
bbf6f052 3769 int i;
bbf6f052 3770
742920c7
RK
3771 /* Optimize the special-case of a zero lower bound. */
3772 if (! integer_zerop (low_bound))
3773 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3774
3775 if (TREE_CODE (index) != INTEGER_CST
3776 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3777 {
3778 /* Nonconstant array index or nonconstant element size.
3779 Generate the tree for *(&array+index) and expand that,
3780 except do it in a language-independent way
3781 and don't complain about non-lvalue arrays.
3782 `mark_addressable' should already have been called
3783 for any array for which this case will be reached. */
3784
3785 /* Don't forget the const or volatile flag from the array
3786 element. */
3787 tree variant_type = build_type_variant (type,
3788 TREE_READONLY (exp),
3789 TREE_THIS_VOLATILE (exp));
3790 tree array_adr = build1 (ADDR_EXPR,
3791 build_pointer_type (variant_type), array);
3792 tree elt;
3793
3794 /* Convert the integer argument to a type the same size as a
3795 pointer so the multiply won't overflow spuriously. */
3796 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3797 index = convert (type_for_size (POINTER_SIZE, 0), index);
3798
3799 /* Don't think the address has side effects
3800 just because the array does.
3801 (In some cases the address might have side effects,
3802 and we fail to record that fact here. However, it should not
3803 matter, since expand_expr should not care.) */
3804 TREE_SIDE_EFFECTS (array_adr) = 0;
3805
3806 elt = build1 (INDIRECT_REF, type,
3807 fold (build (PLUS_EXPR,
3808 TYPE_POINTER_TO (variant_type),
3809 array_adr,
3810 fold (build (MULT_EXPR,
3811 TYPE_POINTER_TO (variant_type),
3812 index,
3813 size_in_bytes (type))))));
3814
3815 /* Volatility, etc., of new expression is same as old
3816 expression. */
3817 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3818 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3819 TREE_READONLY (elt) = TREE_READONLY (exp);
3820
3821 return expand_expr (elt, target, tmode, modifier);
3822 }
3823
3824 /* Fold an expression like: "foo"[2].
3825 This is not done in fold so it won't happen inside &. */
3826
3827 if (TREE_CODE (array) == STRING_CST
3828 && TREE_CODE (index) == INTEGER_CST
3829 && !TREE_INT_CST_HIGH (index)
3830 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 3831 {
742920c7 3832 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 3833 {
742920c7 3834 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
3835 TREE_TYPE (exp) = integer_type_node;
3836 return expand_expr (exp, target, tmode, modifier);
3837 }
742920c7 3838 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 3839 {
742920c7 3840 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 3841 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
3842 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3843 exp),
3844 target, tmode, modifier);
bbf6f052
RK
3845 }
3846 }
bbf6f052 3847
742920c7
RK
3848 /* If this is a constant index into a constant array,
3849 just get the value from the array. Handle both the cases when
3850 we have an explicit constructor and when our operand is a variable
3851 that was declared const. */
4af3895e 3852
742920c7
RK
3853 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3854 {
3855 if (TREE_CODE (index) == INTEGER_CST
3856 && TREE_INT_CST_HIGH (index) == 0)
3857 {
3858 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3859
3860 i = TREE_INT_CST_LOW (index);
3861 while (elem && i--)
3862 elem = TREE_CHAIN (elem);
3863 if (elem)
3864 return expand_expr (fold (TREE_VALUE (elem)), target,
3865 tmode, modifier);
3866 }
3867 }
4af3895e 3868
742920c7
RK
3869 else if (optimize >= 1
3870 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3871 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3872 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3873 {
3874 if (TREE_CODE (index) == INTEGER_CST
3875 && TREE_INT_CST_HIGH (index) == 0)
3876 {
3877 tree init = DECL_INITIAL (array);
3878
3879 i = TREE_INT_CST_LOW (index);
3880 if (TREE_CODE (init) == CONSTRUCTOR)
3881 {
3882 tree elem = CONSTRUCTOR_ELTS (init);
3883
3884 while (elem && i--)
3885 elem = TREE_CHAIN (elem);
3886 if (elem)
3887 return expand_expr (fold (TREE_VALUE (elem)), target,
3888 tmode, modifier);
3889 }
3890 else if (TREE_CODE (init) == STRING_CST
3891 && i < TREE_STRING_LENGTH (init))
3892 {
3893 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3894 return convert_to_mode (mode, temp, 0);
3895 }
3896 }
3897 }
3898 }
8c8a8e34 3899
bbf6f052
RK
3900 /* Treat array-ref with constant index as a component-ref. */
3901
3902 case COMPONENT_REF:
3903 case BIT_FIELD_REF:
4af3895e
JVA
3904 /* If the operand is a CONSTRUCTOR, we can just extract the
3905 appropriate field if it is present. */
3906 if (code != ARRAY_REF
3907 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3908 {
3909 tree elt;
3910
3911 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3912 elt = TREE_CHAIN (elt))
3913 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3914 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3915 }
3916
bbf6f052
RK
3917 {
3918 enum machine_mode mode1;
3919 int bitsize;
3920 int bitpos;
7bb0943f 3921 tree offset;
bbf6f052 3922 int volatilep = 0;
7bb0943f 3923 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3924 &mode1, &unsignedp, &volatilep);
3925
e7f3c83f
RK
3926 /* If we got back the original object, something is wrong. Perhaps
3927 we are evaluating an expression too early. In any event, don't
3928 infinitely recurse. */
3929 if (tem == exp)
3930 abort ();
3931
bbf6f052
RK
3932 /* In some cases, we will be offsetting OP0's address by a constant.
3933 So get it as a sum, if possible. If we will be using it
3934 directly in an insn, we validate it. */
906c4e36 3935 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3936
8c8a8e34 3937 /* If this is a constant, put it into a register if it is a
8008b228 3938 legitimate constant and memory if it isn't. */
8c8a8e34
JW
3939 if (CONSTANT_P (op0))
3940 {
3941 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 3942 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
3943 op0 = force_reg (mode, op0);
3944 else
3945 op0 = validize_mem (force_const_mem (mode, op0));
3946 }
3947
7bb0943f
RS
3948 if (offset != 0)
3949 {
906c4e36 3950 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3951
3952 if (GET_CODE (op0) != MEM)
3953 abort ();
3954 op0 = change_address (op0, VOIDmode,
3955 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3956 force_reg (Pmode, offset_rtx)));
3957 }
3958
bbf6f052
RK
3959 /* Don't forget about volatility even if this is a bitfield. */
3960 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3961 {
3962 op0 = copy_rtx (op0);
3963 MEM_VOLATILE_P (op0) = 1;
3964 }
3965
3966 if (mode1 == VOIDmode
0bba3f6f
RK
3967 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3968 && modifier != EXPAND_CONST_ADDRESS
3969 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3970 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3971 {
3972 /* In cases where an aligned union has an unaligned object
3973 as a field, we might be extracting a BLKmode value from
3974 an integer-mode (e.g., SImode) object. Handle this case
3975 by doing the extract into an object as wide as the field
3976 (which we know to be the width of a basic mode), then
3977 storing into memory, and changing the mode to BLKmode. */
3978 enum machine_mode ext_mode = mode;
3979
3980 if (ext_mode == BLKmode)
3981 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3982
3983 if (ext_mode == BLKmode)
3984 abort ();
3985
3986 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3987 unsignedp, target, ext_mode, ext_mode,
3988 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3989 int_size_in_bytes (TREE_TYPE (tem)));
3990 if (mode == BLKmode)
3991 {
3992 rtx new = assign_stack_temp (ext_mode,
3993 bitsize / BITS_PER_UNIT, 0);
3994
3995 emit_move_insn (new, op0);
3996 op0 = copy_rtx (new);
3997 PUT_MODE (op0, BLKmode);
3998 }
3999
4000 return op0;
4001 }
4002
4003 /* Get a reference to just this component. */
4004 if (modifier == EXPAND_CONST_ADDRESS
4005 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4006 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4007 (bitpos / BITS_PER_UNIT)));
4008 else
4009 op0 = change_address (op0, mode1,
4010 plus_constant (XEXP (op0, 0),
4011 (bitpos / BITS_PER_UNIT)));
4012 MEM_IN_STRUCT_P (op0) = 1;
4013 MEM_VOLATILE_P (op0) |= volatilep;
4014 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4015 return op0;
4016 if (target == 0)
4017 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4018 convert_move (target, op0, unsignedp);
4019 return target;
4020 }
4021
4022 case OFFSET_REF:
4023 {
da120c2f 4024 tree base = build1 (ADDR_EXPR, type, TREE_OPERAND (exp, 0));
bbf6f052 4025 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4026 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4027 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4028 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4029 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4030#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4031 a location is accessed through a pointer to const does not mean
4032 that the value there can never change. */
4033 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4034#endif
4035 return temp;
4036 }
4037
4038 /* Intended for a reference to a buffer of a file-object in Pascal.
4039 But it's not certain that a special tree code will really be
4040 necessary for these. INDIRECT_REF might work for them. */
4041 case BUFFER_REF:
4042 abort ();
4043
7308a047
RS
4044 /* IN_EXPR: Inlined pascal set IN expression.
4045
4046 Algorithm:
4047 rlo = set_low - (set_low%bits_per_word);
4048 the_word = set [ (index - rlo)/bits_per_word ];
4049 bit_index = index % bits_per_word;
4050 bitmask = 1 << bit_index;
4051 return !!(the_word & bitmask); */
4052 case IN_EXPR:
4053 preexpand_calls (exp);
4054 {
4055 tree set = TREE_OPERAND (exp, 0);
4056 tree index = TREE_OPERAND (exp, 1);
4057 tree set_type = TREE_TYPE (set);
4058
4059 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4060 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4061
4062 rtx index_val;
4063 rtx lo_r;
4064 rtx hi_r;
4065 rtx rlow;
4066 rtx diff, quo, rem, addr, bit, result;
4067 rtx setval, setaddr;
4068 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4069
4070 if (target == 0)
17938e57 4071 target = gen_reg_rtx (mode);
7308a047
RS
4072
4073 /* If domain is empty, answer is no. */
4074 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4075 return const0_rtx;
4076
4077 index_val = expand_expr (index, 0, VOIDmode, 0);
4078 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4079 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4080 setval = expand_expr (set, 0, VOIDmode, 0);
4081 setaddr = XEXP (setval, 0);
4082
4083 /* Compare index against bounds, if they are constant. */
4084 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4085 && GET_CODE (lo_r) == CONST_INT
4086 && INTVAL (index_val) < INTVAL (lo_r))
4087 return const0_rtx;
7308a047
RS
4088
4089 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4090 && GET_CODE (hi_r) == CONST_INT
4091 && INTVAL (hi_r) < INTVAL (index_val))
4092 return const0_rtx;
7308a047
RS
4093
4094 /* If we get here, we have to generate the code for both cases
4095 (in range and out of range). */
4096
4097 op0 = gen_label_rtx ();
4098 op1 = gen_label_rtx ();
4099
4100 if (! (GET_CODE (index_val) == CONST_INT
4101 && GET_CODE (lo_r) == CONST_INT))
4102 {
17938e57
RK
4103 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4104 GET_MODE (index_val), 0, 0);
7308a047
RS
4105 emit_jump_insn (gen_blt (op1));
4106 }
4107
4108 if (! (GET_CODE (index_val) == CONST_INT
4109 && GET_CODE (hi_r) == CONST_INT))
4110 {
17938e57
RK
4111 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4112 GET_MODE (index_val), 0, 0);
7308a047
RS
4113 emit_jump_insn (gen_bgt (op1));
4114 }
4115
4116 /* Calculate the element number of bit zero in the first word
4117 of the set. */
4118 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4119 rlow = GEN_INT (INTVAL (lo_r)
4120 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4121 else
17938e57
RK
4122 rlow = expand_binop (index_mode, and_optab, lo_r,
4123 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4124 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4125
4126 diff = expand_binop (index_mode, sub_optab,
17938e57 4127 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4128
4129 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4130 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4131 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4132 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4133 addr = memory_address (byte_mode,
4134 expand_binop (index_mode, add_optab,
17938e57
RK
4135 diff, setaddr, NULL_RTX, 0,
4136 OPTAB_LIB_WIDEN));
7308a047
RS
4137 /* Extract the bit we want to examine */
4138 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4139 gen_rtx (MEM, byte_mode, addr),
4140 make_tree (TREE_TYPE (index), rem),
4141 NULL_RTX, 1);
4142 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4143 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4144 1, OPTAB_LIB_WIDEN);
17938e57
RK
4145
4146 if (result != target)
4147 convert_move (target, result, 1);
7308a047
RS
4148
4149 /* Output the code to handle the out-of-range case. */
4150 emit_jump (op0);
4151 emit_label (op1);
4152 emit_move_insn (target, const0_rtx);
4153 emit_label (op0);
4154 return target;
4155 }
4156
bbf6f052
RK
4157 case WITH_CLEANUP_EXPR:
4158 if (RTL_EXPR_RTL (exp) == 0)
4159 {
4160 RTL_EXPR_RTL (exp)
4161 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4162 cleanups_this_call
4163 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4164 /* That's it for this cleanup. */
4165 TREE_OPERAND (exp, 2) = 0;
4166 }
4167 return RTL_EXPR_RTL (exp);
4168
4169 case CALL_EXPR:
4170 /* Check for a built-in function. */
4171 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4172 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4173 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4174 return expand_builtin (exp, target, subtarget, tmode, ignore);
4175 /* If this call was expanded already by preexpand_calls,
4176 just return the result we got. */
4177 if (CALL_EXPR_RTL (exp) != 0)
4178 return CALL_EXPR_RTL (exp);
8129842c 4179 return expand_call (exp, target, ignore);
bbf6f052
RK
4180
4181 case NON_LVALUE_EXPR:
4182 case NOP_EXPR:
4183 case CONVERT_EXPR:
4184 case REFERENCE_EXPR:
bbf6f052
RK
4185 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4186 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4187 if (TREE_CODE (type) == UNION_TYPE)
4188 {
4189 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4190 if (target == 0)
4191 {
4192 if (mode == BLKmode)
4193 {
4194 if (TYPE_SIZE (type) == 0
4195 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4196 abort ();
4197 target = assign_stack_temp (BLKmode,
4198 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4199 + BITS_PER_UNIT - 1)
4200 / BITS_PER_UNIT, 0);
4201 }
4202 else
4203 target = gen_reg_rtx (mode);
4204 }
4205 if (GET_CODE (target) == MEM)
4206 /* Store data into beginning of memory target. */
4207 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4208 change_address (target, TYPE_MODE (valtype), 0), 0);
4209
bbf6f052
RK
4210 else if (GET_CODE (target) == REG)
4211 /* Store this field into a union of the proper type. */
4212 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4213 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4214 VOIDmode, 0, 1,
4215 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4216 else
4217 abort ();
4218
4219 /* Return the entire union. */
4220 return target;
4221 }
1499e0a8 4222 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4223 if (GET_MODE (op0) == mode)
4224 return op0;
4225 /* If arg is a constant integer being extended from a narrower mode,
4226 we must really truncate to get the extended bits right. Otherwise
4227 (unsigned long) (unsigned char) ("\377"[0])
4228 would come out as ffffffff. */
4229 if (GET_MODE (op0) == VOIDmode
4230 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4231 < GET_MODE_BITSIZE (mode)))
4232 {
4233 /* MODE must be narrower than HOST_BITS_PER_INT. */
4234 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4235
4236 if (width < HOST_BITS_PER_WIDE_INT)
4237 {
4238 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4239 : CONST_DOUBLE_LOW (op0));
4240 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4241 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4242 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4243 else
4244 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4245
4246 op0 = GEN_INT (val);
4247 }
4248 else
4249 {
4250 op0 = (simplify_unary_operation
4251 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4252 ? ZERO_EXTEND : SIGN_EXTEND),
4253 mode, op0,
4254 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4255 if (op0 == 0)
4256 abort ();
4257 }
4258 }
4259 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4260 return op0;
26fcb35a
RS
4261 if (modifier == EXPAND_INITIALIZER)
4262 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4263 if (flag_force_mem && GET_CODE (op0) == MEM)
4264 op0 = copy_to_reg (op0);
4265
4266 if (target == 0)
4267 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4268 else
4269 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4270 return target;
4271
4272 case PLUS_EXPR:
4273 /* We come here from MINUS_EXPR when the second operand is a constant. */
4274 plus_expr:
4275 this_optab = add_optab;
4276
4277 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4278 something else, make sure we add the register to the constant and
4279 then to the other thing. This case can occur during strength
4280 reduction and doing it this way will produce better code if the
4281 frame pointer or argument pointer is eliminated.
4282
4283 fold-const.c will ensure that the constant is always in the inner
4284 PLUS_EXPR, so the only case we need to do anything about is if
4285 sp, ap, or fp is our second argument, in which case we must swap
4286 the innermost first argument and our second argument. */
4287
4288 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4289 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4290 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4291 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4292 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4293 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4294 {
4295 tree t = TREE_OPERAND (exp, 1);
4296
4297 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4298 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4299 }
4300
4301 /* If the result is to be Pmode and we are adding an integer to
4302 something, we might be forming a constant. So try to use
4303 plus_constant. If it produces a sum and we can't accept it,
4304 use force_operand. This allows P = &ARR[const] to generate
4305 efficient code on machines where a SYMBOL_REF is not a valid
4306 address.
4307
4308 If this is an EXPAND_SUM call, always return the sum. */
4309 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4310 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4311 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4312 || mode == Pmode))
4313 {
4314 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4315 EXPAND_SUM);
4316 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4317 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4318 op1 = force_operand (op1, target);
4319 return op1;
4320 }
4321
4322 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4323 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4324 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4325 || mode == Pmode))
4326 {
4327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4328 EXPAND_SUM);
4329 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4330 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4331 op0 = force_operand (op0, target);
4332 return op0;
4333 }
4334
4335 /* No sense saving up arithmetic to be done
4336 if it's all in the wrong mode to form part of an address.
4337 And force_operand won't know whether to sign-extend or
4338 zero-extend. */
4339 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4340 || mode != Pmode) goto binop;
4341
4342 preexpand_calls (exp);
4343 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4344 subtarget = 0;
4345
4346 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4347 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4348
4349 /* Make sure any term that's a sum with a constant comes last. */
4350 if (GET_CODE (op0) == PLUS
4351 && CONSTANT_P (XEXP (op0, 1)))
4352 {
4353 temp = op0;
4354 op0 = op1;
4355 op1 = temp;
4356 }
4357 /* If adding to a sum including a constant,
4358 associate it to put the constant outside. */
4359 if (GET_CODE (op1) == PLUS
4360 && CONSTANT_P (XEXP (op1, 1)))
4361 {
4362 rtx constant_term = const0_rtx;
4363
4364 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4365 if (temp != 0)
4366 op0 = temp;
6f90e075
JW
4367 /* Ensure that MULT comes first if there is one. */
4368 else if (GET_CODE (op0) == MULT)
4369 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4370 else
4371 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4372
4373 /* Let's also eliminate constants from op0 if possible. */
4374 op0 = eliminate_constant_term (op0, &constant_term);
4375
4376 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4377 their sum should be a constant. Form it into OP1, since the
4378 result we want will then be OP0 + OP1. */
4379
4380 temp = simplify_binary_operation (PLUS, mode, constant_term,
4381 XEXP (op1, 1));
4382 if (temp != 0)
4383 op1 = temp;
4384 else
4385 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4386 }
4387
4388 /* Put a constant term last and put a multiplication first. */
4389 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4390 temp = op1, op1 = op0, op0 = temp;
4391
4392 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4393 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4394
4395 case MINUS_EXPR:
4396 /* Handle difference of two symbolic constants,
4397 for the sake of an initializer. */
4398 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4399 && really_constant_p (TREE_OPERAND (exp, 0))
4400 && really_constant_p (TREE_OPERAND (exp, 1)))
4401 {
906c4e36
RK
4402 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4403 VOIDmode, modifier);
4404 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4405 VOIDmode, modifier);
bbf6f052
RK
4406 return gen_rtx (MINUS, mode, op0, op1);
4407 }
4408 /* Convert A - const to A + (-const). */
4409 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4410 {
4411 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4412 fold (build1 (NEGATE_EXPR, type,
4413 TREE_OPERAND (exp, 1))));
4414 goto plus_expr;
4415 }
4416 this_optab = sub_optab;
4417 goto binop;
4418
4419 case MULT_EXPR:
4420 preexpand_calls (exp);
4421 /* If first operand is constant, swap them.
4422 Thus the following special case checks need only
4423 check the second operand. */
4424 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4425 {
4426 register tree t1 = TREE_OPERAND (exp, 0);
4427 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4428 TREE_OPERAND (exp, 1) = t1;
4429 }
4430
4431 /* Attempt to return something suitable for generating an
4432 indexed address, for machines that support that. */
4433
4434 if (modifier == EXPAND_SUM && mode == Pmode
4435 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4436 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4437 {
4438 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4439
4440 /* Apply distributive law if OP0 is x+c. */
4441 if (GET_CODE (op0) == PLUS
4442 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4443 return gen_rtx (PLUS, mode,
4444 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4445 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4446 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4447 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4448
4449 if (GET_CODE (op0) != REG)
906c4e36 4450 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4451 if (GET_CODE (op0) != REG)
4452 op0 = copy_to_mode_reg (mode, op0);
4453
4454 return gen_rtx (MULT, mode, op0,
906c4e36 4455 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4456 }
4457
4458 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4459 subtarget = 0;
4460
4461 /* Check for multiplying things that have been extended
4462 from a narrower type. If this machine supports multiplying
4463 in that narrower type with a result in the desired type,
4464 do it that way, and avoid the explicit type-conversion. */
4465 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4466 && TREE_CODE (type) == INTEGER_TYPE
4467 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4468 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4469 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4470 && int_fits_type_p (TREE_OPERAND (exp, 1),
4471 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4472 /* Don't use a widening multiply if a shift will do. */
4473 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4474 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4475 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4476 ||
4477 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4478 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4479 ==
4480 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4481 /* If both operands are extended, they must either both
4482 be zero-extended or both be sign-extended. */
4483 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4484 ==
4485 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4486 {
4487 enum machine_mode innermode
4488 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4489 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4490 ? umul_widen_optab : smul_widen_optab);
4491 if (mode == GET_MODE_WIDER_MODE (innermode)
4492 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4493 {
4494 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4495 NULL_RTX, VOIDmode, 0);
bbf6f052 4496 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4497 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4498 VOIDmode, 0);
bbf6f052
RK
4499 else
4500 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4501 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4502 goto binop2;
4503 }
4504 }
4505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4506 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4507 return expand_mult (mode, op0, op1, target, unsignedp);
4508
4509 case TRUNC_DIV_EXPR:
4510 case FLOOR_DIV_EXPR:
4511 case CEIL_DIV_EXPR:
4512 case ROUND_DIV_EXPR:
4513 case EXACT_DIV_EXPR:
4514 preexpand_calls (exp);
4515 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4516 subtarget = 0;
4517 /* Possible optimization: compute the dividend with EXPAND_SUM
4518 then if the divisor is constant can optimize the case
4519 where some terms of the dividend have coeffs divisible by it. */
4520 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4521 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4522 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4523
4524 case RDIV_EXPR:
4525 this_optab = flodiv_optab;
4526 goto binop;
4527
4528 case TRUNC_MOD_EXPR:
4529 case FLOOR_MOD_EXPR:
4530 case CEIL_MOD_EXPR:
4531 case ROUND_MOD_EXPR:
4532 preexpand_calls (exp);
4533 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4534 subtarget = 0;
4535 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4536 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4537 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4538
4539 case FIX_ROUND_EXPR:
4540 case FIX_FLOOR_EXPR:
4541 case FIX_CEIL_EXPR:
4542 abort (); /* Not used for C. */
4543
4544 case FIX_TRUNC_EXPR:
906c4e36 4545 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4546 if (target == 0)
4547 target = gen_reg_rtx (mode);
4548 expand_fix (target, op0, unsignedp);
4549 return target;
4550
4551 case FLOAT_EXPR:
906c4e36 4552 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4553 if (target == 0)
4554 target = gen_reg_rtx (mode);
4555 /* expand_float can't figure out what to do if FROM has VOIDmode.
4556 So give it the correct mode. With -O, cse will optimize this. */
4557 if (GET_MODE (op0) == VOIDmode)
4558 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4559 op0);
4560 expand_float (target, op0,
4561 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4562 return target;
4563
4564 case NEGATE_EXPR:
4565 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4566 temp = expand_unop (mode, neg_optab, op0, target, 0);
4567 if (temp == 0)
4568 abort ();
4569 return temp;
4570
4571 case ABS_EXPR:
4572 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4573
2d7050fd
RS
4574 /* Handle complex values specially. */
4575 {
4576 enum machine_mode opmode
4577 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4578
4579 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4580 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4581 return expand_complex_abs (opmode, op0, target, unsignedp);
4582 }
4583
bbf6f052
RK
4584 /* Unsigned abs is simply the operand. Testing here means we don't
4585 risk generating incorrect code below. */
4586 if (TREE_UNSIGNED (type))
4587 return op0;
4588
4589 /* First try to do it with a special abs instruction. */
4590 temp = expand_unop (mode, abs_optab, op0, target, 0);
4591 if (temp != 0)
4592 return temp;
4593
4594 /* If this machine has expensive jumps, we can do integer absolute
4595 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4596 where W is the width of MODE. */
4597
4598 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4599 {
4600 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4601 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4602 NULL_RTX, 0);
bbf6f052
RK
4603
4604 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4605 OPTAB_LIB_WIDEN);
4606 if (temp != 0)
4607 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4608 OPTAB_LIB_WIDEN);
4609
4610 if (temp != 0)
4611 return temp;
4612 }
4613
4614 /* If that does not win, use conditional jump and negate. */
4615 target = original_target;
4616 temp = gen_label_rtx ();
4617 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4618 || (GET_CODE (target) == REG
4619 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4620 target = gen_reg_rtx (mode);
4621 emit_move_insn (target, op0);
4622 emit_cmp_insn (target,
4623 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4624 NULL_RTX, VOIDmode, 0),
4625 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4626 NO_DEFER_POP;
4627 emit_jump_insn (gen_bge (temp));
4628 op0 = expand_unop (mode, neg_optab, target, target, 0);
4629 if (op0 != target)
4630 emit_move_insn (target, op0);
4631 emit_label (temp);
4632 OK_DEFER_POP;
4633 return target;
4634
4635 case MAX_EXPR:
4636 case MIN_EXPR:
4637 target = original_target;
4638 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4639 || (GET_CODE (target) == REG
4640 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4641 target = gen_reg_rtx (mode);
906c4e36 4642 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4643 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4644
4645 /* First try to do it with a special MIN or MAX instruction.
4646 If that does not win, use a conditional jump to select the proper
4647 value. */
4648 this_optab = (TREE_UNSIGNED (type)
4649 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4650 : (code == MIN_EXPR ? smin_optab : smax_optab));
4651
4652 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4653 OPTAB_WIDEN);
4654 if (temp != 0)
4655 return temp;
4656
4657 if (target != op0)
4658 emit_move_insn (target, op0);
4659 op0 = gen_label_rtx ();
f81497d9
RS
4660 /* If this mode is an integer too wide to compare properly,
4661 compare word by word. Rely on cse to optimize constant cases. */
4662 if (GET_MODE_CLASS (mode) == MODE_INT
4663 && !can_compare_p (mode))
bbf6f052 4664 {
f81497d9
RS
4665 if (code == MAX_EXPR)
4666 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 4667 else
f81497d9 4668 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
bbf6f052
RK
4669 emit_move_insn (target, op1);
4670 }
f81497d9
RS
4671 else
4672 {
4673 if (code == MAX_EXPR)
4674 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4675 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4676 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4677 else
4678 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4679 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4680 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4681 if (temp == const0_rtx)
4682 emit_move_insn (target, op1);
4683 else if (temp != const_true_rtx)
4684 {
4685 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4686 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4687 else
4688 abort ();
4689 emit_move_insn (target, op1);
4690 }
4691 }
bbf6f052
RK
4692 emit_label (op0);
4693 return target;
4694
4695/* ??? Can optimize when the operand of this is a bitwise operation,
4696 by using a different bitwise operation. */
4697 case BIT_NOT_EXPR:
4698 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4699 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4700 if (temp == 0)
4701 abort ();
4702 return temp;
4703
4704 case FFS_EXPR:
4705 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4706 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4707 if (temp == 0)
4708 abort ();
4709 return temp;
4710
4711/* ??? Can optimize bitwise operations with one arg constant.
4712 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4713 and (a bitwise1 b) bitwise2 b (etc)
4714 but that is probably not worth while. */
4715
4716/* BIT_AND_EXPR is for bitwise anding.
4717 TRUTH_AND_EXPR is for anding two boolean values
4718 when we want in all cases to compute both of them.
4719 In general it is fastest to do TRUTH_AND_EXPR by
4720 computing both operands as actual zero-or-1 values
4721 and then bitwise anding. In cases where there cannot
4722 be any side effects, better code would be made by
4723 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4724 but the question is how to recognize those cases. */
4725
4726 case TRUTH_AND_EXPR:
4727 case BIT_AND_EXPR:
4728 this_optab = and_optab;
4729 goto binop;
4730
4731/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4732 case TRUTH_OR_EXPR:
4733 case BIT_IOR_EXPR:
4734 this_optab = ior_optab;
4735 goto binop;
4736
874726a8 4737 case TRUTH_XOR_EXPR:
bbf6f052
RK
4738 case BIT_XOR_EXPR:
4739 this_optab = xor_optab;
4740 goto binop;
4741
4742 case LSHIFT_EXPR:
4743 case RSHIFT_EXPR:
4744 case LROTATE_EXPR:
4745 case RROTATE_EXPR:
4746 preexpand_calls (exp);
4747 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4748 subtarget = 0;
4749 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4750 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4751 unsignedp);
4752
4753/* Could determine the answer when only additive constants differ.
4754 Also, the addition of one can be handled by changing the condition. */
4755 case LT_EXPR:
4756 case LE_EXPR:
4757 case GT_EXPR:
4758 case GE_EXPR:
4759 case EQ_EXPR:
4760 case NE_EXPR:
4761 preexpand_calls (exp);
4762 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4763 if (temp != 0)
4764 return temp;
4765 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4766 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4767 && original_target
4768 && GET_CODE (original_target) == REG
4769 && (GET_MODE (original_target)
4770 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4771 {
4772 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4773 if (temp != original_target)
4774 temp = copy_to_reg (temp);
4775 op1 = gen_label_rtx ();
906c4e36 4776 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4777 GET_MODE (temp), unsignedp, 0);
4778 emit_jump_insn (gen_beq (op1));
4779 emit_move_insn (temp, const1_rtx);
4780 emit_label (op1);
4781 return temp;
4782 }
4783 /* If no set-flag instruction, must generate a conditional
4784 store into a temporary variable. Drop through
4785 and handle this like && and ||. */
4786
4787 case TRUTH_ANDIF_EXPR:
4788 case TRUTH_ORIF_EXPR:
e44842fe
RK
4789 if (! ignore
4790 && (target == 0 || ! safe_from_p (target, exp)
4791 /* Make sure we don't have a hard reg (such as function's return
4792 value) live across basic blocks, if not optimizing. */
4793 || (!optimize && GET_CODE (target) == REG
4794 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 4795 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
4796
4797 if (target)
4798 emit_clr_insn (target);
4799
bbf6f052
RK
4800 op1 = gen_label_rtx ();
4801 jumpifnot (exp, op1);
e44842fe
RK
4802
4803 if (target)
4804 emit_0_to_1_insn (target);
4805
bbf6f052 4806 emit_label (op1);
e44842fe 4807 return ignore ? const0_rtx : target;
bbf6f052
RK
4808
4809 case TRUTH_NOT_EXPR:
4810 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4811 /* The parser is careful to generate TRUTH_NOT_EXPR
4812 only with operands that are always zero or one. */
906c4e36 4813 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4814 target, 1, OPTAB_LIB_WIDEN);
4815 if (temp == 0)
4816 abort ();
4817 return temp;
4818
4819 case COMPOUND_EXPR:
4820 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4821 emit_queue ();
4822 return expand_expr (TREE_OPERAND (exp, 1),
4823 (ignore ? const0_rtx : target),
4824 VOIDmode, 0);
4825
4826 case COND_EXPR:
4827 {
4828 /* Note that COND_EXPRs whose type is a structure or union
4829 are required to be constructed to contain assignments of
4830 a temporary variable, so that we can evaluate them here
4831 for side effect only. If type is void, we must do likewise. */
4832
4833 /* If an arm of the branch requires a cleanup,
4834 only that cleanup is performed. */
4835
4836 tree singleton = 0;
4837 tree binary_op = 0, unary_op = 0;
4838 tree old_cleanups = cleanups_this_call;
4839 cleanups_this_call = 0;
4840
4841 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4842 convert it to our mode, if necessary. */
4843 if (integer_onep (TREE_OPERAND (exp, 1))
4844 && integer_zerop (TREE_OPERAND (exp, 2))
4845 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4846 {
dd27116b
RK
4847 if (ignore)
4848 {
4849 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4850 modifier);
4851 return const0_rtx;
4852 }
4853
bbf6f052
RK
4854 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4855 if (GET_MODE (op0) == mode)
4856 return op0;
4857 if (target == 0)
4858 target = gen_reg_rtx (mode);
4859 convert_move (target, op0, unsignedp);
4860 return target;
4861 }
4862
4863 /* If we are not to produce a result, we have no target. Otherwise,
4864 if a target was specified use it; it will not be used as an
4865 intermediate target unless it is safe. If no target, use a
4866 temporary. */
4867
dd27116b 4868 if (ignore)
bbf6f052
RK
4869 temp = 0;
4870 else if (original_target
4871 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4872 temp = original_target;
4873 else if (mode == BLKmode)
4874 {
4875 if (TYPE_SIZE (type) == 0
4876 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4877 abort ();
4878 temp = assign_stack_temp (BLKmode,
4879 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4880 + BITS_PER_UNIT - 1)
4881 / BITS_PER_UNIT, 0);
4882 }
4883 else
4884 temp = gen_reg_rtx (mode);
4885
4886 /* Check for X ? A + B : A. If we have this, we can copy
4887 A to the output and conditionally add B. Similarly for unary
4888 operations. Don't do this if X has side-effects because
4889 those side effects might affect A or B and the "?" operation is
4890 a sequence point in ANSI. (We test for side effects later.) */
4891
4892 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4893 && operand_equal_p (TREE_OPERAND (exp, 2),
4894 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4895 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4896 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4897 && operand_equal_p (TREE_OPERAND (exp, 1),
4898 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4899 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4900 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4901 && operand_equal_p (TREE_OPERAND (exp, 2),
4902 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4903 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4904 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4905 && operand_equal_p (TREE_OPERAND (exp, 1),
4906 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4907 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4908
4909 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4910 operation, do this as A + (X != 0). Similarly for other simple
4911 binary operators. */
dd27116b 4912 if (temp && singleton && binary_op
bbf6f052
RK
4913 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4914 && (TREE_CODE (binary_op) == PLUS_EXPR
4915 || TREE_CODE (binary_op) == MINUS_EXPR
4916 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4917 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4918 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4919 && integer_onep (TREE_OPERAND (binary_op, 1))
4920 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4921 {
4922 rtx result;
4923 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4924 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4925 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4926 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4927 : and_optab);
4928
4929 /* If we had X ? A : A + 1, do this as A + (X == 0).
4930
4931 We have to invert the truth value here and then put it
4932 back later if do_store_flag fails. We cannot simply copy
4933 TREE_OPERAND (exp, 0) to another variable and modify that
4934 because invert_truthvalue can modify the tree pointed to
4935 by its argument. */
4936 if (singleton == TREE_OPERAND (exp, 1))
4937 TREE_OPERAND (exp, 0)
4938 = invert_truthvalue (TREE_OPERAND (exp, 0));
4939
4940 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4941 (safe_from_p (temp, singleton)
4942 ? temp : NULL_RTX),
bbf6f052
RK
4943 mode, BRANCH_COST <= 1);
4944
4945 if (result)
4946 {
906c4e36 4947 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4948 return expand_binop (mode, boptab, op1, result, temp,
4949 unsignedp, OPTAB_LIB_WIDEN);
4950 }
4951 else if (singleton == TREE_OPERAND (exp, 1))
4952 TREE_OPERAND (exp, 0)
4953 = invert_truthvalue (TREE_OPERAND (exp, 0));
4954 }
4955
4956 NO_DEFER_POP;
4957 op0 = gen_label_rtx ();
4958
4959 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4960 {
4961 if (temp != 0)
4962 {
4963 /* If the target conflicts with the other operand of the
4964 binary op, we can't use it. Also, we can't use the target
4965 if it is a hard register, because evaluating the condition
4966 might clobber it. */
4967 if ((binary_op
4968 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4969 || (GET_CODE (temp) == REG
4970 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4971 temp = gen_reg_rtx (mode);
4972 store_expr (singleton, temp, 0);
4973 }
4974 else
906c4e36 4975 expand_expr (singleton,
2937cf87 4976 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4977 if (cleanups_this_call)
4978 {
4979 sorry ("aggregate value in COND_EXPR");
4980 cleanups_this_call = 0;
4981 }
4982 if (singleton == TREE_OPERAND (exp, 1))
4983 jumpif (TREE_OPERAND (exp, 0), op0);
4984 else
4985 jumpifnot (TREE_OPERAND (exp, 0), op0);
4986
4987 if (binary_op && temp == 0)
4988 /* Just touch the other operand. */
4989 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4990 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4991 else if (binary_op)
4992 store_expr (build (TREE_CODE (binary_op), type,
4993 make_tree (type, temp),
4994 TREE_OPERAND (binary_op, 1)),
4995 temp, 0);
4996 else
4997 store_expr (build1 (TREE_CODE (unary_op), type,
4998 make_tree (type, temp)),
4999 temp, 0);
5000 op1 = op0;
5001 }
5002#if 0
5003 /* This is now done in jump.c and is better done there because it
5004 produces shorter register lifetimes. */
5005
5006 /* Check for both possibilities either constants or variables
5007 in registers (but not the same as the target!). If so, can
5008 save branches by assigning one, branching, and assigning the
5009 other. */
5010 else if (temp && GET_MODE (temp) != BLKmode
5011 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5012 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5013 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5014 && DECL_RTL (TREE_OPERAND (exp, 1))
5015 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5016 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5017 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5018 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5019 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5020 && DECL_RTL (TREE_OPERAND (exp, 2))
5021 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5022 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5023 {
5024 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5025 temp = gen_reg_rtx (mode);
5026 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5027 jumpifnot (TREE_OPERAND (exp, 0), op0);
5028 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5029 op1 = op0;
5030 }
5031#endif
5032 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5033 comparison operator. If we have one of these cases, set the
5034 output to A, branch on A (cse will merge these two references),
5035 then set the output to FOO. */
5036 else if (temp
5037 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5038 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5039 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5040 TREE_OPERAND (exp, 1), 0)
5041 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5042 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5043 {
5044 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5045 temp = gen_reg_rtx (mode);
5046 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5047 jumpif (TREE_OPERAND (exp, 0), op0);
5048 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5049 op1 = op0;
5050 }
5051 else if (temp
5052 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5053 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5055 TREE_OPERAND (exp, 2), 0)
5056 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5057 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5058 {
5059 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5060 temp = gen_reg_rtx (mode);
5061 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5062 jumpifnot (TREE_OPERAND (exp, 0), op0);
5063 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5064 op1 = op0;
5065 }
5066 else
5067 {
5068 op1 = gen_label_rtx ();
5069 jumpifnot (TREE_OPERAND (exp, 0), op0);
5070 if (temp != 0)
5071 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5072 else
906c4e36
RK
5073 expand_expr (TREE_OPERAND (exp, 1),
5074 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5075 if (cleanups_this_call)
5076 {
5077 sorry ("aggregate value in COND_EXPR");
5078 cleanups_this_call = 0;
5079 }
5080
5081 emit_queue ();
5082 emit_jump_insn (gen_jump (op1));
5083 emit_barrier ();
5084 emit_label (op0);
5085 if (temp != 0)
5086 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5087 else
906c4e36
RK
5088 expand_expr (TREE_OPERAND (exp, 2),
5089 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5090 }
5091
5092 if (cleanups_this_call)
5093 {
5094 sorry ("aggregate value in COND_EXPR");
5095 cleanups_this_call = 0;
5096 }
5097
5098 emit_queue ();
5099 emit_label (op1);
5100 OK_DEFER_POP;
5101 cleanups_this_call = old_cleanups;
5102 return temp;
5103 }
5104
5105 case TARGET_EXPR:
5106 {
5107 /* Something needs to be initialized, but we didn't know
5108 where that thing was when building the tree. For example,
5109 it could be the return value of a function, or a parameter
5110 to a function which lays down in the stack, or a temporary
5111 variable which must be passed by reference.
5112
5113 We guarantee that the expression will either be constructed
5114 or copied into our original target. */
5115
5116 tree slot = TREE_OPERAND (exp, 0);
5c062816 5117 tree exp1;
bbf6f052
RK
5118
5119 if (TREE_CODE (slot) != VAR_DECL)
5120 abort ();
5121
5122 if (target == 0)
5123 {
5124 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5125 {
5126 target = DECL_RTL (slot);
5c062816 5127 /* If we have already expanded the slot, so don't do
ac993f4f 5128 it again. (mrs) */
5c062816
MS
5129 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5130 return target;
ac993f4f 5131 }
bbf6f052
RK
5132 else
5133 {
5134 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5135 /* All temp slots at this level must not conflict. */
5136 preserve_temp_slots (target);
5137 DECL_RTL (slot) = target;
5138 }
5139
5140#if 0
ac993f4f
MS
5141 /* I bet this needs to be done, and I bet that it needs to
5142 be above, inside the else clause. The reason is
5143 simple, how else is it going to get cleaned up? (mrs)
5144
5145 The reason is probably did not work before, and was
5146 commented out is because this was re-expanding already
5147 expanded target_exprs (target == 0 and DECL_RTL (slot)
5148 != 0) also cleaning them up many times as well. :-( */
5149
bbf6f052
RK
5150 /* Since SLOT is not known to the called function
5151 to belong to its stack frame, we must build an explicit
5152 cleanup. This case occurs when we must build up a reference
5153 to pass the reference as an argument. In this case,
5154 it is very likely that such a reference need not be
5155 built here. */
5156
5157 if (TREE_OPERAND (exp, 2) == 0)
5158 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5159 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5160 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5161 cleanups_this_call);
bbf6f052
RK
5162#endif
5163 }
5164 else
5165 {
5166 /* This case does occur, when expanding a parameter which
5167 needs to be constructed on the stack. The target
5168 is the actual stack address that we want to initialize.
5169 The function we call will perform the cleanup in this case. */
5170
8c042b47
RS
5171 /* If we have already assigned it space, use that space,
5172 not target that we were passed in, as our target
5173 parameter is only a hint. */
5174 if (DECL_RTL (slot) != 0)
5175 {
5176 target = DECL_RTL (slot);
5177 /* If we have already expanded the slot, so don't do
5178 it again. (mrs) */
5179 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5180 return target;
5181 }
5182
bbf6f052
RK
5183 DECL_RTL (slot) = target;
5184 }
5185
5c062816
MS
5186 exp1 = TREE_OPERAND (exp, 1);
5187 /* Mark it as expanded. */
5188 TREE_OPERAND (exp, 1) = NULL_TREE;
5189
5190 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5191 }
5192
5193 case INIT_EXPR:
5194 {
5195 tree lhs = TREE_OPERAND (exp, 0);
5196 tree rhs = TREE_OPERAND (exp, 1);
5197 tree noncopied_parts = 0;
5198 tree lhs_type = TREE_TYPE (lhs);
5199
5200 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5201 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5202 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5203 TYPE_NONCOPIED_PARTS (lhs_type));
5204 while (noncopied_parts != 0)
5205 {
5206 expand_assignment (TREE_VALUE (noncopied_parts),
5207 TREE_PURPOSE (noncopied_parts), 0, 0);
5208 noncopied_parts = TREE_CHAIN (noncopied_parts);
5209 }
5210 return temp;
5211 }
5212
5213 case MODIFY_EXPR:
5214 {
5215 /* If lhs is complex, expand calls in rhs before computing it.
5216 That's so we don't compute a pointer and save it over a call.
5217 If lhs is simple, compute it first so we can give it as a
5218 target if the rhs is just a call. This avoids an extra temp and copy
5219 and that prevents a partial-subsumption which makes bad code.
5220 Actually we could treat component_ref's of vars like vars. */
5221
5222 tree lhs = TREE_OPERAND (exp, 0);
5223 tree rhs = TREE_OPERAND (exp, 1);
5224 tree noncopied_parts = 0;
5225 tree lhs_type = TREE_TYPE (lhs);
5226
5227 temp = 0;
5228
5229 if (TREE_CODE (lhs) != VAR_DECL
5230 && TREE_CODE (lhs) != RESULT_DECL
5231 && TREE_CODE (lhs) != PARM_DECL)
5232 preexpand_calls (exp);
5233
5234 /* Check for |= or &= of a bitfield of size one into another bitfield
5235 of size 1. In this case, (unless we need the result of the
5236 assignment) we can do this more efficiently with a
5237 test followed by an assignment, if necessary.
5238
5239 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5240 things change so we do, this code should be enhanced to
5241 support it. */
5242 if (ignore
5243 && TREE_CODE (lhs) == COMPONENT_REF
5244 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5245 || TREE_CODE (rhs) == BIT_AND_EXPR)
5246 && TREE_OPERAND (rhs, 0) == lhs
5247 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5248 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5249 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5250 {
5251 rtx label = gen_label_rtx ();
5252
5253 do_jump (TREE_OPERAND (rhs, 1),
5254 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5255 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5256 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5257 (TREE_CODE (rhs) == BIT_IOR_EXPR
5258 ? integer_one_node
5259 : integer_zero_node)),
5260 0, 0);
e7c33f54 5261 do_pending_stack_adjust ();
bbf6f052
RK
5262 emit_label (label);
5263 return const0_rtx;
5264 }
5265
5266 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5267 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5268 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5269 TYPE_NONCOPIED_PARTS (lhs_type));
5270
5271 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5272 while (noncopied_parts != 0)
5273 {
5274 expand_assignment (TREE_PURPOSE (noncopied_parts),
5275 TREE_VALUE (noncopied_parts), 0, 0);
5276 noncopied_parts = TREE_CHAIN (noncopied_parts);
5277 }
5278 return temp;
5279 }
5280
5281 case PREINCREMENT_EXPR:
5282 case PREDECREMENT_EXPR:
5283 return expand_increment (exp, 0);
5284
5285 case POSTINCREMENT_EXPR:
5286 case POSTDECREMENT_EXPR:
5287 /* Faster to treat as pre-increment if result is not used. */
5288 return expand_increment (exp, ! ignore);
5289
5290 case ADDR_EXPR:
5291 /* Are we taking the address of a nested function? */
5292 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5293 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5294 {
5295 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5296 op0 = force_operand (op0, target);
5297 }
5298 else
5299 {
906c4e36 5300 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5301 (modifier == EXPAND_INITIALIZER
5302 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5303
5304 /* We would like the object in memory. If it is a constant,
5305 we can have it be statically allocated into memory. For
5306 a non-constant (REG or SUBREG), we need to allocate some
5307 memory and store the value into it. */
5308
5309 if (CONSTANT_P (op0))
5310 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5311 op0);
5312
5313 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5314 {
5315 /* If this object is in a register, it must be not
5316 be BLKmode. */
5317 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5318 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5319 rtx memloc
5320 = assign_stack_temp (inner_mode,
5321 int_size_in_bytes (inner_type), 1);
5322
5323 emit_move_insn (memloc, op0);
5324 op0 = memloc;
5325 }
5326
bbf6f052
RK
5327 if (GET_CODE (op0) != MEM)
5328 abort ();
5329
5330 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5331 return XEXP (op0, 0);
5332 op0 = force_operand (XEXP (op0, 0), target);
5333 }
5334 if (flag_force_addr && GET_CODE (op0) != REG)
5335 return force_reg (Pmode, op0);
5336 return op0;
5337
5338 case ENTRY_VALUE_EXPR:
5339 abort ();
5340
7308a047
RS
5341 /* COMPLEX type for Extended Pascal & Fortran */
5342 case COMPLEX_EXPR:
5343 {
5344 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5345
5346 rtx prev;
5347
5348 /* Get the rtx code of the operands. */
5349 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5350 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5351
5352 if (! target)
5353 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5354
5355 prev = get_last_insn ();
5356
5357 /* Tell flow that the whole of the destination is being set. */
5358 if (GET_CODE (target) == REG)
5359 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5360
5361 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5362 emit_move_insn (gen_realpart (mode, target), op0);
5363 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5364
5365 /* Complex construction should appear as a single unit. */
5366 group_insns (prev);
5367
5368 return target;
5369 }
5370
5371 case REALPART_EXPR:
2d7050fd
RS
5372 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5373 return gen_realpart (mode, op0);
7308a047
RS
5374
5375 case IMAGPART_EXPR:
2d7050fd
RS
5376 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5377 return gen_imagpart (mode, op0);
7308a047
RS
5378
5379 case CONJ_EXPR:
5380 {
5381 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5382 rtx imag_t;
5383 rtx prev;
5384
5385 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5386
5387 if (! target)
5388 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5389
5390 prev = get_last_insn ();
5391
5392 /* Tell flow that the whole of the destination is being set. */
5393 if (GET_CODE (target) == REG)
5394 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5395
5396 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5397 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5398
2d7050fd 5399 imag_t = gen_imagpart (mode, target);
7308a047 5400 temp = expand_unop (mode, neg_optab,
2d7050fd 5401 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5402 if (temp != imag_t)
5403 emit_move_insn (imag_t, temp);
5404
5405 /* Conjugate should appear as a single unit */
5406 group_insns (prev);
5407
5408 return target;
5409 }
5410
bbf6f052 5411 case ERROR_MARK:
66538193
RS
5412 op0 = CONST0_RTX (tmode);
5413 if (op0 != 0)
5414 return op0;
bbf6f052
RK
5415 return const0_rtx;
5416
5417 default:
5418 return (*lang_expand_expr) (exp, target, tmode, modifier);
5419 }
5420
5421 /* Here to do an ordinary binary operator, generating an instruction
5422 from the optab already placed in `this_optab'. */
5423 binop:
5424 preexpand_calls (exp);
5425 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5426 subtarget = 0;
5427 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5428 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5429 binop2:
5430 temp = expand_binop (mode, this_optab, op0, op1, target,
5431 unsignedp, OPTAB_LIB_WIDEN);
5432 if (temp == 0)
5433 abort ();
5434 return temp;
5435}
5436\f
e87b4f3f
RS
5437/* Return the alignment in bits of EXP, a pointer valued expression.
5438 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5439 The alignment returned is, by default, the alignment of the thing that
5440 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5441
5442 Otherwise, look at the expression to see if we can do better, i.e., if the
5443 expression is actually pointing at an object whose alignment is tighter. */
5444
5445static int
5446get_pointer_alignment (exp, max_align)
5447 tree exp;
5448 unsigned max_align;
5449{
5450 unsigned align, inner;
5451
5452 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5453 return 0;
5454
5455 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5456 align = MIN (align, max_align);
5457
5458 while (1)
5459 {
5460 switch (TREE_CODE (exp))
5461 {
5462 case NOP_EXPR:
5463 case CONVERT_EXPR:
5464 case NON_LVALUE_EXPR:
5465 exp = TREE_OPERAND (exp, 0);
5466 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5467 return align;
5468 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5469 inner = MIN (inner, max_align);
5470 align = MAX (align, inner);
5471 break;
5472
5473 case PLUS_EXPR:
5474 /* If sum of pointer + int, restrict our maximum alignment to that
5475 imposed by the integer. If not, we can't do any better than
5476 ALIGN. */
5477 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5478 return align;
5479
e87b4f3f
RS
5480 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5481 & (max_align - 1))
5482 != 0)
bbf6f052
RK
5483 max_align >>= 1;
5484
5485 exp = TREE_OPERAND (exp, 0);
5486 break;
5487
5488 case ADDR_EXPR:
5489 /* See what we are pointing at and look at its alignment. */
5490 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5491 if (TREE_CODE (exp) == FUNCTION_DECL)
5492 align = MAX (align, FUNCTION_BOUNDARY);
5493 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5494 align = MAX (align, DECL_ALIGN (exp));
5495#ifdef CONSTANT_ALIGNMENT
5496 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5497 align = CONSTANT_ALIGNMENT (exp, align);
5498#endif
5499 return MIN (align, max_align);
5500
5501 default:
5502 return align;
5503 }
5504 }
5505}
5506\f
5507/* Return the tree node and offset if a given argument corresponds to
5508 a string constant. */
5509
5510static tree
5511string_constant (arg, ptr_offset)
5512 tree arg;
5513 tree *ptr_offset;
5514{
5515 STRIP_NOPS (arg);
5516
5517 if (TREE_CODE (arg) == ADDR_EXPR
5518 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5519 {
5520 *ptr_offset = integer_zero_node;
5521 return TREE_OPERAND (arg, 0);
5522 }
5523 else if (TREE_CODE (arg) == PLUS_EXPR)
5524 {
5525 tree arg0 = TREE_OPERAND (arg, 0);
5526 tree arg1 = TREE_OPERAND (arg, 1);
5527
5528 STRIP_NOPS (arg0);
5529 STRIP_NOPS (arg1);
5530
5531 if (TREE_CODE (arg0) == ADDR_EXPR
5532 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5533 {
5534 *ptr_offset = arg1;
5535 return TREE_OPERAND (arg0, 0);
5536 }
5537 else if (TREE_CODE (arg1) == ADDR_EXPR
5538 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5539 {
5540 *ptr_offset = arg0;
5541 return TREE_OPERAND (arg1, 0);
5542 }
5543 }
5544
5545 return 0;
5546}
5547
5548/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5549 way, because it could contain a zero byte in the middle.
5550 TREE_STRING_LENGTH is the size of the character array, not the string.
5551
5552 Unfortunately, string_constant can't access the values of const char
5553 arrays with initializers, so neither can we do so here. */
5554
5555static tree
5556c_strlen (src)
5557 tree src;
5558{
5559 tree offset_node;
5560 int offset, max;
5561 char *ptr;
5562
5563 src = string_constant (src, &offset_node);
5564 if (src == 0)
5565 return 0;
5566 max = TREE_STRING_LENGTH (src);
5567 ptr = TREE_STRING_POINTER (src);
5568 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5569 {
5570 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5571 compute the offset to the following null if we don't know where to
5572 start searching for it. */
5573 int i;
5574 for (i = 0; i < max; i++)
5575 if (ptr[i] == 0)
5576 return 0;
5577 /* We don't know the starting offset, but we do know that the string
5578 has no internal zero bytes. We can assume that the offset falls
5579 within the bounds of the string; otherwise, the programmer deserves
5580 what he gets. Subtract the offset from the length of the string,
5581 and return that. */
5582 /* This would perhaps not be valid if we were dealing with named
5583 arrays in addition to literal string constants. */
5584 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5585 }
5586
5587 /* We have a known offset into the string. Start searching there for
5588 a null character. */
5589 if (offset_node == 0)
5590 offset = 0;
5591 else
5592 {
5593 /* Did we get a long long offset? If so, punt. */
5594 if (TREE_INT_CST_HIGH (offset_node) != 0)
5595 return 0;
5596 offset = TREE_INT_CST_LOW (offset_node);
5597 }
5598 /* If the offset is known to be out of bounds, warn, and call strlen at
5599 runtime. */
5600 if (offset < 0 || offset > max)
5601 {
5602 warning ("offset outside bounds of constant string");
5603 return 0;
5604 }
5605 /* Use strlen to search for the first zero byte. Since any strings
5606 constructed with build_string will have nulls appended, we win even
5607 if we get handed something like (char[4])"abcd".
5608
5609 Since OFFSET is our starting index into the string, no further
5610 calculation is needed. */
5611 return size_int (strlen (ptr + offset));
5612}
5613\f
5614/* Expand an expression EXP that calls a built-in function,
5615 with result going to TARGET if that's convenient
5616 (and in mode MODE if that's convenient).
5617 SUBTARGET may be used as the target for computing one of EXP's operands.
5618 IGNORE is nonzero if the value is to be ignored. */
5619
5620static rtx
5621expand_builtin (exp, target, subtarget, mode, ignore)
5622 tree exp;
5623 rtx target;
5624 rtx subtarget;
5625 enum machine_mode mode;
5626 int ignore;
5627{
5628 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5629 tree arglist = TREE_OPERAND (exp, 1);
5630 rtx op0;
60bac6ea 5631 rtx lab1, insns;
bbf6f052 5632 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5633 optab builtin_optab;
bbf6f052
RK
5634
5635 switch (DECL_FUNCTION_CODE (fndecl))
5636 {
5637 case BUILT_IN_ABS:
5638 case BUILT_IN_LABS:
5639 case BUILT_IN_FABS:
5640 /* build_function_call changes these into ABS_EXPR. */
5641 abort ();
5642
1bbddf11
JVA
5643 case BUILT_IN_SIN:
5644 case BUILT_IN_COS:
e87b4f3f
RS
5645 case BUILT_IN_FSQRT:
5646 /* If not optimizing, call the library function. */
8c8a8e34 5647 if (! optimize)
e87b4f3f
RS
5648 break;
5649
5650 if (arglist == 0
19deaec9 5651 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5652 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5653 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5654
db0e6d01
RS
5655 /* Stabilize and compute the argument. */
5656 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5657 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5658 {
5659 exp = copy_node (exp);
5660 arglist = copy_node (arglist);
5661 TREE_OPERAND (exp, 1) = arglist;
5662 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5663 }
e87b4f3f 5664 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5665
5666 /* Make a suitable register to place result in. */
5667 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5668
c1f7c223 5669 emit_queue ();
8c8a8e34 5670 start_sequence ();
e7c33f54 5671
1bbddf11
JVA
5672 switch (DECL_FUNCTION_CODE (fndecl))
5673 {
5674 case BUILT_IN_SIN:
5675 builtin_optab = sin_optab; break;
5676 case BUILT_IN_COS:
5677 builtin_optab = cos_optab; break;
5678 case BUILT_IN_FSQRT:
5679 builtin_optab = sqrt_optab; break;
5680 default:
5681 abort ();
5682 }
5683
5684 /* Compute into TARGET.
e87b4f3f
RS
5685 Set TARGET to wherever the result comes back. */
5686 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5687 builtin_optab, op0, target, 0);
e7c33f54
RK
5688
5689 /* If we were unable to expand via the builtin, stop the
5690 sequence (without outputting the insns) and break, causing
5691 a call the the library function. */
e87b4f3f 5692 if (target == 0)
e7c33f54 5693 {
8c8a8e34 5694 end_sequence ();
e7c33f54
RK
5695 break;
5696 }
e87b4f3f 5697
60bac6ea
RS
5698 /* Check the results by default. But if flag_fast_math is turned on,
5699 then assume sqrt will always be called with valid arguments. */
5700
5701 if (! flag_fast_math)
5702 {
1bbddf11 5703 /* Don't define the builtin FP instructions
60bac6ea
RS
5704 if your machine is not IEEE. */
5705 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5706 abort ();
5707
5708 lab1 = gen_label_rtx ();
5709
5710 /* Test the result; if it is NaN, set errno=EDOM because
5711 the argument was not in the domain. */
5712 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5713 emit_jump_insn (gen_beq (lab1));
5714
5715#if TARGET_EDOM
5716 {
5717#ifdef GEN_ERRNO_RTX
5718 rtx errno_rtx = GEN_ERRNO_RTX;
5719#else
5720 rtx errno_rtx
5721 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5722#endif
5723
5724 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5725 }
5726#else
5727 /* We can't set errno=EDOM directly; let the library call do it.
5728 Pop the arguments right away in case the call gets deleted. */
5729 NO_DEFER_POP;
5730 expand_call (exp, target, 0);
5731 OK_DEFER_POP;
5732#endif
5733
5734 emit_label (lab1);
5735 }
e87b4f3f 5736
e7c33f54 5737 /* Output the entire sequence. */
8c8a8e34
JW
5738 insns = get_insns ();
5739 end_sequence ();
5740 emit_insns (insns);
e7c33f54
RK
5741
5742 return target;
5743
0006469d
TW
5744 /* __builtin_apply_args returns block of memory allocated on
5745 the stack into which is stored the arg pointer, structure
5746 value address, static chain, and all the registers that might
5747 possibly be used in performing a function call. The code is
5748 moved to the start of the function so the incoming values are
5749 saved. */
5750 case BUILT_IN_APPLY_ARGS:
5751 /* Don't do __builtin_apply_args more than once in a function.
5752 Save the result of the first call and reuse it. */
5753 if (apply_args_value != 0)
5754 return apply_args_value;
5755 {
5756 /* When this function is called, it means that registers must be
5757 saved on entry to this function. So we migrate the
5758 call to the first insn of this function. */
5759 rtx temp;
5760 rtx seq;
5761
5762 start_sequence ();
5763 temp = expand_builtin_apply_args ();
5764 seq = get_insns ();
5765 end_sequence ();
5766
5767 apply_args_value = temp;
5768
5769 /* Put the sequence after the NOTE that starts the function.
5770 If this is inside a SEQUENCE, make the outer-level insn
5771 chain current, so the code is placed at the start of the
5772 function. */
5773 push_topmost_sequence ();
5774 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5775 pop_topmost_sequence ();
5776 return temp;
5777 }
5778
5779 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5780 FUNCTION with a copy of the parameters described by
5781 ARGUMENTS, and ARGSIZE. It returns a block of memory
5782 allocated on the stack into which is stored all the registers
5783 that might possibly be used for returning the result of a
5784 function. ARGUMENTS is the value returned by
5785 __builtin_apply_args. ARGSIZE is the number of bytes of
5786 arguments that must be copied. ??? How should this value be
5787 computed? We'll also need a safe worst case value for varargs
5788 functions. */
5789 case BUILT_IN_APPLY:
5790 if (arglist == 0
5791 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5792 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5793 || TREE_CHAIN (arglist) == 0
5794 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5795 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5796 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5797 return const0_rtx;
5798 else
5799 {
5800 int i;
5801 tree t;
5802 rtx ops[3];
5803
5804 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5805 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5806
5807 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5808 }
5809
5810 /* __builtin_return (RESULT) causes the function to return the
5811 value described by RESULT. RESULT is address of the block of
5812 memory returned by __builtin_apply. */
5813 case BUILT_IN_RETURN:
5814 if (arglist
5815 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5816 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5817 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5818 NULL_RTX, VOIDmode, 0));
5819 return const0_rtx;
5820
bbf6f052
RK
5821 case BUILT_IN_SAVEREGS:
5822 /* Don't do __builtin_saveregs more than once in a function.
5823 Save the result of the first call and reuse it. */
5824 if (saveregs_value != 0)
5825 return saveregs_value;
5826 {
5827 /* When this function is called, it means that registers must be
5828 saved on entry to this function. So we migrate the
5829 call to the first insn of this function. */
5830 rtx temp;
5831 rtx seq;
5832 rtx valreg, saved_valreg;
5833
5834 /* Now really call the function. `expand_call' does not call
5835 expand_builtin, so there is no danger of infinite recursion here. */
5836 start_sequence ();
5837
5838#ifdef EXPAND_BUILTIN_SAVEREGS
5839 /* Do whatever the machine needs done in this case. */
5840 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5841#else
5842 /* The register where the function returns its value
5843 is likely to have something else in it, such as an argument.
5844 So preserve that register around the call. */
5845 if (value_mode != VOIDmode)
5846 {
5847 valreg = hard_libcall_value (value_mode);
5848 saved_valreg = gen_reg_rtx (value_mode);
5849 emit_move_insn (saved_valreg, valreg);
5850 }
5851
5852 /* Generate the call, putting the value in a pseudo. */
5853 temp = expand_call (exp, target, ignore);
5854
5855 if (value_mode != VOIDmode)
5856 emit_move_insn (valreg, saved_valreg);
5857#endif
5858
5859 seq = get_insns ();
5860 end_sequence ();
5861
5862 saveregs_value = temp;
5863
0006469d
TW
5864 /* Put the sequence after the NOTE that starts the function.
5865 If this is inside a SEQUENCE, make the outer-level insn
5866 chain current, so the code is placed at the start of the
5867 function. */
5868 push_topmost_sequence ();
bbf6f052 5869 emit_insns_before (seq, NEXT_INSN (get_insns ()));
0006469d 5870 pop_topmost_sequence ();
bbf6f052
RK
5871 return temp;
5872 }
5873
5874 /* __builtin_args_info (N) returns word N of the arg space info
5875 for the current function. The number and meanings of words
5876 is controlled by the definition of CUMULATIVE_ARGS. */
5877 case BUILT_IN_ARGS_INFO:
5878 {
5879 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5880 int i;
5881 int *word_ptr = (int *) &current_function_args_info;
5882 tree type, elts, result;
5883
5884 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5885 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5886 __FILE__, __LINE__);
5887
5888 if (arglist != 0)
5889 {
5890 tree arg = TREE_VALUE (arglist);
5891 if (TREE_CODE (arg) != INTEGER_CST)
42b85a55 5892 error ("argument of `__builtin_args_info' must be constant");
bbf6f052
RK
5893 else
5894 {
5895 int wordnum = TREE_INT_CST_LOW (arg);
5896
42b85a55
RS
5897 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5898 error ("argument of `__builtin_args_info' out of range");
bbf6f052 5899 else
906c4e36 5900 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5901 }
5902 }
5903 else
42b85a55 5904 error ("missing argument in `__builtin_args_info'");
bbf6f052
RK
5905
5906 return const0_rtx;
5907
5908#if 0
5909 for (i = 0; i < nwords; i++)
5910 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5911
5912 type = build_array_type (integer_type_node,
5913 build_index_type (build_int_2 (nwords, 0)));
5914 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5915 TREE_CONSTANT (result) = 1;
5916 TREE_STATIC (result) = 1;
5917 result = build (INDIRECT_REF, build_pointer_type (type), result);
5918 TREE_CONSTANT (result) = 1;
906c4e36 5919 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5920#endif
5921 }
5922
5923 /* Return the address of the first anonymous stack arg. */
5924 case BUILT_IN_NEXT_ARG:
5925 {
5926 tree fntype = TREE_TYPE (current_function_decl);
5927 if (!(TYPE_ARG_TYPES (fntype) != 0
5928 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5929 != void_type_node)))
5930 {
5931 error ("`va_start' used in function with fixed args");
5932 return const0_rtx;
5933 }
5934 }
5935
5936 return expand_binop (Pmode, add_optab,
5937 current_function_internal_arg_pointer,
5938 current_function_arg_offset_rtx,
906c4e36 5939 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5940
5941 case BUILT_IN_CLASSIFY_TYPE:
5942 if (arglist != 0)
5943 {
5944 tree type = TREE_TYPE (TREE_VALUE (arglist));
5945 enum tree_code code = TREE_CODE (type);
5946 if (code == VOID_TYPE)
906c4e36 5947 return GEN_INT (void_type_class);
bbf6f052 5948 if (code == INTEGER_TYPE)
906c4e36 5949 return GEN_INT (integer_type_class);
bbf6f052 5950 if (code == CHAR_TYPE)
906c4e36 5951 return GEN_INT (char_type_class);
bbf6f052 5952 if (code == ENUMERAL_TYPE)
906c4e36 5953 return GEN_INT (enumeral_type_class);
bbf6f052 5954 if (code == BOOLEAN_TYPE)
906c4e36 5955 return GEN_INT (boolean_type_class);
bbf6f052 5956 if (code == POINTER_TYPE)
906c4e36 5957 return GEN_INT (pointer_type_class);
bbf6f052 5958 if (code == REFERENCE_TYPE)
906c4e36 5959 return GEN_INT (reference_type_class);
bbf6f052 5960 if (code == OFFSET_TYPE)
906c4e36 5961 return GEN_INT (offset_type_class);
bbf6f052 5962 if (code == REAL_TYPE)
906c4e36 5963 return GEN_INT (real_type_class);
bbf6f052 5964 if (code == COMPLEX_TYPE)
906c4e36 5965 return GEN_INT (complex_type_class);
bbf6f052 5966 if (code == FUNCTION_TYPE)
906c4e36 5967 return GEN_INT (function_type_class);
bbf6f052 5968 if (code == METHOD_TYPE)
906c4e36 5969 return GEN_INT (method_type_class);
bbf6f052 5970 if (code == RECORD_TYPE)
906c4e36 5971 return GEN_INT (record_type_class);
e7f3c83f 5972 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
906c4e36 5973 return GEN_INT (union_type_class);
bbf6f052 5974 if (code == ARRAY_TYPE)
906c4e36 5975 return GEN_INT (array_type_class);
bbf6f052 5976 if (code == STRING_TYPE)
906c4e36 5977 return GEN_INT (string_type_class);
bbf6f052 5978 if (code == SET_TYPE)
906c4e36 5979 return GEN_INT (set_type_class);
bbf6f052 5980 if (code == FILE_TYPE)
906c4e36 5981 return GEN_INT (file_type_class);
bbf6f052 5982 if (code == LANG_TYPE)
906c4e36 5983 return GEN_INT (lang_type_class);
bbf6f052 5984 }
906c4e36 5985 return GEN_INT (no_type_class);
bbf6f052
RK
5986
5987 case BUILT_IN_CONSTANT_P:
5988 if (arglist == 0)
5989 return const0_rtx;
5990 else
cda0ec81 5991 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5992 ? const1_rtx : const0_rtx);
5993
5994 case BUILT_IN_FRAME_ADDRESS:
5995 /* The argument must be a nonnegative integer constant.
5996 It counts the number of frames to scan up the stack.
5997 The value is the address of that frame. */
5998 case BUILT_IN_RETURN_ADDRESS:
5999 /* The argument must be a nonnegative integer constant.
6000 It counts the number of frames to scan up the stack.
6001 The value is the return address saved in that frame. */
6002 if (arglist == 0)
6003 /* Warning about missing arg was already issued. */
6004 return const0_rtx;
6005 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6006 {
42b85a55 6007 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6008 return const0_rtx;
6009 }
6010 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6011 {
42b85a55 6012 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6013 return const0_rtx;
6014 }
6015 else
6016 {
6017 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6018 rtx tem = frame_pointer_rtx;
6019 int i;
6020
46b68a37
JW
6021 /* Some machines need special handling before we can access arbitrary
6022 frames. For example, on the sparc, we must first flush all
6023 register windows to the stack. */
6024#ifdef SETUP_FRAME_ADDRESSES
6025 SETUP_FRAME_ADDRESSES ();
6026#endif
6027
6028 /* On the sparc, the return address is not in the frame, it is
6029 in a register. There is no way to access it off of the current
6030 frame pointer, but it can be accessed off the previous frame
6031 pointer by reading the value from the register window save
6032 area. */
6033#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6034 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6035 count--;
6036#endif
6037
bbf6f052
RK
6038 /* Scan back COUNT frames to the specified frame. */
6039 for (i = 0; i < count; i++)
6040 {
6041 /* Assume the dynamic chain pointer is in the word that
6042 the frame address points to, unless otherwise specified. */
6043#ifdef DYNAMIC_CHAIN_ADDRESS
6044 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6045#endif
6046 tem = memory_address (Pmode, tem);
6047 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6048 }
6049
6050 /* For __builtin_frame_address, return what we've got. */
6051 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6052 return tem;
6053
6054 /* For __builtin_return_address,
6055 Get the return address from that frame. */
6056#ifdef RETURN_ADDR_RTX
6057 return RETURN_ADDR_RTX (count, tem);
6058#else
6059 tem = memory_address (Pmode,
6060 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6061 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6062#endif
6063 }
6064
6065 case BUILT_IN_ALLOCA:
6066 if (arglist == 0
6067 /* Arg could be non-integer if user redeclared this fcn wrong. */
6068 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6069 return const0_rtx;
6070 current_function_calls_alloca = 1;
6071 /* Compute the argument. */
906c4e36 6072 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6073
6074 /* Allocate the desired space. */
8c8a8e34 6075 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
6076
6077 /* Record the new stack level for nonlocal gotos. */
6dc42e49 6078 if (nonlocal_goto_handler_slot != 0)
906c4e36 6079 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
6080 return target;
6081
6082 case BUILT_IN_FFS:
6083 /* If not optimizing, call the library function. */
6084 if (!optimize)
6085 break;
6086
6087 if (arglist == 0
6088 /* Arg could be non-integer if user redeclared this fcn wrong. */
6089 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6090 return const0_rtx;
6091
6092 /* Compute the argument. */
6093 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6094 /* Compute ffs, into TARGET if possible.
6095 Set TARGET to wherever the result comes back. */
6096 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6097 ffs_optab, op0, target, 1);
6098 if (target == 0)
6099 abort ();
6100 return target;
6101
6102 case BUILT_IN_STRLEN:
6103 /* If not optimizing, call the library function. */
6104 if (!optimize)
6105 break;
6106
6107 if (arglist == 0
6108 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6109 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6110 return const0_rtx;
6111 else
6112 {
e7c33f54
RK
6113 tree src = TREE_VALUE (arglist);
6114 tree len = c_strlen (src);
bbf6f052 6115
e7c33f54
RK
6116 int align
6117 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6118
6119 rtx result, src_rtx, char_rtx;
6120 enum machine_mode insn_mode = value_mode, char_mode;
6121 enum insn_code icode;
6122
6123 /* If the length is known, just return it. */
6124 if (len != 0)
6125 return expand_expr (len, target, mode, 0);
6126
6127 /* If SRC is not a pointer type, don't do this operation inline. */
6128 if (align == 0)
6129 break;
6130
6131 /* Call a function if we can't compute strlen in the right mode. */
6132
6133 while (insn_mode != VOIDmode)
6134 {
6135 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6136 if (icode != CODE_FOR_nothing)
6137 break;
6138
6139 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6140 }
6141 if (insn_mode == VOIDmode)
bbf6f052 6142 break;
e7c33f54
RK
6143
6144 /* Make a place to write the result of the instruction. */
6145 result = target;
6146 if (! (result != 0
6147 && GET_CODE (result) == REG
6148 && GET_MODE (result) == insn_mode
6149 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6150 result = gen_reg_rtx (insn_mode);
6151
4d613828 6152 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6153
4d613828 6154 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6155 result = gen_reg_rtx (insn_mode);
6156
6157 src_rtx = memory_address (BLKmode,
906c4e36 6158 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6159 EXPAND_NORMAL));
4d613828 6160 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6161 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6162
6163 char_rtx = const0_rtx;
4d613828
RS
6164 char_mode = insn_operand_mode[(int)icode][2];
6165 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6166 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6167
6168 emit_insn (GEN_FCN (icode) (result,
6169 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6170 char_rtx, GEN_INT (align)));
e7c33f54
RK
6171
6172 /* Return the value in the proper mode for this function. */
6173 if (GET_MODE (result) == value_mode)
6174 return result;
6175 else if (target != 0)
6176 {
6177 convert_move (target, result, 0);
6178 return target;
6179 }
6180 else
6181 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6182 }
6183
6184 case BUILT_IN_STRCPY:
6185 /* If not optimizing, call the library function. */
6186 if (!optimize)
6187 break;
6188
6189 if (arglist == 0
6190 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6191 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6192 || TREE_CHAIN (arglist) == 0
6193 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6194 return const0_rtx;
6195 else
6196 {
6197 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6198
6199 if (len == 0)
6200 break;
6201
6202 len = size_binop (PLUS_EXPR, len, integer_one_node);
6203
906c4e36 6204 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6205 }
6206
6207 /* Drops in. */
6208 case BUILT_IN_MEMCPY:
6209 /* If not optimizing, call the library function. */
6210 if (!optimize)
6211 break;
6212
6213 if (arglist == 0
6214 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6215 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6216 || TREE_CHAIN (arglist) == 0
6217 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6218 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6219 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6220 return const0_rtx;
6221 else
6222 {
6223 tree dest = TREE_VALUE (arglist);
6224 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6225 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6226
6227 int src_align
6228 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6229 int dest_align
6230 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9937da1a 6231 rtx dest_rtx, dest_mem, src_mem;
bbf6f052
RK
6232
6233 /* If either SRC or DEST is not a pointer type, don't do
6234 this operation in-line. */
6235 if (src_align == 0 || dest_align == 0)
6236 {
6237 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6238 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6239 break;
6240 }
6241
906c4e36 6242 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
9937da1a
RS
6243 dest_mem = gen_rtx (MEM, BLKmode,
6244 memory_address (BLKmode, dest_rtx));
6245 src_mem = gen_rtx (MEM, BLKmode,
6246 memory_address (BLKmode,
6247 expand_expr (src, NULL_RTX,
6248 Pmode,
6249 EXPAND_NORMAL)));
bbf6f052
RK
6250
6251 /* Copy word part most expediently. */
9937da1a 6252 emit_block_move (dest_mem, src_mem,
906c4e36 6253 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6254 MIN (src_align, dest_align));
6255 return dest_rtx;
6256 }
6257
6258/* These comparison functions need an instruction that returns an actual
6259 index. An ordinary compare that just sets the condition codes
6260 is not enough. */
6261#ifdef HAVE_cmpstrsi
6262 case BUILT_IN_STRCMP:
6263 /* If not optimizing, call the library function. */
6264 if (!optimize)
6265 break;
6266
6267 if (arglist == 0
6268 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6269 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6270 || TREE_CHAIN (arglist) == 0
6271 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6272 return const0_rtx;
6273 else if (!HAVE_cmpstrsi)
6274 break;
6275 {
6276 tree arg1 = TREE_VALUE (arglist);
6277 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6278 tree offset;
6279 tree len, len2;
6280
6281 len = c_strlen (arg1);
6282 if (len)
6283 len = size_binop (PLUS_EXPR, integer_one_node, len);
6284 len2 = c_strlen (arg2);
6285 if (len2)
6286 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6287
6288 /* If we don't have a constant length for the first, use the length
6289 of the second, if we know it. We don't require a constant for
6290 this case; some cost analysis could be done if both are available
6291 but neither is constant. For now, assume they're equally cheap.
6292
6293 If both strings have constant lengths, use the smaller. This
6294 could arise if optimization results in strcpy being called with
6295 two fixed strings, or if the code was machine-generated. We should
6296 add some code to the `memcmp' handler below to deal with such
6297 situations, someday. */
6298 if (!len || TREE_CODE (len) != INTEGER_CST)
6299 {
6300 if (len2)
6301 len = len2;
6302 else if (len == 0)
6303 break;
6304 }
6305 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6306 {
6307 if (tree_int_cst_lt (len2, len))
6308 len = len2;
6309 }
6310
906c4e36 6311 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6312 }
6313
6314 /* Drops in. */
6315 case BUILT_IN_MEMCMP:
6316 /* If not optimizing, call the library function. */
6317 if (!optimize)
6318 break;
6319
6320 if (arglist == 0
6321 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6322 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6323 || TREE_CHAIN (arglist) == 0
6324 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6325 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6326 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6327 return const0_rtx;
6328 else if (!HAVE_cmpstrsi)
6329 break;
6330 {
6331 tree arg1 = TREE_VALUE (arglist);
6332 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6333 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6334 rtx result;
6335
6336 int arg1_align
6337 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6338 int arg2_align
6339 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6340 enum machine_mode insn_mode
6341 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6342
6343 /* If we don't have POINTER_TYPE, call the function. */
6344 if (arg1_align == 0 || arg2_align == 0)
6345 {
6346 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6347 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6348 break;
6349 }
6350
6351 /* Make a place to write the result of the instruction. */
6352 result = target;
6353 if (! (result != 0
6354 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6355 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6356 result = gen_reg_rtx (insn_mode);
6357
6358 emit_insn (gen_cmpstrsi (result,
6359 gen_rtx (MEM, BLKmode,
906c4e36
RK
6360 expand_expr (arg1, NULL_RTX, Pmode,
6361 EXPAND_NORMAL)),
bbf6f052 6362 gen_rtx (MEM, BLKmode,
906c4e36
RK
6363 expand_expr (arg2, NULL_RTX, Pmode,
6364 EXPAND_NORMAL)),
6365 expand_expr (len, NULL_RTX, VOIDmode, 0),
6366 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6367
6368 /* Return the value in the proper mode for this function. */
6369 mode = TYPE_MODE (TREE_TYPE (exp));
6370 if (GET_MODE (result) == mode)
6371 return result;
6372 else if (target != 0)
6373 {
6374 convert_move (target, result, 0);
6375 return target;
6376 }
6377 else
6378 return convert_to_mode (mode, result, 0);
6379 }
6380#else
6381 case BUILT_IN_STRCMP:
6382 case BUILT_IN_MEMCMP:
6383 break;
6384#endif
6385
6386 default: /* just do library call, if unknown builtin */
42b85a55 6387 error ("built-in function `%s' not currently supported",
bbf6f052
RK
6388 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6389 }
6390
6391 /* The switch statement above can drop through to cause the function
6392 to be called normally. */
6393
6394 return expand_call (exp, target, ignore);
6395}
6396\f
0006469d
TW
6397/* Built-in functions to perform an untyped call and return. */
6398
6399/* For each register that may be used for calling a function, this
6400 gives a mode used to copy the register's value. VOIDmode indicates
6401 the register is not used for calling a function. If the machine
6402 has register windows, this gives only the outbound registers.
6403 INCOMING_REGNO gives the corresponding inbound register. */
6404static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6405
6406/* For each register that may be used for returning values, this gives
6407 a mode used to copy the register's value. VOIDmode indicates the
6408 register is not used for returning values. If the machine has
6409 register windows, this gives only the outbound registers.
6410 INCOMING_REGNO gives the corresponding inbound register. */
6411static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6412
6413/* Return the size required for the block returned by __builtin_apply_args,
6414 and initialize apply_args_mode. */
6415static int
6416apply_args_size ()
6417{
6418 static int size = -1;
6419 int align, regno;
6420 enum machine_mode mode;
6421
6422 /* The values computed by this function never change. */
6423 if (size < 0)
6424 {
6425 /* The first value is the incoming arg-pointer. */
6426 size = GET_MODE_SIZE (Pmode);
6427
6428 /* The second value is the structure value address unless this is
6429 passed as an "invisible" first argument. */
6430 if (struct_value_rtx)
6431 size += GET_MODE_SIZE (Pmode);
6432
6433 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6434 if (FUNCTION_ARG_REGNO_P (regno))
6435 {
6436 /* Search for the proper mode for copying this register's
6437 value. I'm not sure this is right, but it works so far. */
6438 enum machine_mode best_mode = VOIDmode;
6439
6440 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6441 mode != VOIDmode;
6442 mode = GET_MODE_WIDER_MODE (mode))
6443 if (HARD_REGNO_MODE_OK (regno, mode)
6444 && HARD_REGNO_NREGS (regno, mode) == 1)
6445 best_mode = mode;
6446
6447 if (best_mode == VOIDmode)
6448 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6449 mode != VOIDmode;
6450 mode = GET_MODE_WIDER_MODE (mode))
6451 if (HARD_REGNO_MODE_OK (regno, mode)
6452 && (mov_optab->handlers[(int) mode].insn_code
6453 != CODE_FOR_nothing))
6454 best_mode = mode;
6455
6456 mode = best_mode;
6457 if (mode == VOIDmode)
6458 abort ();
6459
6460 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6461 if (size % align != 0)
6462 size = CEIL (size, align) * align;
6463 size += GET_MODE_SIZE (mode);
6464 apply_args_mode[regno] = mode;
6465 }
6466 else
6467 apply_args_mode[regno] = VOIDmode;
6468 }
6469 return size;
6470}
6471
6472/* Return the size required for the block returned by __builtin_apply,
6473 and initialize apply_result_mode. */
6474static int
6475apply_result_size ()
6476{
6477 static int size = -1;
6478 int align, regno;
6479 enum machine_mode mode;
6480
6481 /* The values computed by this function never change. */
6482 if (size < 0)
6483 {
6484 size = 0;
6485
6486 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6487 if (FUNCTION_VALUE_REGNO_P (regno))
6488 {
6489 /* Search for the proper mode for copying this register's
6490 value. I'm not sure this is right, but it works so far. */
6491 enum machine_mode best_mode = VOIDmode;
6492
6493 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6494 mode != TImode;
6495 mode = GET_MODE_WIDER_MODE (mode))
6496 if (HARD_REGNO_MODE_OK (regno, mode))
6497 best_mode = mode;
6498
6499 if (best_mode == VOIDmode)
6500 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6501 mode != VOIDmode;
6502 mode = GET_MODE_WIDER_MODE (mode))
6503 if (HARD_REGNO_MODE_OK (regno, mode)
6504 && (mov_optab->handlers[(int) mode].insn_code
6505 != CODE_FOR_nothing))
6506 best_mode = mode;
6507
6508 mode = best_mode;
6509 if (mode == VOIDmode)
6510 abort ();
6511
6512 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6513 if (size % align != 0)
6514 size = CEIL (size, align) * align;
6515 size += GET_MODE_SIZE (mode);
6516 apply_result_mode[regno] = mode;
6517 }
6518 else
6519 apply_result_mode[regno] = VOIDmode;
6520
6521 /* Allow targets that use untyped_call and untyped_return to override
6522 the size so that machine-specific information can be stored here. */
6523#ifdef APPLY_RESULT_SIZE
6524 size = APPLY_RESULT_SIZE;
6525#endif
6526 }
6527 return size;
6528}
6529
6530#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6531/* Create a vector describing the result block RESULT. If SAVEP is true,
6532 the result block is used to save the values; otherwise it is used to
6533 restore the values. */
6534static rtx
6535result_vector (savep, result)
6536 int savep;
6537 rtx result;
6538{
6539 int regno, size, align, nelts;
6540 enum machine_mode mode;
6541 rtx reg, mem;
6542 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6543
6544 size = nelts = 0;
6545 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6546 if ((mode = apply_result_mode[regno]) != VOIDmode)
6547 {
6548 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6549 if (size % align != 0)
6550 size = CEIL (size, align) * align;
6551 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6552 mem = change_address (result, mode,
6553 plus_constant (XEXP (result, 0), size));
6554 savevec[nelts++] = (savep
6555 ? gen_rtx (SET, VOIDmode, mem, reg)
6556 : gen_rtx (SET, VOIDmode, reg, mem));
6557 size += GET_MODE_SIZE (mode);
6558 }
6559 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6560}
6561#endif /* HAVE_untyped_call or HAVE_untyped_return */
6562
6563
6564/* Save the state required to perform an untyped call with the same
6565 arguments as were passed to the current function. */
6566static rtx
6567expand_builtin_apply_args ()
6568{
6569 rtx registers;
6570 int size, align, regno;
6571 enum machine_mode mode;
6572
6573 /* Create a block where the arg-pointer, structure value address,
6574 and argument registers can be saved. */
6575 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6576
6577 /* Walk past the arg-pointer and structure value address. */
6578 size = GET_MODE_SIZE (Pmode);
6579 if (struct_value_rtx)
6580 size += GET_MODE_SIZE (Pmode);
6581
6582 /* Save each register used in calling a function to the block. */
6583 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6584 if ((mode = apply_args_mode[regno]) != VOIDmode)
6585 {
6586 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6587 if (size % align != 0)
6588 size = CEIL (size, align) * align;
6589 emit_move_insn (change_address (registers, mode,
6590 plus_constant (XEXP (registers, 0),
6591 size)),
6592 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6593 size += GET_MODE_SIZE (mode);
6594 }
6595
6596 /* Save the arg pointer to the block. */
6597 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6598 copy_to_reg (virtual_incoming_args_rtx));
6599 size = GET_MODE_SIZE (Pmode);
6600
6601 /* Save the structure value address unless this is passed as an
6602 "invisible" first argument. */
6603 if (struct_value_incoming_rtx)
6604 {
6605 emit_move_insn (change_address (registers, Pmode,
6606 plus_constant (XEXP (registers, 0),
6607 size)),
6608 copy_to_reg (struct_value_incoming_rtx));
6609 size += GET_MODE_SIZE (Pmode);
6610 }
6611
6612 /* Return the address of the block. */
6613 return copy_addr_to_reg (XEXP (registers, 0));
6614}
6615
6616/* Perform an untyped call and save the state required to perform an
6617 untyped return of whatever value was returned by the given function. */
6618static rtx
6619expand_builtin_apply (function, arguments, argsize)
6620 rtx function, arguments, argsize;
6621{
6622 int size, align, regno;
6623 enum machine_mode mode;
6624 rtx incoming_args, result, reg, dest, call_insn;
6625 rtx old_stack_level = 0;
6626 rtx use_insns = 0;
6627
6628 /* Create a block where the return registers can be saved. */
6629 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6630
6631 /* ??? The argsize value should be adjusted here. */
6632
6633 /* Fetch the arg pointer from the ARGUMENTS block. */
6634 incoming_args = gen_reg_rtx (Pmode);
6635 emit_move_insn (incoming_args,
6636 gen_rtx (MEM, Pmode, arguments));
6637#ifndef STACK_GROWS_DOWNWARD
6638 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6639 incoming_args, 0, OPTAB_LIB_WIDEN);
6640#endif
6641
6642 /* Perform postincrements before actually calling the function. */
6643 emit_queue ();
6644
6645 /* Push a new argument block and copy the arguments. */
6646 do_pending_stack_adjust ();
6647 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6648
6649 /* Push a block of memory onto the stack to store the memory arguments.
6650 Save the address in a register, and copy the memory arguments. ??? I
6651 haven't figured out how the calling convention macros effect this,
6652 but it's likely that the source and/or destination addresses in
6653 the block copy will need updating in machine specific ways. */
6654 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6655 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6656 gen_rtx (MEM, BLKmode, incoming_args),
6657 argsize,
6658 PARM_BOUNDARY / BITS_PER_UNIT);
6659
6660 /* Refer to the argument block. */
6661 apply_args_size ();
6662 arguments = gen_rtx (MEM, BLKmode, arguments);
6663
6664 /* Walk past the arg-pointer and structure value address. */
6665 size = GET_MODE_SIZE (Pmode);
6666 if (struct_value_rtx)
6667 size += GET_MODE_SIZE (Pmode);
6668
6669 /* Restore each of the registers previously saved. Make USE insns
6670 for each of these registers for use in making the call. */
6671 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6672 if ((mode = apply_args_mode[regno]) != VOIDmode)
6673 {
6674 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6675 if (size % align != 0)
6676 size = CEIL (size, align) * align;
6677 reg = gen_rtx (REG, mode, regno);
6678 emit_move_insn (reg,
6679 change_address (arguments, mode,
6680 plus_constant (XEXP (arguments, 0),
6681 size)));
6682
6683 push_to_sequence (use_insns);
6684 emit_insn (gen_rtx (USE, VOIDmode, reg));
6685 use_insns = get_insns ();
6686 end_sequence ();
6687 size += GET_MODE_SIZE (mode);
6688 }
6689
6690 /* Restore the structure value address unless this is passed as an
6691 "invisible" first argument. */
6692 size = GET_MODE_SIZE (Pmode);
6693 if (struct_value_rtx)
6694 {
6695 rtx value = gen_reg_rtx (Pmode);
6696 emit_move_insn (value,
6697 change_address (arguments, Pmode,
6698 plus_constant (XEXP (arguments, 0),
6699 size)));
6700 emit_move_insn (struct_value_rtx, value);
6701 if (GET_CODE (struct_value_rtx) == REG)
6702 {
6703 push_to_sequence (use_insns);
6704 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6705 use_insns = get_insns ();
6706 end_sequence ();
6707 }
6708 size += GET_MODE_SIZE (Pmode);
6709 }
6710
6711 /* All arguments and registers used for the call are set up by now! */
6712 function = prepare_call_address (function, NULL_TREE, &use_insns);
6713
6714 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6715 and we don't want to load it into a register as an optimization,
6716 because prepare_call_address already did it if it should be done. */
6717 if (GET_CODE (function) != SYMBOL_REF)
6718 function = memory_address (FUNCTION_MODE, function);
6719
6720 /* Generate the actual call instruction and save the return value. */
6721#ifdef HAVE_untyped_call
6722 if (HAVE_untyped_call)
6723 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6724 result, result_vector (1, result)));
6725 else
6726#endif
6727#ifdef HAVE_call_value
6728 if (HAVE_call_value)
6729 {
6730 rtx valreg = 0;
6731
6732 /* Locate the unique return register. It is not possible to
6733 express a call that sets more than one return register using
6734 call_value; use untyped_call for that. In fact, untyped_call
6735 only needs to save the return registers in the given block. */
6736 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6737 if ((mode = apply_result_mode[regno]) != VOIDmode)
6738 {
6739 if (valreg)
6740 abort (); /* HAVE_untyped_call required. */
6741 valreg = gen_rtx (REG, mode, regno);
6742 }
6743
6744 emit_call_insn (gen_call_value (valreg,
6745 gen_rtx (MEM, FUNCTION_MODE, function),
6746 const0_rtx, NULL_RTX, const0_rtx));
6747
6748 emit_move_insn (change_address (result, GET_MODE (valreg),
6749 XEXP (result, 0)),
6750 valreg);
6751 }
6752 else
6753#endif
6754 abort ();
6755
6756 /* Find the CALL insn we just emitted and write the USE insns before it. */
6757 for (call_insn = get_last_insn ();
6758 call_insn && GET_CODE (call_insn) != CALL_INSN;
6759 call_insn = PREV_INSN (call_insn))
6760 ;
6761
6762 if (! call_insn)
6763 abort ();
6764
6765 /* Put the USE insns before the CALL. */
6766 emit_insns_before (use_insns, call_insn);
6767
6768 /* Restore the stack. */
6769 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6770
6771 /* Return the address of the result block. */
6772 return copy_addr_to_reg (XEXP (result, 0));
6773}
6774
6775/* Perform an untyped return. */
6776static void
6777expand_builtin_return (result)
6778 rtx result;
6779{
6780 int size, align, regno;
6781 enum machine_mode mode;
6782 rtx reg;
6783 rtx use_insns = 0;
6784
6785 apply_result_size ();
6786 result = gen_rtx (MEM, BLKmode, result);
6787
6788#ifdef HAVE_untyped_return
6789 if (HAVE_untyped_return)
6790 {
6791 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6792 emit_barrier ();
6793 return;
6794 }
6795#endif
6796
6797 /* Restore the return value and note that each value is used. */
6798 size = 0;
6799 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6800 if ((mode = apply_result_mode[regno]) != VOIDmode)
6801 {
6802 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6803 if (size % align != 0)
6804 size = CEIL (size, align) * align;
6805 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6806 emit_move_insn (reg,
6807 change_address (result, mode,
6808 plus_constant (XEXP (result, 0),
6809 size)));
6810
6811 push_to_sequence (use_insns);
6812 emit_insn (gen_rtx (USE, VOIDmode, reg));
6813 use_insns = get_insns ();
6814 end_sequence ();
6815 size += GET_MODE_SIZE (mode);
6816 }
6817
6818 /* Put the USE insns before the return. */
6819 emit_insns (use_insns);
6820
6821 /* Return whatever values was restored by jumping directly to the end
6822 of the function. */
6823 expand_null_return ();
6824}
6825\f
bbf6f052
RK
6826/* Expand code for a post- or pre- increment or decrement
6827 and return the RTX for the result.
6828 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6829
6830static rtx
6831expand_increment (exp, post)
6832 register tree exp;
6833 int post;
6834{
6835 register rtx op0, op1;
6836 register rtx temp, value;
6837 register tree incremented = TREE_OPERAND (exp, 0);
6838 optab this_optab = add_optab;
6839 int icode;
6840 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6841 int op0_is_copy = 0;
6842
6843 /* Stabilize any component ref that might need to be
6844 evaluated more than once below. */
ca300798
RS
6845 if (!post
6846 || TREE_CODE (incremented) == BIT_FIELD_REF
bbf6f052
RK
6847 || (TREE_CODE (incremented) == COMPONENT_REF
6848 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6849 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6850 incremented = stabilize_reference (incremented);
6851
6852 /* Compute the operands as RTX.
6853 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6854 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6855 and insns were generated in computing it. */
6856
bbf6f052 6857 temp = get_last_insn ();
906c4e36 6858 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6859
6860 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6861 in place but intead must do sign- or zero-extension during assignment,
6862 so we copy it into a new register and let the code below use it as
6863 a copy.
6864
6865 Note that we can safely modify this SUBREG since it is know not to be
6866 shared (it was made by the expand_expr call above). */
6867
6868 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6869 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6870
94a58076
RS
6871 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6872 && temp != get_last_insn ());
906c4e36 6873 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6874
6875 /* Decide whether incrementing or decrementing. */
6876 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6877 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6878 this_optab = sub_optab;
6879
6880 /* If OP0 is not the actual lvalue, but rather a copy in a register,
ca300798
RS
6881 then we cannot just increment OP0. We must therefore contrive to
6882 increment the original value. Then, for postincrement, we can return
6883 OP0 since it is a copy of the old value. For preincrement, we want
6884 to always expand here, since this generates better or equivalent code. */
6885 if (!post || op0_is_copy)
bbf6f052
RK
6886 {
6887 /* This is the easiest way to increment the value wherever it is.
ca300798
RS
6888 Problems with multiple evaluation of INCREMENTED are prevented
6889 because either (1) it is a component_ref or preincrement,
bbf6f052
RK
6890 in which case it was stabilized above, or (2) it is an array_ref
6891 with constant index in an array in a register, which is
6892 safe to reevaluate. */
6893 tree newexp = build ((this_optab == add_optab
6894 ? PLUS_EXPR : MINUS_EXPR),
6895 TREE_TYPE (exp),
6896 incremented,
6897 TREE_OPERAND (exp, 1));
6898 temp = expand_assignment (incremented, newexp, ! post, 0);
6899 return post ? op0 : temp;
6900 }
6901
6902 /* Convert decrement by a constant into a negative increment. */
6903 if (this_optab == sub_optab
6904 && GET_CODE (op1) == CONST_INT)
6905 {
906c4e36 6906 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6907 this_optab = add_optab;
6908 }
6909
6910 if (post)
6911 {
6912 /* We have a true reference to the value in OP0.
6913 If there is an insn to add or subtract in this mode, queue it. */
6914
6915#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6916 op0 = stabilize (op0);
6917#endif
6918
6919 icode = (int) this_optab->handlers[(int) mode].insn_code;
6920 if (icode != (int) CODE_FOR_nothing
6921 /* Make sure that OP0 is valid for operands 0 and 1
6922 of the insn we want to queue. */
6923 && (*insn_operand_predicate[icode][0]) (op0, mode)
6924 && (*insn_operand_predicate[icode][1]) (op0, mode))
6925 {
6926 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6927 op1 = force_reg (mode, op1);
6928
6929 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6930 }
6931 }
6932
6933 /* Preincrement, or we can't increment with one simple insn. */
6934 if (post)
6935 /* Save a copy of the value before inc or dec, to return it later. */
6936 temp = value = copy_to_reg (op0);
6937 else
6938 /* Arrange to return the incremented value. */
6939 /* Copy the rtx because expand_binop will protect from the queue,
6940 and the results of that would be invalid for us to return
6941 if our caller does emit_queue before using our result. */
6942 temp = copy_rtx (value = op0);
6943
6944 /* Increment however we can. */
6945 op1 = expand_binop (mode, this_optab, value, op1, op0,
6946 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6947 /* Make sure the value is stored into OP0. */
6948 if (op1 != op0)
6949 emit_move_insn (op0, op1);
6950
6951 return temp;
6952}
6953\f
6954/* Expand all function calls contained within EXP, innermost ones first.
6955 But don't look within expressions that have sequence points.
6956 For each CALL_EXPR, record the rtx for its value
6957 in the CALL_EXPR_RTL field. */
6958
6959static void
6960preexpand_calls (exp)
6961 tree exp;
6962{
6963 register int nops, i;
6964 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6965
6966 if (! do_preexpand_calls)
6967 return;
6968
6969 /* Only expressions and references can contain calls. */
6970
6971 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6972 return;
6973
6974 switch (TREE_CODE (exp))
6975 {
6976 case CALL_EXPR:
6977 /* Do nothing if already expanded. */
6978 if (CALL_EXPR_RTL (exp) != 0)
6979 return;
6980
6981 /* Do nothing to built-in functions. */
6982 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6983 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6984 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6985 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6986 return;
6987
6988 case COMPOUND_EXPR:
6989 case COND_EXPR:
6990 case TRUTH_ANDIF_EXPR:
6991 case TRUTH_ORIF_EXPR:
6992 /* If we find one of these, then we can be sure
6993 the adjust will be done for it (since it makes jumps).
6994 Do it now, so that if this is inside an argument
6995 of a function, we don't get the stack adjustment
6996 after some other args have already been pushed. */
6997 do_pending_stack_adjust ();
6998 return;
6999
7000 case BLOCK:
7001 case RTL_EXPR:
7002 case WITH_CLEANUP_EXPR:
7003 return;
7004
7005 case SAVE_EXPR:
7006 if (SAVE_EXPR_RTL (exp) != 0)
7007 return;
7008 }
7009
7010 nops = tree_code_length[(int) TREE_CODE (exp)];
7011 for (i = 0; i < nops; i++)
7012 if (TREE_OPERAND (exp, i) != 0)
7013 {
7014 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
7015 if (type == 'e' || type == '<' || type == '1' || type == '2'
7016 || type == 'r')
7017 preexpand_calls (TREE_OPERAND (exp, i));
7018 }
7019}
7020\f
7021/* At the start of a function, record that we have no previously-pushed
7022 arguments waiting to be popped. */
7023
7024void
7025init_pending_stack_adjust ()
7026{
7027 pending_stack_adjust = 0;
7028}
7029
7030/* When exiting from function, if safe, clear out any pending stack adjust
7031 so the adjustment won't get done. */
7032
7033void
7034clear_pending_stack_adjust ()
7035{
7036#ifdef EXIT_IGNORE_STACK
7037 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 7038 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
7039 && ! flag_inline_functions)
7040 pending_stack_adjust = 0;
7041#endif
7042}
7043
7044/* Pop any previously-pushed arguments that have not been popped yet. */
7045
7046void
7047do_pending_stack_adjust ()
7048{
7049 if (inhibit_defer_pop == 0)
7050 {
7051 if (pending_stack_adjust != 0)
906c4e36 7052 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
7053 pending_stack_adjust = 0;
7054 }
7055}
7056
7057/* Expand all cleanups up to OLD_CLEANUPS.
7058 Needed here, and also for language-dependent calls. */
7059
7060void
7061expand_cleanups_to (old_cleanups)
7062 tree old_cleanups;
7063{
7064 while (cleanups_this_call != old_cleanups)
7065 {
906c4e36 7066 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7067 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
7068 }
7069}
7070\f
7071/* Expand conditional expressions. */
7072
7073/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
7074 LABEL is an rtx of code CODE_LABEL, in this function and all the
7075 functions here. */
7076
7077void
7078jumpifnot (exp, label)
7079 tree exp;
7080 rtx label;
7081{
906c4e36 7082 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
7083}
7084
7085/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
7086
7087void
7088jumpif (exp, label)
7089 tree exp;
7090 rtx label;
7091{
906c4e36 7092 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
7093}
7094
7095/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7096 the result is zero, or IF_TRUE_LABEL if the result is one.
7097 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7098 meaning fall through in that case.
7099
e7c33f54
RK
7100 do_jump always does any pending stack adjust except when it does not
7101 actually perform a jump. An example where there is no jump
7102 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7103
bbf6f052
RK
7104 This function is responsible for optimizing cases such as
7105 &&, || and comparison operators in EXP. */
7106
7107void
7108do_jump (exp, if_false_label, if_true_label)
7109 tree exp;
7110 rtx if_false_label, if_true_label;
7111{
7112 register enum tree_code code = TREE_CODE (exp);
7113 /* Some cases need to create a label to jump to
7114 in order to properly fall through.
7115 These cases set DROP_THROUGH_LABEL nonzero. */
7116 rtx drop_through_label = 0;
7117 rtx temp;
7118 rtx comparison = 0;
7119 int i;
7120 tree type;
7121
7122 emit_queue ();
7123
7124 switch (code)
7125 {
7126 case ERROR_MARK:
7127 break;
7128
7129 case INTEGER_CST:
7130 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7131 if (temp)
7132 emit_jump (temp);
7133 break;
7134
7135#if 0
7136 /* This is not true with #pragma weak */
7137 case ADDR_EXPR:
7138 /* The address of something can never be zero. */
7139 if (if_true_label)
7140 emit_jump (if_true_label);
7141 break;
7142#endif
7143
7144 case NOP_EXPR:
7145 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7146 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7147 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7148 goto normal;
7149 case CONVERT_EXPR:
7150 /* If we are narrowing the operand, we have to do the compare in the
7151 narrower mode. */
7152 if ((TYPE_PRECISION (TREE_TYPE (exp))
7153 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7154 goto normal;
7155 case NON_LVALUE_EXPR:
7156 case REFERENCE_EXPR:
7157 case ABS_EXPR:
7158 case NEGATE_EXPR:
7159 case LROTATE_EXPR:
7160 case RROTATE_EXPR:
7161 /* These cannot change zero->non-zero or vice versa. */
7162 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7163 break;
7164
7165#if 0
7166 /* This is never less insns than evaluating the PLUS_EXPR followed by
7167 a test and can be longer if the test is eliminated. */
7168 case PLUS_EXPR:
7169 /* Reduce to minus. */
7170 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7171 TREE_OPERAND (exp, 0),
7172 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7173 TREE_OPERAND (exp, 1))));
7174 /* Process as MINUS. */
7175#endif
7176
7177 case MINUS_EXPR:
7178 /* Non-zero iff operands of minus differ. */
7179 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7180 TREE_OPERAND (exp, 0),
7181 TREE_OPERAND (exp, 1)),
7182 NE, NE);
7183 break;
7184
7185 case BIT_AND_EXPR:
7186 /* If we are AND'ing with a small constant, do this comparison in the
7187 smallest type that fits. If the machine doesn't have comparisons
7188 that small, it will be converted back to the wider comparison.
7189 This helps if we are testing the sign bit of a narrower object.
7190 combine can't do this for us because it can't know whether a
7191 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7192
08af8e09
RK
7193 if (! SLOW_BYTE_ACCESS
7194 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7195 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
7196 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7197 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
7198 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7199 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7200 != CODE_FOR_nothing))
bbf6f052
RK
7201 {
7202 do_jump (convert (type, exp), if_false_label, if_true_label);
7203 break;
7204 }
7205 goto normal;
7206
7207 case TRUTH_NOT_EXPR:
7208 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7209 break;
7210
7211 case TRUTH_ANDIF_EXPR:
7212 if (if_false_label == 0)
7213 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 7214 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
7215 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7216 break;
7217
7218 case TRUTH_ORIF_EXPR:
7219 if (if_true_label == 0)
7220 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 7221 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
7222 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7223 break;
7224
7225 case COMPOUND_EXPR:
7226 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7227 free_temp_slots ();
7228 emit_queue ();
e7c33f54 7229 do_pending_stack_adjust ();
bbf6f052
RK
7230 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7231 break;
7232
7233 case COMPONENT_REF:
7234 case BIT_FIELD_REF:
7235 case ARRAY_REF:
7236 {
7237 int bitsize, bitpos, unsignedp;
7238 enum machine_mode mode;
7239 tree type;
7bb0943f 7240 tree offset;
bbf6f052
RK
7241 int volatilep = 0;
7242
7243 /* Get description of this reference. We don't actually care
7244 about the underlying object here. */
7bb0943f
RS
7245 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7246 &mode, &unsignedp, &volatilep);
bbf6f052
RK
7247
7248 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
7249 if (! SLOW_BYTE_ACCESS
7250 && type != 0 && bitsize >= 0
7251 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7252 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7253 != CODE_FOR_nothing))
bbf6f052
RK
7254 {
7255 do_jump (convert (type, exp), if_false_label, if_true_label);
7256 break;
7257 }
7258 goto normal;
7259 }
7260
7261 case COND_EXPR:
7262 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7263 if (integer_onep (TREE_OPERAND (exp, 1))
7264 && integer_zerop (TREE_OPERAND (exp, 2)))
7265 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7266
7267 else if (integer_zerop (TREE_OPERAND (exp, 1))
7268 && integer_onep (TREE_OPERAND (exp, 2)))
7269 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7270
7271 else
7272 {
7273 register rtx label1 = gen_label_rtx ();
7274 drop_through_label = gen_label_rtx ();
906c4e36 7275 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
7276 /* Now the THEN-expression. */
7277 do_jump (TREE_OPERAND (exp, 1),
7278 if_false_label ? if_false_label : drop_through_label,
7279 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
7280 /* In case the do_jump just above never jumps. */
7281 do_pending_stack_adjust ();
bbf6f052
RK
7282 emit_label (label1);
7283 /* Now the ELSE-expression. */
7284 do_jump (TREE_OPERAND (exp, 2),
7285 if_false_label ? if_false_label : drop_through_label,
7286 if_true_label ? if_true_label : drop_through_label);
7287 }
7288 break;
7289
7290 case EQ_EXPR:
7291 if (integer_zerop (TREE_OPERAND (exp, 1)))
7292 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7293 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7294 == MODE_INT)
7295 &&
7296 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7297 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7298 else
7299 comparison = compare (exp, EQ, EQ);
7300 break;
7301
7302 case NE_EXPR:
7303 if (integer_zerop (TREE_OPERAND (exp, 1)))
7304 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7305 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7306 == MODE_INT)
7307 &&
7308 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7309 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7310 else
7311 comparison = compare (exp, NE, NE);
7312 break;
7313
7314 case LT_EXPR:
7315 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7316 == MODE_INT)
7317 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7318 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7319 else
7320 comparison = compare (exp, LT, LTU);
7321 break;
7322
7323 case LE_EXPR:
7324 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7325 == MODE_INT)
7326 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7327 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7328 else
7329 comparison = compare (exp, LE, LEU);
7330 break;
7331
7332 case GT_EXPR:
7333 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7334 == MODE_INT)
7335 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7336 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7337 else
7338 comparison = compare (exp, GT, GTU);
7339 break;
7340
7341 case GE_EXPR:
7342 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7343 == MODE_INT)
7344 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7345 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7346 else
7347 comparison = compare (exp, GE, GEU);
7348 break;
7349
7350 default:
7351 normal:
906c4e36 7352 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7353#if 0
7354 /* This is not needed any more and causes poor code since it causes
7355 comparisons and tests from non-SI objects to have different code
7356 sequences. */
7357 /* Copy to register to avoid generating bad insns by cse
7358 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7359 if (!cse_not_expected && GET_CODE (temp) == MEM)
7360 temp = copy_to_reg (temp);
7361#endif
7362 do_pending_stack_adjust ();
7363 if (GET_CODE (temp) == CONST_INT)
7364 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7365 else if (GET_CODE (temp) == LABEL_REF)
7366 comparison = const_true_rtx;
7367 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7368 && !can_compare_p (GET_MODE (temp)))
7369 /* Note swapping the labels gives us not-equal. */
7370 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7371 else if (GET_MODE (temp) != VOIDmode)
7372 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
7373 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7374 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
7375 else
7376 abort ();
7377 }
7378
7379 /* Do any postincrements in the expression that was tested. */
7380 emit_queue ();
7381
7382 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7383 straight into a conditional jump instruction as the jump condition.
7384 Otherwise, all the work has been done already. */
7385
7386 if (comparison == const_true_rtx)
7387 {
7388 if (if_true_label)
7389 emit_jump (if_true_label);
7390 }
7391 else if (comparison == const0_rtx)
7392 {
7393 if (if_false_label)
7394 emit_jump (if_false_label);
7395 }
7396 else if (comparison)
7397 do_jump_for_compare (comparison, if_false_label, if_true_label);
7398
7399 free_temp_slots ();
7400
7401 if (drop_through_label)
e7c33f54
RK
7402 {
7403 /* If do_jump produces code that might be jumped around,
7404 do any stack adjusts from that code, before the place
7405 where control merges in. */
7406 do_pending_stack_adjust ();
7407 emit_label (drop_through_label);
7408 }
bbf6f052
RK
7409}
7410\f
7411/* Given a comparison expression EXP for values too wide to be compared
7412 with one insn, test the comparison and jump to the appropriate label.
7413 The code of EXP is ignored; we always test GT if SWAP is 0,
7414 and LT if SWAP is 1. */
7415
7416static void
7417do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7418 tree exp;
7419 int swap;
7420 rtx if_false_label, if_true_label;
7421{
906c4e36
RK
7422 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7423 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7424 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7425 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7426 rtx drop_through_label = 0;
7427 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7428 int i;
7429
7430 if (! if_true_label || ! if_false_label)
7431 drop_through_label = gen_label_rtx ();
7432 if (! if_true_label)
7433 if_true_label = drop_through_label;
7434 if (! if_false_label)
7435 if_false_label = drop_through_label;
7436
7437 /* Compare a word at a time, high order first. */
f81497d9
RS
7438 for (i = 0; i < nwords; i++)
7439 {
7440 rtx comp;
7441 rtx op0_word, op1_word;
7442
7443 if (WORDS_BIG_ENDIAN)
7444 {
7445 op0_word = operand_subword_force (op0, i, mode);
7446 op1_word = operand_subword_force (op1, i, mode);
7447 }
7448 else
7449 {
7450 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7451 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7452 }
7453
7454 /* All but high-order word must be compared as unsigned. */
7455 comp = compare_from_rtx (op0_word, op1_word,
7456 (unsignedp || i > 0) ? GTU : GT,
7457 unsignedp, word_mode, NULL_RTX, 0);
7458 if (comp == const_true_rtx)
7459 emit_jump (if_true_label);
7460 else if (comp != const0_rtx)
7461 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7462
7463 /* Consider lower words only if these are equal. */
7464 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7465 NULL_RTX, 0);
7466 if (comp == const_true_rtx)
7467 emit_jump (if_false_label);
7468 else if (comp != const0_rtx)
7469 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7470 }
7471
7472 if (if_false_label)
7473 emit_jump (if_false_label);
7474 if (drop_through_label)
7475 emit_label (drop_through_label);
7476}
7477
7478/* Compare OP0 with OP1, word at a time, in mode MODE.
7479 UNSIGNEDP says to do unsigned comparison.
7480 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7481
7482static void
7483do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7484 enum machine_mode mode;
7485 int unsignedp;
7486 rtx op0, op1;
7487 rtx if_false_label, if_true_label;
7488{
7489 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7490 rtx drop_through_label = 0;
7491 int i;
7492
7493 if (! if_true_label || ! if_false_label)
7494 drop_through_label = gen_label_rtx ();
7495 if (! if_true_label)
7496 if_true_label = drop_through_label;
7497 if (! if_false_label)
7498 if_false_label = drop_through_label;
7499
7500 /* Compare a word at a time, high order first. */
bbf6f052
RK
7501 for (i = 0; i < nwords; i++)
7502 {
7503 rtx comp;
7504 rtx op0_word, op1_word;
7505
7506 if (WORDS_BIG_ENDIAN)
7507 {
7508 op0_word = operand_subword_force (op0, i, mode);
7509 op1_word = operand_subword_force (op1, i, mode);
7510 }
7511 else
7512 {
7513 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7514 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7515 }
7516
7517 /* All but high-order word must be compared as unsigned. */
7518 comp = compare_from_rtx (op0_word, op1_word,
7519 (unsignedp || i > 0) ? GTU : GT,
906c4e36 7520 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
7521 if (comp == const_true_rtx)
7522 emit_jump (if_true_label);
7523 else if (comp != const0_rtx)
906c4e36 7524 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
7525
7526 /* Consider lower words only if these are equal. */
7527 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 7528 NULL_RTX, 0);
bbf6f052
RK
7529 if (comp == const_true_rtx)
7530 emit_jump (if_false_label);
7531 else if (comp != const0_rtx)
906c4e36 7532 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
7533 }
7534
7535 if (if_false_label)
7536 emit_jump (if_false_label);
7537 if (drop_through_label)
7538 emit_label (drop_through_label);
7539}
7540
7541/* Given an EQ_EXPR expression EXP for values too wide to be compared
7542 with one insn, test the comparison and jump to the appropriate label. */
7543
7544static void
7545do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7546 tree exp;
7547 rtx if_false_label, if_true_label;
7548{
906c4e36
RK
7549 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7550 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7551 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7552 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7553 int i;
7554 rtx drop_through_label = 0;
7555
7556 if (! if_false_label)
7557 drop_through_label = if_false_label = gen_label_rtx ();
7558
7559 for (i = 0; i < nwords; i++)
7560 {
7561 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7562 operand_subword_force (op1, i, mode),
cd1b4b44
RK
7563 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7564 word_mode, NULL_RTX, 0);
bbf6f052
RK
7565 if (comp == const_true_rtx)
7566 emit_jump (if_false_label);
7567 else if (comp != const0_rtx)
906c4e36 7568 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7569 }
7570
7571 if (if_true_label)
7572 emit_jump (if_true_label);
7573 if (drop_through_label)
7574 emit_label (drop_through_label);
7575}
7576\f
7577/* Jump according to whether OP0 is 0.
7578 We assume that OP0 has an integer mode that is too wide
7579 for the available compare insns. */
7580
7581static void
7582do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7583 rtx op0;
7584 rtx if_false_label, if_true_label;
7585{
7586 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7587 int i;
7588 rtx drop_through_label = 0;
7589
7590 if (! if_false_label)
7591 drop_through_label = if_false_label = gen_label_rtx ();
7592
7593 for (i = 0; i < nwords; i++)
7594 {
7595 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7596 GET_MODE (op0)),
cd1b4b44 7597 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
7598 if (comp == const_true_rtx)
7599 emit_jump (if_false_label);
7600 else if (comp != const0_rtx)
906c4e36 7601 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7602 }
7603
7604 if (if_true_label)
7605 emit_jump (if_true_label);
7606 if (drop_through_label)
7607 emit_label (drop_through_label);
7608}
7609
7610/* Given a comparison expression in rtl form, output conditional branches to
7611 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7612
7613static void
7614do_jump_for_compare (comparison, if_false_label, if_true_label)
7615 rtx comparison, if_false_label, if_true_label;
7616{
7617 if (if_true_label)
7618 {
7619 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7620 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7621 else
7622 abort ();
7623
7624 if (if_false_label)
7625 emit_jump (if_false_label);
7626 }
7627 else if (if_false_label)
7628 {
7629 rtx insn;
7630 rtx prev = PREV_INSN (get_last_insn ());
7631 rtx branch = 0;
7632
7633 /* Output the branch with the opposite condition. Then try to invert
7634 what is generated. If more than one insn is a branch, or if the
7635 branch is not the last insn written, abort. If we can't invert
7636 the branch, emit make a true label, redirect this jump to that,
7637 emit a jump to the false label and define the true label. */
7638
7639 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7640 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7641 else
7642 abort ();
7643
7644 /* Here we get the insn before what was just emitted.
7645 On some machines, emitting the branch can discard
7646 the previous compare insn and emit a replacement. */
7647 if (prev == 0)
7648 /* If there's only one preceding insn... */
7649 insn = get_insns ();
7650 else
7651 insn = NEXT_INSN (prev);
7652
7653 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7654 if (GET_CODE (insn) == JUMP_INSN)
7655 {
7656 if (branch)
7657 abort ();
7658 branch = insn;
7659 }
7660
7661 if (branch != get_last_insn ())
7662 abort ();
7663
7664 if (! invert_jump (branch, if_false_label))
7665 {
7666 if_true_label = gen_label_rtx ();
7667 redirect_jump (branch, if_true_label);
7668 emit_jump (if_false_label);
7669 emit_label (if_true_label);
7670 }
7671 }
7672}
7673\f
7674/* Generate code for a comparison expression EXP
7675 (including code to compute the values to be compared)
7676 and set (CC0) according to the result.
7677 SIGNED_CODE should be the rtx operation for this comparison for
7678 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7679
7680 We force a stack adjustment unless there are currently
7681 things pushed on the stack that aren't yet used. */
7682
7683static rtx
7684compare (exp, signed_code, unsigned_code)
7685 register tree exp;
7686 enum rtx_code signed_code, unsigned_code;
7687{
906c4e36
RK
7688 register rtx op0
7689 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7690 register rtx op1
7691 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7692 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7693 register enum machine_mode mode = TYPE_MODE (type);
7694 int unsignedp = TREE_UNSIGNED (type);
7695 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7696
7697 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7698 ((mode == BLKmode)
906c4e36 7699 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7700 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7701}
7702
7703/* Like compare but expects the values to compare as two rtx's.
7704 The decision as to signed or unsigned comparison must be made by the caller.
7705
7706 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7707 compared.
7708
7709 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7710 size of MODE should be used. */
7711
7712rtx
7713compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7714 register rtx op0, op1;
7715 enum rtx_code code;
7716 int unsignedp;
7717 enum machine_mode mode;
7718 rtx size;
7719 int align;
7720{
a7c5971a
RK
7721 rtx tem;
7722
bf743ac5
RK
7723 /* If one operand is constant, make it the second one. Only do this
7724 if the other operand is not constant as well. */
bbf6f052 7725
bf743ac5
RK
7726 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7727 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 7728 {
a7c5971a 7729 tem = op0;
bbf6f052
RK
7730 op0 = op1;
7731 op1 = tem;
7732 code = swap_condition (code);
7733 }
7734
7735 if (flag_force_mem)
7736 {
7737 op0 = force_not_mem (op0);
7738 op1 = force_not_mem (op1);
7739 }
7740
7741 do_pending_stack_adjust ();
7742
a7c5971a
RK
7743 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7744 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7745 return tem;
bbf6f052 7746
cd1b4b44
RK
7747#if 0
7748 /* There's no need to do this now that combine.c can eliminate lots of
7749 sign extensions. This can be less efficient in certain cases on other
1c6bc817 7750 machines. */
cd1b4b44 7751
bbf6f052
RK
7752 /* If this is a signed equality comparison, we can do it as an
7753 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7754 extension and comparisons with zero are done as unsigned. This is
7755 the case even on machines that can do fast sign extension, since
8008b228 7756 zero-extension is easier to combine with other operations than
77fa0940
RK
7757 sign-extension is. If we are comparing against a constant, we must
7758 convert it to what it would look like unsigned. */
bbf6f052 7759 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7760 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7761 {
7762 if (GET_CODE (op1) == CONST_INT
7763 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7764 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7765 unsignedp = 1;
7766 }
cd1b4b44 7767#endif
bbf6f052
RK
7768
7769 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7770
7771 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7772}
7773\f
7774/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7775 and return an rtx for the result. EXP is either a comparison
7776 or a TRUTH_NOT_EXPR whose operand is a comparison.
7777
bbf6f052
RK
7778 If TARGET is nonzero, store the result there if convenient.
7779
7780 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7781 cheap.
7782
7783 Return zero if there is no suitable set-flag instruction
7784 available on this machine.
7785
7786 Once expand_expr has been called on the arguments of the comparison,
7787 we are committed to doing the store flag, since it is not safe to
7788 re-evaluate the expression. We emit the store-flag insn by calling
7789 emit_store_flag, but only expand the arguments if we have a reason
7790 to believe that emit_store_flag will be successful. If we think that
7791 it will, but it isn't, we have to simulate the store-flag with a
7792 set/jump/set sequence. */
7793
7794static rtx
7795do_store_flag (exp, target, mode, only_cheap)
7796 tree exp;
7797 rtx target;
7798 enum machine_mode mode;
7799 int only_cheap;
7800{
7801 enum rtx_code code;
e7c33f54 7802 tree arg0, arg1, type;
bbf6f052 7803 tree tem;
e7c33f54
RK
7804 enum machine_mode operand_mode;
7805 int invert = 0;
7806 int unsignedp;
bbf6f052
RK
7807 rtx op0, op1;
7808 enum insn_code icode;
7809 rtx subtarget = target;
7810 rtx result, label, pattern, jump_pat;
7811
e7c33f54
RK
7812 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7813 result at the end. We can't simply invert the test since it would
7814 have already been inverted if it were valid. This case occurs for
7815 some floating-point comparisons. */
7816
7817 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7818 invert = 1, exp = TREE_OPERAND (exp, 0);
7819
7820 arg0 = TREE_OPERAND (exp, 0);
7821 arg1 = TREE_OPERAND (exp, 1);
7822 type = TREE_TYPE (arg0);
7823 operand_mode = TYPE_MODE (type);
7824 unsignedp = TREE_UNSIGNED (type);
7825
bbf6f052
RK
7826 /* We won't bother with BLKmode store-flag operations because it would mean
7827 passing a lot of information to emit_store_flag. */
7828 if (operand_mode == BLKmode)
7829 return 0;
7830
d964285c
CH
7831 STRIP_NOPS (arg0);
7832 STRIP_NOPS (arg1);
bbf6f052
RK
7833
7834 /* Get the rtx comparison code to use. We know that EXP is a comparison
7835 operation of some type. Some comparisons against 1 and -1 can be
7836 converted to comparisons with zero. Do so here so that the tests
7837 below will be aware that we have a comparison with zero. These
7838 tests will not catch constants in the first operand, but constants
7839 are rarely passed as the first operand. */
7840
7841 switch (TREE_CODE (exp))
7842 {
7843 case EQ_EXPR:
7844 code = EQ;
7845 break;
7846 case NE_EXPR:
7847 code = NE;
7848 break;
7849 case LT_EXPR:
7850 if (integer_onep (arg1))
7851 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7852 else
7853 code = unsignedp ? LTU : LT;
7854 break;
7855 case LE_EXPR:
5bf6e3bd
RK
7856 if (! unsignedp && integer_all_onesp (arg1))
7857 arg1 = integer_zero_node, code = LT;
bbf6f052
RK
7858 else
7859 code = unsignedp ? LEU : LE;
7860 break;
7861 case GT_EXPR:
5bf6e3bd
RK
7862 if (! unsignedp && integer_all_onesp (arg1))
7863 arg1 = integer_zero_node, code = GE;
bbf6f052
RK
7864 else
7865 code = unsignedp ? GTU : GT;
7866 break;
7867 case GE_EXPR:
7868 if (integer_onep (arg1))
7869 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7870 else
7871 code = unsignedp ? GEU : GE;
7872 break;
7873 default:
7874 abort ();
7875 }
7876
7877 /* Put a constant second. */
7878 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7879 {
7880 tem = arg0; arg0 = arg1; arg1 = tem;
7881 code = swap_condition (code);
7882 }
7883
7884 /* If this is an equality or inequality test of a single bit, we can
7885 do this by shifting the bit being tested to the low-order bit and
7886 masking the result with the constant 1. If the condition was EQ,
7887 we xor it with 1. This does not require an scc insn and is faster
7888 than an scc insn even if we have it. */
7889
7890 if ((code == NE || code == EQ)
7891 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7892 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7893 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7894 {
af508edd 7895 tree inner = TREE_OPERAND (arg0, 0);
bbf6f052 7896 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7897 NULL_RTX, VOIDmode, 0)));
af508edd
RK
7898 int ops_unsignedp;
7899
7900 /* If INNER is a right shift of a constant and it plus BITNUM does
7901 not overflow, adjust BITNUM and INNER. */
7902
7903 if (TREE_CODE (inner) == RSHIFT_EXPR
7904 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
7905 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
7906 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
7907 < TYPE_PRECISION (type)))
7908 {
7909 bitnum +=TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
7910 inner = TREE_OPERAND (inner, 0);
7911 }
7912
7913 /* If we are going to be able to omit the AND below, we must do our
7914 operations as unsigned. If we must use the AND, we have a choice.
7915 Normally unsigned is faster, but for some machines signed is. */
7916 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
7917#ifdef BYTE_LOADS_SIGN_EXTEND
7918 : 0
7919#else
7920 : 1
7921#endif
7922 );
bbf6f052
RK
7923
7924 if (subtarget == 0 || GET_CODE (subtarget) != REG
7925 || GET_MODE (subtarget) != operand_mode
af508edd 7926 || ! safe_from_p (subtarget, inner))
bbf6f052
RK
7927 subtarget = 0;
7928
af508edd 7929 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052
RK
7930
7931 if (bitnum != 0)
7932 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
af508edd 7933 size_int (bitnum), target, ops_unsignedp);
bbf6f052
RK
7934
7935 if (GET_MODE (op0) != mode)
af508edd
RK
7936 op0 = convert_to_mode (mode, op0, ops_unsignedp);
7937
7938 if ((code == EQ && ! invert) || (code == NE && invert))
7939 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target,
7940 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 7941
af508edd 7942 /* Put the AND last so it can combine with more things. */
bbf6f052
RK
7943 if (bitnum != TYPE_PRECISION (type) - 1)
7944 op0 = expand_and (op0, const1_rtx, target);
7945
bbf6f052
RK
7946 return op0;
7947 }
7948
7949 /* Now see if we are likely to be able to do this. Return if not. */
7950 if (! can_compare_p (operand_mode))
7951 return 0;
7952 icode = setcc_gen_code[(int) code];
7953 if (icode == CODE_FOR_nothing
7954 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7955 {
7956 /* We can only do this if it is one of the special cases that
7957 can be handled without an scc insn. */
7958 if ((code == LT && integer_zerop (arg1))
7959 || (! only_cheap && code == GE && integer_zerop (arg1)))
7960 ;
7961 else if (BRANCH_COST >= 0
7962 && ! only_cheap && (code == NE || code == EQ)
7963 && TREE_CODE (type) != REAL_TYPE
7964 && ((abs_optab->handlers[(int) operand_mode].insn_code
7965 != CODE_FOR_nothing)
7966 || (ffs_optab->handlers[(int) operand_mode].insn_code
7967 != CODE_FOR_nothing)))
7968 ;
7969 else
7970 return 0;
7971 }
7972
7973 preexpand_calls (exp);
7974 if (subtarget == 0 || GET_CODE (subtarget) != REG
7975 || GET_MODE (subtarget) != operand_mode
7976 || ! safe_from_p (subtarget, arg1))
7977 subtarget = 0;
7978
7979 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7980 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7981
7982 if (target == 0)
7983 target = gen_reg_rtx (mode);
7984
d39985fa
RK
7985 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7986 because, if the emit_store_flag does anything it will succeed and
7987 OP0 and OP1 will not be used subsequently. */
7988
7989 result = emit_store_flag (target, code,
7990 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7991 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7992 operand_mode, unsignedp, 1);
bbf6f052
RK
7993
7994 if (result)
e7c33f54
RK
7995 {
7996 if (invert)
7997 result = expand_binop (mode, xor_optab, result, const1_rtx,
7998 result, 0, OPTAB_LIB_WIDEN);
7999 return result;
8000 }
bbf6f052
RK
8001
8002 /* If this failed, we have to do this with set/compare/jump/set code. */
8003 if (target == 0 || GET_CODE (target) != REG
8004 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
8005 target = gen_reg_rtx (GET_MODE (target));
8006
e7c33f54 8007 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
8008 result = compare_from_rtx (op0, op1, code, unsignedp,
8009 operand_mode, NULL_RTX, 0);
bbf6f052 8010 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
8011 return (((result == const0_rtx && ! invert)
8012 || (result != const0_rtx && invert))
8013 ? const0_rtx : const1_rtx);
bbf6f052
RK
8014
8015 label = gen_label_rtx ();
8016 if (bcc_gen_fctn[(int) code] == 0)
8017 abort ();
8018
8019 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 8020 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
8021 emit_label (label);
8022
8023 return target;
8024}
8025\f
8026/* Generate a tablejump instruction (used for switch statements). */
8027
8028#ifdef HAVE_tablejump
8029
8030/* INDEX is the value being switched on, with the lowest value
8031 in the table already subtracted.
88d3b7f0 8032 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
8033 RANGE is the length of the jump table.
8034 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
8035
8036 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
8037 index value is out of range. */
8038
8039void
e87b4f3f 8040do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 8041 rtx index, range, table_label, default_label;
e87b4f3f 8042 enum machine_mode mode;
bbf6f052
RK
8043{
8044 register rtx temp, vector;
8045
88d3b7f0
RS
8046 /* Do an unsigned comparison (in the proper mode) between the index
8047 expression and the value which represents the length of the range.
8048 Since we just finished subtracting the lower bound of the range
8049 from the index expression, this comparison allows us to simultaneously
8050 check that the original index expression value is both greater than
8051 or equal to the minimum value of the range and less than or equal to
8052 the maximum value of the range. */
e87b4f3f 8053
b4c65118 8054 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
bbf6f052 8055 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
8056
8057 /* If index is in range, it must fit in Pmode.
8058 Convert to Pmode so we can index with it. */
8059 if (mode != Pmode)
8060 index = convert_to_mode (Pmode, index, 1);
8061
bbf6f052
RK
8062 /* If flag_force_addr were to affect this address
8063 it could interfere with the tricky assumptions made
8064 about addresses that contain label-refs,
8065 which may be valid only very near the tablejump itself. */
8066 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
8067 GET_MODE_SIZE, because this indicates how large insns are. The other
8068 uses should all be Pmode, because they are addresses. This code
8069 could fail if addresses and insns are not the same size. */
8070 index = memory_address_noforce
8071 (CASE_VECTOR_MODE,
8072 gen_rtx (PLUS, Pmode,
8073 gen_rtx (MULT, Pmode, index,
906c4e36 8074 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
8075 gen_rtx (LABEL_REF, Pmode, table_label)));
8076 temp = gen_reg_rtx (CASE_VECTOR_MODE);
8077 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
8078 RTX_UNCHANGING_P (vector) = 1;
8079 convert_move (temp, vector, 0);
8080
8081 emit_jump_insn (gen_tablejump (temp, table_label));
8082
8083#ifndef CASE_VECTOR_PC_RELATIVE
8084 /* If we are generating PIC code or if the table is PC-relative, the
8085 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
8086 if (! flag_pic)
8087 emit_barrier ();
8088#endif
8089}
8090
8091#endif /* HAVE_tablejump */
This page took 1.01947 seconds and 5 git commands to generate.