]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(main, scan_prog_file): Don't call signal on signal names that aren't
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
bbf6f052
RK
32#include "typeclass.h"
33
34#define CEIL(x,y) (((x) + (y) - 1) / (y))
35
36/* Decide whether a function's arguments should be processed
bbc8a071
RK
37 from first to last or from last to first.
38
39 They should if the stack and args grow in opposite directions, but
40 only if we have push insns. */
bbf6f052 41
bbf6f052 42#ifdef PUSH_ROUNDING
bbc8a071
RK
43
44#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
bbf6f052
RK
45#define PUSH_ARGS_REVERSED /* If it's last to first */
46#endif
bbc8a071 47
bbf6f052
RK
48#endif
49
50#ifndef STACK_PUSH_CODE
51#ifdef STACK_GROWS_DOWNWARD
52#define STACK_PUSH_CODE PRE_DEC
53#else
54#define STACK_PUSH_CODE PRE_INC
55#endif
56#endif
57
58/* Like STACK_BOUNDARY but in units of bytes, not bits. */
59#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
60
61/* If this is nonzero, we do not bother generating VOLATILE
62 around volatile memory references, and we are willing to
63 output indirect addresses. If cse is to follow, we reject
64 indirect addresses so a useful potential cse is generated;
65 if it is used only once, instruction combination will produce
66 the same indirect address eventually. */
67int cse_not_expected;
68
69/* Nonzero to generate code for all the subroutines within an
70 expression before generating the upper levels of the expression.
71 Nowadays this is never zero. */
72int do_preexpand_calls = 1;
73
74/* Number of units that we should eventually pop off the stack.
75 These are the arguments to function calls that have already returned. */
76int pending_stack_adjust;
77
78/* Nonzero means stack pops must not be deferred, and deferred stack
79 pops must not be output. It is nonzero inside a function call,
80 inside a conditional expression, inside a statement expression,
81 and in other cases as well. */
82int inhibit_defer_pop;
83
84/* A list of all cleanups which belong to the arguments of
85 function calls being expanded by expand_call. */
86tree cleanups_this_call;
87
88/* Nonzero means __builtin_saveregs has already been done in this function.
89 The value is the pseudoreg containing the value __builtin_saveregs
90 returned. */
91static rtx saveregs_value;
92
dcf76fff
TW
93/* Similarly for __builtin_apply_args. */
94static rtx apply_args_value;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
98
99struct move_by_pieces
100{
101 rtx to;
102 rtx to_addr;
103 int autinc_to;
104 int explicit_inc_to;
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
109 int len;
110 int offset;
111 int reverse;
112};
113
114static rtx enqueue_insn PROTO((rtx, rtx));
115static int queued_subexp_p PROTO((rtx));
116static void init_queue PROTO((void));
117static void move_by_pieces PROTO((rtx, rtx, int, int));
118static int move_by_pieces_ninsns PROTO((unsigned int, int));
119static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
120 struct move_by_pieces *));
121static void group_insns PROTO((rtx));
122static void store_constructor PROTO((tree, rtx));
123static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
124 enum machine_mode, int, int, int));
125static tree save_noncopied_parts PROTO((tree, tree));
126static tree init_noncopied_parts PROTO((tree, tree));
127static int safe_from_p PROTO((rtx, tree));
128static int fixed_type_p PROTO((tree));
129static int get_pointer_alignment PROTO((tree, unsigned));
130static tree string_constant PROTO((tree, tree *));
131static tree c_strlen PROTO((tree));
132static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
0006469d
TW
133static int apply_args_size PROTO((void));
134static int apply_result_size PROTO((void));
135static rtx result_vector PROTO((int, rtx));
136static rtx expand_builtin_apply_args PROTO((void));
137static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
138static void expand_builtin_return PROTO((rtx));
4969d05d
RK
139static rtx expand_increment PROTO((tree, int));
140static void preexpand_calls PROTO((tree));
141static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
f81497d9 142static void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
143static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
144static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
145static void do_jump_for_compare PROTO((rtx, rtx, rtx));
146static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
147static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 148
4fa52007
RK
149/* Record for each mode whether we can move a register directly to or
150 from an object of that mode in memory. If we can't, we won't try
151 to use that mode directly when accessing a field of that mode. */
152
153static char direct_load[NUM_MACHINE_MODES];
154static char direct_store[NUM_MACHINE_MODES];
155
bbf6f052
RK
156/* MOVE_RATIO is the number of move instructions that is better than
157 a block move. */
158
159#ifndef MOVE_RATIO
266007a7 160#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
161#define MOVE_RATIO 2
162#else
163/* A value of around 6 would minimize code size; infinity would minimize
164 execution time. */
165#define MOVE_RATIO 15
166#endif
167#endif
e87b4f3f 168
266007a7 169/* This array records the insn_code of insns to perform block moves. */
e6677db3 170enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 171
e87b4f3f
RS
172/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
173
174#ifndef SLOW_UNALIGNED_ACCESS
175#define SLOW_UNALIGNED_ACCESS 0
176#endif
0006469d
TW
177
178/* Register mappings for target machines without register windows. */
179#ifndef INCOMING_REGNO
180#define INCOMING_REGNO(OUT) (OUT)
181#endif
182#ifndef OUTGOING_REGNO
183#define OUTGOING_REGNO(IN) (IN)
184#endif
bbf6f052 185\f
4fa52007 186/* This is run once per compilation to set up which modes can be used
266007a7 187 directly in memory and to initialize the block move optab. */
4fa52007
RK
188
189void
190init_expr_once ()
191{
192 rtx insn, pat;
193 enum machine_mode mode;
e2549997
RS
194 /* Try indexing by frame ptr and try by stack ptr.
195 It is known that on the Convex the stack ptr isn't a valid index.
196 With luck, one or the other is valid on any machine. */
4fa52007 197 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 198 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
199
200 start_sequence ();
201 insn = emit_insn (gen_rtx (SET, 0, 0));
202 pat = PATTERN (insn);
203
204 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
205 mode = (enum machine_mode) ((int) mode + 1))
206 {
207 int regno;
208 rtx reg;
209 int num_clobbers;
210
211 direct_load[(int) mode] = direct_store[(int) mode] = 0;
212 PUT_MODE (mem, mode);
e2549997 213 PUT_MODE (mem1, mode);
4fa52007 214
e6fe56a4
RK
215 /* See if there is some register that can be used in this mode and
216 directly loaded or stored from memory. */
217
7308a047
RS
218 if (mode != VOIDmode && mode != BLKmode)
219 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
220 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
221 regno++)
222 {
223 if (! HARD_REGNO_MODE_OK (regno, mode))
224 continue;
e6fe56a4 225
7308a047 226 reg = gen_rtx (REG, mode, regno);
e6fe56a4 227
7308a047
RS
228 SET_SRC (pat) = mem;
229 SET_DEST (pat) = reg;
230 if (recog (pat, insn, &num_clobbers) >= 0)
231 direct_load[(int) mode] = 1;
e6fe56a4 232
e2549997
RS
233 SET_SRC (pat) = mem1;
234 SET_DEST (pat) = reg;
235 if (recog (pat, insn, &num_clobbers) >= 0)
236 direct_load[(int) mode] = 1;
237
7308a047
RS
238 SET_SRC (pat) = reg;
239 SET_DEST (pat) = mem;
240 if (recog (pat, insn, &num_clobbers) >= 0)
241 direct_store[(int) mode] = 1;
e2549997
RS
242
243 SET_SRC (pat) = reg;
244 SET_DEST (pat) = mem1;
245 if (recog (pat, insn, &num_clobbers) >= 0)
246 direct_store[(int) mode] = 1;
7308a047 247 }
4fa52007
RK
248 }
249
250 end_sequence ();
251}
252
bbf6f052
RK
253/* This is run at the start of compiling a function. */
254
255void
256init_expr ()
257{
258 init_queue ();
259
260 pending_stack_adjust = 0;
261 inhibit_defer_pop = 0;
262 cleanups_this_call = 0;
263 saveregs_value = 0;
0006469d 264 apply_args_value = 0;
e87b4f3f 265 forced_labels = 0;
bbf6f052
RK
266}
267
268/* Save all variables describing the current status into the structure *P.
269 This is used before starting a nested function. */
270
271void
272save_expr_status (p)
273 struct function *p;
274{
275 /* Instead of saving the postincrement queue, empty it. */
276 emit_queue ();
277
278 p->pending_stack_adjust = pending_stack_adjust;
279 p->inhibit_defer_pop = inhibit_defer_pop;
280 p->cleanups_this_call = cleanups_this_call;
281 p->saveregs_value = saveregs_value;
0006469d 282 p->apply_args_value = apply_args_value;
e87b4f3f 283 p->forced_labels = forced_labels;
bbf6f052
RK
284
285 pending_stack_adjust = 0;
286 inhibit_defer_pop = 0;
287 cleanups_this_call = 0;
288 saveregs_value = 0;
0006469d 289 apply_args_value = 0;
e87b4f3f 290 forced_labels = 0;
bbf6f052
RK
291}
292
293/* Restore all variables describing the current status from the structure *P.
294 This is used after a nested function. */
295
296void
297restore_expr_status (p)
298 struct function *p;
299{
300 pending_stack_adjust = p->pending_stack_adjust;
301 inhibit_defer_pop = p->inhibit_defer_pop;
302 cleanups_this_call = p->cleanups_this_call;
303 saveregs_value = p->saveregs_value;
0006469d 304 apply_args_value = p->apply_args_value;
e87b4f3f 305 forced_labels = p->forced_labels;
bbf6f052
RK
306}
307\f
308/* Manage the queue of increment instructions to be output
309 for POSTINCREMENT_EXPR expressions, etc. */
310
311static rtx pending_chain;
312
313/* Queue up to increment (or change) VAR later. BODY says how:
314 BODY should be the same thing you would pass to emit_insn
315 to increment right away. It will go to emit_insn later on.
316
317 The value is a QUEUED expression to be used in place of VAR
318 where you want to guarantee the pre-incrementation value of VAR. */
319
320static rtx
321enqueue_insn (var, body)
322 rtx var, body;
323{
324 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 325 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
326 return pending_chain;
327}
328
329/* Use protect_from_queue to convert a QUEUED expression
330 into something that you can put immediately into an instruction.
331 If the queued incrementation has not happened yet,
332 protect_from_queue returns the variable itself.
333 If the incrementation has happened, protect_from_queue returns a temp
334 that contains a copy of the old value of the variable.
335
336 Any time an rtx which might possibly be a QUEUED is to be put
337 into an instruction, it must be passed through protect_from_queue first.
338 QUEUED expressions are not meaningful in instructions.
339
340 Do not pass a value through protect_from_queue and then hold
341 on to it for a while before putting it in an instruction!
342 If the queue is flushed in between, incorrect code will result. */
343
344rtx
345protect_from_queue (x, modify)
346 register rtx x;
347 int modify;
348{
349 register RTX_CODE code = GET_CODE (x);
350
351#if 0 /* A QUEUED can hang around after the queue is forced out. */
352 /* Shortcut for most common case. */
353 if (pending_chain == 0)
354 return x;
355#endif
356
357 if (code != QUEUED)
358 {
359 /* A special hack for read access to (MEM (QUEUED ...))
360 to facilitate use of autoincrement.
361 Make a copy of the contents of the memory location
362 rather than a copy of the address, but not
363 if the value is of mode BLKmode. */
364 if (code == MEM && GET_MODE (x) != BLKmode
365 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
366 {
367 register rtx y = XEXP (x, 0);
368 XEXP (x, 0) = QUEUED_VAR (y);
369 if (QUEUED_INSN (y))
370 {
371 register rtx temp = gen_reg_rtx (GET_MODE (x));
372 emit_insn_before (gen_move_insn (temp, x),
373 QUEUED_INSN (y));
374 return temp;
375 }
376 return x;
377 }
378 /* Otherwise, recursively protect the subexpressions of all
379 the kinds of rtx's that can contain a QUEUED. */
380 if (code == MEM)
381 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
382 else if (code == PLUS || code == MULT)
383 {
384 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
385 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
386 }
387 return x;
388 }
389 /* If the increment has not happened, use the variable itself. */
390 if (QUEUED_INSN (x) == 0)
391 return QUEUED_VAR (x);
392 /* If the increment has happened and a pre-increment copy exists,
393 use that copy. */
394 if (QUEUED_COPY (x) != 0)
395 return QUEUED_COPY (x);
396 /* The increment has happened but we haven't set up a pre-increment copy.
397 Set one up now, and use it. */
398 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
399 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
400 QUEUED_INSN (x));
401 return QUEUED_COPY (x);
402}
403
404/* Return nonzero if X contains a QUEUED expression:
405 if it contains anything that will be altered by a queued increment.
406 We handle only combinations of MEM, PLUS, MINUS and MULT operators
407 since memory addresses generally contain only those. */
408
409static int
410queued_subexp_p (x)
411 rtx x;
412{
413 register enum rtx_code code = GET_CODE (x);
414 switch (code)
415 {
416 case QUEUED:
417 return 1;
418 case MEM:
419 return queued_subexp_p (XEXP (x, 0));
420 case MULT:
421 case PLUS:
422 case MINUS:
423 return queued_subexp_p (XEXP (x, 0))
424 || queued_subexp_p (XEXP (x, 1));
425 }
426 return 0;
427}
428
429/* Perform all the pending incrementations. */
430
431void
432emit_queue ()
433{
434 register rtx p;
435 while (p = pending_chain)
436 {
437 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
438 pending_chain = QUEUED_NEXT (p);
439 }
440}
441
442static void
443init_queue ()
444{
445 if (pending_chain)
446 abort ();
447}
448\f
449/* Copy data from FROM to TO, where the machine modes are not the same.
450 Both modes may be integer, or both may be floating.
451 UNSIGNEDP should be nonzero if FROM is an unsigned type.
452 This causes zero-extension instead of sign-extension. */
453
454void
455convert_move (to, from, unsignedp)
456 register rtx to, from;
457 int unsignedp;
458{
459 enum machine_mode to_mode = GET_MODE (to);
460 enum machine_mode from_mode = GET_MODE (from);
461 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
462 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
463 enum insn_code code;
464 rtx libcall;
465
466 /* rtx code for making an equivalent value. */
467 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
468
469 to = protect_from_queue (to, 1);
470 from = protect_from_queue (from, 0);
471
472 if (to_real != from_real)
473 abort ();
474
1499e0a8
RK
475 /* If FROM is a SUBREG that indicates that we have already done at least
476 the required extension, strip it. We don't handle such SUBREGs as
477 TO here. */
478
479 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
480 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
481 >= GET_MODE_SIZE (to_mode))
482 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
483 from = gen_lowpart (to_mode, from), from_mode = to_mode;
484
485 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
486 abort ();
487
bbf6f052
RK
488 if (to_mode == from_mode
489 || (from_mode == VOIDmode && CONSTANT_P (from)))
490 {
491 emit_move_insn (to, from);
492 return;
493 }
494
495 if (to_real)
496 {
b424402e
RS
497#ifdef HAVE_extendqfhf2
498 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
499 {
500 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
501 return;
502 }
503#endif
504#ifdef HAVE_extendqfsf2
505 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
506 {
507 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
508 return;
509 }
510#endif
511#ifdef HAVE_extendqfdf2
512 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
513 {
514 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
515 return;
516 }
517#endif
518#ifdef HAVE_extendqfxf2
519 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
520 {
521 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
525#ifdef HAVE_extendqftf2
526 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
527 {
528 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
532
533#ifdef HAVE_extendhfsf2
534 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
535 {
536 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
537 return;
538 }
539#endif
540#ifdef HAVE_extendhfdf2
541 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
542 {
543 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
544 return;
545 }
546#endif
547#ifdef HAVE_extendhfxf2
548 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
549 {
550 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
554#ifdef HAVE_extendhftf2
555 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
556 {
557 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
561
bbf6f052
RK
562#ifdef HAVE_extendsfdf2
563 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
564 {
565 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
566 return;
567 }
568#endif
b092b471
JW
569#ifdef HAVE_extendsfxf2
570 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
571 {
572 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
573 return;
574 }
575#endif
bbf6f052
RK
576#ifdef HAVE_extendsftf2
577 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
578 {
579 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
580 return;
581 }
582#endif
b092b471
JW
583#ifdef HAVE_extenddfxf2
584 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
585 {
586 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
bbf6f052
RK
590#ifdef HAVE_extenddftf2
591 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
592 {
593 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597
598#ifdef HAVE_trunchfqf2
599 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
600 {
601 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
602 return;
603 }
604#endif
605#ifdef HAVE_truncsfqf2
606 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
612#ifdef HAVE_truncdfqf2
613 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
619#ifdef HAVE_truncxfqf2
620 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_trunctfqf2
627 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
bbf6f052
RK
661#ifdef HAVE_truncdfsf2
662 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
b092b471
JW
668#ifdef HAVE_truncxfsf2
669 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
bbf6f052
RK
675#ifdef HAVE_trunctfsf2
676 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
677 {
678 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
b092b471
JW
682#ifdef HAVE_truncxfdf2
683 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
bbf6f052
RK
689#ifdef HAVE_trunctfdf2
690 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
691 {
692 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
693 return;
694 }
695#endif
696
b092b471
JW
697 libcall = (rtx) 0;
698 switch (from_mode)
699 {
700 case SFmode:
701 switch (to_mode)
702 {
703 case DFmode:
704 libcall = extendsfdf2_libfunc;
705 break;
706
707 case XFmode:
708 libcall = extendsfxf2_libfunc;
709 break;
710
711 case TFmode:
712 libcall = extendsftf2_libfunc;
713 break;
714 }
715 break;
716
717 case DFmode:
718 switch (to_mode)
719 {
720 case SFmode:
721 libcall = truncdfsf2_libfunc;
722 break;
723
724 case XFmode:
725 libcall = extenddfxf2_libfunc;
726 break;
727
728 case TFmode:
729 libcall = extenddftf2_libfunc;
730 break;
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
757 }
758 break;
759 }
760
761 if (libcall == (rtx) 0)
762 /* This conversion is not implemented yet. */
bbf6f052
RK
763 abort ();
764
e87b4f3f 765 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
766 emit_move_insn (to, hard_libcall_value (to_mode));
767 return;
768 }
769
770 /* Now both modes are integers. */
771
772 /* Handle expanding beyond a word. */
773 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
774 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
775 {
776 rtx insns;
777 rtx lowpart;
778 rtx fill_value;
779 rtx lowfrom;
780 int i;
781 enum machine_mode lowpart_mode;
782 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
783
784 /* Try converting directly if the insn is supported. */
785 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
786 != CODE_FOR_nothing)
787 {
cd1b4b44
RK
788 /* If FROM is a SUBREG, put it into a register. Do this
789 so that we always generate the same set of insns for
790 better cse'ing; if an intermediate assignment occurred,
791 we won't be doing the operation directly on the SUBREG. */
792 if (optimize > 0 && GET_CODE (from) == SUBREG)
793 from = force_reg (from_mode, from);
bbf6f052
RK
794 emit_unop_insn (code, to, from, equiv_code);
795 return;
796 }
797 /* Next, try converting via full word. */
798 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
799 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
800 != CODE_FOR_nothing))
801 {
802 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
803 emit_unop_insn (code, to,
804 gen_lowpart (word_mode, to), equiv_code);
805 return;
806 }
807
808 /* No special multiword conversion insn; do it by hand. */
809 start_sequence ();
810
811 /* Get a copy of FROM widened to a word, if necessary. */
812 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
813 lowpart_mode = word_mode;
814 else
815 lowpart_mode = from_mode;
816
817 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
818
819 lowpart = gen_lowpart (lowpart_mode, to);
820 emit_move_insn (lowpart, lowfrom);
821
822 /* Compute the value to put in each remaining word. */
823 if (unsignedp)
824 fill_value = const0_rtx;
825 else
826 {
827#ifdef HAVE_slt
828 if (HAVE_slt
829 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
830 && STORE_FLAG_VALUE == -1)
831 {
906c4e36
RK
832 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
833 lowpart_mode, 0, 0);
bbf6f052
RK
834 fill_value = gen_reg_rtx (word_mode);
835 emit_insn (gen_slt (fill_value));
836 }
837 else
838#endif
839 {
840 fill_value
841 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
842 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 843 NULL_RTX, 0);
bbf6f052
RK
844 fill_value = convert_to_mode (word_mode, fill_value, 1);
845 }
846 }
847
848 /* Fill the remaining words. */
849 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
850 {
851 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
852 rtx subword = operand_subword (to, index, 1, to_mode);
853
854 if (subword == 0)
855 abort ();
856
857 if (fill_value != subword)
858 emit_move_insn (subword, fill_value);
859 }
860
861 insns = get_insns ();
862 end_sequence ();
863
906c4e36 864 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
865 gen_rtx (equiv_code, to_mode, from));
866 return;
867 }
868
d3c64ee3
RS
869 /* Truncating multi-word to a word or less. */
870 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
871 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052
RK
872 {
873 convert_move (to, gen_lowpart (word_mode, from), 0);
874 return;
875 }
876
877 /* Handle pointer conversion */ /* SPEE 900220 */
878 if (to_mode == PSImode)
879 {
880 if (from_mode != SImode)
881 from = convert_to_mode (SImode, from, unsignedp);
882
883#ifdef HAVE_truncsipsi
884 if (HAVE_truncsipsi)
885 {
886 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
887 return;
888 }
889#endif /* HAVE_truncsipsi */
890 abort ();
891 }
892
893 if (from_mode == PSImode)
894 {
895 if (to_mode != SImode)
896 {
897 from = convert_to_mode (SImode, from, unsignedp);
898 from_mode = SImode;
899 }
900 else
901 {
902#ifdef HAVE_extendpsisi
903 if (HAVE_extendpsisi)
904 {
905 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
906 return;
907 }
908#endif /* HAVE_extendpsisi */
909 abort ();
910 }
911 }
912
913 /* Now follow all the conversions between integers
914 no more than a word long. */
915
916 /* For truncation, usually we can just refer to FROM in a narrower mode. */
917 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
918 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 919 GET_MODE_BITSIZE (from_mode)))
bbf6f052 920 {
d3c64ee3
RS
921 if (!((GET_CODE (from) == MEM
922 && ! MEM_VOLATILE_P (from)
923 && direct_load[(int) to_mode]
924 && ! mode_dependent_address_p (XEXP (from, 0)))
925 || GET_CODE (from) == REG
926 || GET_CODE (from) == SUBREG))
927 from = force_reg (from_mode, from);
bbf6f052
RK
928 emit_move_insn (to, gen_lowpart (to_mode, from));
929 return;
930 }
931
d3c64ee3 932 /* Handle extension. */
bbf6f052
RK
933 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
934 {
935 /* Convert directly if that works. */
936 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
937 != CODE_FOR_nothing)
938 {
3dc4195c
RK
939 /* If FROM is a SUBREG, put it into a register. Do this
940 so that we always generate the same set of insns for
941 better cse'ing; if an intermediate assignment occurred,
942 we won't be doing the operation directly on the SUBREG. */
943 if (optimize > 0 && GET_CODE (from) == SUBREG)
944 from = force_reg (from_mode, from);
bbf6f052
RK
945 emit_unop_insn (code, to, from, equiv_code);
946 return;
947 }
948 else
949 {
950 enum machine_mode intermediate;
951
952 /* Search for a mode to convert via. */
953 for (intermediate = from_mode; intermediate != VOIDmode;
954 intermediate = GET_MODE_WIDER_MODE (intermediate))
955 if ((can_extend_p (to_mode, intermediate, unsignedp)
956 != CODE_FOR_nothing)
957 && (can_extend_p (intermediate, from_mode, unsignedp)
958 != CODE_FOR_nothing))
959 {
960 convert_move (to, convert_to_mode (intermediate, from,
961 unsignedp), unsignedp);
962 return;
963 }
964
965 /* No suitable intermediate mode. */
966 abort ();
967 }
968 }
969
970 /* Support special truncate insns for certain modes. */
971
972 if (from_mode == DImode && to_mode == SImode)
973 {
974#ifdef HAVE_truncdisi2
975 if (HAVE_truncdisi2)
976 {
977 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
978 return;
979 }
980#endif
981 convert_move (to, force_reg (from_mode, from), unsignedp);
982 return;
983 }
984
985 if (from_mode == DImode && to_mode == HImode)
986 {
987#ifdef HAVE_truncdihi2
988 if (HAVE_truncdihi2)
989 {
990 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
991 return;
992 }
993#endif
994 convert_move (to, force_reg (from_mode, from), unsignedp);
995 return;
996 }
997
998 if (from_mode == DImode && to_mode == QImode)
999 {
1000#ifdef HAVE_truncdiqi2
1001 if (HAVE_truncdiqi2)
1002 {
1003 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1004 return;
1005 }
1006#endif
1007 convert_move (to, force_reg (from_mode, from), unsignedp);
1008 return;
1009 }
1010
1011 if (from_mode == SImode && to_mode == HImode)
1012 {
1013#ifdef HAVE_truncsihi2
1014 if (HAVE_truncsihi2)
1015 {
1016 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1017 return;
1018 }
1019#endif
1020 convert_move (to, force_reg (from_mode, from), unsignedp);
1021 return;
1022 }
1023
1024 if (from_mode == SImode && to_mode == QImode)
1025 {
1026#ifdef HAVE_truncsiqi2
1027 if (HAVE_truncsiqi2)
1028 {
1029 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1030 return;
1031 }
1032#endif
1033 convert_move (to, force_reg (from_mode, from), unsignedp);
1034 return;
1035 }
1036
1037 if (from_mode == HImode && to_mode == QImode)
1038 {
1039#ifdef HAVE_trunchiqi2
1040 if (HAVE_trunchiqi2)
1041 {
1042 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1043 return;
1044 }
1045#endif
1046 convert_move (to, force_reg (from_mode, from), unsignedp);
1047 return;
1048 }
1049
1050 /* Handle truncation of volatile memrefs, and so on;
1051 the things that couldn't be truncated directly,
1052 and for which there was no special instruction. */
1053 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1054 {
1055 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1056 emit_move_insn (to, temp);
1057 return;
1058 }
1059
1060 /* Mode combination is not recognized. */
1061 abort ();
1062}
1063
1064/* Return an rtx for a value that would result
1065 from converting X to mode MODE.
1066 Both X and MODE may be floating, or both integer.
1067 UNSIGNEDP is nonzero if X is an unsigned value.
1068 This can be done by referring to a part of X in place
5d901c31
RS
1069 or by copying to a new temporary with conversion.
1070
1071 This function *must not* call protect_from_queue
1072 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1073
1074rtx
1075convert_to_mode (mode, x, unsignedp)
1076 enum machine_mode mode;
1077 rtx x;
1078 int unsignedp;
1079{
1080 register rtx temp;
1499e0a8
RK
1081
1082 /* If FROM is a SUBREG that indicates that we have already done at least
1083 the required extension, strip it. */
1084
1085 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1086 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1087 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1088 x = gen_lowpart (mode, x);
bbf6f052 1089
bbf6f052
RK
1090 if (mode == GET_MODE (x))
1091 return x;
1092
1093 /* There is one case that we must handle specially: If we are converting
906c4e36 1094 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1095 we are to interpret the constant as unsigned, gen_lowpart will do
1096 the wrong if the constant appears negative. What we want to do is
1097 make the high-order word of the constant zero, not all ones. */
1098
1099 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1100 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1101 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1102 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1103
1104 /* We can do this with a gen_lowpart if both desired and current modes
1105 are integer, and this is either a constant integer, a register, or a
1106 non-volatile MEM. Except for the constant case, we must be narrowing
1107 the operand. */
1108
1109 if (GET_CODE (x) == CONST_INT
1110 || (GET_MODE_CLASS (mode) == MODE_INT
1111 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1112 && (GET_CODE (x) == CONST_DOUBLE
1113 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1114 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 1115 && direct_load[(int) mode]
bbf6f052
RK
1116 || GET_CODE (x) == REG)))))
1117 return gen_lowpart (mode, x);
1118
1119 temp = gen_reg_rtx (mode);
1120 convert_move (temp, x, unsignedp);
1121 return temp;
1122}
1123\f
1124/* Generate several move instructions to copy LEN bytes
1125 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1126 The caller must pass FROM and TO
1127 through protect_from_queue before calling.
1128 ALIGN (in bytes) is maximum alignment we can assume. */
1129
bbf6f052
RK
1130static void
1131move_by_pieces (to, from, len, align)
1132 rtx to, from;
1133 int len, align;
1134{
1135 struct move_by_pieces data;
1136 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1137 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1138
1139 data.offset = 0;
1140 data.to_addr = to_addr;
1141 data.from_addr = from_addr;
1142 data.to = to;
1143 data.from = from;
1144 data.autinc_to
1145 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1146 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1147 data.autinc_from
1148 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1149 || GET_CODE (from_addr) == POST_INC
1150 || GET_CODE (from_addr) == POST_DEC);
1151
1152 data.explicit_inc_from = 0;
1153 data.explicit_inc_to = 0;
1154 data.reverse
1155 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1156 if (data.reverse) data.offset = len;
1157 data.len = len;
1158
1159 /* If copying requires more than two move insns,
1160 copy addresses to registers (to make displacements shorter)
1161 and use post-increment if available. */
1162 if (!(data.autinc_from && data.autinc_to)
1163 && move_by_pieces_ninsns (len, align) > 2)
1164 {
1165#ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_from)
1167 {
1168 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1169 data.autinc_from = 1;
1170 data.explicit_inc_from = -1;
1171 }
1172#endif
1173#ifdef HAVE_POST_INCREMENT
1174 if (! data.autinc_from)
1175 {
1176 data.from_addr = copy_addr_to_reg (from_addr);
1177 data.autinc_from = 1;
1178 data.explicit_inc_from = 1;
1179 }
1180#endif
1181 if (!data.autinc_from && CONSTANT_P (from_addr))
1182 data.from_addr = copy_addr_to_reg (from_addr);
1183#ifdef HAVE_PRE_DECREMENT
1184 if (data.reverse && ! data.autinc_to)
1185 {
1186 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1187 data.autinc_to = 1;
1188 data.explicit_inc_to = -1;
1189 }
1190#endif
1191#ifdef HAVE_POST_INCREMENT
1192 if (! data.reverse && ! data.autinc_to)
1193 {
1194 data.to_addr = copy_addr_to_reg (to_addr);
1195 data.autinc_to = 1;
1196 data.explicit_inc_to = 1;
1197 }
1198#endif
1199 if (!data.autinc_to && CONSTANT_P (to_addr))
1200 data.to_addr = copy_addr_to_reg (to_addr);
1201 }
1202
e87b4f3f
RS
1203 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1204 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1205 align = MOVE_MAX;
bbf6f052
RK
1206
1207 /* First move what we can in the largest integer mode, then go to
1208 successively smaller modes. */
1209
1210 while (max_size > 1)
1211 {
1212 enum machine_mode mode = VOIDmode, tmode;
1213 enum insn_code icode;
1214
e7c33f54
RK
1215 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1216 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1217 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1218 mode = tmode;
1219
1220 if (mode == VOIDmode)
1221 break;
1222
1223 icode = mov_optab->handlers[(int) mode].insn_code;
1224 if (icode != CODE_FOR_nothing
1225 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1226 GET_MODE_SIZE (mode)))
1227 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1228
1229 max_size = GET_MODE_SIZE (mode);
1230 }
1231
1232 /* The code above should have handled everything. */
1233 if (data.len != 0)
1234 abort ();
1235}
1236
1237/* Return number of insns required to move L bytes by pieces.
1238 ALIGN (in bytes) is maximum alignment we can assume. */
1239
1240static int
1241move_by_pieces_ninsns (l, align)
1242 unsigned int l;
1243 int align;
1244{
1245 register int n_insns = 0;
e87b4f3f 1246 int max_size = MOVE_MAX + 1;
bbf6f052 1247
e87b4f3f
RS
1248 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1249 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1250 align = MOVE_MAX;
bbf6f052
RK
1251
1252 while (max_size > 1)
1253 {
1254 enum machine_mode mode = VOIDmode, tmode;
1255 enum insn_code icode;
1256
e7c33f54
RK
1257 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1258 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1259 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1260 mode = tmode;
1261
1262 if (mode == VOIDmode)
1263 break;
1264
1265 icode = mov_optab->handlers[(int) mode].insn_code;
1266 if (icode != CODE_FOR_nothing
1267 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1268 GET_MODE_SIZE (mode)))
1269 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1270
1271 max_size = GET_MODE_SIZE (mode);
1272 }
1273
1274 return n_insns;
1275}
1276
1277/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1278 with move instructions for mode MODE. GENFUN is the gen_... function
1279 to make a move insn for that mode. DATA has all the other info. */
1280
1281static void
1282move_by_pieces_1 (genfun, mode, data)
1283 rtx (*genfun) ();
1284 enum machine_mode mode;
1285 struct move_by_pieces *data;
1286{
1287 register int size = GET_MODE_SIZE (mode);
1288 register rtx to1, from1;
1289
1290 while (data->len >= size)
1291 {
1292 if (data->reverse) data->offset -= size;
1293
1294 to1 = (data->autinc_to
1295 ? gen_rtx (MEM, mode, data->to_addr)
1296 : change_address (data->to, mode,
1297 plus_constant (data->to_addr, data->offset)));
1298 from1 =
1299 (data->autinc_from
1300 ? gen_rtx (MEM, mode, data->from_addr)
1301 : change_address (data->from, mode,
1302 plus_constant (data->from_addr, data->offset)));
1303
1304#ifdef HAVE_PRE_DECREMENT
1305 if (data->explicit_inc_to < 0)
906c4e36 1306 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1307 if (data->explicit_inc_from < 0)
906c4e36 1308 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1309#endif
1310
1311 emit_insn ((*genfun) (to1, from1));
1312#ifdef HAVE_POST_INCREMENT
1313 if (data->explicit_inc_to > 0)
906c4e36 1314 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1315 if (data->explicit_inc_from > 0)
906c4e36 1316 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1317#endif
1318
1319 if (! data->reverse) data->offset += size;
1320
1321 data->len -= size;
1322 }
1323}
1324\f
1325/* Emit code to move a block Y to a block X.
1326 This may be done with string-move instructions,
1327 with multiple scalar move instructions, or with a library call.
1328
1329 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1330 with mode BLKmode.
1331 SIZE is an rtx that says how long they are.
1332 ALIGN is the maximum alignment we can assume they have,
1333 measured in bytes. */
1334
1335void
1336emit_block_move (x, y, size, align)
1337 rtx x, y;
1338 rtx size;
1339 int align;
1340{
1341 if (GET_MODE (x) != BLKmode)
1342 abort ();
1343
1344 if (GET_MODE (y) != BLKmode)
1345 abort ();
1346
1347 x = protect_from_queue (x, 1);
1348 y = protect_from_queue (y, 0);
5d901c31 1349 size = protect_from_queue (size, 0);
bbf6f052
RK
1350
1351 if (GET_CODE (x) != MEM)
1352 abort ();
1353 if (GET_CODE (y) != MEM)
1354 abort ();
1355 if (size == 0)
1356 abort ();
1357
1358 if (GET_CODE (size) == CONST_INT
906c4e36 1359 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1360 move_by_pieces (x, y, INTVAL (size), align);
1361 else
1362 {
1363 /* Try the most limited insn first, because there's no point
1364 including more than one in the machine description unless
1365 the more limited one has some advantage. */
266007a7 1366
0bba3f6f 1367 rtx opalign = GEN_INT (align);
266007a7
RK
1368 enum machine_mode mode;
1369
1370 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1371 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1372 {
266007a7 1373 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1374
1375 if (code != CODE_FOR_nothing
803090c4
RK
1376 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1377 here because if SIZE is less than the mode mask, as it is
8008b228 1378 returned by the macro, it will definitely be less than the
803090c4 1379 actual mode mask. */
266007a7 1380 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1381 && (insn_operand_predicate[(int) code][0] == 0
1382 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1383 && (insn_operand_predicate[(int) code][1] == 0
1384 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1385 && (insn_operand_predicate[(int) code][3] == 0
1386 || (*insn_operand_predicate[(int) code][3]) (opalign,
1387 VOIDmode)))
bbf6f052 1388 {
1ba1e2a8 1389 rtx op2;
266007a7
RK
1390 rtx last = get_last_insn ();
1391 rtx pat;
1392
1ba1e2a8 1393 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1394 if (insn_operand_predicate[(int) code][2] != 0
1395 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1396 op2 = copy_to_mode_reg (mode, op2);
1397
1398 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1399 if (pat)
1400 {
1401 emit_insn (pat);
1402 return;
1403 }
1404 else
1405 delete_insns_since (last);
bbf6f052
RK
1406 }
1407 }
bbf6f052
RK
1408
1409#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1410 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1411 VOIDmode, 3, XEXP (x, 0), Pmode,
1412 XEXP (y, 0), Pmode,
0fa83258
RK
1413 convert_to_mode (TYPE_MODE (sizetype), size,
1414 TREE_UNSIGNED (sizetype)),
1415 TYPE_MODE (sizetype));
bbf6f052 1416#else
d562e42e 1417 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
0fa83258
RK
1420 convert_to_mode (TYPE_MODE (sizetype), size,
1421 TREE_UNSIGNED (sizetype)),
1422 TYPE_MODE (sizetype));
bbf6f052
RK
1423#endif
1424 }
1425}
1426\f
1427/* Copy all or part of a value X into registers starting at REGNO.
1428 The number of registers to be filled is NREGS. */
1429
1430void
1431move_block_to_reg (regno, x, nregs, mode)
1432 int regno;
1433 rtx x;
1434 int nregs;
1435 enum machine_mode mode;
1436{
1437 int i;
1438 rtx pat, last;
1439
1440 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1441 x = validize_mem (force_const_mem (mode, x));
1442
1443 /* See if the machine can do this with a load multiple insn. */
1444#ifdef HAVE_load_multiple
1445 last = get_last_insn ();
1446 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1447 GEN_INT (nregs));
bbf6f052
RK
1448 if (pat)
1449 {
1450 emit_insn (pat);
1451 return;
1452 }
1453 else
1454 delete_insns_since (last);
1455#endif
1456
1457 for (i = 0; i < nregs; i++)
1458 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1459 operand_subword_force (x, i, mode));
1460}
1461
1462/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1463 The number of registers to be filled is NREGS. */
1464
1465void
1466move_block_from_reg (regno, x, nregs)
1467 int regno;
1468 rtx x;
1469 int nregs;
1470{
1471 int i;
1472 rtx pat, last;
1473
1474 /* See if the machine can do this with a store multiple insn. */
1475#ifdef HAVE_store_multiple
1476 last = get_last_insn ();
1477 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1478 GEN_INT (nregs));
bbf6f052
RK
1479 if (pat)
1480 {
1481 emit_insn (pat);
1482 return;
1483 }
1484 else
1485 delete_insns_since (last);
1486#endif
1487
1488 for (i = 0; i < nregs; i++)
1489 {
1490 rtx tem = operand_subword (x, i, 1, BLKmode);
1491
1492 if (tem == 0)
1493 abort ();
1494
1495 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1496 }
1497}
1498
1499/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1500
1501void
1502use_regs (regno, nregs)
1503 int regno;
1504 int nregs;
1505{
1506 int i;
1507
1508 for (i = 0; i < nregs; i++)
1509 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1510}
7308a047
RS
1511
1512/* Mark the instructions since PREV as a libcall block.
1513 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1514
f76a70d5 1515static void
7308a047
RS
1516group_insns (prev)
1517 rtx prev;
1518{
1519 rtx insn_first;
1520 rtx insn_last;
1521
1522 /* Find the instructions to mark */
1523 if (prev)
1524 insn_first = NEXT_INSN (prev);
1525 else
1526 insn_first = get_insns ();
1527
1528 insn_last = get_last_insn ();
1529
1530 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1531 REG_NOTES (insn_last));
1532
1533 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1534 REG_NOTES (insn_first));
1535}
bbf6f052
RK
1536\f
1537/* Write zeros through the storage of OBJECT.
1538 If OBJECT has BLKmode, SIZE is its length in bytes. */
1539
1540void
1541clear_storage (object, size)
1542 rtx object;
1543 int size;
1544{
1545 if (GET_MODE (object) == BLKmode)
1546 {
1547#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1548 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1549 VOIDmode, 3,
1550 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1551 GEN_INT (size), Pmode);
bbf6f052 1552#else
d562e42e 1553 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1554 VOIDmode, 2,
1555 XEXP (object, 0), Pmode,
906c4e36 1556 GEN_INT (size), Pmode);
bbf6f052
RK
1557#endif
1558 }
1559 else
1560 emit_move_insn (object, const0_rtx);
1561}
1562
1563/* Generate code to copy Y into X.
1564 Both Y and X must have the same mode, except that
1565 Y can be a constant with VOIDmode.
1566 This mode cannot be BLKmode; use emit_block_move for that.
1567
1568 Return the last instruction emitted. */
1569
1570rtx
1571emit_move_insn (x, y)
1572 rtx x, y;
1573{
1574 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1575 enum machine_mode submode;
1576 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1577 int i;
1578
1579 x = protect_from_queue (x, 1);
1580 y = protect_from_queue (y, 0);
1581
1582 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1583 abort ();
1584
1585 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1586 y = force_const_mem (mode, y);
1587
1588 /* If X or Y are memory references, verify that their addresses are valid
1589 for the machine. */
1590 if (GET_CODE (x) == MEM
1591 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1592 && ! push_operand (x, GET_MODE (x)))
1593 || (flag_force_addr
1594 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1595 x = change_address (x, VOIDmode, XEXP (x, 0));
1596
1597 if (GET_CODE (y) == MEM
1598 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1599 || (flag_force_addr
1600 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1601 y = change_address (y, VOIDmode, XEXP (y, 0));
1602
1603 if (mode == BLKmode)
1604 abort ();
1605
261c4230
RS
1606 return emit_move_insn_1 (x, y);
1607}
1608
1609/* Low level part of emit_move_insn.
1610 Called just like emit_move_insn, but assumes X and Y
1611 are basically valid. */
1612
1613rtx
1614emit_move_insn_1 (x, y)
1615 rtx x, y;
1616{
1617 enum machine_mode mode = GET_MODE (x);
1618 enum machine_mode submode;
1619 enum mode_class class = GET_MODE_CLASS (mode);
1620 int i;
1621
7308a047
RS
1622 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1623 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1624 (class == MODE_COMPLEX_INT
1625 ? MODE_INT : MODE_FLOAT),
1626 0);
1627
bbf6f052
RK
1628 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1629 return
1630 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1631
89742723 1632 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1633 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1634 && submode != BLKmode
1635 && (mov_optab->handlers[(int) submode].insn_code
1636 != CODE_FOR_nothing))
1637 {
1638 /* Don't split destination if it is a stack push. */
1639 int stack = push_operand (x, GET_MODE (x));
1640 rtx prev = get_last_insn ();
1641
1642 /* Tell flow that the whole of the destination is being set. */
1643 if (GET_CODE (x) == REG)
1644 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1645
1646 /* If this is a stack, push the highpart first, so it
1647 will be in the argument order.
1648
1649 In that case, change_address is used only to convert
1650 the mode, not to change the address. */
1651 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1652 ((stack ? change_address (x, submode, (rtx) 0)
1653 : gen_highpart (submode, x)),
1654 gen_highpart (submode, y)));
1655 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1656 ((stack ? change_address (x, submode, (rtx) 0)
1657 : gen_lowpart (submode, x)),
1658 gen_lowpart (submode, y)));
1659
1660 group_insns (prev);
7a1ab50a
RS
1661
1662 return get_last_insn ();
7308a047
RS
1663 }
1664
bbf6f052
RK
1665 /* This will handle any multi-word mode that lacks a move_insn pattern.
1666 However, you will get better code if you define such patterns,
1667 even if they must turn into multiple assembler instructions. */
a4320483 1668 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1669 {
1670 rtx last_insn = 0;
7308a047 1671 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1672
1673 for (i = 0;
1674 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1675 i++)
1676 {
1677 rtx xpart = operand_subword (x, i, 1, mode);
1678 rtx ypart = operand_subword (y, i, 1, mode);
1679
1680 /* If we can't get a part of Y, put Y into memory if it is a
1681 constant. Otherwise, force it into a register. If we still
1682 can't get a part of Y, abort. */
1683 if (ypart == 0 && CONSTANT_P (y))
1684 {
1685 y = force_const_mem (mode, y);
1686 ypart = operand_subword (y, i, 1, mode);
1687 }
1688 else if (ypart == 0)
1689 ypart = operand_subword_force (y, i, mode);
1690
1691 if (xpart == 0 || ypart == 0)
1692 abort ();
1693
1694 last_insn = emit_move_insn (xpart, ypart);
1695 }
7308a047
RS
1696 /* Mark these insns as a libcall block. */
1697 group_insns (prev_insn);
1698
bbf6f052
RK
1699 return last_insn;
1700 }
1701 else
1702 abort ();
1703}
1704\f
1705/* Pushing data onto the stack. */
1706
1707/* Push a block of length SIZE (perhaps variable)
1708 and return an rtx to address the beginning of the block.
1709 Note that it is not possible for the value returned to be a QUEUED.
1710 The value may be virtual_outgoing_args_rtx.
1711
1712 EXTRA is the number of bytes of padding to push in addition to SIZE.
1713 BELOW nonzero means this padding comes at low addresses;
1714 otherwise, the padding comes at high addresses. */
1715
1716rtx
1717push_block (size, extra, below)
1718 rtx size;
1719 int extra, below;
1720{
1721 register rtx temp;
1722 if (CONSTANT_P (size))
1723 anti_adjust_stack (plus_constant (size, extra));
1724 else if (GET_CODE (size) == REG && extra == 0)
1725 anti_adjust_stack (size);
1726 else
1727 {
1728 rtx temp = copy_to_mode_reg (Pmode, size);
1729 if (extra != 0)
906c4e36 1730 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1731 temp, 0, OPTAB_LIB_WIDEN);
1732 anti_adjust_stack (temp);
1733 }
1734
1735#ifdef STACK_GROWS_DOWNWARD
1736 temp = virtual_outgoing_args_rtx;
1737 if (extra != 0 && below)
1738 temp = plus_constant (temp, extra);
1739#else
1740 if (GET_CODE (size) == CONST_INT)
1741 temp = plus_constant (virtual_outgoing_args_rtx,
1742 - INTVAL (size) - (below ? 0 : extra));
1743 else if (extra != 0 && !below)
1744 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1745 negate_rtx (Pmode, plus_constant (size, extra)));
1746 else
1747 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1748 negate_rtx (Pmode, size));
1749#endif
1750
1751 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1752}
1753
87e38d84 1754rtx
bbf6f052
RK
1755gen_push_operand ()
1756{
1757 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1758}
1759
1760/* Generate code to push X onto the stack, assuming it has mode MODE and
1761 type TYPE.
1762 MODE is redundant except when X is a CONST_INT (since they don't
1763 carry mode info).
1764 SIZE is an rtx for the size of data to be copied (in bytes),
1765 needed only if X is BLKmode.
1766
1767 ALIGN (in bytes) is maximum alignment we can assume.
1768
cd048831
RK
1769 If PARTIAL and REG are both nonzero, then copy that many of the first
1770 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1771 The amount of space pushed is decreased by PARTIAL words,
1772 rounded *down* to a multiple of PARM_BOUNDARY.
1773 REG must be a hard register in this case.
cd048831
RK
1774 If REG is zero but PARTIAL is not, take any all others actions for an
1775 argument partially in registers, but do not actually load any
1776 registers.
bbf6f052
RK
1777
1778 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1779 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1780
1781 On a machine that lacks real push insns, ARGS_ADDR is the address of
1782 the bottom of the argument block for this call. We use indexing off there
1783 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1784 argument block has not been preallocated.
1785
1786 ARGS_SO_FAR is the size of args previously pushed for this call. */
1787
1788void
1789emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1790 args_addr, args_so_far)
1791 register rtx x;
1792 enum machine_mode mode;
1793 tree type;
1794 rtx size;
1795 int align;
1796 int partial;
1797 rtx reg;
1798 int extra;
1799 rtx args_addr;
1800 rtx args_so_far;
1801{
1802 rtx xinner;
1803 enum direction stack_direction
1804#ifdef STACK_GROWS_DOWNWARD
1805 = downward;
1806#else
1807 = upward;
1808#endif
1809
1810 /* Decide where to pad the argument: `downward' for below,
1811 `upward' for above, or `none' for don't pad it.
1812 Default is below for small data on big-endian machines; else above. */
1813 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1814
1815 /* Invert direction if stack is post-update. */
1816 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1817 if (where_pad != none)
1818 where_pad = (where_pad == downward ? upward : downward);
1819
1820 xinner = x = protect_from_queue (x, 0);
1821
1822 if (mode == BLKmode)
1823 {
1824 /* Copy a block into the stack, entirely or partially. */
1825
1826 register rtx temp;
1827 int used = partial * UNITS_PER_WORD;
1828 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1829 int skip;
1830
1831 if (size == 0)
1832 abort ();
1833
1834 used -= offset;
1835
1836 /* USED is now the # of bytes we need not copy to the stack
1837 because registers will take care of them. */
1838
1839 if (partial != 0)
1840 xinner = change_address (xinner, BLKmode,
1841 plus_constant (XEXP (xinner, 0), used));
1842
1843 /* If the partial register-part of the arg counts in its stack size,
1844 skip the part of stack space corresponding to the registers.
1845 Otherwise, start copying to the beginning of the stack space,
1846 by setting SKIP to 0. */
1847#ifndef REG_PARM_STACK_SPACE
1848 skip = 0;
1849#else
1850 skip = used;
1851#endif
1852
1853#ifdef PUSH_ROUNDING
1854 /* Do it with several push insns if that doesn't take lots of insns
1855 and if there is no difficulty with push insns that skip bytes
1856 on the stack for alignment purposes. */
1857 if (args_addr == 0
1858 && GET_CODE (size) == CONST_INT
1859 && skip == 0
1860 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1861 < MOVE_RATIO)
bbf6f052
RK
1862 /* Here we avoid the case of a structure whose weak alignment
1863 forces many pushes of a small amount of data,
1864 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1865 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1866 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1867 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1868 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1869 {
1870 /* Push padding now if padding above and stack grows down,
1871 or if padding below and stack grows up.
1872 But if space already allocated, this has already been done. */
1873 if (extra && args_addr == 0
1874 && where_pad != none && where_pad != stack_direction)
906c4e36 1875 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1876
1877 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1878 INTVAL (size) - used, align);
1879 }
1880 else
1881#endif /* PUSH_ROUNDING */
1882 {
1883 /* Otherwise make space on the stack and copy the data
1884 to the address of that space. */
1885
1886 /* Deduct words put into registers from the size we must copy. */
1887 if (partial != 0)
1888 {
1889 if (GET_CODE (size) == CONST_INT)
906c4e36 1890 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1891 else
1892 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1893 GEN_INT (used), NULL_RTX, 0,
1894 OPTAB_LIB_WIDEN);
bbf6f052
RK
1895 }
1896
1897 /* Get the address of the stack space.
1898 In this case, we do not deal with EXTRA separately.
1899 A single stack adjust will do. */
1900 if (! args_addr)
1901 {
1902 temp = push_block (size, extra, where_pad == downward);
1903 extra = 0;
1904 }
1905 else if (GET_CODE (args_so_far) == CONST_INT)
1906 temp = memory_address (BLKmode,
1907 plus_constant (args_addr,
1908 skip + INTVAL (args_so_far)));
1909 else
1910 temp = memory_address (BLKmode,
1911 plus_constant (gen_rtx (PLUS, Pmode,
1912 args_addr, args_so_far),
1913 skip));
1914
1915 /* TEMP is the address of the block. Copy the data there. */
1916 if (GET_CODE (size) == CONST_INT
1917 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1918 < MOVE_RATIO))
1919 {
1920 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1921 INTVAL (size), align);
1922 goto ret;
1923 }
1924 /* Try the most limited insn first, because there's no point
1925 including more than one in the machine description unless
1926 the more limited one has some advantage. */
1927#ifdef HAVE_movstrqi
1928 if (HAVE_movstrqi
1929 && GET_CODE (size) == CONST_INT
1930 && ((unsigned) INTVAL (size)
1931 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1932 {
c841050e
RS
1933 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1934 xinner, size, GEN_INT (align));
1935 if (pat != 0)
1936 {
1937 emit_insn (pat);
1938 goto ret;
1939 }
bbf6f052
RK
1940 }
1941#endif
1942#ifdef HAVE_movstrhi
1943 if (HAVE_movstrhi
1944 && GET_CODE (size) == CONST_INT
1945 && ((unsigned) INTVAL (size)
1946 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1947 {
c841050e
RS
1948 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1949 xinner, size, GEN_INT (align));
1950 if (pat != 0)
1951 {
1952 emit_insn (pat);
1953 goto ret;
1954 }
bbf6f052
RK
1955 }
1956#endif
1957#ifdef HAVE_movstrsi
1958 if (HAVE_movstrsi)
1959 {
c841050e
RS
1960 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1961 xinner, size, GEN_INT (align));
1962 if (pat != 0)
1963 {
1964 emit_insn (pat);
1965 goto ret;
1966 }
bbf6f052
RK
1967 }
1968#endif
1969#ifdef HAVE_movstrdi
1970 if (HAVE_movstrdi)
1971 {
c841050e
RS
1972 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1973 xinner, size, GEN_INT (align));
1974 if (pat != 0)
1975 {
1976 emit_insn (pat);
1977 goto ret;
1978 }
bbf6f052
RK
1979 }
1980#endif
1981
1982#ifndef ACCUMULATE_OUTGOING_ARGS
1983 /* If the source is referenced relative to the stack pointer,
1984 copy it to another register to stabilize it. We do not need
1985 to do this if we know that we won't be changing sp. */
1986
1987 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1988 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1989 temp = copy_to_reg (temp);
1990#endif
1991
1992 /* Make inhibit_defer_pop nonzero around the library call
1993 to force it to pop the bcopy-arguments right away. */
1994 NO_DEFER_POP;
1995#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1996 emit_library_call (memcpy_libfunc, 0,
bbf6f052 1997 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
1998 convert_to_mode (TYPE_MODE (sizetype),
1999 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2000 TYPE_MODE (sizetype));
bbf6f052 2001#else
d562e42e 2002 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2003 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
0fa83258
RK
2004 convert_to_mode (TYPE_MODE (sizetype),
2005 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2006 TYPE_MODE (sizetype));
bbf6f052
RK
2007#endif
2008 OK_DEFER_POP;
2009 }
2010 }
2011 else if (partial > 0)
2012 {
2013 /* Scalar partly in registers. */
2014
2015 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2016 int i;
2017 int not_stack;
2018 /* # words of start of argument
2019 that we must make space for but need not store. */
2020 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2021 int args_offset = INTVAL (args_so_far);
2022 int skip;
2023
2024 /* Push padding now if padding above and stack grows down,
2025 or if padding below and stack grows up.
2026 But if space already allocated, this has already been done. */
2027 if (extra && args_addr == 0
2028 && where_pad != none && where_pad != stack_direction)
906c4e36 2029 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2030
2031 /* If we make space by pushing it, we might as well push
2032 the real data. Otherwise, we can leave OFFSET nonzero
2033 and leave the space uninitialized. */
2034 if (args_addr == 0)
2035 offset = 0;
2036
2037 /* Now NOT_STACK gets the number of words that we don't need to
2038 allocate on the stack. */
2039 not_stack = partial - offset;
2040
2041 /* If the partial register-part of the arg counts in its stack size,
2042 skip the part of stack space corresponding to the registers.
2043 Otherwise, start copying to the beginning of the stack space,
2044 by setting SKIP to 0. */
2045#ifndef REG_PARM_STACK_SPACE
2046 skip = 0;
2047#else
2048 skip = not_stack;
2049#endif
2050
2051 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2052 x = validize_mem (force_const_mem (mode, x));
2053
2054 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2055 SUBREGs of such registers are not allowed. */
2056 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2057 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2058 x = copy_to_reg (x);
2059
2060 /* Loop over all the words allocated on the stack for this arg. */
2061 /* We can do it by words, because any scalar bigger than a word
2062 has a size a multiple of a word. */
2063#ifndef PUSH_ARGS_REVERSED
2064 for (i = not_stack; i < size; i++)
2065#else
2066 for (i = size - 1; i >= not_stack; i--)
2067#endif
2068 if (i >= not_stack + offset)
2069 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2070 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2071 0, args_addr,
2072 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2073 * UNITS_PER_WORD)));
2074 }
2075 else
2076 {
2077 rtx addr;
2078
2079 /* Push padding now if padding above and stack grows down,
2080 or if padding below and stack grows up.
2081 But if space already allocated, this has already been done. */
2082 if (extra && args_addr == 0
2083 && where_pad != none && where_pad != stack_direction)
906c4e36 2084 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2085
2086#ifdef PUSH_ROUNDING
2087 if (args_addr == 0)
2088 addr = gen_push_operand ();
2089 else
2090#endif
2091 if (GET_CODE (args_so_far) == CONST_INT)
2092 addr
2093 = memory_address (mode,
2094 plus_constant (args_addr, INTVAL (args_so_far)));
2095 else
2096 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2097 args_so_far));
2098
2099 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2100 }
2101
2102 ret:
2103 /* If part should go in registers, copy that part
2104 into the appropriate registers. Do this now, at the end,
2105 since mem-to-mem copies above may do function calls. */
cd048831 2106 if (partial > 0 && reg != 0)
bbf6f052
RK
2107 move_block_to_reg (REGNO (reg), x, partial, mode);
2108
2109 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2110 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2111}
2112\f
bbf6f052
RK
2113/* Expand an assignment that stores the value of FROM into TO.
2114 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2115 (This may contain a QUEUED rtx.)
2116 Otherwise, the returned value is not meaningful.
2117
2118 SUGGEST_REG is no longer actually used.
2119 It used to mean, copy the value through a register
2120 and return that register, if that is possible.
2121 But now we do this if WANT_VALUE.
2122
2123 If the value stored is a constant, we return the constant. */
2124
2125rtx
2126expand_assignment (to, from, want_value, suggest_reg)
2127 tree to, from;
2128 int want_value;
2129 int suggest_reg;
2130{
2131 register rtx to_rtx = 0;
2132 rtx result;
2133
2134 /* Don't crash if the lhs of the assignment was erroneous. */
2135
2136 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2137 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2138
2139 /* Assignment of a structure component needs special treatment
2140 if the structure component's rtx is not simply a MEM.
2141 Assignment of an array element at a constant index
2142 has the same problem. */
2143
2144 if (TREE_CODE (to) == COMPONENT_REF
2145 || TREE_CODE (to) == BIT_FIELD_REF
2146 || (TREE_CODE (to) == ARRAY_REF
2147 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2148 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2149 {
2150 enum machine_mode mode1;
2151 int bitsize;
2152 int bitpos;
7bb0943f 2153 tree offset;
bbf6f052
RK
2154 int unsignedp;
2155 int volatilep = 0;
7bb0943f 2156 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2157 &mode1, &unsignedp, &volatilep);
2158
2159 /* If we are going to use store_bit_field and extract_bit_field,
2160 make sure to_rtx will be safe for multiple use. */
2161
2162 if (mode1 == VOIDmode && want_value)
2163 tem = stabilize_reference (tem);
2164
906c4e36 2165 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2166 if (offset != 0)
2167 {
906c4e36 2168 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2169
2170 if (GET_CODE (to_rtx) != MEM)
2171 abort ();
2172 to_rtx = change_address (to_rtx, VOIDmode,
2173 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2174 force_reg (Pmode, offset_rtx)));
2175 }
bbf6f052
RK
2176 if (volatilep)
2177 {
2178 if (GET_CODE (to_rtx) == MEM)
2179 MEM_VOLATILE_P (to_rtx) = 1;
2180#if 0 /* This was turned off because, when a field is volatile
2181 in an object which is not volatile, the object may be in a register,
2182 and then we would abort over here. */
2183 else
2184 abort ();
2185#endif
2186 }
2187
2188 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2189 (want_value
2190 /* Spurious cast makes HPUX compiler happy. */
2191 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2192 : VOIDmode),
2193 unsignedp,
2194 /* Required alignment of containing datum. */
2195 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2196 int_size_in_bytes (TREE_TYPE (tem)));
2197 preserve_temp_slots (result);
2198 free_temp_slots ();
2199
2200 return result;
2201 }
2202
2203 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2204 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2205
2206 if (to_rtx == 0)
906c4e36 2207 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2208
86d38d25
RS
2209 /* Don't move directly into a return register. */
2210 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2211 {
66538193 2212 rtx temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
2213 emit_move_insn (to_rtx, temp);
2214 preserve_temp_slots (to_rtx);
2215 free_temp_slots ();
2216 return to_rtx;
2217 }
2218
bbf6f052
RK
2219 /* In case we are returning the contents of an object which overlaps
2220 the place the value is being stored, use a safe function when copying
2221 a value through a pointer into a structure value return block. */
2222 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2223 && current_function_returns_struct
2224 && !current_function_returns_pcc_struct)
2225 {
906c4e36 2226 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2227 rtx size = expr_size (from);
2228
2229#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2230 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2231 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2232 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
2233 convert_to_mode (TYPE_MODE (sizetype),
2234 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2235 TYPE_MODE (sizetype));
bbf6f052 2236#else
d562e42e 2237 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2238 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2239 XEXP (to_rtx, 0), Pmode,
0fa83258
RK
2240 convert_to_mode (TYPE_MODE (sizetype),
2241 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2242 TYPE_MODE (sizetype));
bbf6f052
RK
2243#endif
2244
2245 preserve_temp_slots (to_rtx);
2246 free_temp_slots ();
2247 return to_rtx;
2248 }
2249
2250 /* Compute FROM and store the value in the rtx we got. */
2251
2252 result = store_expr (from, to_rtx, want_value);
2253 preserve_temp_slots (result);
2254 free_temp_slots ();
2255 return result;
2256}
2257
2258/* Generate code for computing expression EXP,
2259 and storing the value into TARGET.
2260 Returns TARGET or an equivalent value.
2261 TARGET may contain a QUEUED rtx.
2262
2263 If SUGGEST_REG is nonzero, copy the value through a register
2264 and return that register, if that is possible.
2265
2266 If the value stored is a constant, we return the constant. */
2267
2268rtx
2269store_expr (exp, target, suggest_reg)
2270 register tree exp;
2271 register rtx target;
2272 int suggest_reg;
2273{
2274 register rtx temp;
2275 int dont_return_target = 0;
2276
2277 if (TREE_CODE (exp) == COMPOUND_EXPR)
2278 {
2279 /* Perform first part of compound expression, then assign from second
2280 part. */
2281 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2282 emit_queue ();
2283 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2284 }
2285 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2286 {
2287 /* For conditional expression, get safe form of the target. Then
2288 test the condition, doing the appropriate assignment on either
2289 side. This avoids the creation of unnecessary temporaries.
2290 For non-BLKmode, it is more efficient not to do this. */
2291
2292 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2293
2294 emit_queue ();
2295 target = protect_from_queue (target, 1);
2296
2297 NO_DEFER_POP;
2298 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2299 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2300 emit_queue ();
2301 emit_jump_insn (gen_jump (lab2));
2302 emit_barrier ();
2303 emit_label (lab1);
2304 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2305 emit_queue ();
2306 emit_label (lab2);
2307 OK_DEFER_POP;
2308 return target;
2309 }
2310 else if (suggest_reg && GET_CODE (target) == MEM
2311 && GET_MODE (target) != BLKmode)
2312 /* If target is in memory and caller wants value in a register instead,
2313 arrange that. Pass TARGET as target for expand_expr so that,
2314 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2315 We know expand_expr will not use the target in that case. */
2316 {
906c4e36 2317 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2318 GET_MODE (target), 0);
2319 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2320 temp = copy_to_reg (temp);
2321 dont_return_target = 1;
2322 }
2323 else if (queued_subexp_p (target))
2324 /* If target contains a postincrement, it is not safe
2325 to use as the returned value. It would access the wrong
2326 place by the time the queued increment gets output.
2327 So copy the value through a temporary and use that temp
2328 as the result. */
2329 {
2330 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2331 {
2332 /* Expand EXP into a new pseudo. */
2333 temp = gen_reg_rtx (GET_MODE (target));
2334 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2335 }
2336 else
906c4e36 2337 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2338 dont_return_target = 1;
2339 }
1499e0a8
RK
2340 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2341 /* If this is an scalar in a register that is stored in a wider mode
2342 than the declared mode, compute the result into its declared mode
2343 and then convert to the wider mode. Our value is the computed
2344 expression. */
2345 {
2346 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2347 convert_move (SUBREG_REG (target), temp,
2348 SUBREG_PROMOTED_UNSIGNED_P (target));
2349 return temp;
2350 }
bbf6f052
RK
2351 else
2352 {
2353 temp = expand_expr (exp, target, GET_MODE (target), 0);
2354 /* DO return TARGET if it's a specified hardware register.
2355 expand_return relies on this. */
2356 if (!(target && GET_CODE (target) == REG
2357 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2358 && CONSTANT_P (temp))
2359 dont_return_target = 1;
2360 }
2361
2362 /* If value was not generated in the target, store it there.
2363 Convert the value to TARGET's type first if nec. */
2364
2365 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2366 {
2367 target = protect_from_queue (target, 1);
2368 if (GET_MODE (temp) != GET_MODE (target)
2369 && GET_MODE (temp) != VOIDmode)
2370 {
2371 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2372 if (dont_return_target)
2373 {
2374 /* In this case, we will return TEMP,
2375 so make sure it has the proper mode.
2376 But don't forget to store the value into TARGET. */
2377 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2378 emit_move_insn (target, temp);
2379 }
2380 else
2381 convert_move (target, temp, unsignedp);
2382 }
2383
2384 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2385 {
2386 /* Handle copying a string constant into an array.
2387 The string constant may be shorter than the array.
2388 So copy just the string's actual length, and clear the rest. */
2389 rtx size;
2390
e87b4f3f
RS
2391 /* Get the size of the data type of the string,
2392 which is actually the size of the target. */
2393 size = expr_size (exp);
2394 if (GET_CODE (size) == CONST_INT
2395 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2396 emit_block_move (target, temp, size,
2397 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2398 else
bbf6f052 2399 {
e87b4f3f
RS
2400 /* Compute the size of the data to copy from the string. */
2401 tree copy_size
c03b7665
RK
2402 = size_binop (MIN_EXPR,
2403 size_binop (CEIL_DIV_EXPR,
2404 TYPE_SIZE (TREE_TYPE (exp)),
2405 size_int (BITS_PER_UNIT)),
2406 convert (sizetype,
2407 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
2408 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2409 VOIDmode, 0);
e87b4f3f
RS
2410 rtx label = 0;
2411
2412 /* Copy that much. */
2413 emit_block_move (target, temp, copy_size_rtx,
2414 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2415
2416 /* Figure out how much is left in TARGET
2417 that we have to clear. */
2418 if (GET_CODE (copy_size_rtx) == CONST_INT)
2419 {
2420 temp = plus_constant (XEXP (target, 0),
2421 TREE_STRING_LENGTH (exp));
2422 size = plus_constant (size,
2423 - TREE_STRING_LENGTH (exp));
2424 }
2425 else
2426 {
2427 enum machine_mode size_mode = Pmode;
2428
2429 temp = force_reg (Pmode, XEXP (target, 0));
2430 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2431 copy_size_rtx, NULL_RTX, 0,
2432 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2433
2434 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2435 copy_size_rtx, NULL_RTX, 0,
2436 OPTAB_LIB_WIDEN);
e87b4f3f 2437
906c4e36 2438 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2439 GET_MODE (size), 0, 0);
2440 label = gen_label_rtx ();
2441 emit_jump_insn (gen_blt (label));
2442 }
2443
2444 if (size != const0_rtx)
2445 {
bbf6f052 2446#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2447 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2448 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2449#else
d562e42e 2450 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2451 temp, Pmode, size, Pmode);
bbf6f052 2452#endif
e87b4f3f
RS
2453 }
2454 if (label)
2455 emit_label (label);
bbf6f052
RK
2456 }
2457 }
2458 else if (GET_MODE (temp) == BLKmode)
2459 emit_block_move (target, temp, expr_size (exp),
2460 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2461 else
2462 emit_move_insn (target, temp);
2463 }
2464 if (dont_return_target)
2465 return temp;
2466 return target;
2467}
2468\f
2469/* Store the value of constructor EXP into the rtx TARGET.
2470 TARGET is either a REG or a MEM. */
2471
2472static void
2473store_constructor (exp, target)
2474 tree exp;
2475 rtx target;
2476{
4af3895e
JVA
2477 tree type = TREE_TYPE (exp);
2478
bbf6f052
RK
2479 /* We know our target cannot conflict, since safe_from_p has been called. */
2480#if 0
2481 /* Don't try copying piece by piece into a hard register
2482 since that is vulnerable to being clobbered by EXP.
2483 Instead, construct in a pseudo register and then copy it all. */
2484 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2485 {
2486 rtx temp = gen_reg_rtx (GET_MODE (target));
2487 store_constructor (exp, temp);
2488 emit_move_insn (target, temp);
2489 return;
2490 }
2491#endif
2492
4af3895e 2493 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2494 {
2495 register tree elt;
2496
4af3895e
JVA
2497 /* Inform later passes that the whole union value is dead. */
2498 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2499 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2500
2501 /* If we are building a static constructor into a register,
2502 set the initial value as zero so we can fold the value into
2503 a constant. */
2504 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2505 emit_move_insn (target, const0_rtx);
2506
bbf6f052
RK
2507 /* If the constructor has fewer fields than the structure,
2508 clear the whole structure first. */
2509 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2510 != list_length (TYPE_FIELDS (type)))
2511 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2512 else
2513 /* Inform later passes that the old value is dead. */
2514 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2515
2516 /* Store each element of the constructor into
2517 the corresponding field of TARGET. */
2518
2519 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2520 {
2521 register tree field = TREE_PURPOSE (elt);
2522 register enum machine_mode mode;
2523 int bitsize;
2524 int bitpos;
2525 int unsignedp;
2526
f32fd778
RS
2527 /* Just ignore missing fields.
2528 We cleared the whole structure, above,
2529 if any fields are missing. */
2530 if (field == 0)
2531 continue;
2532
bbf6f052
RK
2533 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2534 unsignedp = TREE_UNSIGNED (field);
2535 mode = DECL_MODE (field);
2536 if (DECL_BIT_FIELD (field))
2537 mode = VOIDmode;
2538
2539 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2540 /* ??? This case remains to be written. */
2541 abort ();
2542
2543 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2544
2545 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2546 /* The alignment of TARGET is
2547 at least what its type requires. */
2548 VOIDmode, 0,
4af3895e
JVA
2549 TYPE_ALIGN (type) / BITS_PER_UNIT,
2550 int_size_in_bytes (type));
bbf6f052
RK
2551 }
2552 }
4af3895e 2553 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2554 {
2555 register tree elt;
2556 register int i;
4af3895e 2557 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2558 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2559 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2560 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2561
2562 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2563 clear the whole structure first. Similarly if this this is
2564 static constructor of a non-BLKmode object. */
bbf6f052 2565
4af3895e
JVA
2566 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2567 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
597bb7f1 2568 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2569 else
2570 /* Inform later passes that the old value is dead. */
2571 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2572
2573 /* Store each element of the constructor into
2574 the corresponding element of TARGET, determined
2575 by counting the elements. */
2576 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2577 elt;
2578 elt = TREE_CHAIN (elt), i++)
2579 {
2580 register enum machine_mode mode;
2581 int bitsize;
2582 int bitpos;
2583 int unsignedp;
2584
2585 mode = TYPE_MODE (elttype);
2586 bitsize = GET_MODE_BITSIZE (mode);
2587 unsignedp = TREE_UNSIGNED (elttype);
2588
2589 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2590
2591 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2592 /* The alignment of TARGET is
2593 at least what its type requires. */
2594 VOIDmode, 0,
4af3895e
JVA
2595 TYPE_ALIGN (type) / BITS_PER_UNIT,
2596 int_size_in_bytes (type));
bbf6f052
RK
2597 }
2598 }
2599
2600 else
2601 abort ();
2602}
2603
2604/* Store the value of EXP (an expression tree)
2605 into a subfield of TARGET which has mode MODE and occupies
2606 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2607 If MODE is VOIDmode, it means that we are storing into a bit-field.
2608
2609 If VALUE_MODE is VOIDmode, return nothing in particular.
2610 UNSIGNEDP is not used in this case.
2611
2612 Otherwise, return an rtx for the value stored. This rtx
2613 has mode VALUE_MODE if that is convenient to do.
2614 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2615
2616 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2617 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2618
2619static rtx
2620store_field (target, bitsize, bitpos, mode, exp, value_mode,
2621 unsignedp, align, total_size)
2622 rtx target;
2623 int bitsize, bitpos;
2624 enum machine_mode mode;
2625 tree exp;
2626 enum machine_mode value_mode;
2627 int unsignedp;
2628 int align;
2629 int total_size;
2630{
906c4e36 2631 HOST_WIDE_INT width_mask = 0;
bbf6f052 2632
906c4e36
RK
2633 if (bitsize < HOST_BITS_PER_WIDE_INT)
2634 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2635
2636 /* If we are storing into an unaligned field of an aligned union that is
2637 in a register, we may have the mode of TARGET being an integer mode but
2638 MODE == BLKmode. In that case, get an aligned object whose size and
2639 alignment are the same as TARGET and store TARGET into it (we can avoid
2640 the store if the field being stored is the entire width of TARGET). Then
2641 call ourselves recursively to store the field into a BLKmode version of
2642 that object. Finally, load from the object into TARGET. This is not
2643 very efficient in general, but should only be slightly more expensive
2644 than the otherwise-required unaligned accesses. Perhaps this can be
2645 cleaned up later. */
2646
2647 if (mode == BLKmode
2648 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2649 {
2650 rtx object = assign_stack_temp (GET_MODE (target),
2651 GET_MODE_SIZE (GET_MODE (target)), 0);
2652 rtx blk_object = copy_rtx (object);
2653
2654 PUT_MODE (blk_object, BLKmode);
2655
2656 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2657 emit_move_insn (object, target);
2658
2659 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2660 align, total_size);
2661
2662 emit_move_insn (target, object);
2663
2664 return target;
2665 }
2666
2667 /* If the structure is in a register or if the component
2668 is a bit field, we cannot use addressing to access it.
2669 Use bit-field techniques or SUBREG to store in it. */
2670
4fa52007
RK
2671 if (mode == VOIDmode
2672 || (mode != BLKmode && ! direct_store[(int) mode])
2673 || GET_CODE (target) == REG
bbf6f052
RK
2674 || GET_CODE (target) == SUBREG)
2675 {
906c4e36 2676 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2677 /* Store the value in the bitfield. */
2678 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2679 if (value_mode != VOIDmode)
2680 {
2681 /* The caller wants an rtx for the value. */
2682 /* If possible, avoid refetching from the bitfield itself. */
2683 if (width_mask != 0
2684 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2685 {
9074de27 2686 tree count;
5c4d7cfb 2687 enum machine_mode tmode;
86a2c12a 2688
5c4d7cfb
RS
2689 if (unsignedp)
2690 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2691 tmode = GET_MODE (temp);
86a2c12a
RS
2692 if (tmode == VOIDmode)
2693 tmode = value_mode;
5c4d7cfb
RS
2694 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2695 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2696 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2697 }
bbf6f052 2698 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2699 NULL_RTX, value_mode, 0, align,
2700 total_size);
bbf6f052
RK
2701 }
2702 return const0_rtx;
2703 }
2704 else
2705 {
2706 rtx addr = XEXP (target, 0);
2707 rtx to_rtx;
2708
2709 /* If a value is wanted, it must be the lhs;
2710 so make the address stable for multiple use. */
2711
2712 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2713 && ! CONSTANT_ADDRESS_P (addr)
2714 /* A frame-pointer reference is already stable. */
2715 && ! (GET_CODE (addr) == PLUS
2716 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2717 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2718 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2719 addr = copy_to_reg (addr);
2720
2721 /* Now build a reference to just the desired component. */
2722
2723 to_rtx = change_address (target, mode,
2724 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2725 MEM_IN_STRUCT_P (to_rtx) = 1;
2726
2727 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2728 }
2729}
2730\f
2731/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2732 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 2733 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
2734
2735 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2736 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2737 If the position of the field is variable, we store a tree
2738 giving the variable offset (in units) in *POFFSET.
2739 This offset is in addition to the bit position.
2740 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2741
2742 If any of the extraction expressions is volatile,
2743 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2744
2745 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2746 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2747 is redundant.
2748
2749 If the field describes a variable-sized object, *PMODE is set to
2750 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2751 this case, but the address of the object can be found. */
bbf6f052
RK
2752
2753tree
4969d05d
RK
2754get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
2755 punsignedp, pvolatilep)
bbf6f052
RK
2756 tree exp;
2757 int *pbitsize;
2758 int *pbitpos;
7bb0943f 2759 tree *poffset;
bbf6f052
RK
2760 enum machine_mode *pmode;
2761 int *punsignedp;
2762 int *pvolatilep;
2763{
2764 tree size_tree = 0;
2765 enum machine_mode mode = VOIDmode;
742920c7 2766 tree offset = integer_zero_node;
bbf6f052
RK
2767
2768 if (TREE_CODE (exp) == COMPONENT_REF)
2769 {
2770 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2771 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2772 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2773 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2774 }
2775 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2776 {
2777 size_tree = TREE_OPERAND (exp, 1);
2778 *punsignedp = TREE_UNSIGNED (exp);
2779 }
2780 else
2781 {
2782 mode = TYPE_MODE (TREE_TYPE (exp));
2783 *pbitsize = GET_MODE_BITSIZE (mode);
2784 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2785 }
2786
2787 if (size_tree)
2788 {
2789 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2790 mode = BLKmode, *pbitsize = -1;
2791 else
2792 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2793 }
2794
2795 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2796 and find the ultimate containing object. */
2797
2798 *pbitpos = 0;
2799
2800 while (1)
2801 {
7bb0943f 2802 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2803 {
7bb0943f
RS
2804 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2805 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2806 : TREE_OPERAND (exp, 2));
bbf6f052 2807
e7f3c83f
RK
2808 /* If this field hasn't been filled in yet, don't go
2809 past it. This should only happen when folding expressions
2810 made during type construction. */
2811 if (pos == 0)
2812 break;
2813
7bb0943f
RS
2814 if (TREE_CODE (pos) == PLUS_EXPR)
2815 {
2816 tree constant, var;
2817 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2818 {
2819 constant = TREE_OPERAND (pos, 0);
2820 var = TREE_OPERAND (pos, 1);
2821 }
2822 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2823 {
2824 constant = TREE_OPERAND (pos, 1);
2825 var = TREE_OPERAND (pos, 0);
2826 }
2827 else
2828 abort ();
742920c7 2829
7bb0943f 2830 *pbitpos += TREE_INT_CST_LOW (constant);
742920c7
RK
2831 offset = size_binop (PLUS_EXPR, offset,
2832 size_binop (FLOOR_DIV_EXPR, var,
2833 size_int (BITS_PER_UNIT)));
7bb0943f
RS
2834 }
2835 else if (TREE_CODE (pos) == INTEGER_CST)
2836 *pbitpos += TREE_INT_CST_LOW (pos);
2837 else
2838 {
2839 /* Assume here that the offset is a multiple of a unit.
2840 If not, there should be an explicitly added constant. */
742920c7
RK
2841 offset = size_binop (PLUS_EXPR, offset,
2842 size_binop (FLOOR_DIV_EXPR, pos,
2843 size_int (BITS_PER_UNIT)));
7bb0943f 2844 }
bbf6f052 2845 }
bbf6f052 2846
742920c7 2847 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 2848 {
742920c7
RK
2849 /* This code is based on the code in case ARRAY_REF in expand_expr
2850 below. We assume here that the size of an array element is
2851 always an integral multiple of BITS_PER_UNIT. */
2852
2853 tree index = TREE_OPERAND (exp, 1);
2854 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
2855 tree low_bound
2856 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
2857 tree index_type = TREE_TYPE (index);
2858
2859 if (! integer_zerop (low_bound))
2860 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
2861
2862 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
2863 {
2864 index = convert (type_for_size (POINTER_SIZE, 0), index);
2865 index_type = TREE_TYPE (index);
2866 }
2867
2868 index = fold (build (MULT_EXPR, index_type, index,
2869 TYPE_SIZE (TREE_TYPE (exp))));
2870
2871 if (TREE_CODE (index) == INTEGER_CST
2872 && TREE_INT_CST_HIGH (index) == 0)
2873 *pbitpos += TREE_INT_CST_LOW (index);
2874 else
2875 offset = size_binop (PLUS_EXPR, offset,
2876 size_binop (FLOOR_DIV_EXPR, index,
2877 size_int (BITS_PER_UNIT)));
bbf6f052
RK
2878 }
2879 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
2880 && ! ((TREE_CODE (exp) == NOP_EXPR
2881 || TREE_CODE (exp) == CONVERT_EXPR)
2882 && (TYPE_MODE (TREE_TYPE (exp))
2883 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
2884 break;
7bb0943f
RS
2885
2886 /* If any reference in the chain is volatile, the effect is volatile. */
2887 if (TREE_THIS_VOLATILE (exp))
2888 *pvolatilep = 1;
bbf6f052
RK
2889 exp = TREE_OPERAND (exp, 0);
2890 }
2891
2892 /* If this was a bit-field, see if there is a mode that allows direct
2893 access in case EXP is in memory. */
e7f3c83f 2894 if (mode == VOIDmode && *pbitsize != 0 && *pbitpos % *pbitsize == 0)
bbf6f052
RK
2895 {
2896 mode = mode_for_size (*pbitsize, MODE_INT, 0);
2897 if (mode == BLKmode)
2898 mode = VOIDmode;
2899 }
2900
742920c7
RK
2901 if (integer_zerop (offset))
2902 offset = 0;
2903
bbf6f052 2904 *pmode = mode;
7bb0943f
RS
2905 *poffset = offset;
2906#if 0
2907 /* We aren't finished fixing the callers to really handle nonzero offset. */
2908 if (offset != 0)
2909 abort ();
2910#endif
bbf6f052
RK
2911
2912 return exp;
2913}
2914\f
2915/* Given an rtx VALUE that may contain additions and multiplications,
2916 return an equivalent value that just refers to a register or memory.
2917 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
2918 and returning a pseudo-register containing the value.
2919
2920 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
2921
2922rtx
2923force_operand (value, target)
2924 rtx value, target;
2925{
2926 register optab binoptab = 0;
2927 /* Use a temporary to force order of execution of calls to
2928 `force_operand'. */
2929 rtx tmp;
2930 register rtx op2;
2931 /* Use subtarget as the target for operand 0 of a binary operation. */
2932 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
2933
2934 if (GET_CODE (value) == PLUS)
2935 binoptab = add_optab;
2936 else if (GET_CODE (value) == MINUS)
2937 binoptab = sub_optab;
2938 else if (GET_CODE (value) == MULT)
2939 {
2940 op2 = XEXP (value, 1);
2941 if (!CONSTANT_P (op2)
2942 && !(GET_CODE (op2) == REG && op2 != subtarget))
2943 subtarget = 0;
2944 tmp = force_operand (XEXP (value, 0), subtarget);
2945 return expand_mult (GET_MODE (value), tmp,
906c4e36 2946 force_operand (op2, NULL_RTX),
bbf6f052
RK
2947 target, 0);
2948 }
2949
2950 if (binoptab)
2951 {
2952 op2 = XEXP (value, 1);
2953 if (!CONSTANT_P (op2)
2954 && !(GET_CODE (op2) == REG && op2 != subtarget))
2955 subtarget = 0;
2956 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
2957 {
2958 binoptab = add_optab;
2959 op2 = negate_rtx (GET_MODE (value), op2);
2960 }
2961
2962 /* Check for an addition with OP2 a constant integer and our first
2963 operand a PLUS of a virtual register and something else. In that
2964 case, we want to emit the sum of the virtual register and the
2965 constant first and then add the other value. This allows virtual
2966 register instantiation to simply modify the constant rather than
2967 creating another one around this addition. */
2968 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
2969 && GET_CODE (XEXP (value, 0)) == PLUS
2970 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
2971 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
2972 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
2973 {
2974 rtx temp = expand_binop (GET_MODE (value), binoptab,
2975 XEXP (XEXP (value, 0), 0), op2,
2976 subtarget, 0, OPTAB_LIB_WIDEN);
2977 return expand_binop (GET_MODE (value), binoptab, temp,
2978 force_operand (XEXP (XEXP (value, 0), 1), 0),
2979 target, 0, OPTAB_LIB_WIDEN);
2980 }
2981
2982 tmp = force_operand (XEXP (value, 0), subtarget);
2983 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 2984 force_operand (op2, NULL_RTX),
bbf6f052 2985 target, 0, OPTAB_LIB_WIDEN);
8008b228 2986 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
2987 because the only operations we are expanding here are signed ones. */
2988 }
2989 return value;
2990}
2991\f
2992/* Subroutine of expand_expr:
2993 save the non-copied parts (LIST) of an expr (LHS), and return a list
2994 which can restore these values to their previous values,
2995 should something modify their storage. */
2996
2997static tree
2998save_noncopied_parts (lhs, list)
2999 tree lhs;
3000 tree list;
3001{
3002 tree tail;
3003 tree parts = 0;
3004
3005 for (tail = list; tail; tail = TREE_CHAIN (tail))
3006 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3007 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3008 else
3009 {
3010 tree part = TREE_VALUE (tail);
3011 tree part_type = TREE_TYPE (part);
906c4e36 3012 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3013 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3014 int_size_in_bytes (part_type), 0);
3015 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3016 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3017 parts = tree_cons (to_be_saved,
906c4e36
RK
3018 build (RTL_EXPR, part_type, NULL_TREE,
3019 (tree) target),
bbf6f052
RK
3020 parts);
3021 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3022 }
3023 return parts;
3024}
3025
3026/* Subroutine of expand_expr:
3027 record the non-copied parts (LIST) of an expr (LHS), and return a list
3028 which specifies the initial values of these parts. */
3029
3030static tree
3031init_noncopied_parts (lhs, list)
3032 tree lhs;
3033 tree list;
3034{
3035 tree tail;
3036 tree parts = 0;
3037
3038 for (tail = list; tail; tail = TREE_CHAIN (tail))
3039 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3040 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3041 else
3042 {
3043 tree part = TREE_VALUE (tail);
3044 tree part_type = TREE_TYPE (part);
906c4e36 3045 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3046 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3047 }
3048 return parts;
3049}
3050
3051/* Subroutine of expand_expr: return nonzero iff there is no way that
3052 EXP can reference X, which is being modified. */
3053
3054static int
3055safe_from_p (x, exp)
3056 rtx x;
3057 tree exp;
3058{
3059 rtx exp_rtl = 0;
3060 int i, nops;
3061
3062 if (x == 0)
3063 return 1;
3064
3065 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3066 find the underlying pseudo. */
3067 if (GET_CODE (x) == SUBREG)
3068 {
3069 x = SUBREG_REG (x);
3070 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3071 return 0;
3072 }
3073
3074 /* If X is a location in the outgoing argument area, it is always safe. */
3075 if (GET_CODE (x) == MEM
3076 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3077 || (GET_CODE (XEXP (x, 0)) == PLUS
3078 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3079 return 1;
3080
3081 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3082 {
3083 case 'd':
3084 exp_rtl = DECL_RTL (exp);
3085 break;
3086
3087 case 'c':
3088 return 1;
3089
3090 case 'x':
3091 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3092 return ((TREE_VALUE (exp) == 0
3093 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3094 && (TREE_CHAIN (exp) == 0
3095 || safe_from_p (x, TREE_CHAIN (exp))));
3096 else
3097 return 0;
3098
3099 case '1':
3100 return safe_from_p (x, TREE_OPERAND (exp, 0));
3101
3102 case '2':
3103 case '<':
3104 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3105 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3106
3107 case 'e':
3108 case 'r':
3109 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3110 the expression. If it is set, we conflict iff we are that rtx or
3111 both are in memory. Otherwise, we check all operands of the
3112 expression recursively. */
3113
3114 switch (TREE_CODE (exp))
3115 {
3116 case ADDR_EXPR:
3117 return staticp (TREE_OPERAND (exp, 0));
3118
3119 case INDIRECT_REF:
3120 if (GET_CODE (x) == MEM)
3121 return 0;
3122 break;
3123
3124 case CALL_EXPR:
3125 exp_rtl = CALL_EXPR_RTL (exp);
3126 if (exp_rtl == 0)
3127 {
3128 /* Assume that the call will clobber all hard registers and
3129 all of memory. */
3130 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3131 || GET_CODE (x) == MEM)
3132 return 0;
3133 }
3134
3135 break;
3136
3137 case RTL_EXPR:
3138 exp_rtl = RTL_EXPR_RTL (exp);
3139 if (exp_rtl == 0)
3140 /* We don't know what this can modify. */
3141 return 0;
3142
3143 break;
3144
3145 case WITH_CLEANUP_EXPR:
3146 exp_rtl = RTL_EXPR_RTL (exp);
3147 break;
3148
3149 case SAVE_EXPR:
3150 exp_rtl = SAVE_EXPR_RTL (exp);
3151 break;
3152
8129842c
RS
3153 case BIND_EXPR:
3154 /* The only operand we look at is operand 1. The rest aren't
3155 part of the expression. */
3156 return safe_from_p (x, TREE_OPERAND (exp, 1));
3157
bbf6f052
RK
3158 case METHOD_CALL_EXPR:
3159 /* This takes a rtx argument, but shouldn't appear here. */
3160 abort ();
3161 }
3162
3163 /* If we have an rtx, we do not need to scan our operands. */
3164 if (exp_rtl)
3165 break;
3166
3167 nops = tree_code_length[(int) TREE_CODE (exp)];
3168 for (i = 0; i < nops; i++)
3169 if (TREE_OPERAND (exp, i) != 0
3170 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3171 return 0;
3172 }
3173
3174 /* If we have an rtl, find any enclosed object. Then see if we conflict
3175 with it. */
3176 if (exp_rtl)
3177 {
3178 if (GET_CODE (exp_rtl) == SUBREG)
3179 {
3180 exp_rtl = SUBREG_REG (exp_rtl);
3181 if (GET_CODE (exp_rtl) == REG
3182 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3183 return 0;
3184 }
3185
3186 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3187 are memory and EXP is not readonly. */
3188 return ! (rtx_equal_p (x, exp_rtl)
3189 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3190 && ! TREE_READONLY (exp)));
3191 }
3192
3193 /* If we reach here, it is safe. */
3194 return 1;
3195}
3196
3197/* Subroutine of expand_expr: return nonzero iff EXP is an
3198 expression whose type is statically determinable. */
3199
3200static int
3201fixed_type_p (exp)
3202 tree exp;
3203{
3204 if (TREE_CODE (exp) == PARM_DECL
3205 || TREE_CODE (exp) == VAR_DECL
3206 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3207 || TREE_CODE (exp) == COMPONENT_REF
3208 || TREE_CODE (exp) == ARRAY_REF)
3209 return 1;
3210 return 0;
3211}
3212\f
3213/* expand_expr: generate code for computing expression EXP.
3214 An rtx for the computed value is returned. The value is never null.
3215 In the case of a void EXP, const0_rtx is returned.
3216
3217 The value may be stored in TARGET if TARGET is nonzero.
3218 TARGET is just a suggestion; callers must assume that
3219 the rtx returned may not be the same as TARGET.
3220
3221 If TARGET is CONST0_RTX, it means that the value will be ignored.
3222
3223 If TMODE is not VOIDmode, it suggests generating the
3224 result in mode TMODE. But this is done only when convenient.
3225 Otherwise, TMODE is ignored and the value generated in its natural mode.
3226 TMODE is just a suggestion; callers must assume that
3227 the rtx returned may not have mode TMODE.
3228
3229 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3230 with a constant address even if that address is not normally legitimate.
3231 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3232
3233 If MODIFIER is EXPAND_SUM then when EXP is an addition
3234 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3235 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3236 products as above, or REG or MEM, or constant.
3237 Ordinarily in such cases we would output mul or add instructions
3238 and then return a pseudo reg containing the sum.
3239
3240 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3241 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3242 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3243 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3244
3245rtx
3246expand_expr (exp, target, tmode, modifier)
3247 register tree exp;
3248 rtx target;
3249 enum machine_mode tmode;
3250 enum expand_modifier modifier;
3251{
3252 register rtx op0, op1, temp;
3253 tree type = TREE_TYPE (exp);
3254 int unsignedp = TREE_UNSIGNED (type);
3255 register enum machine_mode mode = TYPE_MODE (type);
3256 register enum tree_code code = TREE_CODE (exp);
3257 optab this_optab;
3258 /* Use subtarget as the target for operand 0 of a binary operation. */
3259 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3260 rtx original_target = target;
3261 int ignore = target == const0_rtx;
3262 tree context;
3263
3264 /* Don't use hard regs as subtargets, because the combiner
3265 can only handle pseudo regs. */
3266 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3267 subtarget = 0;
3268 /* Avoid subtargets inside loops,
3269 since they hide some invariant expressions. */
3270 if (preserve_subexpressions_p ())
3271 subtarget = 0;
3272
3273 if (ignore) target = 0, original_target = 0;
3274
3275 /* If will do cse, generate all results into pseudo registers
3276 since 1) that allows cse to find more things
3277 and 2) otherwise cse could produce an insn the machine
3278 cannot support. */
3279
3280 if (! cse_not_expected && mode != BLKmode && target
3281 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3282 target = subtarget;
3283
3284 /* Ensure we reference a volatile object even if value is ignored. */
3285 if (ignore && TREE_THIS_VOLATILE (exp)
3e3f5658 3286 && TREE_CODE (exp) != FUNCTION_DECL
bbf6f052
RK
3287 && mode != VOIDmode && mode != BLKmode)
3288 {
3289 target = gen_reg_rtx (mode);
3290 temp = expand_expr (exp, target, VOIDmode, modifier);
3291 if (temp != target)
3292 emit_move_insn (target, temp);
3293 return target;
3294 }
3295
3296 switch (code)
3297 {
3298 case LABEL_DECL:
b552441b
RS
3299 {
3300 tree function = decl_function_context (exp);
3301 /* Handle using a label in a containing function. */
3302 if (function != current_function_decl && function != 0)
3303 {
3304 struct function *p = find_function_data (function);
3305 /* Allocate in the memory associated with the function
3306 that the label is in. */
3307 push_obstacks (p->function_obstack,
3308 p->function_maybepermanent_obstack);
3309
3310 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3311 label_rtx (exp), p->forced_labels);
3312 pop_obstacks ();
3313 }
3314 else if (modifier == EXPAND_INITIALIZER)
3315 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3316 label_rtx (exp), forced_labels);
26fcb35a 3317 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3318 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3319 if (function != current_function_decl && function != 0)
3320 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3321 return temp;
b552441b 3322 }
bbf6f052
RK
3323
3324 case PARM_DECL:
3325 if (DECL_RTL (exp) == 0)
3326 {
3327 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3328 return CONST0_RTX (mode);
bbf6f052
RK
3329 }
3330
3331 case FUNCTION_DECL:
3332 case VAR_DECL:
3333 case RESULT_DECL:
3334 if (DECL_RTL (exp) == 0)
3335 abort ();
3336 /* Ensure variable marked as used
3337 even if it doesn't go through a parser. */
3338 TREE_USED (exp) = 1;
3339 /* Handle variables inherited from containing functions. */
3340 context = decl_function_context (exp);
3341
3342 /* We treat inline_function_decl as an alias for the current function
3343 because that is the inline function whose vars, types, etc.
3344 are being merged into the current function.
3345 See expand_inline_function. */
3346 if (context != 0 && context != current_function_decl
3347 && context != inline_function_decl
3348 /* If var is static, we don't need a static chain to access it. */
3349 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3350 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3351 {
3352 rtx addr;
3353
3354 /* Mark as non-local and addressable. */
81feeecb 3355 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3356 mark_addressable (exp);
3357 if (GET_CODE (DECL_RTL (exp)) != MEM)
3358 abort ();
3359 addr = XEXP (DECL_RTL (exp), 0);
3360 if (GET_CODE (addr) == MEM)
3361 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3362 else
3363 addr = fix_lexical_addr (addr, exp);
3364 return change_address (DECL_RTL (exp), mode, addr);
3365 }
4af3895e 3366
bbf6f052
RK
3367 /* This is the case of an array whose size is to be determined
3368 from its initializer, while the initializer is still being parsed.
3369 See expand_decl. */
3370 if (GET_CODE (DECL_RTL (exp)) == MEM
3371 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3372 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3373 XEXP (DECL_RTL (exp), 0));
3374 if (GET_CODE (DECL_RTL (exp)) == MEM
3375 && modifier != EXPAND_CONST_ADDRESS
3376 && modifier != EXPAND_SUM
3377 && modifier != EXPAND_INITIALIZER)
3378 {
3379 /* DECL_RTL probably contains a constant address.
3380 On RISC machines where a constant address isn't valid,
3381 make some insns to get that address into a register. */
3382 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3383 || (flag_force_addr
3384 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3385 return change_address (DECL_RTL (exp), VOIDmode,
3386 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3387 }
1499e0a8
RK
3388
3389 /* If the mode of DECL_RTL does not match that of the decl, it
3390 must be a promoted value. We return a SUBREG of the wanted mode,
3391 but mark it so that we know that it was already extended. */
3392
3393 if (GET_CODE (DECL_RTL (exp)) == REG
3394 && GET_MODE (DECL_RTL (exp)) != mode)
3395 {
3396 enum machine_mode decl_mode = DECL_MODE (exp);
3397
3398 /* Get the signedness used for this variable. Ensure we get the
3399 same mode we got when the variable was declared. */
3400
3401 PROMOTE_MODE (decl_mode, unsignedp, type);
3402
3403 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3404 abort ();
3405
3406 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3407 SUBREG_PROMOTED_VAR_P (temp) = 1;
3408 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3409 return temp;
3410 }
3411
bbf6f052
RK
3412 return DECL_RTL (exp);
3413
3414 case INTEGER_CST:
3415 return immed_double_const (TREE_INT_CST_LOW (exp),
3416 TREE_INT_CST_HIGH (exp),
3417 mode);
3418
3419 case CONST_DECL:
3420 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3421
3422 case REAL_CST:
3423 /* If optimized, generate immediate CONST_DOUBLE
3424 which will be turned into memory by reload if necessary.
3425
3426 We used to force a register so that loop.c could see it. But
3427 this does not allow gen_* patterns to perform optimizations with
3428 the constants. It also produces two insns in cases like "x = 1.0;".
3429 On most machines, floating-point constants are not permitted in
3430 many insns, so we'd end up copying it to a register in any case.
3431
3432 Now, we do the copying in expand_binop, if appropriate. */
3433 return immed_real_const (exp);
3434
3435 case COMPLEX_CST:
3436 case STRING_CST:
3437 if (! TREE_CST_RTL (exp))
3438 output_constant_def (exp);
3439
3440 /* TREE_CST_RTL probably contains a constant address.
3441 On RISC machines where a constant address isn't valid,
3442 make some insns to get that address into a register. */
3443 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3444 && modifier != EXPAND_CONST_ADDRESS
3445 && modifier != EXPAND_INITIALIZER
3446 && modifier != EXPAND_SUM
3447 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3448 return change_address (TREE_CST_RTL (exp), VOIDmode,
3449 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3450 return TREE_CST_RTL (exp);
3451
3452 case SAVE_EXPR:
3453 context = decl_function_context (exp);
3454 /* We treat inline_function_decl as an alias for the current function
3455 because that is the inline function whose vars, types, etc.
3456 are being merged into the current function.
3457 See expand_inline_function. */
3458 if (context == current_function_decl || context == inline_function_decl)
3459 context = 0;
3460
3461 /* If this is non-local, handle it. */
3462 if (context)
3463 {
3464 temp = SAVE_EXPR_RTL (exp);
3465 if (temp && GET_CODE (temp) == REG)
3466 {
3467 put_var_into_stack (exp);
3468 temp = SAVE_EXPR_RTL (exp);
3469 }
3470 if (temp == 0 || GET_CODE (temp) != MEM)
3471 abort ();
3472 return change_address (temp, mode,
3473 fix_lexical_addr (XEXP (temp, 0), exp));
3474 }
3475 if (SAVE_EXPR_RTL (exp) == 0)
3476 {
3477 if (mode == BLKmode)
3478 temp
3479 = assign_stack_temp (mode,
3480 int_size_in_bytes (TREE_TYPE (exp)), 0);
3481 else
1499e0a8
RK
3482 {
3483 enum machine_mode var_mode = mode;
3484
3485 if (TREE_CODE (type) == INTEGER_TYPE
3486 || TREE_CODE (type) == ENUMERAL_TYPE
3487 || TREE_CODE (type) == BOOLEAN_TYPE
3488 || TREE_CODE (type) == CHAR_TYPE
3489 || TREE_CODE (type) == REAL_TYPE
3490 || TREE_CODE (type) == POINTER_TYPE
3491 || TREE_CODE (type) == OFFSET_TYPE)
3492 {
3493 PROMOTE_MODE (var_mode, unsignedp, type);
3494 }
3495
3496 temp = gen_reg_rtx (var_mode);
3497 }
3498
bbf6f052 3499 SAVE_EXPR_RTL (exp) = temp;
bbf6f052
RK
3500 if (!optimize && GET_CODE (temp) == REG)
3501 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3502 save_expr_regs);
ff78f773
RK
3503
3504 /* If the mode of TEMP does not match that of the expression, it
3505 must be a promoted value. We pass store_expr a SUBREG of the
3506 wanted mode but mark it so that we know that it was already
3507 extended. Note that `unsignedp' was modified above in
3508 this case. */
3509
3510 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
3511 {
3512 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3513 SUBREG_PROMOTED_VAR_P (temp) = 1;
3514 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3515 }
3516
3517 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 3518 }
1499e0a8
RK
3519
3520 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3521 must be a promoted value. We return a SUBREG of the wanted mode,
3522 but mark it so that we know that it was already extended. Note
3523 that `unsignedp' was modified above in this case. */
3524
3525 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3526 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3527 {
3528 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3529 SUBREG_PROMOTED_VAR_P (temp) = 1;
3530 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3531 return temp;
3532 }
3533
bbf6f052
RK
3534 return SAVE_EXPR_RTL (exp);
3535
3536 case EXIT_EXPR:
3537 /* Exit the current loop if the body-expression is true. */
3538 {
3539 rtx label = gen_label_rtx ();
906c4e36
RK
3540 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3541 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3542 emit_label (label);
3543 }
3544 return const0_rtx;
3545
3546 case LOOP_EXPR:
3547 expand_start_loop (1);
3548 expand_expr_stmt (TREE_OPERAND (exp, 0));
3549 expand_end_loop ();
3550
3551 return const0_rtx;
3552
3553 case BIND_EXPR:
3554 {
3555 tree vars = TREE_OPERAND (exp, 0);
3556 int vars_need_expansion = 0;
3557
3558 /* Need to open a binding contour here because
3559 if there are any cleanups they most be contained here. */
3560 expand_start_bindings (0);
3561
2df53c0b
RS
3562 /* Mark the corresponding BLOCK for output in its proper place. */
3563 if (TREE_OPERAND (exp, 2) != 0
3564 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3565 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3566
3567 /* If VARS have not yet been expanded, expand them now. */
3568 while (vars)
3569 {
3570 if (DECL_RTL (vars) == 0)
3571 {
3572 vars_need_expansion = 1;
3573 expand_decl (vars);
3574 }
3575 expand_decl_init (vars);
3576 vars = TREE_CHAIN (vars);
3577 }
3578
3579 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3580
3581 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3582
3583 return temp;
3584 }
3585
3586 case RTL_EXPR:
3587 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3588 abort ();
3589 emit_insns (RTL_EXPR_SEQUENCE (exp));
3590 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3591 return RTL_EXPR_RTL (exp);
3592
3593 case CONSTRUCTOR:
4af3895e
JVA
3594 /* All elts simple constants => refer to a constant in memory. But
3595 if this is a non-BLKmode mode, let it store a field at a time
3596 since that should make a CONST_INT or CONST_DOUBLE when we
3597 fold. */
3598 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3599 {
3600 rtx constructor = output_constant_def (exp);
b552441b
RS
3601 if (modifier != EXPAND_CONST_ADDRESS
3602 && modifier != EXPAND_INITIALIZER
3603 && modifier != EXPAND_SUM
3604 && !memory_address_p (GET_MODE (constructor),
3605 XEXP (constructor, 0)))
bbf6f052
RK
3606 constructor = change_address (constructor, VOIDmode,
3607 XEXP (constructor, 0));
3608 return constructor;
3609 }
3610
3611 if (ignore)
3612 {
3613 tree elt;
3614 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3615 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3616 return const0_rtx;
3617 }
3618 else
3619 {
3620 if (target == 0 || ! safe_from_p (target, exp))
3621 {
3622 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3623 target = gen_reg_rtx (mode);
3624 else
3625 {
3b94d087
RS
3626 enum tree_code c = TREE_CODE (type);
3627 target
3628 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
e7f3c83f
RK
3629 if (c == RECORD_TYPE || c == UNION_TYPE
3630 || c == QUAL_UNION_TYPE || c == ARRAY_TYPE)
3b94d087 3631 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3632 }
3633 }
3634 store_constructor (exp, target);
3635 return target;
3636 }
3637
3638 case INDIRECT_REF:
3639 {
3640 tree exp1 = TREE_OPERAND (exp, 0);
3641 tree exp2;
3642
3643 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3644 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3645 This code has the same general effect as simply doing
3646 expand_expr on the save expr, except that the expression PTR
3647 is computed for use as a memory address. This means different
3648 code, suitable for indexing, may be generated. */
3649 if (TREE_CODE (exp1) == SAVE_EXPR
3650 && SAVE_EXPR_RTL (exp1) == 0
3651 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3652 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3653 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3654 {
906c4e36
RK
3655 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3656 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3657 op0 = memory_address (mode, temp);
3658 op0 = copy_all_regs (op0);
3659 SAVE_EXPR_RTL (exp1) = op0;
3660 }
3661 else
3662 {
906c4e36 3663 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3664 op0 = memory_address (mode, op0);
3665 }
8c8a8e34
JW
3666
3667 temp = gen_rtx (MEM, mode, op0);
3668 /* If address was computed by addition,
3669 mark this as an element of an aggregate. */
3670 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3671 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3672 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3673 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3674 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3675 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
e7f3c83f 3676 || TREE_CODE (TREE_TYPE (exp)) == QUAL_UNION_TYPE
8c8a8e34
JW
3677 || (TREE_CODE (exp1) == ADDR_EXPR
3678 && (exp2 = TREE_OPERAND (exp1, 0))
3679 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3680 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
e7f3c83f
RK
3681 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE
3682 || TREE_CODE (TREE_TYPE (exp2)) == QUAL_UNION_TYPE)))
8c8a8e34 3683 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3684 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 3685#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
3686 a location is accessed through a pointer to const does not mean
3687 that the value there can never change. */
8c8a8e34 3688 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3689#endif
8c8a8e34
JW
3690 return temp;
3691 }
bbf6f052
RK
3692
3693 case ARRAY_REF:
742920c7
RK
3694 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
3695 abort ();
bbf6f052 3696
bbf6f052 3697 {
742920c7
RK
3698 tree array = TREE_OPERAND (exp, 0);
3699 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
3700 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
3701 tree index = TREE_OPERAND (exp, 1);
3702 tree index_type = TREE_TYPE (index);
bbf6f052 3703 int i;
bbf6f052 3704
742920c7
RK
3705 /* Optimize the special-case of a zero lower bound. */
3706 if (! integer_zerop (low_bound))
3707 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
3708
3709 if (TREE_CODE (index) != INTEGER_CST
3710 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3711 {
3712 /* Nonconstant array index or nonconstant element size.
3713 Generate the tree for *(&array+index) and expand that,
3714 except do it in a language-independent way
3715 and don't complain about non-lvalue arrays.
3716 `mark_addressable' should already have been called
3717 for any array for which this case will be reached. */
3718
3719 /* Don't forget the const or volatile flag from the array
3720 element. */
3721 tree variant_type = build_type_variant (type,
3722 TREE_READONLY (exp),
3723 TREE_THIS_VOLATILE (exp));
3724 tree array_adr = build1 (ADDR_EXPR,
3725 build_pointer_type (variant_type), array);
3726 tree elt;
3727
3728 /* Convert the integer argument to a type the same size as a
3729 pointer so the multiply won't overflow spuriously. */
3730 if (TYPE_PRECISION (index_type) != POINTER_SIZE)
3731 index = convert (type_for_size (POINTER_SIZE, 0), index);
3732
3733 /* Don't think the address has side effects
3734 just because the array does.
3735 (In some cases the address might have side effects,
3736 and we fail to record that fact here. However, it should not
3737 matter, since expand_expr should not care.) */
3738 TREE_SIDE_EFFECTS (array_adr) = 0;
3739
3740 elt = build1 (INDIRECT_REF, type,
3741 fold (build (PLUS_EXPR,
3742 TYPE_POINTER_TO (variant_type),
3743 array_adr,
3744 fold (build (MULT_EXPR,
3745 TYPE_POINTER_TO (variant_type),
3746 index,
3747 size_in_bytes (type))))));
3748
3749 /* Volatility, etc., of new expression is same as old
3750 expression. */
3751 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3752 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3753 TREE_READONLY (elt) = TREE_READONLY (exp);
3754
3755 return expand_expr (elt, target, tmode, modifier);
3756 }
3757
3758 /* Fold an expression like: "foo"[2].
3759 This is not done in fold so it won't happen inside &. */
3760
3761 if (TREE_CODE (array) == STRING_CST
3762 && TREE_CODE (index) == INTEGER_CST
3763 && !TREE_INT_CST_HIGH (index)
3764 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array))
bbf6f052 3765 {
742920c7 3766 if (TREE_TYPE (TREE_TYPE (array)) == integer_type_node)
bbf6f052 3767 {
742920c7 3768 exp = build_int_2 (((int *)TREE_STRING_POINTER (array))[i], 0);
bbf6f052
RK
3769 TREE_TYPE (exp) = integer_type_node;
3770 return expand_expr (exp, target, tmode, modifier);
3771 }
742920c7 3772 if (TREE_TYPE (TREE_TYPE (array)) == char_type_node)
bbf6f052 3773 {
742920c7 3774 exp = build_int_2 (TREE_STRING_POINTER (array)[i], 0);
bbf6f052 3775 TREE_TYPE (exp) = integer_type_node;
742920c7
RK
3776 return expand_expr (convert (TREE_TYPE (TREE_TYPE (array)),
3777 exp),
3778 target, tmode, modifier);
bbf6f052
RK
3779 }
3780 }
bbf6f052 3781
742920c7
RK
3782 /* If this is a constant index into a constant array,
3783 just get the value from the array. Handle both the cases when
3784 we have an explicit constructor and when our operand is a variable
3785 that was declared const. */
4af3895e 3786
742920c7
RK
3787 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
3788 {
3789 if (TREE_CODE (index) == INTEGER_CST
3790 && TREE_INT_CST_HIGH (index) == 0)
3791 {
3792 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3793
3794 i = TREE_INT_CST_LOW (index);
3795 while (elem && i--)
3796 elem = TREE_CHAIN (elem);
3797 if (elem)
3798 return expand_expr (fold (TREE_VALUE (elem)), target,
3799 tmode, modifier);
3800 }
3801 }
4af3895e 3802
742920c7
RK
3803 else if (optimize >= 1
3804 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
3805 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
3806 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
3807 {
3808 if (TREE_CODE (index) == INTEGER_CST
3809 && TREE_INT_CST_HIGH (index) == 0)
3810 {
3811 tree init = DECL_INITIAL (array);
3812
3813 i = TREE_INT_CST_LOW (index);
3814 if (TREE_CODE (init) == CONSTRUCTOR)
3815 {
3816 tree elem = CONSTRUCTOR_ELTS (init);
3817
3818 while (elem && i--)
3819 elem = TREE_CHAIN (elem);
3820 if (elem)
3821 return expand_expr (fold (TREE_VALUE (elem)), target,
3822 tmode, modifier);
3823 }
3824 else if (TREE_CODE (init) == STRING_CST
3825 && i < TREE_STRING_LENGTH (init))
3826 {
3827 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
3828 return convert_to_mode (mode, temp, 0);
3829 }
3830 }
3831 }
3832 }
8c8a8e34 3833
bbf6f052
RK
3834 /* Treat array-ref with constant index as a component-ref. */
3835
3836 case COMPONENT_REF:
3837 case BIT_FIELD_REF:
4af3895e
JVA
3838 /* If the operand is a CONSTRUCTOR, we can just extract the
3839 appropriate field if it is present. */
3840 if (code != ARRAY_REF
3841 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3842 {
3843 tree elt;
3844
3845 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3846 elt = TREE_CHAIN (elt))
3847 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3848 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3849 }
3850
bbf6f052
RK
3851 {
3852 enum machine_mode mode1;
3853 int bitsize;
3854 int bitpos;
7bb0943f 3855 tree offset;
bbf6f052 3856 int volatilep = 0;
7bb0943f 3857 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3858 &mode1, &unsignedp, &volatilep);
3859
e7f3c83f
RK
3860 /* If we got back the original object, something is wrong. Perhaps
3861 we are evaluating an expression too early. In any event, don't
3862 infinitely recurse. */
3863 if (tem == exp)
3864 abort ();
3865
bbf6f052
RK
3866 /* In some cases, we will be offsetting OP0's address by a constant.
3867 So get it as a sum, if possible. If we will be using it
3868 directly in an insn, we validate it. */
906c4e36 3869 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3870
8c8a8e34 3871 /* If this is a constant, put it into a register if it is a
8008b228 3872 legitimate constant and memory if it isn't. */
8c8a8e34
JW
3873 if (CONSTANT_P (op0))
3874 {
3875 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 3876 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
3877 op0 = force_reg (mode, op0);
3878 else
3879 op0 = validize_mem (force_const_mem (mode, op0));
3880 }
3881
7bb0943f
RS
3882 if (offset != 0)
3883 {
906c4e36 3884 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3885
3886 if (GET_CODE (op0) != MEM)
3887 abort ();
3888 op0 = change_address (op0, VOIDmode,
3889 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3890 force_reg (Pmode, offset_rtx)));
3891 }
3892
bbf6f052
RK
3893 /* Don't forget about volatility even if this is a bitfield. */
3894 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
3895 {
3896 op0 = copy_rtx (op0);
3897 MEM_VOLATILE_P (op0) = 1;
3898 }
3899
3900 if (mode1 == VOIDmode
0bba3f6f
RK
3901 || (mode1 != BLKmode && ! direct_load[(int) mode1]
3902 && modifier != EXPAND_CONST_ADDRESS
3903 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
3904 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
3905 {
3906 /* In cases where an aligned union has an unaligned object
3907 as a field, we might be extracting a BLKmode value from
3908 an integer-mode (e.g., SImode) object. Handle this case
3909 by doing the extract into an object as wide as the field
3910 (which we know to be the width of a basic mode), then
3911 storing into memory, and changing the mode to BLKmode. */
3912 enum machine_mode ext_mode = mode;
3913
3914 if (ext_mode == BLKmode)
3915 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
3916
3917 if (ext_mode == BLKmode)
3918 abort ();
3919
3920 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
3921 unsignedp, target, ext_mode, ext_mode,
3922 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
3923 int_size_in_bytes (TREE_TYPE (tem)));
3924 if (mode == BLKmode)
3925 {
3926 rtx new = assign_stack_temp (ext_mode,
3927 bitsize / BITS_PER_UNIT, 0);
3928
3929 emit_move_insn (new, op0);
3930 op0 = copy_rtx (new);
3931 PUT_MODE (op0, BLKmode);
3932 }
3933
3934 return op0;
3935 }
3936
3937 /* Get a reference to just this component. */
3938 if (modifier == EXPAND_CONST_ADDRESS
3939 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
3940 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
3941 (bitpos / BITS_PER_UNIT)));
3942 else
3943 op0 = change_address (op0, mode1,
3944 plus_constant (XEXP (op0, 0),
3945 (bitpos / BITS_PER_UNIT)));
3946 MEM_IN_STRUCT_P (op0) = 1;
3947 MEM_VOLATILE_P (op0) |= volatilep;
3948 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
3949 return op0;
3950 if (target == 0)
3951 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
3952 convert_move (target, op0, unsignedp);
3953 return target;
3954 }
3955
3956 case OFFSET_REF:
3957 {
3958 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
3959 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 3960 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3961 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
3962 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3963 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 3964#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
3965 a location is accessed through a pointer to const does not mean
3966 that the value there can never change. */
3967 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
3968#endif
3969 return temp;
3970 }
3971
3972 /* Intended for a reference to a buffer of a file-object in Pascal.
3973 But it's not certain that a special tree code will really be
3974 necessary for these. INDIRECT_REF might work for them. */
3975 case BUFFER_REF:
3976 abort ();
3977
7308a047
RS
3978 /* IN_EXPR: Inlined pascal set IN expression.
3979
3980 Algorithm:
3981 rlo = set_low - (set_low%bits_per_word);
3982 the_word = set [ (index - rlo)/bits_per_word ];
3983 bit_index = index % bits_per_word;
3984 bitmask = 1 << bit_index;
3985 return !!(the_word & bitmask); */
3986 case IN_EXPR:
3987 preexpand_calls (exp);
3988 {
3989 tree set = TREE_OPERAND (exp, 0);
3990 tree index = TREE_OPERAND (exp, 1);
3991 tree set_type = TREE_TYPE (set);
3992
3993 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
3994 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
3995
3996 rtx index_val;
3997 rtx lo_r;
3998 rtx hi_r;
3999 rtx rlow;
4000 rtx diff, quo, rem, addr, bit, result;
4001 rtx setval, setaddr;
4002 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4003
4004 if (target == 0)
17938e57 4005 target = gen_reg_rtx (mode);
7308a047
RS
4006
4007 /* If domain is empty, answer is no. */
4008 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4009 return const0_rtx;
4010
4011 index_val = expand_expr (index, 0, VOIDmode, 0);
4012 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4013 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4014 setval = expand_expr (set, 0, VOIDmode, 0);
4015 setaddr = XEXP (setval, 0);
4016
4017 /* Compare index against bounds, if they are constant. */
4018 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4019 && GET_CODE (lo_r) == CONST_INT
4020 && INTVAL (index_val) < INTVAL (lo_r))
4021 return const0_rtx;
7308a047
RS
4022
4023 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4024 && GET_CODE (hi_r) == CONST_INT
4025 && INTVAL (hi_r) < INTVAL (index_val))
4026 return const0_rtx;
7308a047
RS
4027
4028 /* If we get here, we have to generate the code for both cases
4029 (in range and out of range). */
4030
4031 op0 = gen_label_rtx ();
4032 op1 = gen_label_rtx ();
4033
4034 if (! (GET_CODE (index_val) == CONST_INT
4035 && GET_CODE (lo_r) == CONST_INT))
4036 {
17938e57
RK
4037 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4038 GET_MODE (index_val), 0, 0);
7308a047
RS
4039 emit_jump_insn (gen_blt (op1));
4040 }
4041
4042 if (! (GET_CODE (index_val) == CONST_INT
4043 && GET_CODE (hi_r) == CONST_INT))
4044 {
17938e57
RK
4045 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4046 GET_MODE (index_val), 0, 0);
7308a047
RS
4047 emit_jump_insn (gen_bgt (op1));
4048 }
4049
4050 /* Calculate the element number of bit zero in the first word
4051 of the set. */
4052 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4053 rlow = GEN_INT (INTVAL (lo_r)
4054 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4055 else
17938e57
RK
4056 rlow = expand_binop (index_mode, and_optab, lo_r,
4057 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4058 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4059
4060 diff = expand_binop (index_mode, sub_optab,
17938e57 4061 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4062
4063 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4064 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4065 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4066 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4067 addr = memory_address (byte_mode,
4068 expand_binop (index_mode, add_optab,
17938e57
RK
4069 diff, setaddr, NULL_RTX, 0,
4070 OPTAB_LIB_WIDEN));
7308a047
RS
4071 /* Extract the bit we want to examine */
4072 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4073 gen_rtx (MEM, byte_mode, addr),
4074 make_tree (TREE_TYPE (index), rem),
4075 NULL_RTX, 1);
4076 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4077 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4078 1, OPTAB_LIB_WIDEN);
17938e57
RK
4079
4080 if (result != target)
4081 convert_move (target, result, 1);
7308a047
RS
4082
4083 /* Output the code to handle the out-of-range case. */
4084 emit_jump (op0);
4085 emit_label (op1);
4086 emit_move_insn (target, const0_rtx);
4087 emit_label (op0);
4088 return target;
4089 }
4090
bbf6f052
RK
4091 case WITH_CLEANUP_EXPR:
4092 if (RTL_EXPR_RTL (exp) == 0)
4093 {
4094 RTL_EXPR_RTL (exp)
4095 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4096 cleanups_this_call
4097 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4098 /* That's it for this cleanup. */
4099 TREE_OPERAND (exp, 2) = 0;
4100 }
4101 return RTL_EXPR_RTL (exp);
4102
4103 case CALL_EXPR:
4104 /* Check for a built-in function. */
4105 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4106 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4107 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4108 return expand_builtin (exp, target, subtarget, tmode, ignore);
4109 /* If this call was expanded already by preexpand_calls,
4110 just return the result we got. */
4111 if (CALL_EXPR_RTL (exp) != 0)
4112 return CALL_EXPR_RTL (exp);
8129842c 4113 return expand_call (exp, target, ignore);
bbf6f052
RK
4114
4115 case NON_LVALUE_EXPR:
4116 case NOP_EXPR:
4117 case CONVERT_EXPR:
4118 case REFERENCE_EXPR:
4119 if (TREE_CODE (type) == VOID_TYPE || ignore)
4120 {
4121 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4122 return const0_rtx;
4123 }
4124 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4125 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4126 if (TREE_CODE (type) == UNION_TYPE)
4127 {
4128 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4129 if (target == 0)
4130 {
4131 if (mode == BLKmode)
4132 {
4133 if (TYPE_SIZE (type) == 0
4134 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4135 abort ();
4136 target = assign_stack_temp (BLKmode,
4137 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4138 + BITS_PER_UNIT - 1)
4139 / BITS_PER_UNIT, 0);
4140 }
4141 else
4142 target = gen_reg_rtx (mode);
4143 }
4144 if (GET_CODE (target) == MEM)
4145 /* Store data into beginning of memory target. */
4146 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4147 change_address (target, TYPE_MODE (valtype), 0), 0);
4148
bbf6f052
RK
4149 else if (GET_CODE (target) == REG)
4150 /* Store this field into a union of the proper type. */
4151 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4152 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4153 VOIDmode, 0, 1,
4154 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4155 else
4156 abort ();
4157
4158 /* Return the entire union. */
4159 return target;
4160 }
1499e0a8 4161 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4162 if (GET_MODE (op0) == mode)
4163 return op0;
4164 /* If arg is a constant integer being extended from a narrower mode,
4165 we must really truncate to get the extended bits right. Otherwise
4166 (unsigned long) (unsigned char) ("\377"[0])
4167 would come out as ffffffff. */
4168 if (GET_MODE (op0) == VOIDmode
4169 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4170 < GET_MODE_BITSIZE (mode)))
4171 {
4172 /* MODE must be narrower than HOST_BITS_PER_INT. */
4173 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4174
4175 if (width < HOST_BITS_PER_WIDE_INT)
4176 {
4177 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4178 : CONST_DOUBLE_LOW (op0));
4179 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4180 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4181 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4182 else
4183 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4184
4185 op0 = GEN_INT (val);
4186 }
4187 else
4188 {
4189 op0 = (simplify_unary_operation
4190 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4191 ? ZERO_EXTEND : SIGN_EXTEND),
4192 mode, op0,
4193 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4194 if (op0 == 0)
4195 abort ();
4196 }
4197 }
4198 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4199 return op0;
26fcb35a
RS
4200 if (modifier == EXPAND_INITIALIZER)
4201 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4202 if (flag_force_mem && GET_CODE (op0) == MEM)
4203 op0 = copy_to_reg (op0);
4204
4205 if (target == 0)
4206 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4207 else
4208 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4209 return target;
4210
4211 case PLUS_EXPR:
4212 /* We come here from MINUS_EXPR when the second operand is a constant. */
4213 plus_expr:
4214 this_optab = add_optab;
4215
4216 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4217 something else, make sure we add the register to the constant and
4218 then to the other thing. This case can occur during strength
4219 reduction and doing it this way will produce better code if the
4220 frame pointer or argument pointer is eliminated.
4221
4222 fold-const.c will ensure that the constant is always in the inner
4223 PLUS_EXPR, so the only case we need to do anything about is if
4224 sp, ap, or fp is our second argument, in which case we must swap
4225 the innermost first argument and our second argument. */
4226
4227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4228 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4229 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4230 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4231 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4232 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4233 {
4234 tree t = TREE_OPERAND (exp, 1);
4235
4236 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4237 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4238 }
4239
4240 /* If the result is to be Pmode and we are adding an integer to
4241 something, we might be forming a constant. So try to use
4242 plus_constant. If it produces a sum and we can't accept it,
4243 use force_operand. This allows P = &ARR[const] to generate
4244 efficient code on machines where a SYMBOL_REF is not a valid
4245 address.
4246
4247 If this is an EXPAND_SUM call, always return the sum. */
4248 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4249 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4250 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4251 || mode == Pmode))
4252 {
4253 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4254 EXPAND_SUM);
4255 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4256 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4257 op1 = force_operand (op1, target);
4258 return op1;
4259 }
4260
4261 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4262 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4263 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4264 || mode == Pmode))
4265 {
4266 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4267 EXPAND_SUM);
4268 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4269 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4270 op0 = force_operand (op0, target);
4271 return op0;
4272 }
4273
4274 /* No sense saving up arithmetic to be done
4275 if it's all in the wrong mode to form part of an address.
4276 And force_operand won't know whether to sign-extend or
4277 zero-extend. */
4278 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4279 || mode != Pmode) goto binop;
4280
4281 preexpand_calls (exp);
4282 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4283 subtarget = 0;
4284
4285 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4286 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4287
4288 /* Make sure any term that's a sum with a constant comes last. */
4289 if (GET_CODE (op0) == PLUS
4290 && CONSTANT_P (XEXP (op0, 1)))
4291 {
4292 temp = op0;
4293 op0 = op1;
4294 op1 = temp;
4295 }
4296 /* If adding to a sum including a constant,
4297 associate it to put the constant outside. */
4298 if (GET_CODE (op1) == PLUS
4299 && CONSTANT_P (XEXP (op1, 1)))
4300 {
4301 rtx constant_term = const0_rtx;
4302
4303 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4304 if (temp != 0)
4305 op0 = temp;
6f90e075
JW
4306 /* Ensure that MULT comes first if there is one. */
4307 else if (GET_CODE (op0) == MULT)
4308 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4309 else
4310 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4311
4312 /* Let's also eliminate constants from op0 if possible. */
4313 op0 = eliminate_constant_term (op0, &constant_term);
4314
4315 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4316 their sum should be a constant. Form it into OP1, since the
4317 result we want will then be OP0 + OP1. */
4318
4319 temp = simplify_binary_operation (PLUS, mode, constant_term,
4320 XEXP (op1, 1));
4321 if (temp != 0)
4322 op1 = temp;
4323 else
4324 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4325 }
4326
4327 /* Put a constant term last and put a multiplication first. */
4328 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4329 temp = op1, op1 = op0, op0 = temp;
4330
4331 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4332 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4333
4334 case MINUS_EXPR:
4335 /* Handle difference of two symbolic constants,
4336 for the sake of an initializer. */
4337 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4338 && really_constant_p (TREE_OPERAND (exp, 0))
4339 && really_constant_p (TREE_OPERAND (exp, 1)))
4340 {
906c4e36
RK
4341 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4342 VOIDmode, modifier);
4343 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4344 VOIDmode, modifier);
bbf6f052
RK
4345 return gen_rtx (MINUS, mode, op0, op1);
4346 }
4347 /* Convert A - const to A + (-const). */
4348 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4349 {
4350 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4351 fold (build1 (NEGATE_EXPR, type,
4352 TREE_OPERAND (exp, 1))));
4353 goto plus_expr;
4354 }
4355 this_optab = sub_optab;
4356 goto binop;
4357
4358 case MULT_EXPR:
4359 preexpand_calls (exp);
4360 /* If first operand is constant, swap them.
4361 Thus the following special case checks need only
4362 check the second operand. */
4363 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4364 {
4365 register tree t1 = TREE_OPERAND (exp, 0);
4366 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4367 TREE_OPERAND (exp, 1) = t1;
4368 }
4369
4370 /* Attempt to return something suitable for generating an
4371 indexed address, for machines that support that. */
4372
4373 if (modifier == EXPAND_SUM && mode == Pmode
4374 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4375 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4376 {
4377 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4378
4379 /* Apply distributive law if OP0 is x+c. */
4380 if (GET_CODE (op0) == PLUS
4381 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4382 return gen_rtx (PLUS, mode,
4383 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4384 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4385 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4386 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4387
4388 if (GET_CODE (op0) != REG)
906c4e36 4389 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4390 if (GET_CODE (op0) != REG)
4391 op0 = copy_to_mode_reg (mode, op0);
4392
4393 return gen_rtx (MULT, mode, op0,
906c4e36 4394 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4395 }
4396
4397 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4398 subtarget = 0;
4399
4400 /* Check for multiplying things that have been extended
4401 from a narrower type. If this machine supports multiplying
4402 in that narrower type with a result in the desired type,
4403 do it that way, and avoid the explicit type-conversion. */
4404 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4405 && TREE_CODE (type) == INTEGER_TYPE
4406 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4407 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4408 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4409 && int_fits_type_p (TREE_OPERAND (exp, 1),
4410 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4411 /* Don't use a widening multiply if a shift will do. */
4412 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4413 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4414 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4415 ||
4416 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4417 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4418 ==
4419 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4420 /* If both operands are extended, they must either both
4421 be zero-extended or both be sign-extended. */
4422 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4423 ==
4424 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4425 {
4426 enum machine_mode innermode
4427 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4428 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4429 ? umul_widen_optab : smul_widen_optab);
4430 if (mode == GET_MODE_WIDER_MODE (innermode)
4431 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4432 {
4433 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4434 NULL_RTX, VOIDmode, 0);
bbf6f052 4435 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4436 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4437 VOIDmode, 0);
bbf6f052
RK
4438 else
4439 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4440 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4441 goto binop2;
4442 }
4443 }
4444 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4445 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4446 return expand_mult (mode, op0, op1, target, unsignedp);
4447
4448 case TRUNC_DIV_EXPR:
4449 case FLOOR_DIV_EXPR:
4450 case CEIL_DIV_EXPR:
4451 case ROUND_DIV_EXPR:
4452 case EXACT_DIV_EXPR:
4453 preexpand_calls (exp);
4454 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4455 subtarget = 0;
4456 /* Possible optimization: compute the dividend with EXPAND_SUM
4457 then if the divisor is constant can optimize the case
4458 where some terms of the dividend have coeffs divisible by it. */
4459 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4460 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4461 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4462
4463 case RDIV_EXPR:
4464 this_optab = flodiv_optab;
4465 goto binop;
4466
4467 case TRUNC_MOD_EXPR:
4468 case FLOOR_MOD_EXPR:
4469 case CEIL_MOD_EXPR:
4470 case ROUND_MOD_EXPR:
4471 preexpand_calls (exp);
4472 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4473 subtarget = 0;
4474 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4475 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4476 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4477
4478 case FIX_ROUND_EXPR:
4479 case FIX_FLOOR_EXPR:
4480 case FIX_CEIL_EXPR:
4481 abort (); /* Not used for C. */
4482
4483 case FIX_TRUNC_EXPR:
906c4e36 4484 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4485 if (target == 0)
4486 target = gen_reg_rtx (mode);
4487 expand_fix (target, op0, unsignedp);
4488 return target;
4489
4490 case FLOAT_EXPR:
906c4e36 4491 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4492 if (target == 0)
4493 target = gen_reg_rtx (mode);
4494 /* expand_float can't figure out what to do if FROM has VOIDmode.
4495 So give it the correct mode. With -O, cse will optimize this. */
4496 if (GET_MODE (op0) == VOIDmode)
4497 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4498 op0);
4499 expand_float (target, op0,
4500 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4501 return target;
4502
4503 case NEGATE_EXPR:
4504 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4505 temp = expand_unop (mode, neg_optab, op0, target, 0);
4506 if (temp == 0)
4507 abort ();
4508 return temp;
4509
4510 case ABS_EXPR:
4511 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4512
2d7050fd
RS
4513 /* Handle complex values specially. */
4514 {
4515 enum machine_mode opmode
4516 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4517
4518 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4519 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4520 return expand_complex_abs (opmode, op0, target, unsignedp);
4521 }
4522
bbf6f052
RK
4523 /* Unsigned abs is simply the operand. Testing here means we don't
4524 risk generating incorrect code below. */
4525 if (TREE_UNSIGNED (type))
4526 return op0;
4527
4528 /* First try to do it with a special abs instruction. */
4529 temp = expand_unop (mode, abs_optab, op0, target, 0);
4530 if (temp != 0)
4531 return temp;
4532
4533 /* If this machine has expensive jumps, we can do integer absolute
4534 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4535 where W is the width of MODE. */
4536
4537 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4538 {
4539 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4540 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4541 NULL_RTX, 0);
bbf6f052
RK
4542
4543 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4544 OPTAB_LIB_WIDEN);
4545 if (temp != 0)
4546 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4547 OPTAB_LIB_WIDEN);
4548
4549 if (temp != 0)
4550 return temp;
4551 }
4552
4553 /* If that does not win, use conditional jump and negate. */
4554 target = original_target;
4555 temp = gen_label_rtx ();
4556 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4557 || (GET_CODE (target) == REG
4558 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4559 target = gen_reg_rtx (mode);
4560 emit_move_insn (target, op0);
4561 emit_cmp_insn (target,
4562 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4563 NULL_RTX, VOIDmode, 0),
4564 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4565 NO_DEFER_POP;
4566 emit_jump_insn (gen_bge (temp));
4567 op0 = expand_unop (mode, neg_optab, target, target, 0);
4568 if (op0 != target)
4569 emit_move_insn (target, op0);
4570 emit_label (temp);
4571 OK_DEFER_POP;
4572 return target;
4573
4574 case MAX_EXPR:
4575 case MIN_EXPR:
4576 target = original_target;
4577 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4578 || (GET_CODE (target) == REG
4579 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4580 target = gen_reg_rtx (mode);
906c4e36 4581 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4582 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4583
4584 /* First try to do it with a special MIN or MAX instruction.
4585 If that does not win, use a conditional jump to select the proper
4586 value. */
4587 this_optab = (TREE_UNSIGNED (type)
4588 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4589 : (code == MIN_EXPR ? smin_optab : smax_optab));
4590
4591 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4592 OPTAB_WIDEN);
4593 if (temp != 0)
4594 return temp;
4595
4596 if (target != op0)
4597 emit_move_insn (target, op0);
4598 op0 = gen_label_rtx ();
f81497d9
RS
4599 /* If this mode is an integer too wide to compare properly,
4600 compare word by word. Rely on cse to optimize constant cases. */
4601 if (GET_MODE_CLASS (mode) == MODE_INT
4602 && !can_compare_p (mode))
bbf6f052 4603 {
f81497d9
RS
4604 if (code == MAX_EXPR)
4605 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), target, op1, NULL, op0);
bbf6f052 4606 else
f81497d9 4607 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type), op1, target, NULL, op0);
bbf6f052
RK
4608 emit_move_insn (target, op1);
4609 }
f81497d9
RS
4610 else
4611 {
4612 if (code == MAX_EXPR)
4613 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4614 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4615 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
4616 else
4617 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
4618 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4619 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
4620 if (temp == const0_rtx)
4621 emit_move_insn (target, op1);
4622 else if (temp != const_true_rtx)
4623 {
4624 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4625 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4626 else
4627 abort ();
4628 emit_move_insn (target, op1);
4629 }
4630 }
bbf6f052
RK
4631 emit_label (op0);
4632 return target;
4633
4634/* ??? Can optimize when the operand of this is a bitwise operation,
4635 by using a different bitwise operation. */
4636 case BIT_NOT_EXPR:
4637 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4638 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4639 if (temp == 0)
4640 abort ();
4641 return temp;
4642
4643 case FFS_EXPR:
4644 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4645 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4646 if (temp == 0)
4647 abort ();
4648 return temp;
4649
4650/* ??? Can optimize bitwise operations with one arg constant.
4651 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4652 and (a bitwise1 b) bitwise2 b (etc)
4653 but that is probably not worth while. */
4654
4655/* BIT_AND_EXPR is for bitwise anding.
4656 TRUTH_AND_EXPR is for anding two boolean values
4657 when we want in all cases to compute both of them.
4658 In general it is fastest to do TRUTH_AND_EXPR by
4659 computing both operands as actual zero-or-1 values
4660 and then bitwise anding. In cases where there cannot
4661 be any side effects, better code would be made by
4662 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4663 but the question is how to recognize those cases. */
4664
4665 case TRUTH_AND_EXPR:
4666 case BIT_AND_EXPR:
4667 this_optab = and_optab;
4668 goto binop;
4669
4670/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4671 case TRUTH_OR_EXPR:
4672 case BIT_IOR_EXPR:
4673 this_optab = ior_optab;
4674 goto binop;
4675
874726a8 4676 case TRUTH_XOR_EXPR:
bbf6f052
RK
4677 case BIT_XOR_EXPR:
4678 this_optab = xor_optab;
4679 goto binop;
4680
4681 case LSHIFT_EXPR:
4682 case RSHIFT_EXPR:
4683 case LROTATE_EXPR:
4684 case RROTATE_EXPR:
4685 preexpand_calls (exp);
4686 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4687 subtarget = 0;
4688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4689 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4690 unsignedp);
4691
4692/* Could determine the answer when only additive constants differ.
4693 Also, the addition of one can be handled by changing the condition. */
4694 case LT_EXPR:
4695 case LE_EXPR:
4696 case GT_EXPR:
4697 case GE_EXPR:
4698 case EQ_EXPR:
4699 case NE_EXPR:
4700 preexpand_calls (exp);
4701 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4702 if (temp != 0)
4703 return temp;
4704 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4705 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4706 && original_target
4707 && GET_CODE (original_target) == REG
4708 && (GET_MODE (original_target)
4709 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4710 {
4711 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4712 if (temp != original_target)
4713 temp = copy_to_reg (temp);
4714 op1 = gen_label_rtx ();
906c4e36 4715 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4716 GET_MODE (temp), unsignedp, 0);
4717 emit_jump_insn (gen_beq (op1));
4718 emit_move_insn (temp, const1_rtx);
4719 emit_label (op1);
4720 return temp;
4721 }
4722 /* If no set-flag instruction, must generate a conditional
4723 store into a temporary variable. Drop through
4724 and handle this like && and ||. */
4725
4726 case TRUTH_ANDIF_EXPR:
4727 case TRUTH_ORIF_EXPR:
4728 if (target == 0 || ! safe_from_p (target, exp)
4729 /* Make sure we don't have a hard reg (such as function's return
4730 value) live across basic blocks, if not optimizing. */
4731 || (!optimize && GET_CODE (target) == REG
4732 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4733 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4734 emit_clr_insn (target);
4735 op1 = gen_label_rtx ();
4736 jumpifnot (exp, op1);
4737 emit_0_to_1_insn (target);
4738 emit_label (op1);
4739 return target;
4740
4741 case TRUTH_NOT_EXPR:
4742 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4743 /* The parser is careful to generate TRUTH_NOT_EXPR
4744 only with operands that are always zero or one. */
906c4e36 4745 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4746 target, 1, OPTAB_LIB_WIDEN);
4747 if (temp == 0)
4748 abort ();
4749 return temp;
4750
4751 case COMPOUND_EXPR:
4752 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4753 emit_queue ();
4754 return expand_expr (TREE_OPERAND (exp, 1),
4755 (ignore ? const0_rtx : target),
4756 VOIDmode, 0);
4757
4758 case COND_EXPR:
4759 {
4760 /* Note that COND_EXPRs whose type is a structure or union
4761 are required to be constructed to contain assignments of
4762 a temporary variable, so that we can evaluate them here
4763 for side effect only. If type is void, we must do likewise. */
4764
4765 /* If an arm of the branch requires a cleanup,
4766 only that cleanup is performed. */
4767
4768 tree singleton = 0;
4769 tree binary_op = 0, unary_op = 0;
4770 tree old_cleanups = cleanups_this_call;
4771 cleanups_this_call = 0;
4772
4773 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4774 convert it to our mode, if necessary. */
4775 if (integer_onep (TREE_OPERAND (exp, 1))
4776 && integer_zerop (TREE_OPERAND (exp, 2))
4777 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4778 {
4779 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4780 if (GET_MODE (op0) == mode)
4781 return op0;
4782 if (target == 0)
4783 target = gen_reg_rtx (mode);
4784 convert_move (target, op0, unsignedp);
4785 return target;
4786 }
4787
4788 /* If we are not to produce a result, we have no target. Otherwise,
4789 if a target was specified use it; it will not be used as an
4790 intermediate target unless it is safe. If no target, use a
4791 temporary. */
4792
4793 if (mode == VOIDmode || ignore)
4794 temp = 0;
4795 else if (original_target
4796 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4797 temp = original_target;
4798 else if (mode == BLKmode)
4799 {
4800 if (TYPE_SIZE (type) == 0
4801 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4802 abort ();
4803 temp = assign_stack_temp (BLKmode,
4804 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4805 + BITS_PER_UNIT - 1)
4806 / BITS_PER_UNIT, 0);
4807 }
4808 else
4809 temp = gen_reg_rtx (mode);
4810
4811 /* Check for X ? A + B : A. If we have this, we can copy
4812 A to the output and conditionally add B. Similarly for unary
4813 operations. Don't do this if X has side-effects because
4814 those side effects might affect A or B and the "?" operation is
4815 a sequence point in ANSI. (We test for side effects later.) */
4816
4817 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4818 && operand_equal_p (TREE_OPERAND (exp, 2),
4819 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4820 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4821 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4822 && operand_equal_p (TREE_OPERAND (exp, 1),
4823 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4824 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4825 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4826 && operand_equal_p (TREE_OPERAND (exp, 2),
4827 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4828 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4829 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4830 && operand_equal_p (TREE_OPERAND (exp, 1),
4831 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4832 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4833
4834 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4835 operation, do this as A + (X != 0). Similarly for other simple
4836 binary operators. */
4837 if (singleton && binary_op
4838 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4839 && (TREE_CODE (binary_op) == PLUS_EXPR
4840 || TREE_CODE (binary_op) == MINUS_EXPR
4841 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4842 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4843 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4844 && integer_onep (TREE_OPERAND (binary_op, 1))
4845 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4846 {
4847 rtx result;
4848 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4849 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4850 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4851 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4852 : and_optab);
4853
4854 /* If we had X ? A : A + 1, do this as A + (X == 0).
4855
4856 We have to invert the truth value here and then put it
4857 back later if do_store_flag fails. We cannot simply copy
4858 TREE_OPERAND (exp, 0) to another variable and modify that
4859 because invert_truthvalue can modify the tree pointed to
4860 by its argument. */
4861 if (singleton == TREE_OPERAND (exp, 1))
4862 TREE_OPERAND (exp, 0)
4863 = invert_truthvalue (TREE_OPERAND (exp, 0));
4864
4865 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4866 (safe_from_p (temp, singleton)
4867 ? temp : NULL_RTX),
bbf6f052
RK
4868 mode, BRANCH_COST <= 1);
4869
4870 if (result)
4871 {
906c4e36 4872 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4873 return expand_binop (mode, boptab, op1, result, temp,
4874 unsignedp, OPTAB_LIB_WIDEN);
4875 }
4876 else if (singleton == TREE_OPERAND (exp, 1))
4877 TREE_OPERAND (exp, 0)
4878 = invert_truthvalue (TREE_OPERAND (exp, 0));
4879 }
4880
4881 NO_DEFER_POP;
4882 op0 = gen_label_rtx ();
4883
4884 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4885 {
4886 if (temp != 0)
4887 {
4888 /* If the target conflicts with the other operand of the
4889 binary op, we can't use it. Also, we can't use the target
4890 if it is a hard register, because evaluating the condition
4891 might clobber it. */
4892 if ((binary_op
4893 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4894 || (GET_CODE (temp) == REG
4895 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4896 temp = gen_reg_rtx (mode);
4897 store_expr (singleton, temp, 0);
4898 }
4899 else
906c4e36
RK
4900 expand_expr (singleton,
4901 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4902 if (cleanups_this_call)
4903 {
4904 sorry ("aggregate value in COND_EXPR");
4905 cleanups_this_call = 0;
4906 }
4907 if (singleton == TREE_OPERAND (exp, 1))
4908 jumpif (TREE_OPERAND (exp, 0), op0);
4909 else
4910 jumpifnot (TREE_OPERAND (exp, 0), op0);
4911
4912 if (binary_op && temp == 0)
4913 /* Just touch the other operand. */
4914 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 4915 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4916 else if (binary_op)
4917 store_expr (build (TREE_CODE (binary_op), type,
4918 make_tree (type, temp),
4919 TREE_OPERAND (binary_op, 1)),
4920 temp, 0);
4921 else
4922 store_expr (build1 (TREE_CODE (unary_op), type,
4923 make_tree (type, temp)),
4924 temp, 0);
4925 op1 = op0;
4926 }
4927#if 0
4928 /* This is now done in jump.c and is better done there because it
4929 produces shorter register lifetimes. */
4930
4931 /* Check for both possibilities either constants or variables
4932 in registers (but not the same as the target!). If so, can
4933 save branches by assigning one, branching, and assigning the
4934 other. */
4935 else if (temp && GET_MODE (temp) != BLKmode
4936 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
4937 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
4938 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
4939 && DECL_RTL (TREE_OPERAND (exp, 1))
4940 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
4941 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
4942 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
4943 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
4944 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
4945 && DECL_RTL (TREE_OPERAND (exp, 2))
4946 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
4947 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
4948 {
4949 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4950 temp = gen_reg_rtx (mode);
4951 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4952 jumpifnot (TREE_OPERAND (exp, 0), op0);
4953 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4954 op1 = op0;
4955 }
4956#endif
4957 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
4958 comparison operator. If we have one of these cases, set the
4959 output to A, branch on A (cse will merge these two references),
4960 then set the output to FOO. */
4961 else if (temp
4962 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4963 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4965 TREE_OPERAND (exp, 1), 0)
4966 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4967 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
4968 {
4969 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4970 temp = gen_reg_rtx (mode);
4971 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4972 jumpif (TREE_OPERAND (exp, 0), op0);
4973 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4974 op1 = op0;
4975 }
4976 else if (temp
4977 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
4978 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
4979 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
4980 TREE_OPERAND (exp, 2), 0)
4981 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4982 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
4983 {
4984 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
4985 temp = gen_reg_rtx (mode);
4986 store_expr (TREE_OPERAND (exp, 2), temp, 0);
4987 jumpifnot (TREE_OPERAND (exp, 0), op0);
4988 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4989 op1 = op0;
4990 }
4991 else
4992 {
4993 op1 = gen_label_rtx ();
4994 jumpifnot (TREE_OPERAND (exp, 0), op0);
4995 if (temp != 0)
4996 store_expr (TREE_OPERAND (exp, 1), temp, 0);
4997 else
906c4e36
RK
4998 expand_expr (TREE_OPERAND (exp, 1),
4999 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5000 if (cleanups_this_call)
5001 {
5002 sorry ("aggregate value in COND_EXPR");
5003 cleanups_this_call = 0;
5004 }
5005
5006 emit_queue ();
5007 emit_jump_insn (gen_jump (op1));
5008 emit_barrier ();
5009 emit_label (op0);
5010 if (temp != 0)
5011 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5012 else
906c4e36
RK
5013 expand_expr (TREE_OPERAND (exp, 2),
5014 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5015 }
5016
5017 if (cleanups_this_call)
5018 {
5019 sorry ("aggregate value in COND_EXPR");
5020 cleanups_this_call = 0;
5021 }
5022
5023 emit_queue ();
5024 emit_label (op1);
5025 OK_DEFER_POP;
5026 cleanups_this_call = old_cleanups;
5027 return temp;
5028 }
5029
5030 case TARGET_EXPR:
5031 {
5032 /* Something needs to be initialized, but we didn't know
5033 where that thing was when building the tree. For example,
5034 it could be the return value of a function, or a parameter
5035 to a function which lays down in the stack, or a temporary
5036 variable which must be passed by reference.
5037
5038 We guarantee that the expression will either be constructed
5039 or copied into our original target. */
5040
5041 tree slot = TREE_OPERAND (exp, 0);
5c062816 5042 tree exp1;
bbf6f052
RK
5043
5044 if (TREE_CODE (slot) != VAR_DECL)
5045 abort ();
5046
5047 if (target == 0)
5048 {
5049 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5050 {
5051 target = DECL_RTL (slot);
5c062816 5052 /* If we have already expanded the slot, so don't do
ac993f4f 5053 it again. (mrs) */
5c062816
MS
5054 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5055 return target;
ac993f4f 5056 }
bbf6f052
RK
5057 else
5058 {
5059 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5060 /* All temp slots at this level must not conflict. */
5061 preserve_temp_slots (target);
5062 DECL_RTL (slot) = target;
5063 }
5064
5065#if 0
ac993f4f
MS
5066 /* I bet this needs to be done, and I bet that it needs to
5067 be above, inside the else clause. The reason is
5068 simple, how else is it going to get cleaned up? (mrs)
5069
5070 The reason is probably did not work before, and was
5071 commented out is because this was re-expanding already
5072 expanded target_exprs (target == 0 and DECL_RTL (slot)
5073 != 0) also cleaning them up many times as well. :-( */
5074
bbf6f052
RK
5075 /* Since SLOT is not known to the called function
5076 to belong to its stack frame, we must build an explicit
5077 cleanup. This case occurs when we must build up a reference
5078 to pass the reference as an argument. In this case,
5079 it is very likely that such a reference need not be
5080 built here. */
5081
5082 if (TREE_OPERAND (exp, 2) == 0)
5083 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5084 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5085 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5086 cleanups_this_call);
bbf6f052
RK
5087#endif
5088 }
5089 else
5090 {
5091 /* This case does occur, when expanding a parameter which
5092 needs to be constructed on the stack. The target
5093 is the actual stack address that we want to initialize.
5094 The function we call will perform the cleanup in this case. */
5095
5096 DECL_RTL (slot) = target;
5097 }
5098
5c062816
MS
5099 exp1 = TREE_OPERAND (exp, 1);
5100 /* Mark it as expanded. */
5101 TREE_OPERAND (exp, 1) = NULL_TREE;
5102
5103 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5104 }
5105
5106 case INIT_EXPR:
5107 {
5108 tree lhs = TREE_OPERAND (exp, 0);
5109 tree rhs = TREE_OPERAND (exp, 1);
5110 tree noncopied_parts = 0;
5111 tree lhs_type = TREE_TYPE (lhs);
5112
5113 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5114 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5115 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5116 TYPE_NONCOPIED_PARTS (lhs_type));
5117 while (noncopied_parts != 0)
5118 {
5119 expand_assignment (TREE_VALUE (noncopied_parts),
5120 TREE_PURPOSE (noncopied_parts), 0, 0);
5121 noncopied_parts = TREE_CHAIN (noncopied_parts);
5122 }
5123 return temp;
5124 }
5125
5126 case MODIFY_EXPR:
5127 {
5128 /* If lhs is complex, expand calls in rhs before computing it.
5129 That's so we don't compute a pointer and save it over a call.
5130 If lhs is simple, compute it first so we can give it as a
5131 target if the rhs is just a call. This avoids an extra temp and copy
5132 and that prevents a partial-subsumption which makes bad code.
5133 Actually we could treat component_ref's of vars like vars. */
5134
5135 tree lhs = TREE_OPERAND (exp, 0);
5136 tree rhs = TREE_OPERAND (exp, 1);
5137 tree noncopied_parts = 0;
5138 tree lhs_type = TREE_TYPE (lhs);
5139
5140 temp = 0;
5141
5142 if (TREE_CODE (lhs) != VAR_DECL
5143 && TREE_CODE (lhs) != RESULT_DECL
5144 && TREE_CODE (lhs) != PARM_DECL)
5145 preexpand_calls (exp);
5146
5147 /* Check for |= or &= of a bitfield of size one into another bitfield
5148 of size 1. In this case, (unless we need the result of the
5149 assignment) we can do this more efficiently with a
5150 test followed by an assignment, if necessary.
5151
5152 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5153 things change so we do, this code should be enhanced to
5154 support it. */
5155 if (ignore
5156 && TREE_CODE (lhs) == COMPONENT_REF
5157 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5158 || TREE_CODE (rhs) == BIT_AND_EXPR)
5159 && TREE_OPERAND (rhs, 0) == lhs
5160 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5161 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5162 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5163 {
5164 rtx label = gen_label_rtx ();
5165
5166 do_jump (TREE_OPERAND (rhs, 1),
5167 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5168 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5169 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5170 (TREE_CODE (rhs) == BIT_IOR_EXPR
5171 ? integer_one_node
5172 : integer_zero_node)),
5173 0, 0);
e7c33f54 5174 do_pending_stack_adjust ();
bbf6f052
RK
5175 emit_label (label);
5176 return const0_rtx;
5177 }
5178
5179 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5180 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5181 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5182 TYPE_NONCOPIED_PARTS (lhs_type));
5183
5184 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5185 while (noncopied_parts != 0)
5186 {
5187 expand_assignment (TREE_PURPOSE (noncopied_parts),
5188 TREE_VALUE (noncopied_parts), 0, 0);
5189 noncopied_parts = TREE_CHAIN (noncopied_parts);
5190 }
5191 return temp;
5192 }
5193
5194 case PREINCREMENT_EXPR:
5195 case PREDECREMENT_EXPR:
5196 return expand_increment (exp, 0);
5197
5198 case POSTINCREMENT_EXPR:
5199 case POSTDECREMENT_EXPR:
5200 /* Faster to treat as pre-increment if result is not used. */
5201 return expand_increment (exp, ! ignore);
5202
5203 case ADDR_EXPR:
5204 /* Are we taking the address of a nested function? */
5205 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5206 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5207 {
5208 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5209 op0 = force_operand (op0, target);
5210 }
5211 else
5212 {
906c4e36 5213 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5214 (modifier == EXPAND_INITIALIZER
5215 ? modifier : EXPAND_CONST_ADDRESS));
896102d0
RK
5216
5217 /* We would like the object in memory. If it is a constant,
5218 we can have it be statically allocated into memory. For
5219 a non-constant (REG or SUBREG), we need to allocate some
5220 memory and store the value into it. */
5221
5222 if (CONSTANT_P (op0))
5223 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5224 op0);
5225
5226 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
5227 {
5228 /* If this object is in a register, it must be not
5229 be BLKmode. */
5230 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
5231 enum machine_mode inner_mode = TYPE_MODE (inner_type);
5232 rtx memloc
5233 = assign_stack_temp (inner_mode,
5234 int_size_in_bytes (inner_type), 1);
5235
5236 emit_move_insn (memloc, op0);
5237 op0 = memloc;
5238 }
5239
bbf6f052
RK
5240 if (GET_CODE (op0) != MEM)
5241 abort ();
5242
5243 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5244 return XEXP (op0, 0);
5245 op0 = force_operand (XEXP (op0, 0), target);
5246 }
5247 if (flag_force_addr && GET_CODE (op0) != REG)
5248 return force_reg (Pmode, op0);
5249 return op0;
5250
5251 case ENTRY_VALUE_EXPR:
5252 abort ();
5253
7308a047
RS
5254 /* COMPLEX type for Extended Pascal & Fortran */
5255 case COMPLEX_EXPR:
5256 {
5257 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5258
5259 rtx prev;
5260
5261 /* Get the rtx code of the operands. */
5262 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5263 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5264
5265 if (! target)
5266 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5267
5268 prev = get_last_insn ();
5269
5270 /* Tell flow that the whole of the destination is being set. */
5271 if (GET_CODE (target) == REG)
5272 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5273
5274 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5275 emit_move_insn (gen_realpart (mode, target), op0);
5276 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5277
5278 /* Complex construction should appear as a single unit. */
5279 group_insns (prev);
5280
5281 return target;
5282 }
5283
5284 case REALPART_EXPR:
2d7050fd
RS
5285 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5286 return gen_realpart (mode, op0);
7308a047
RS
5287
5288 case IMAGPART_EXPR:
2d7050fd
RS
5289 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5290 return gen_imagpart (mode, op0);
7308a047
RS
5291
5292 case CONJ_EXPR:
5293 {
5294 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5295 rtx imag_t;
5296 rtx prev;
5297
5298 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5299
5300 if (! target)
5301 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5302
5303 prev = get_last_insn ();
5304
5305 /* Tell flow that the whole of the destination is being set. */
5306 if (GET_CODE (target) == REG)
5307 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5308
5309 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5310 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5311
2d7050fd 5312 imag_t = gen_imagpart (mode, target);
7308a047 5313 temp = expand_unop (mode, neg_optab,
2d7050fd 5314 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5315 if (temp != imag_t)
5316 emit_move_insn (imag_t, temp);
5317
5318 /* Conjugate should appear as a single unit */
5319 group_insns (prev);
5320
5321 return target;
5322 }
5323
bbf6f052 5324 case ERROR_MARK:
66538193
RS
5325 op0 = CONST0_RTX (tmode);
5326 if (op0 != 0)
5327 return op0;
bbf6f052
RK
5328 return const0_rtx;
5329
5330 default:
5331 return (*lang_expand_expr) (exp, target, tmode, modifier);
5332 }
5333
5334 /* Here to do an ordinary binary operator, generating an instruction
5335 from the optab already placed in `this_optab'. */
5336 binop:
5337 preexpand_calls (exp);
5338 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5339 subtarget = 0;
5340 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5341 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5342 binop2:
5343 temp = expand_binop (mode, this_optab, op0, op1, target,
5344 unsignedp, OPTAB_LIB_WIDEN);
5345 if (temp == 0)
5346 abort ();
5347 return temp;
5348}
5349\f
e87b4f3f
RS
5350/* Return the alignment in bits of EXP, a pointer valued expression.
5351 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5352 The alignment returned is, by default, the alignment of the thing that
5353 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5354
5355 Otherwise, look at the expression to see if we can do better, i.e., if the
5356 expression is actually pointing at an object whose alignment is tighter. */
5357
5358static int
5359get_pointer_alignment (exp, max_align)
5360 tree exp;
5361 unsigned max_align;
5362{
5363 unsigned align, inner;
5364
5365 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5366 return 0;
5367
5368 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5369 align = MIN (align, max_align);
5370
5371 while (1)
5372 {
5373 switch (TREE_CODE (exp))
5374 {
5375 case NOP_EXPR:
5376 case CONVERT_EXPR:
5377 case NON_LVALUE_EXPR:
5378 exp = TREE_OPERAND (exp, 0);
5379 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5380 return align;
5381 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5382 inner = MIN (inner, max_align);
5383 align = MAX (align, inner);
5384 break;
5385
5386 case PLUS_EXPR:
5387 /* If sum of pointer + int, restrict our maximum alignment to that
5388 imposed by the integer. If not, we can't do any better than
5389 ALIGN. */
5390 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5391 return align;
5392
e87b4f3f
RS
5393 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5394 & (max_align - 1))
5395 != 0)
bbf6f052
RK
5396 max_align >>= 1;
5397
5398 exp = TREE_OPERAND (exp, 0);
5399 break;
5400
5401 case ADDR_EXPR:
5402 /* See what we are pointing at and look at its alignment. */
5403 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5404 if (TREE_CODE (exp) == FUNCTION_DECL)
5405 align = MAX (align, FUNCTION_BOUNDARY);
5406 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5407 align = MAX (align, DECL_ALIGN (exp));
5408#ifdef CONSTANT_ALIGNMENT
5409 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5410 align = CONSTANT_ALIGNMENT (exp, align);
5411#endif
5412 return MIN (align, max_align);
5413
5414 default:
5415 return align;
5416 }
5417 }
5418}
5419\f
5420/* Return the tree node and offset if a given argument corresponds to
5421 a string constant. */
5422
5423static tree
5424string_constant (arg, ptr_offset)
5425 tree arg;
5426 tree *ptr_offset;
5427{
5428 STRIP_NOPS (arg);
5429
5430 if (TREE_CODE (arg) == ADDR_EXPR
5431 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5432 {
5433 *ptr_offset = integer_zero_node;
5434 return TREE_OPERAND (arg, 0);
5435 }
5436 else if (TREE_CODE (arg) == PLUS_EXPR)
5437 {
5438 tree arg0 = TREE_OPERAND (arg, 0);
5439 tree arg1 = TREE_OPERAND (arg, 1);
5440
5441 STRIP_NOPS (arg0);
5442 STRIP_NOPS (arg1);
5443
5444 if (TREE_CODE (arg0) == ADDR_EXPR
5445 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5446 {
5447 *ptr_offset = arg1;
5448 return TREE_OPERAND (arg0, 0);
5449 }
5450 else if (TREE_CODE (arg1) == ADDR_EXPR
5451 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5452 {
5453 *ptr_offset = arg0;
5454 return TREE_OPERAND (arg1, 0);
5455 }
5456 }
5457
5458 return 0;
5459}
5460
5461/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5462 way, because it could contain a zero byte in the middle.
5463 TREE_STRING_LENGTH is the size of the character array, not the string.
5464
5465 Unfortunately, string_constant can't access the values of const char
5466 arrays with initializers, so neither can we do so here. */
5467
5468static tree
5469c_strlen (src)
5470 tree src;
5471{
5472 tree offset_node;
5473 int offset, max;
5474 char *ptr;
5475
5476 src = string_constant (src, &offset_node);
5477 if (src == 0)
5478 return 0;
5479 max = TREE_STRING_LENGTH (src);
5480 ptr = TREE_STRING_POINTER (src);
5481 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5482 {
5483 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5484 compute the offset to the following null if we don't know where to
5485 start searching for it. */
5486 int i;
5487 for (i = 0; i < max; i++)
5488 if (ptr[i] == 0)
5489 return 0;
5490 /* We don't know the starting offset, but we do know that the string
5491 has no internal zero bytes. We can assume that the offset falls
5492 within the bounds of the string; otherwise, the programmer deserves
5493 what he gets. Subtract the offset from the length of the string,
5494 and return that. */
5495 /* This would perhaps not be valid if we were dealing with named
5496 arrays in addition to literal string constants. */
5497 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5498 }
5499
5500 /* We have a known offset into the string. Start searching there for
5501 a null character. */
5502 if (offset_node == 0)
5503 offset = 0;
5504 else
5505 {
5506 /* Did we get a long long offset? If so, punt. */
5507 if (TREE_INT_CST_HIGH (offset_node) != 0)
5508 return 0;
5509 offset = TREE_INT_CST_LOW (offset_node);
5510 }
5511 /* If the offset is known to be out of bounds, warn, and call strlen at
5512 runtime. */
5513 if (offset < 0 || offset > max)
5514 {
5515 warning ("offset outside bounds of constant string");
5516 return 0;
5517 }
5518 /* Use strlen to search for the first zero byte. Since any strings
5519 constructed with build_string will have nulls appended, we win even
5520 if we get handed something like (char[4])"abcd".
5521
5522 Since OFFSET is our starting index into the string, no further
5523 calculation is needed. */
5524 return size_int (strlen (ptr + offset));
5525}
5526\f
5527/* Expand an expression EXP that calls a built-in function,
5528 with result going to TARGET if that's convenient
5529 (and in mode MODE if that's convenient).
5530 SUBTARGET may be used as the target for computing one of EXP's operands.
5531 IGNORE is nonzero if the value is to be ignored. */
5532
5533static rtx
5534expand_builtin (exp, target, subtarget, mode, ignore)
5535 tree exp;
5536 rtx target;
5537 rtx subtarget;
5538 enum machine_mode mode;
5539 int ignore;
5540{
5541 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5542 tree arglist = TREE_OPERAND (exp, 1);
5543 rtx op0;
60bac6ea 5544 rtx lab1, insns;
bbf6f052 5545 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5546 optab builtin_optab;
bbf6f052
RK
5547
5548 switch (DECL_FUNCTION_CODE (fndecl))
5549 {
5550 case BUILT_IN_ABS:
5551 case BUILT_IN_LABS:
5552 case BUILT_IN_FABS:
5553 /* build_function_call changes these into ABS_EXPR. */
5554 abort ();
5555
1bbddf11
JVA
5556 case BUILT_IN_SIN:
5557 case BUILT_IN_COS:
e87b4f3f
RS
5558 case BUILT_IN_FSQRT:
5559 /* If not optimizing, call the library function. */
8c8a8e34 5560 if (! optimize)
e87b4f3f
RS
5561 break;
5562
5563 if (arglist == 0
19deaec9 5564 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5565 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5566 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5567
db0e6d01
RS
5568 /* Stabilize and compute the argument. */
5569 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5570 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5571 {
5572 exp = copy_node (exp);
5573 arglist = copy_node (arglist);
5574 TREE_OPERAND (exp, 1) = arglist;
5575 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5576 }
e87b4f3f 5577 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5578
5579 /* Make a suitable register to place result in. */
5580 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5581
c1f7c223 5582 emit_queue ();
8c8a8e34 5583 start_sequence ();
e7c33f54 5584
1bbddf11
JVA
5585 switch (DECL_FUNCTION_CODE (fndecl))
5586 {
5587 case BUILT_IN_SIN:
5588 builtin_optab = sin_optab; break;
5589 case BUILT_IN_COS:
5590 builtin_optab = cos_optab; break;
5591 case BUILT_IN_FSQRT:
5592 builtin_optab = sqrt_optab; break;
5593 default:
5594 abort ();
5595 }
5596
5597 /* Compute into TARGET.
e87b4f3f
RS
5598 Set TARGET to wherever the result comes back. */
5599 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5600 builtin_optab, op0, target, 0);
e7c33f54
RK
5601
5602 /* If we were unable to expand via the builtin, stop the
5603 sequence (without outputting the insns) and break, causing
5604 a call the the library function. */
e87b4f3f 5605 if (target == 0)
e7c33f54 5606 {
8c8a8e34 5607 end_sequence ();
e7c33f54
RK
5608 break;
5609 }
e87b4f3f 5610
60bac6ea
RS
5611 /* Check the results by default. But if flag_fast_math is turned on,
5612 then assume sqrt will always be called with valid arguments. */
5613
5614 if (! flag_fast_math)
5615 {
1bbddf11 5616 /* Don't define the builtin FP instructions
60bac6ea
RS
5617 if your machine is not IEEE. */
5618 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5619 abort ();
5620
5621 lab1 = gen_label_rtx ();
5622
5623 /* Test the result; if it is NaN, set errno=EDOM because
5624 the argument was not in the domain. */
5625 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5626 emit_jump_insn (gen_beq (lab1));
5627
5628#if TARGET_EDOM
5629 {
5630#ifdef GEN_ERRNO_RTX
5631 rtx errno_rtx = GEN_ERRNO_RTX;
5632#else
5633 rtx errno_rtx
5634 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5635#endif
5636
5637 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5638 }
5639#else
5640 /* We can't set errno=EDOM directly; let the library call do it.
5641 Pop the arguments right away in case the call gets deleted. */
5642 NO_DEFER_POP;
5643 expand_call (exp, target, 0);
5644 OK_DEFER_POP;
5645#endif
5646
5647 emit_label (lab1);
5648 }
e87b4f3f 5649
e7c33f54 5650 /* Output the entire sequence. */
8c8a8e34
JW
5651 insns = get_insns ();
5652 end_sequence ();
5653 emit_insns (insns);
e7c33f54
RK
5654
5655 return target;
5656
0006469d
TW
5657 /* __builtin_apply_args returns block of memory allocated on
5658 the stack into which is stored the arg pointer, structure
5659 value address, static chain, and all the registers that might
5660 possibly be used in performing a function call. The code is
5661 moved to the start of the function so the incoming values are
5662 saved. */
5663 case BUILT_IN_APPLY_ARGS:
5664 /* Don't do __builtin_apply_args more than once in a function.
5665 Save the result of the first call and reuse it. */
5666 if (apply_args_value != 0)
5667 return apply_args_value;
5668 {
5669 /* When this function is called, it means that registers must be
5670 saved on entry to this function. So we migrate the
5671 call to the first insn of this function. */
5672 rtx temp;
5673 rtx seq;
5674
5675 start_sequence ();
5676 temp = expand_builtin_apply_args ();
5677 seq = get_insns ();
5678 end_sequence ();
5679
5680 apply_args_value = temp;
5681
5682 /* Put the sequence after the NOTE that starts the function.
5683 If this is inside a SEQUENCE, make the outer-level insn
5684 chain current, so the code is placed at the start of the
5685 function. */
5686 push_topmost_sequence ();
5687 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5688 pop_topmost_sequence ();
5689 return temp;
5690 }
5691
5692 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
5693 FUNCTION with a copy of the parameters described by
5694 ARGUMENTS, and ARGSIZE. It returns a block of memory
5695 allocated on the stack into which is stored all the registers
5696 that might possibly be used for returning the result of a
5697 function. ARGUMENTS is the value returned by
5698 __builtin_apply_args. ARGSIZE is the number of bytes of
5699 arguments that must be copied. ??? How should this value be
5700 computed? We'll also need a safe worst case value for varargs
5701 functions. */
5702 case BUILT_IN_APPLY:
5703 if (arglist == 0
5704 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5705 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
5706 || TREE_CHAIN (arglist) == 0
5707 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
5708 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
5709 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
5710 return const0_rtx;
5711 else
5712 {
5713 int i;
5714 tree t;
5715 rtx ops[3];
5716
5717 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
5718 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
5719
5720 return expand_builtin_apply (ops[0], ops[1], ops[2]);
5721 }
5722
5723 /* __builtin_return (RESULT) causes the function to return the
5724 value described by RESULT. RESULT is address of the block of
5725 memory returned by __builtin_apply. */
5726 case BUILT_IN_RETURN:
5727 if (arglist
5728 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5729 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
5730 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
5731 NULL_RTX, VOIDmode, 0));
5732 return const0_rtx;
5733
bbf6f052
RK
5734 case BUILT_IN_SAVEREGS:
5735 /* Don't do __builtin_saveregs more than once in a function.
5736 Save the result of the first call and reuse it. */
5737 if (saveregs_value != 0)
5738 return saveregs_value;
5739 {
5740 /* When this function is called, it means that registers must be
5741 saved on entry to this function. So we migrate the
5742 call to the first insn of this function. */
5743 rtx temp;
5744 rtx seq;
5745 rtx valreg, saved_valreg;
5746
5747 /* Now really call the function. `expand_call' does not call
5748 expand_builtin, so there is no danger of infinite recursion here. */
5749 start_sequence ();
5750
5751#ifdef EXPAND_BUILTIN_SAVEREGS
5752 /* Do whatever the machine needs done in this case. */
5753 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5754#else
5755 /* The register where the function returns its value
5756 is likely to have something else in it, such as an argument.
5757 So preserve that register around the call. */
5758 if (value_mode != VOIDmode)
5759 {
5760 valreg = hard_libcall_value (value_mode);
5761 saved_valreg = gen_reg_rtx (value_mode);
5762 emit_move_insn (saved_valreg, valreg);
5763 }
5764
5765 /* Generate the call, putting the value in a pseudo. */
5766 temp = expand_call (exp, target, ignore);
5767
5768 if (value_mode != VOIDmode)
5769 emit_move_insn (valreg, saved_valreg);
5770#endif
5771
5772 seq = get_insns ();
5773 end_sequence ();
5774
5775 saveregs_value = temp;
5776
0006469d
TW
5777 /* Put the sequence after the NOTE that starts the function.
5778 If this is inside a SEQUENCE, make the outer-level insn
5779 chain current, so the code is placed at the start of the
5780 function. */
5781 push_topmost_sequence ();
bbf6f052 5782 emit_insns_before (seq, NEXT_INSN (get_insns ()));
0006469d 5783 pop_topmost_sequence ();
bbf6f052
RK
5784 return temp;
5785 }
5786
5787 /* __builtin_args_info (N) returns word N of the arg space info
5788 for the current function. The number and meanings of words
5789 is controlled by the definition of CUMULATIVE_ARGS. */
5790 case BUILT_IN_ARGS_INFO:
5791 {
5792 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5793 int i;
5794 int *word_ptr = (int *) &current_function_args_info;
5795 tree type, elts, result;
5796
5797 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5798 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5799 __FILE__, __LINE__);
5800
5801 if (arglist != 0)
5802 {
5803 tree arg = TREE_VALUE (arglist);
5804 if (TREE_CODE (arg) != INTEGER_CST)
42b85a55 5805 error ("argument of `__builtin_args_info' must be constant");
bbf6f052
RK
5806 else
5807 {
5808 int wordnum = TREE_INT_CST_LOW (arg);
5809
42b85a55
RS
5810 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
5811 error ("argument of `__builtin_args_info' out of range");
bbf6f052 5812 else
906c4e36 5813 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5814 }
5815 }
5816 else
42b85a55 5817 error ("missing argument in `__builtin_args_info'");
bbf6f052
RK
5818
5819 return const0_rtx;
5820
5821#if 0
5822 for (i = 0; i < nwords; i++)
5823 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5824
5825 type = build_array_type (integer_type_node,
5826 build_index_type (build_int_2 (nwords, 0)));
5827 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5828 TREE_CONSTANT (result) = 1;
5829 TREE_STATIC (result) = 1;
5830 result = build (INDIRECT_REF, build_pointer_type (type), result);
5831 TREE_CONSTANT (result) = 1;
906c4e36 5832 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5833#endif
5834 }
5835
5836 /* Return the address of the first anonymous stack arg. */
5837 case BUILT_IN_NEXT_ARG:
5838 {
5839 tree fntype = TREE_TYPE (current_function_decl);
5840 if (!(TYPE_ARG_TYPES (fntype) != 0
5841 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5842 != void_type_node)))
5843 {
5844 error ("`va_start' used in function with fixed args");
5845 return const0_rtx;
5846 }
5847 }
5848
5849 return expand_binop (Pmode, add_optab,
5850 current_function_internal_arg_pointer,
5851 current_function_arg_offset_rtx,
906c4e36 5852 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5853
5854 case BUILT_IN_CLASSIFY_TYPE:
5855 if (arglist != 0)
5856 {
5857 tree type = TREE_TYPE (TREE_VALUE (arglist));
5858 enum tree_code code = TREE_CODE (type);
5859 if (code == VOID_TYPE)
906c4e36 5860 return GEN_INT (void_type_class);
bbf6f052 5861 if (code == INTEGER_TYPE)
906c4e36 5862 return GEN_INT (integer_type_class);
bbf6f052 5863 if (code == CHAR_TYPE)
906c4e36 5864 return GEN_INT (char_type_class);
bbf6f052 5865 if (code == ENUMERAL_TYPE)
906c4e36 5866 return GEN_INT (enumeral_type_class);
bbf6f052 5867 if (code == BOOLEAN_TYPE)
906c4e36 5868 return GEN_INT (boolean_type_class);
bbf6f052 5869 if (code == POINTER_TYPE)
906c4e36 5870 return GEN_INT (pointer_type_class);
bbf6f052 5871 if (code == REFERENCE_TYPE)
906c4e36 5872 return GEN_INT (reference_type_class);
bbf6f052 5873 if (code == OFFSET_TYPE)
906c4e36 5874 return GEN_INT (offset_type_class);
bbf6f052 5875 if (code == REAL_TYPE)
906c4e36 5876 return GEN_INT (real_type_class);
bbf6f052 5877 if (code == COMPLEX_TYPE)
906c4e36 5878 return GEN_INT (complex_type_class);
bbf6f052 5879 if (code == FUNCTION_TYPE)
906c4e36 5880 return GEN_INT (function_type_class);
bbf6f052 5881 if (code == METHOD_TYPE)
906c4e36 5882 return GEN_INT (method_type_class);
bbf6f052 5883 if (code == RECORD_TYPE)
906c4e36 5884 return GEN_INT (record_type_class);
e7f3c83f 5885 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
906c4e36 5886 return GEN_INT (union_type_class);
bbf6f052 5887 if (code == ARRAY_TYPE)
906c4e36 5888 return GEN_INT (array_type_class);
bbf6f052 5889 if (code == STRING_TYPE)
906c4e36 5890 return GEN_INT (string_type_class);
bbf6f052 5891 if (code == SET_TYPE)
906c4e36 5892 return GEN_INT (set_type_class);
bbf6f052 5893 if (code == FILE_TYPE)
906c4e36 5894 return GEN_INT (file_type_class);
bbf6f052 5895 if (code == LANG_TYPE)
906c4e36 5896 return GEN_INT (lang_type_class);
bbf6f052 5897 }
906c4e36 5898 return GEN_INT (no_type_class);
bbf6f052
RK
5899
5900 case BUILT_IN_CONSTANT_P:
5901 if (arglist == 0)
5902 return const0_rtx;
5903 else
cda0ec81 5904 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5905 ? const1_rtx : const0_rtx);
5906
5907 case BUILT_IN_FRAME_ADDRESS:
5908 /* The argument must be a nonnegative integer constant.
5909 It counts the number of frames to scan up the stack.
5910 The value is the address of that frame. */
5911 case BUILT_IN_RETURN_ADDRESS:
5912 /* The argument must be a nonnegative integer constant.
5913 It counts the number of frames to scan up the stack.
5914 The value is the return address saved in that frame. */
5915 if (arglist == 0)
5916 /* Warning about missing arg was already issued. */
5917 return const0_rtx;
5918 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5919 {
42b85a55 5920 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
5921 return const0_rtx;
5922 }
5923 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5924 {
42b85a55 5925 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
5926 return const0_rtx;
5927 }
5928 else
5929 {
5930 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5931 rtx tem = frame_pointer_rtx;
5932 int i;
5933
46b68a37
JW
5934 /* Some machines need special handling before we can access arbitrary
5935 frames. For example, on the sparc, we must first flush all
5936 register windows to the stack. */
5937#ifdef SETUP_FRAME_ADDRESSES
5938 SETUP_FRAME_ADDRESSES ();
5939#endif
5940
5941 /* On the sparc, the return address is not in the frame, it is
5942 in a register. There is no way to access it off of the current
5943 frame pointer, but it can be accessed off the previous frame
5944 pointer by reading the value from the register window save
5945 area. */
5946#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
5947 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
5948 count--;
5949#endif
5950
bbf6f052
RK
5951 /* Scan back COUNT frames to the specified frame. */
5952 for (i = 0; i < count; i++)
5953 {
5954 /* Assume the dynamic chain pointer is in the word that
5955 the frame address points to, unless otherwise specified. */
5956#ifdef DYNAMIC_CHAIN_ADDRESS
5957 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5958#endif
5959 tem = memory_address (Pmode, tem);
5960 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5961 }
5962
5963 /* For __builtin_frame_address, return what we've got. */
5964 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5965 return tem;
5966
5967 /* For __builtin_return_address,
5968 Get the return address from that frame. */
5969#ifdef RETURN_ADDR_RTX
5970 return RETURN_ADDR_RTX (count, tem);
5971#else
5972 tem = memory_address (Pmode,
5973 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5974 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5975#endif
5976 }
5977
5978 case BUILT_IN_ALLOCA:
5979 if (arglist == 0
5980 /* Arg could be non-integer if user redeclared this fcn wrong. */
5981 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5982 return const0_rtx;
5983 current_function_calls_alloca = 1;
5984 /* Compute the argument. */
906c4e36 5985 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5986
5987 /* Allocate the desired space. */
8c8a8e34 5988 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5989
5990 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5991 if (nonlocal_goto_handler_slot != 0)
906c4e36 5992 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5993 return target;
5994
5995 case BUILT_IN_FFS:
5996 /* If not optimizing, call the library function. */
5997 if (!optimize)
5998 break;
5999
6000 if (arglist == 0
6001 /* Arg could be non-integer if user redeclared this fcn wrong. */
6002 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6003 return const0_rtx;
6004
6005 /* Compute the argument. */
6006 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6007 /* Compute ffs, into TARGET if possible.
6008 Set TARGET to wherever the result comes back. */
6009 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6010 ffs_optab, op0, target, 1);
6011 if (target == 0)
6012 abort ();
6013 return target;
6014
6015 case BUILT_IN_STRLEN:
6016 /* If not optimizing, call the library function. */
6017 if (!optimize)
6018 break;
6019
6020 if (arglist == 0
6021 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6022 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6023 return const0_rtx;
6024 else
6025 {
e7c33f54
RK
6026 tree src = TREE_VALUE (arglist);
6027 tree len = c_strlen (src);
bbf6f052 6028
e7c33f54
RK
6029 int align
6030 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6031
6032 rtx result, src_rtx, char_rtx;
6033 enum machine_mode insn_mode = value_mode, char_mode;
6034 enum insn_code icode;
6035
6036 /* If the length is known, just return it. */
6037 if (len != 0)
6038 return expand_expr (len, target, mode, 0);
6039
6040 /* If SRC is not a pointer type, don't do this operation inline. */
6041 if (align == 0)
6042 break;
6043
6044 /* Call a function if we can't compute strlen in the right mode. */
6045
6046 while (insn_mode != VOIDmode)
6047 {
6048 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6049 if (icode != CODE_FOR_nothing)
6050 break;
6051
6052 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6053 }
6054 if (insn_mode == VOIDmode)
bbf6f052 6055 break;
e7c33f54
RK
6056
6057 /* Make a place to write the result of the instruction. */
6058 result = target;
6059 if (! (result != 0
6060 && GET_CODE (result) == REG
6061 && GET_MODE (result) == insn_mode
6062 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6063 result = gen_reg_rtx (insn_mode);
6064
4d613828 6065 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6066
4d613828 6067 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6068 result = gen_reg_rtx (insn_mode);
6069
6070 src_rtx = memory_address (BLKmode,
906c4e36 6071 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6072 EXPAND_NORMAL));
4d613828 6073 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6074 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6075
6076 char_rtx = const0_rtx;
4d613828
RS
6077 char_mode = insn_operand_mode[(int)icode][2];
6078 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6079 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6080
6081 emit_insn (GEN_FCN (icode) (result,
6082 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6083 char_rtx, GEN_INT (align)));
e7c33f54
RK
6084
6085 /* Return the value in the proper mode for this function. */
6086 if (GET_MODE (result) == value_mode)
6087 return result;
6088 else if (target != 0)
6089 {
6090 convert_move (target, result, 0);
6091 return target;
6092 }
6093 else
6094 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6095 }
6096
6097 case BUILT_IN_STRCPY:
6098 /* If not optimizing, call the library function. */
6099 if (!optimize)
6100 break;
6101
6102 if (arglist == 0
6103 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6104 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6105 || TREE_CHAIN (arglist) == 0
6106 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6107 return const0_rtx;
6108 else
6109 {
6110 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6111
6112 if (len == 0)
6113 break;
6114
6115 len = size_binop (PLUS_EXPR, len, integer_one_node);
6116
906c4e36 6117 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6118 }
6119
6120 /* Drops in. */
6121 case BUILT_IN_MEMCPY:
6122 /* If not optimizing, call the library function. */
6123 if (!optimize)
6124 break;
6125
6126 if (arglist == 0
6127 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6128 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6129 || TREE_CHAIN (arglist) == 0
6130 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6131 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6132 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6133 return const0_rtx;
6134 else
6135 {
6136 tree dest = TREE_VALUE (arglist);
6137 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6138 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6139
6140 int src_align
6141 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6142 int dest_align
6143 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9937da1a 6144 rtx dest_rtx, dest_mem, src_mem;
bbf6f052
RK
6145
6146 /* If either SRC or DEST is not a pointer type, don't do
6147 this operation in-line. */
6148 if (src_align == 0 || dest_align == 0)
6149 {
6150 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6151 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6152 break;
6153 }
6154
906c4e36 6155 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
9937da1a
RS
6156 dest_mem = gen_rtx (MEM, BLKmode,
6157 memory_address (BLKmode, dest_rtx));
6158 src_mem = gen_rtx (MEM, BLKmode,
6159 memory_address (BLKmode,
6160 expand_expr (src, NULL_RTX,
6161 Pmode,
6162 EXPAND_NORMAL)));
bbf6f052
RK
6163
6164 /* Copy word part most expediently. */
9937da1a 6165 emit_block_move (dest_mem, src_mem,
906c4e36 6166 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6167 MIN (src_align, dest_align));
6168 return dest_rtx;
6169 }
6170
6171/* These comparison functions need an instruction that returns an actual
6172 index. An ordinary compare that just sets the condition codes
6173 is not enough. */
6174#ifdef HAVE_cmpstrsi
6175 case BUILT_IN_STRCMP:
6176 /* If not optimizing, call the library function. */
6177 if (!optimize)
6178 break;
6179
6180 if (arglist == 0
6181 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6182 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6183 || TREE_CHAIN (arglist) == 0
6184 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6185 return const0_rtx;
6186 else if (!HAVE_cmpstrsi)
6187 break;
6188 {
6189 tree arg1 = TREE_VALUE (arglist);
6190 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6191 tree offset;
6192 tree len, len2;
6193
6194 len = c_strlen (arg1);
6195 if (len)
6196 len = size_binop (PLUS_EXPR, integer_one_node, len);
6197 len2 = c_strlen (arg2);
6198 if (len2)
6199 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6200
6201 /* If we don't have a constant length for the first, use the length
6202 of the second, if we know it. We don't require a constant for
6203 this case; some cost analysis could be done if both are available
6204 but neither is constant. For now, assume they're equally cheap.
6205
6206 If both strings have constant lengths, use the smaller. This
6207 could arise if optimization results in strcpy being called with
6208 two fixed strings, or if the code was machine-generated. We should
6209 add some code to the `memcmp' handler below to deal with such
6210 situations, someday. */
6211 if (!len || TREE_CODE (len) != INTEGER_CST)
6212 {
6213 if (len2)
6214 len = len2;
6215 else if (len == 0)
6216 break;
6217 }
6218 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6219 {
6220 if (tree_int_cst_lt (len2, len))
6221 len = len2;
6222 }
6223
906c4e36 6224 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6225 }
6226
6227 /* Drops in. */
6228 case BUILT_IN_MEMCMP:
6229 /* If not optimizing, call the library function. */
6230 if (!optimize)
6231 break;
6232
6233 if (arglist == 0
6234 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6235 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6236 || TREE_CHAIN (arglist) == 0
6237 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6238 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6239 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6240 return const0_rtx;
6241 else if (!HAVE_cmpstrsi)
6242 break;
6243 {
6244 tree arg1 = TREE_VALUE (arglist);
6245 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6246 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6247 rtx result;
6248
6249 int arg1_align
6250 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6251 int arg2_align
6252 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6253 enum machine_mode insn_mode
6254 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6255
6256 /* If we don't have POINTER_TYPE, call the function. */
6257 if (arg1_align == 0 || arg2_align == 0)
6258 {
6259 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6260 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6261 break;
6262 }
6263
6264 /* Make a place to write the result of the instruction. */
6265 result = target;
6266 if (! (result != 0
6267 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6268 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6269 result = gen_reg_rtx (insn_mode);
6270
6271 emit_insn (gen_cmpstrsi (result,
6272 gen_rtx (MEM, BLKmode,
906c4e36
RK
6273 expand_expr (arg1, NULL_RTX, Pmode,
6274 EXPAND_NORMAL)),
bbf6f052 6275 gen_rtx (MEM, BLKmode,
906c4e36
RK
6276 expand_expr (arg2, NULL_RTX, Pmode,
6277 EXPAND_NORMAL)),
6278 expand_expr (len, NULL_RTX, VOIDmode, 0),
6279 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6280
6281 /* Return the value in the proper mode for this function. */
6282 mode = TYPE_MODE (TREE_TYPE (exp));
6283 if (GET_MODE (result) == mode)
6284 return result;
6285 else if (target != 0)
6286 {
6287 convert_move (target, result, 0);
6288 return target;
6289 }
6290 else
6291 return convert_to_mode (mode, result, 0);
6292 }
6293#else
6294 case BUILT_IN_STRCMP:
6295 case BUILT_IN_MEMCMP:
6296 break;
6297#endif
6298
6299 default: /* just do library call, if unknown builtin */
42b85a55 6300 error ("built-in function `%s' not currently supported",
bbf6f052
RK
6301 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6302 }
6303
6304 /* The switch statement above can drop through to cause the function
6305 to be called normally. */
6306
6307 return expand_call (exp, target, ignore);
6308}
6309\f
0006469d
TW
6310/* Built-in functions to perform an untyped call and return. */
6311
6312/* For each register that may be used for calling a function, this
6313 gives a mode used to copy the register's value. VOIDmode indicates
6314 the register is not used for calling a function. If the machine
6315 has register windows, this gives only the outbound registers.
6316 INCOMING_REGNO gives the corresponding inbound register. */
6317static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
6318
6319/* For each register that may be used for returning values, this gives
6320 a mode used to copy the register's value. VOIDmode indicates the
6321 register is not used for returning values. If the machine has
6322 register windows, this gives only the outbound registers.
6323 INCOMING_REGNO gives the corresponding inbound register. */
6324static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
6325
6326/* Return the size required for the block returned by __builtin_apply_args,
6327 and initialize apply_args_mode. */
6328static int
6329apply_args_size ()
6330{
6331 static int size = -1;
6332 int align, regno;
6333 enum machine_mode mode;
6334
6335 /* The values computed by this function never change. */
6336 if (size < 0)
6337 {
6338 /* The first value is the incoming arg-pointer. */
6339 size = GET_MODE_SIZE (Pmode);
6340
6341 /* The second value is the structure value address unless this is
6342 passed as an "invisible" first argument. */
6343 if (struct_value_rtx)
6344 size += GET_MODE_SIZE (Pmode);
6345
6346 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6347 if (FUNCTION_ARG_REGNO_P (regno))
6348 {
6349 /* Search for the proper mode for copying this register's
6350 value. I'm not sure this is right, but it works so far. */
6351 enum machine_mode best_mode = VOIDmode;
6352
6353 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6354 mode != VOIDmode;
6355 mode = GET_MODE_WIDER_MODE (mode))
6356 if (HARD_REGNO_MODE_OK (regno, mode)
6357 && HARD_REGNO_NREGS (regno, mode) == 1)
6358 best_mode = mode;
6359
6360 if (best_mode == VOIDmode)
6361 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6362 mode != VOIDmode;
6363 mode = GET_MODE_WIDER_MODE (mode))
6364 if (HARD_REGNO_MODE_OK (regno, mode)
6365 && (mov_optab->handlers[(int) mode].insn_code
6366 != CODE_FOR_nothing))
6367 best_mode = mode;
6368
6369 mode = best_mode;
6370 if (mode == VOIDmode)
6371 abort ();
6372
6373 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6374 if (size % align != 0)
6375 size = CEIL (size, align) * align;
6376 size += GET_MODE_SIZE (mode);
6377 apply_args_mode[regno] = mode;
6378 }
6379 else
6380 apply_args_mode[regno] = VOIDmode;
6381 }
6382 return size;
6383}
6384
6385/* Return the size required for the block returned by __builtin_apply,
6386 and initialize apply_result_mode. */
6387static int
6388apply_result_size ()
6389{
6390 static int size = -1;
6391 int align, regno;
6392 enum machine_mode mode;
6393
6394 /* The values computed by this function never change. */
6395 if (size < 0)
6396 {
6397 size = 0;
6398
6399 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6400 if (FUNCTION_VALUE_REGNO_P (regno))
6401 {
6402 /* Search for the proper mode for copying this register's
6403 value. I'm not sure this is right, but it works so far. */
6404 enum machine_mode best_mode = VOIDmode;
6405
6406 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
6407 mode != TImode;
6408 mode = GET_MODE_WIDER_MODE (mode))
6409 if (HARD_REGNO_MODE_OK (regno, mode))
6410 best_mode = mode;
6411
6412 if (best_mode == VOIDmode)
6413 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
6414 mode != VOIDmode;
6415 mode = GET_MODE_WIDER_MODE (mode))
6416 if (HARD_REGNO_MODE_OK (regno, mode)
6417 && (mov_optab->handlers[(int) mode].insn_code
6418 != CODE_FOR_nothing))
6419 best_mode = mode;
6420
6421 mode = best_mode;
6422 if (mode == VOIDmode)
6423 abort ();
6424
6425 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6426 if (size % align != 0)
6427 size = CEIL (size, align) * align;
6428 size += GET_MODE_SIZE (mode);
6429 apply_result_mode[regno] = mode;
6430 }
6431 else
6432 apply_result_mode[regno] = VOIDmode;
6433
6434 /* Allow targets that use untyped_call and untyped_return to override
6435 the size so that machine-specific information can be stored here. */
6436#ifdef APPLY_RESULT_SIZE
6437 size = APPLY_RESULT_SIZE;
6438#endif
6439 }
6440 return size;
6441}
6442
6443#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
6444/* Create a vector describing the result block RESULT. If SAVEP is true,
6445 the result block is used to save the values; otherwise it is used to
6446 restore the values. */
6447static rtx
6448result_vector (savep, result)
6449 int savep;
6450 rtx result;
6451{
6452 int regno, size, align, nelts;
6453 enum machine_mode mode;
6454 rtx reg, mem;
6455 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
6456
6457 size = nelts = 0;
6458 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6459 if ((mode = apply_result_mode[regno]) != VOIDmode)
6460 {
6461 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6462 if (size % align != 0)
6463 size = CEIL (size, align) * align;
6464 reg = gen_rtx (REG, mode, savep ? INCOMING_REGNO (regno) : regno);
6465 mem = change_address (result, mode,
6466 plus_constant (XEXP (result, 0), size));
6467 savevec[nelts++] = (savep
6468 ? gen_rtx (SET, VOIDmode, mem, reg)
6469 : gen_rtx (SET, VOIDmode, reg, mem));
6470 size += GET_MODE_SIZE (mode);
6471 }
6472 return gen_rtx (PARALLEL, VOIDmode, gen_rtvec_v (nelts, savevec));
6473}
6474#endif /* HAVE_untyped_call or HAVE_untyped_return */
6475
6476
6477/* Save the state required to perform an untyped call with the same
6478 arguments as were passed to the current function. */
6479static rtx
6480expand_builtin_apply_args ()
6481{
6482 rtx registers;
6483 int size, align, regno;
6484 enum machine_mode mode;
6485
6486 /* Create a block where the arg-pointer, structure value address,
6487 and argument registers can be saved. */
6488 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
6489
6490 /* Walk past the arg-pointer and structure value address. */
6491 size = GET_MODE_SIZE (Pmode);
6492 if (struct_value_rtx)
6493 size += GET_MODE_SIZE (Pmode);
6494
6495 /* Save each register used in calling a function to the block. */
6496 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6497 if ((mode = apply_args_mode[regno]) != VOIDmode)
6498 {
6499 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6500 if (size % align != 0)
6501 size = CEIL (size, align) * align;
6502 emit_move_insn (change_address (registers, mode,
6503 plus_constant (XEXP (registers, 0),
6504 size)),
6505 gen_rtx (REG, mode, INCOMING_REGNO (regno)));
6506 size += GET_MODE_SIZE (mode);
6507 }
6508
6509 /* Save the arg pointer to the block. */
6510 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
6511 copy_to_reg (virtual_incoming_args_rtx));
6512 size = GET_MODE_SIZE (Pmode);
6513
6514 /* Save the structure value address unless this is passed as an
6515 "invisible" first argument. */
6516 if (struct_value_incoming_rtx)
6517 {
6518 emit_move_insn (change_address (registers, Pmode,
6519 plus_constant (XEXP (registers, 0),
6520 size)),
6521 copy_to_reg (struct_value_incoming_rtx));
6522 size += GET_MODE_SIZE (Pmode);
6523 }
6524
6525 /* Return the address of the block. */
6526 return copy_addr_to_reg (XEXP (registers, 0));
6527}
6528
6529/* Perform an untyped call and save the state required to perform an
6530 untyped return of whatever value was returned by the given function. */
6531static rtx
6532expand_builtin_apply (function, arguments, argsize)
6533 rtx function, arguments, argsize;
6534{
6535 int size, align, regno;
6536 enum machine_mode mode;
6537 rtx incoming_args, result, reg, dest, call_insn;
6538 rtx old_stack_level = 0;
6539 rtx use_insns = 0;
6540
6541 /* Create a block where the return registers can be saved. */
6542 result = assign_stack_local (BLKmode, apply_result_size (), -1);
6543
6544 /* ??? The argsize value should be adjusted here. */
6545
6546 /* Fetch the arg pointer from the ARGUMENTS block. */
6547 incoming_args = gen_reg_rtx (Pmode);
6548 emit_move_insn (incoming_args,
6549 gen_rtx (MEM, Pmode, arguments));
6550#ifndef STACK_GROWS_DOWNWARD
6551 incoming_args = expand_binop (Pmode, add_optab, incoming_args, argsize,
6552 incoming_args, 0, OPTAB_LIB_WIDEN);
6553#endif
6554
6555 /* Perform postincrements before actually calling the function. */
6556 emit_queue ();
6557
6558 /* Push a new argument block and copy the arguments. */
6559 do_pending_stack_adjust ();
6560 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
6561
6562 /* Push a block of memory onto the stack to store the memory arguments.
6563 Save the address in a register, and copy the memory arguments. ??? I
6564 haven't figured out how the calling convention macros effect this,
6565 but it's likely that the source and/or destination addresses in
6566 the block copy will need updating in machine specific ways. */
6567 dest = copy_addr_to_reg (push_block (argsize, 0, 0));
6568 emit_block_move (gen_rtx (MEM, BLKmode, dest),
6569 gen_rtx (MEM, BLKmode, incoming_args),
6570 argsize,
6571 PARM_BOUNDARY / BITS_PER_UNIT);
6572
6573 /* Refer to the argument block. */
6574 apply_args_size ();
6575 arguments = gen_rtx (MEM, BLKmode, arguments);
6576
6577 /* Walk past the arg-pointer and structure value address. */
6578 size = GET_MODE_SIZE (Pmode);
6579 if (struct_value_rtx)
6580 size += GET_MODE_SIZE (Pmode);
6581
6582 /* Restore each of the registers previously saved. Make USE insns
6583 for each of these registers for use in making the call. */
6584 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6585 if ((mode = apply_args_mode[regno]) != VOIDmode)
6586 {
6587 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6588 if (size % align != 0)
6589 size = CEIL (size, align) * align;
6590 reg = gen_rtx (REG, mode, regno);
6591 emit_move_insn (reg,
6592 change_address (arguments, mode,
6593 plus_constant (XEXP (arguments, 0),
6594 size)));
6595
6596 push_to_sequence (use_insns);
6597 emit_insn (gen_rtx (USE, VOIDmode, reg));
6598 use_insns = get_insns ();
6599 end_sequence ();
6600 size += GET_MODE_SIZE (mode);
6601 }
6602
6603 /* Restore the structure value address unless this is passed as an
6604 "invisible" first argument. */
6605 size = GET_MODE_SIZE (Pmode);
6606 if (struct_value_rtx)
6607 {
6608 rtx value = gen_reg_rtx (Pmode);
6609 emit_move_insn (value,
6610 change_address (arguments, Pmode,
6611 plus_constant (XEXP (arguments, 0),
6612 size)));
6613 emit_move_insn (struct_value_rtx, value);
6614 if (GET_CODE (struct_value_rtx) == REG)
6615 {
6616 push_to_sequence (use_insns);
6617 emit_insn (gen_rtx (USE, VOIDmode, struct_value_rtx));
6618 use_insns = get_insns ();
6619 end_sequence ();
6620 }
6621 size += GET_MODE_SIZE (Pmode);
6622 }
6623
6624 /* All arguments and registers used for the call are set up by now! */
6625 function = prepare_call_address (function, NULL_TREE, &use_insns);
6626
6627 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
6628 and we don't want to load it into a register as an optimization,
6629 because prepare_call_address already did it if it should be done. */
6630 if (GET_CODE (function) != SYMBOL_REF)
6631 function = memory_address (FUNCTION_MODE, function);
6632
6633 /* Generate the actual call instruction and save the return value. */
6634#ifdef HAVE_untyped_call
6635 if (HAVE_untyped_call)
6636 emit_call_insn (gen_untyped_call (gen_rtx (MEM, FUNCTION_MODE, function),
6637 result, result_vector (1, result)));
6638 else
6639#endif
6640#ifdef HAVE_call_value
6641 if (HAVE_call_value)
6642 {
6643 rtx valreg = 0;
6644
6645 /* Locate the unique return register. It is not possible to
6646 express a call that sets more than one return register using
6647 call_value; use untyped_call for that. In fact, untyped_call
6648 only needs to save the return registers in the given block. */
6649 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6650 if ((mode = apply_result_mode[regno]) != VOIDmode)
6651 {
6652 if (valreg)
6653 abort (); /* HAVE_untyped_call required. */
6654 valreg = gen_rtx (REG, mode, regno);
6655 }
6656
6657 emit_call_insn (gen_call_value (valreg,
6658 gen_rtx (MEM, FUNCTION_MODE, function),
6659 const0_rtx, NULL_RTX, const0_rtx));
6660
6661 emit_move_insn (change_address (result, GET_MODE (valreg),
6662 XEXP (result, 0)),
6663 valreg);
6664 }
6665 else
6666#endif
6667 abort ();
6668
6669 /* Find the CALL insn we just emitted and write the USE insns before it. */
6670 for (call_insn = get_last_insn ();
6671 call_insn && GET_CODE (call_insn) != CALL_INSN;
6672 call_insn = PREV_INSN (call_insn))
6673 ;
6674
6675 if (! call_insn)
6676 abort ();
6677
6678 /* Put the USE insns before the CALL. */
6679 emit_insns_before (use_insns, call_insn);
6680
6681 /* Restore the stack. */
6682 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
6683
6684 /* Return the address of the result block. */
6685 return copy_addr_to_reg (XEXP (result, 0));
6686}
6687
6688/* Perform an untyped return. */
6689static void
6690expand_builtin_return (result)
6691 rtx result;
6692{
6693 int size, align, regno;
6694 enum machine_mode mode;
6695 rtx reg;
6696 rtx use_insns = 0;
6697
6698 apply_result_size ();
6699 result = gen_rtx (MEM, BLKmode, result);
6700
6701#ifdef HAVE_untyped_return
6702 if (HAVE_untyped_return)
6703 {
6704 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
6705 emit_barrier ();
6706 return;
6707 }
6708#endif
6709
6710 /* Restore the return value and note that each value is used. */
6711 size = 0;
6712 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
6713 if ((mode = apply_result_mode[regno]) != VOIDmode)
6714 {
6715 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
6716 if (size % align != 0)
6717 size = CEIL (size, align) * align;
6718 reg = gen_rtx (REG, mode, INCOMING_REGNO (regno));
6719 emit_move_insn (reg,
6720 change_address (result, mode,
6721 plus_constant (XEXP (result, 0),
6722 size)));
6723
6724 push_to_sequence (use_insns);
6725 emit_insn (gen_rtx (USE, VOIDmode, reg));
6726 use_insns = get_insns ();
6727 end_sequence ();
6728 size += GET_MODE_SIZE (mode);
6729 }
6730
6731 /* Put the USE insns before the return. */
6732 emit_insns (use_insns);
6733
6734 /* Return whatever values was restored by jumping directly to the end
6735 of the function. */
6736 expand_null_return ();
6737}
6738\f
bbf6f052
RK
6739/* Expand code for a post- or pre- increment or decrement
6740 and return the RTX for the result.
6741 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6742
6743static rtx
6744expand_increment (exp, post)
6745 register tree exp;
6746 int post;
6747{
6748 register rtx op0, op1;
6749 register rtx temp, value;
6750 register tree incremented = TREE_OPERAND (exp, 0);
6751 optab this_optab = add_optab;
6752 int icode;
6753 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6754 int op0_is_copy = 0;
6755
6756 /* Stabilize any component ref that might need to be
6757 evaluated more than once below. */
ca300798
RS
6758 if (!post
6759 || TREE_CODE (incremented) == BIT_FIELD_REF
bbf6f052
RK
6760 || (TREE_CODE (incremented) == COMPONENT_REF
6761 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6762 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6763 incremented = stabilize_reference (incremented);
6764
6765 /* Compute the operands as RTX.
6766 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6767 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6768 and insns were generated in computing it. */
6769
bbf6f052 6770 temp = get_last_insn ();
906c4e36 6771 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6772
6773 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6774 in place but intead must do sign- or zero-extension during assignment,
6775 so we copy it into a new register and let the code below use it as
6776 a copy.
6777
6778 Note that we can safely modify this SUBREG since it is know not to be
6779 shared (it was made by the expand_expr call above). */
6780
6781 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6782 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6783
94a58076
RS
6784 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6785 && temp != get_last_insn ());
906c4e36 6786 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6787
6788 /* Decide whether incrementing or decrementing. */
6789 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6790 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6791 this_optab = sub_optab;
6792
6793 /* If OP0 is not the actual lvalue, but rather a copy in a register,
ca300798
RS
6794 then we cannot just increment OP0. We must therefore contrive to
6795 increment the original value. Then, for postincrement, we can return
6796 OP0 since it is a copy of the old value. For preincrement, we want
6797 to always expand here, since this generates better or equivalent code. */
6798 if (!post || op0_is_copy)
bbf6f052
RK
6799 {
6800 /* This is the easiest way to increment the value wherever it is.
ca300798
RS
6801 Problems with multiple evaluation of INCREMENTED are prevented
6802 because either (1) it is a component_ref or preincrement,
bbf6f052
RK
6803 in which case it was stabilized above, or (2) it is an array_ref
6804 with constant index in an array in a register, which is
6805 safe to reevaluate. */
6806 tree newexp = build ((this_optab == add_optab
6807 ? PLUS_EXPR : MINUS_EXPR),
6808 TREE_TYPE (exp),
6809 incremented,
6810 TREE_OPERAND (exp, 1));
6811 temp = expand_assignment (incremented, newexp, ! post, 0);
6812 return post ? op0 : temp;
6813 }
6814
6815 /* Convert decrement by a constant into a negative increment. */
6816 if (this_optab == sub_optab
6817 && GET_CODE (op1) == CONST_INT)
6818 {
906c4e36 6819 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6820 this_optab = add_optab;
6821 }
6822
6823 if (post)
6824 {
6825 /* We have a true reference to the value in OP0.
6826 If there is an insn to add or subtract in this mode, queue it. */
6827
6828#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6829 op0 = stabilize (op0);
6830#endif
6831
6832 icode = (int) this_optab->handlers[(int) mode].insn_code;
6833 if (icode != (int) CODE_FOR_nothing
6834 /* Make sure that OP0 is valid for operands 0 and 1
6835 of the insn we want to queue. */
6836 && (*insn_operand_predicate[icode][0]) (op0, mode)
6837 && (*insn_operand_predicate[icode][1]) (op0, mode))
6838 {
6839 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6840 op1 = force_reg (mode, op1);
6841
6842 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6843 }
6844 }
6845
6846 /* Preincrement, or we can't increment with one simple insn. */
6847 if (post)
6848 /* Save a copy of the value before inc or dec, to return it later. */
6849 temp = value = copy_to_reg (op0);
6850 else
6851 /* Arrange to return the incremented value. */
6852 /* Copy the rtx because expand_binop will protect from the queue,
6853 and the results of that would be invalid for us to return
6854 if our caller does emit_queue before using our result. */
6855 temp = copy_rtx (value = op0);
6856
6857 /* Increment however we can. */
6858 op1 = expand_binop (mode, this_optab, value, op1, op0,
6859 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6860 /* Make sure the value is stored into OP0. */
6861 if (op1 != op0)
6862 emit_move_insn (op0, op1);
6863
6864 return temp;
6865}
6866\f
6867/* Expand all function calls contained within EXP, innermost ones first.
6868 But don't look within expressions that have sequence points.
6869 For each CALL_EXPR, record the rtx for its value
6870 in the CALL_EXPR_RTL field. */
6871
6872static void
6873preexpand_calls (exp)
6874 tree exp;
6875{
6876 register int nops, i;
6877 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6878
6879 if (! do_preexpand_calls)
6880 return;
6881
6882 /* Only expressions and references can contain calls. */
6883
6884 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6885 return;
6886
6887 switch (TREE_CODE (exp))
6888 {
6889 case CALL_EXPR:
6890 /* Do nothing if already expanded. */
6891 if (CALL_EXPR_RTL (exp) != 0)
6892 return;
6893
6894 /* Do nothing to built-in functions. */
6895 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6896 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6897 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6898 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6899 return;
6900
6901 case COMPOUND_EXPR:
6902 case COND_EXPR:
6903 case TRUTH_ANDIF_EXPR:
6904 case TRUTH_ORIF_EXPR:
6905 /* If we find one of these, then we can be sure
6906 the adjust will be done for it (since it makes jumps).
6907 Do it now, so that if this is inside an argument
6908 of a function, we don't get the stack adjustment
6909 after some other args have already been pushed. */
6910 do_pending_stack_adjust ();
6911 return;
6912
6913 case BLOCK:
6914 case RTL_EXPR:
6915 case WITH_CLEANUP_EXPR:
6916 return;
6917
6918 case SAVE_EXPR:
6919 if (SAVE_EXPR_RTL (exp) != 0)
6920 return;
6921 }
6922
6923 nops = tree_code_length[(int) TREE_CODE (exp)];
6924 for (i = 0; i < nops; i++)
6925 if (TREE_OPERAND (exp, i) != 0)
6926 {
6927 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6928 if (type == 'e' || type == '<' || type == '1' || type == '2'
6929 || type == 'r')
6930 preexpand_calls (TREE_OPERAND (exp, i));
6931 }
6932}
6933\f
6934/* At the start of a function, record that we have no previously-pushed
6935 arguments waiting to be popped. */
6936
6937void
6938init_pending_stack_adjust ()
6939{
6940 pending_stack_adjust = 0;
6941}
6942
6943/* When exiting from function, if safe, clear out any pending stack adjust
6944 so the adjustment won't get done. */
6945
6946void
6947clear_pending_stack_adjust ()
6948{
6949#ifdef EXIT_IGNORE_STACK
6950 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 6951 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
6952 && ! flag_inline_functions)
6953 pending_stack_adjust = 0;
6954#endif
6955}
6956
6957/* Pop any previously-pushed arguments that have not been popped yet. */
6958
6959void
6960do_pending_stack_adjust ()
6961{
6962 if (inhibit_defer_pop == 0)
6963 {
6964 if (pending_stack_adjust != 0)
906c4e36 6965 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
6966 pending_stack_adjust = 0;
6967 }
6968}
6969
6970/* Expand all cleanups up to OLD_CLEANUPS.
6971 Needed here, and also for language-dependent calls. */
6972
6973void
6974expand_cleanups_to (old_cleanups)
6975 tree old_cleanups;
6976{
6977 while (cleanups_this_call != old_cleanups)
6978 {
906c4e36 6979 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6980 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6981 }
6982}
6983\f
6984/* Expand conditional expressions. */
6985
6986/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6987 LABEL is an rtx of code CODE_LABEL, in this function and all the
6988 functions here. */
6989
6990void
6991jumpifnot (exp, label)
6992 tree exp;
6993 rtx label;
6994{
906c4e36 6995 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
6996}
6997
6998/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6999
7000void
7001jumpif (exp, label)
7002 tree exp;
7003 rtx label;
7004{
906c4e36 7005 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
7006}
7007
7008/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
7009 the result is zero, or IF_TRUE_LABEL if the result is one.
7010 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
7011 meaning fall through in that case.
7012
e7c33f54
RK
7013 do_jump always does any pending stack adjust except when it does not
7014 actually perform a jump. An example where there is no jump
7015 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7016
bbf6f052
RK
7017 This function is responsible for optimizing cases such as
7018 &&, || and comparison operators in EXP. */
7019
7020void
7021do_jump (exp, if_false_label, if_true_label)
7022 tree exp;
7023 rtx if_false_label, if_true_label;
7024{
7025 register enum tree_code code = TREE_CODE (exp);
7026 /* Some cases need to create a label to jump to
7027 in order to properly fall through.
7028 These cases set DROP_THROUGH_LABEL nonzero. */
7029 rtx drop_through_label = 0;
7030 rtx temp;
7031 rtx comparison = 0;
7032 int i;
7033 tree type;
7034
7035 emit_queue ();
7036
7037 switch (code)
7038 {
7039 case ERROR_MARK:
7040 break;
7041
7042 case INTEGER_CST:
7043 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7044 if (temp)
7045 emit_jump (temp);
7046 break;
7047
7048#if 0
7049 /* This is not true with #pragma weak */
7050 case ADDR_EXPR:
7051 /* The address of something can never be zero. */
7052 if (if_true_label)
7053 emit_jump (if_true_label);
7054 break;
7055#endif
7056
7057 case NOP_EXPR:
7058 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7059 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7060 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7061 goto normal;
7062 case CONVERT_EXPR:
7063 /* If we are narrowing the operand, we have to do the compare in the
7064 narrower mode. */
7065 if ((TYPE_PRECISION (TREE_TYPE (exp))
7066 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7067 goto normal;
7068 case NON_LVALUE_EXPR:
7069 case REFERENCE_EXPR:
7070 case ABS_EXPR:
7071 case NEGATE_EXPR:
7072 case LROTATE_EXPR:
7073 case RROTATE_EXPR:
7074 /* These cannot change zero->non-zero or vice versa. */
7075 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7076 break;
7077
7078#if 0
7079 /* This is never less insns than evaluating the PLUS_EXPR followed by
7080 a test and can be longer if the test is eliminated. */
7081 case PLUS_EXPR:
7082 /* Reduce to minus. */
7083 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7084 TREE_OPERAND (exp, 0),
7085 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7086 TREE_OPERAND (exp, 1))));
7087 /* Process as MINUS. */
7088#endif
7089
7090 case MINUS_EXPR:
7091 /* Non-zero iff operands of minus differ. */
7092 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7093 TREE_OPERAND (exp, 0),
7094 TREE_OPERAND (exp, 1)),
7095 NE, NE);
7096 break;
7097
7098 case BIT_AND_EXPR:
7099 /* If we are AND'ing with a small constant, do this comparison in the
7100 smallest type that fits. If the machine doesn't have comparisons
7101 that small, it will be converted back to the wider comparison.
7102 This helps if we are testing the sign bit of a narrower object.
7103 combine can't do this for us because it can't know whether a
7104 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7105
08af8e09
RK
7106 if (! SLOW_BYTE_ACCESS
7107 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7108 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
7109 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7110 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
7111 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7112 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7113 != CODE_FOR_nothing))
bbf6f052
RK
7114 {
7115 do_jump (convert (type, exp), if_false_label, if_true_label);
7116 break;
7117 }
7118 goto normal;
7119
7120 case TRUTH_NOT_EXPR:
7121 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7122 break;
7123
7124 case TRUTH_ANDIF_EXPR:
7125 if (if_false_label == 0)
7126 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 7127 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
7128 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7129 break;
7130
7131 case TRUTH_ORIF_EXPR:
7132 if (if_true_label == 0)
7133 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 7134 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
7135 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7136 break;
7137
7138 case COMPOUND_EXPR:
7139 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7140 free_temp_slots ();
7141 emit_queue ();
e7c33f54 7142 do_pending_stack_adjust ();
bbf6f052
RK
7143 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7144 break;
7145
7146 case COMPONENT_REF:
7147 case BIT_FIELD_REF:
7148 case ARRAY_REF:
7149 {
7150 int bitsize, bitpos, unsignedp;
7151 enum machine_mode mode;
7152 tree type;
7bb0943f 7153 tree offset;
bbf6f052
RK
7154 int volatilep = 0;
7155
7156 /* Get description of this reference. We don't actually care
7157 about the underlying object here. */
7bb0943f
RS
7158 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7159 &mode, &unsignedp, &volatilep);
bbf6f052
RK
7160
7161 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
7162 if (! SLOW_BYTE_ACCESS
7163 && type != 0 && bitsize >= 0
7164 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7165 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7166 != CODE_FOR_nothing))
bbf6f052
RK
7167 {
7168 do_jump (convert (type, exp), if_false_label, if_true_label);
7169 break;
7170 }
7171 goto normal;
7172 }
7173
7174 case COND_EXPR:
7175 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7176 if (integer_onep (TREE_OPERAND (exp, 1))
7177 && integer_zerop (TREE_OPERAND (exp, 2)))
7178 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7179
7180 else if (integer_zerop (TREE_OPERAND (exp, 1))
7181 && integer_onep (TREE_OPERAND (exp, 2)))
7182 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7183
7184 else
7185 {
7186 register rtx label1 = gen_label_rtx ();
7187 drop_through_label = gen_label_rtx ();
906c4e36 7188 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
7189 /* Now the THEN-expression. */
7190 do_jump (TREE_OPERAND (exp, 1),
7191 if_false_label ? if_false_label : drop_through_label,
7192 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
7193 /* In case the do_jump just above never jumps. */
7194 do_pending_stack_adjust ();
bbf6f052
RK
7195 emit_label (label1);
7196 /* Now the ELSE-expression. */
7197 do_jump (TREE_OPERAND (exp, 2),
7198 if_false_label ? if_false_label : drop_through_label,
7199 if_true_label ? if_true_label : drop_through_label);
7200 }
7201 break;
7202
7203 case EQ_EXPR:
7204 if (integer_zerop (TREE_OPERAND (exp, 1)))
7205 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7206 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7207 == MODE_INT)
7208 &&
7209 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7210 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7211 else
7212 comparison = compare (exp, EQ, EQ);
7213 break;
7214
7215 case NE_EXPR:
7216 if (integer_zerop (TREE_OPERAND (exp, 1)))
7217 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7218 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7219 == MODE_INT)
7220 &&
7221 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7222 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7223 else
7224 comparison = compare (exp, NE, NE);
7225 break;
7226
7227 case LT_EXPR:
7228 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7229 == MODE_INT)
7230 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7231 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7232 else
7233 comparison = compare (exp, LT, LTU);
7234 break;
7235
7236 case LE_EXPR:
7237 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7238 == MODE_INT)
7239 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7240 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7241 else
7242 comparison = compare (exp, LE, LEU);
7243 break;
7244
7245 case GT_EXPR:
7246 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7247 == MODE_INT)
7248 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7249 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7250 else
7251 comparison = compare (exp, GT, GTU);
7252 break;
7253
7254 case GE_EXPR:
7255 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7256 == MODE_INT)
7257 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7258 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7259 else
7260 comparison = compare (exp, GE, GEU);
7261 break;
7262
7263 default:
7264 normal:
906c4e36 7265 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7266#if 0
7267 /* This is not needed any more and causes poor code since it causes
7268 comparisons and tests from non-SI objects to have different code
7269 sequences. */
7270 /* Copy to register to avoid generating bad insns by cse
7271 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7272 if (!cse_not_expected && GET_CODE (temp) == MEM)
7273 temp = copy_to_reg (temp);
7274#endif
7275 do_pending_stack_adjust ();
7276 if (GET_CODE (temp) == CONST_INT)
7277 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7278 else if (GET_CODE (temp) == LABEL_REF)
7279 comparison = const_true_rtx;
7280 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7281 && !can_compare_p (GET_MODE (temp)))
7282 /* Note swapping the labels gives us not-equal. */
7283 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7284 else if (GET_MODE (temp) != VOIDmode)
7285 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
7286 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7287 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
7288 else
7289 abort ();
7290 }
7291
7292 /* Do any postincrements in the expression that was tested. */
7293 emit_queue ();
7294
7295 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7296 straight into a conditional jump instruction as the jump condition.
7297 Otherwise, all the work has been done already. */
7298
7299 if (comparison == const_true_rtx)
7300 {
7301 if (if_true_label)
7302 emit_jump (if_true_label);
7303 }
7304 else if (comparison == const0_rtx)
7305 {
7306 if (if_false_label)
7307 emit_jump (if_false_label);
7308 }
7309 else if (comparison)
7310 do_jump_for_compare (comparison, if_false_label, if_true_label);
7311
7312 free_temp_slots ();
7313
7314 if (drop_through_label)
e7c33f54
RK
7315 {
7316 /* If do_jump produces code that might be jumped around,
7317 do any stack adjusts from that code, before the place
7318 where control merges in. */
7319 do_pending_stack_adjust ();
7320 emit_label (drop_through_label);
7321 }
bbf6f052
RK
7322}
7323\f
7324/* Given a comparison expression EXP for values too wide to be compared
7325 with one insn, test the comparison and jump to the appropriate label.
7326 The code of EXP is ignored; we always test GT if SWAP is 0,
7327 and LT if SWAP is 1. */
7328
7329static void
7330do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7331 tree exp;
7332 int swap;
7333 rtx if_false_label, if_true_label;
7334{
906c4e36
RK
7335 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7336 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7337 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7338 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7339 rtx drop_through_label = 0;
7340 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7341 int i;
7342
7343 if (! if_true_label || ! if_false_label)
7344 drop_through_label = gen_label_rtx ();
7345 if (! if_true_label)
7346 if_true_label = drop_through_label;
7347 if (! if_false_label)
7348 if_false_label = drop_through_label;
7349
7350 /* Compare a word at a time, high order first. */
f81497d9
RS
7351 for (i = 0; i < nwords; i++)
7352 {
7353 rtx comp;
7354 rtx op0_word, op1_word;
7355
7356 if (WORDS_BIG_ENDIAN)
7357 {
7358 op0_word = operand_subword_force (op0, i, mode);
7359 op1_word = operand_subword_force (op1, i, mode);
7360 }
7361 else
7362 {
7363 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7364 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7365 }
7366
7367 /* All but high-order word must be compared as unsigned. */
7368 comp = compare_from_rtx (op0_word, op1_word,
7369 (unsignedp || i > 0) ? GTU : GT,
7370 unsignedp, word_mode, NULL_RTX, 0);
7371 if (comp == const_true_rtx)
7372 emit_jump (if_true_label);
7373 else if (comp != const0_rtx)
7374 do_jump_for_compare (comp, NULL_RTX, if_true_label);
7375
7376 /* Consider lower words only if these are equal. */
7377 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
7378 NULL_RTX, 0);
7379 if (comp == const_true_rtx)
7380 emit_jump (if_false_label);
7381 else if (comp != const0_rtx)
7382 do_jump_for_compare (comp, NULL_RTX, if_false_label);
7383 }
7384
7385 if (if_false_label)
7386 emit_jump (if_false_label);
7387 if (drop_through_label)
7388 emit_label (drop_through_label);
7389}
7390
7391/* Compare OP0 with OP1, word at a time, in mode MODE.
7392 UNSIGNEDP says to do unsigned comparison.
7393 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
7394
7395static void
7396do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
7397 enum machine_mode mode;
7398 int unsignedp;
7399 rtx op0, op1;
7400 rtx if_false_label, if_true_label;
7401{
7402 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7403 rtx drop_through_label = 0;
7404 int i;
7405
7406 if (! if_true_label || ! if_false_label)
7407 drop_through_label = gen_label_rtx ();
7408 if (! if_true_label)
7409 if_true_label = drop_through_label;
7410 if (! if_false_label)
7411 if_false_label = drop_through_label;
7412
7413 /* Compare a word at a time, high order first. */
bbf6f052
RK
7414 for (i = 0; i < nwords; i++)
7415 {
7416 rtx comp;
7417 rtx op0_word, op1_word;
7418
7419 if (WORDS_BIG_ENDIAN)
7420 {
7421 op0_word = operand_subword_force (op0, i, mode);
7422 op1_word = operand_subword_force (op1, i, mode);
7423 }
7424 else
7425 {
7426 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7427 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7428 }
7429
7430 /* All but high-order word must be compared as unsigned. */
7431 comp = compare_from_rtx (op0_word, op1_word,
7432 (unsignedp || i > 0) ? GTU : GT,
906c4e36 7433 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
7434 if (comp == const_true_rtx)
7435 emit_jump (if_true_label);
7436 else if (comp != const0_rtx)
906c4e36 7437 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
7438
7439 /* Consider lower words only if these are equal. */
7440 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 7441 NULL_RTX, 0);
bbf6f052
RK
7442 if (comp == const_true_rtx)
7443 emit_jump (if_false_label);
7444 else if (comp != const0_rtx)
906c4e36 7445 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
7446 }
7447
7448 if (if_false_label)
7449 emit_jump (if_false_label);
7450 if (drop_through_label)
7451 emit_label (drop_through_label);
7452}
7453
7454/* Given an EQ_EXPR expression EXP for values too wide to be compared
7455 with one insn, test the comparison and jump to the appropriate label. */
7456
7457static void
7458do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7459 tree exp;
7460 rtx if_false_label, if_true_label;
7461{
906c4e36
RK
7462 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7463 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7464 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7465 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7466 int i;
7467 rtx drop_through_label = 0;
7468
7469 if (! if_false_label)
7470 drop_through_label = if_false_label = gen_label_rtx ();
7471
7472 for (i = 0; i < nwords; i++)
7473 {
7474 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7475 operand_subword_force (op1, i, mode),
cd1b4b44
RK
7476 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7477 word_mode, NULL_RTX, 0);
bbf6f052
RK
7478 if (comp == const_true_rtx)
7479 emit_jump (if_false_label);
7480 else if (comp != const0_rtx)
906c4e36 7481 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7482 }
7483
7484 if (if_true_label)
7485 emit_jump (if_true_label);
7486 if (drop_through_label)
7487 emit_label (drop_through_label);
7488}
7489\f
7490/* Jump according to whether OP0 is 0.
7491 We assume that OP0 has an integer mode that is too wide
7492 for the available compare insns. */
7493
7494static void
7495do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7496 rtx op0;
7497 rtx if_false_label, if_true_label;
7498{
7499 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7500 int i;
7501 rtx drop_through_label = 0;
7502
7503 if (! if_false_label)
7504 drop_through_label = if_false_label = gen_label_rtx ();
7505
7506 for (i = 0; i < nwords; i++)
7507 {
7508 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7509 GET_MODE (op0)),
cd1b4b44 7510 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
7511 if (comp == const_true_rtx)
7512 emit_jump (if_false_label);
7513 else if (comp != const0_rtx)
906c4e36 7514 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7515 }
7516
7517 if (if_true_label)
7518 emit_jump (if_true_label);
7519 if (drop_through_label)
7520 emit_label (drop_through_label);
7521}
7522
7523/* Given a comparison expression in rtl form, output conditional branches to
7524 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7525
7526static void
7527do_jump_for_compare (comparison, if_false_label, if_true_label)
7528 rtx comparison, if_false_label, if_true_label;
7529{
7530 if (if_true_label)
7531 {
7532 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7533 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7534 else
7535 abort ();
7536
7537 if (if_false_label)
7538 emit_jump (if_false_label);
7539 }
7540 else if (if_false_label)
7541 {
7542 rtx insn;
7543 rtx prev = PREV_INSN (get_last_insn ());
7544 rtx branch = 0;
7545
7546 /* Output the branch with the opposite condition. Then try to invert
7547 what is generated. If more than one insn is a branch, or if the
7548 branch is not the last insn written, abort. If we can't invert
7549 the branch, emit make a true label, redirect this jump to that,
7550 emit a jump to the false label and define the true label. */
7551
7552 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7553 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7554 else
7555 abort ();
7556
7557 /* Here we get the insn before what was just emitted.
7558 On some machines, emitting the branch can discard
7559 the previous compare insn and emit a replacement. */
7560 if (prev == 0)
7561 /* If there's only one preceding insn... */
7562 insn = get_insns ();
7563 else
7564 insn = NEXT_INSN (prev);
7565
7566 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7567 if (GET_CODE (insn) == JUMP_INSN)
7568 {
7569 if (branch)
7570 abort ();
7571 branch = insn;
7572 }
7573
7574 if (branch != get_last_insn ())
7575 abort ();
7576
7577 if (! invert_jump (branch, if_false_label))
7578 {
7579 if_true_label = gen_label_rtx ();
7580 redirect_jump (branch, if_true_label);
7581 emit_jump (if_false_label);
7582 emit_label (if_true_label);
7583 }
7584 }
7585}
7586\f
7587/* Generate code for a comparison expression EXP
7588 (including code to compute the values to be compared)
7589 and set (CC0) according to the result.
7590 SIGNED_CODE should be the rtx operation for this comparison for
7591 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7592
7593 We force a stack adjustment unless there are currently
7594 things pushed on the stack that aren't yet used. */
7595
7596static rtx
7597compare (exp, signed_code, unsigned_code)
7598 register tree exp;
7599 enum rtx_code signed_code, unsigned_code;
7600{
906c4e36
RK
7601 register rtx op0
7602 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7603 register rtx op1
7604 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7605 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7606 register enum machine_mode mode = TYPE_MODE (type);
7607 int unsignedp = TREE_UNSIGNED (type);
7608 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7609
7610 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7611 ((mode == BLKmode)
906c4e36 7612 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7613 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7614}
7615
7616/* Like compare but expects the values to compare as two rtx's.
7617 The decision as to signed or unsigned comparison must be made by the caller.
7618
7619 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7620 compared.
7621
7622 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7623 size of MODE should be used. */
7624
7625rtx
7626compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7627 register rtx op0, op1;
7628 enum rtx_code code;
7629 int unsignedp;
7630 enum machine_mode mode;
7631 rtx size;
7632 int align;
7633{
a7c5971a
RK
7634 rtx tem;
7635
bf743ac5
RK
7636 /* If one operand is constant, make it the second one. Only do this
7637 if the other operand is not constant as well. */
bbf6f052 7638
bf743ac5
RK
7639 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7640 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 7641 {
a7c5971a 7642 tem = op0;
bbf6f052
RK
7643 op0 = op1;
7644 op1 = tem;
7645 code = swap_condition (code);
7646 }
7647
7648 if (flag_force_mem)
7649 {
7650 op0 = force_not_mem (op0);
7651 op1 = force_not_mem (op1);
7652 }
7653
7654 do_pending_stack_adjust ();
7655
a7c5971a
RK
7656 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7657 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7658 return tem;
bbf6f052 7659
cd1b4b44
RK
7660#if 0
7661 /* There's no need to do this now that combine.c can eliminate lots of
7662 sign extensions. This can be less efficient in certain cases on other
7663 machines.
7664
bbf6f052
RK
7665 /* If this is a signed equality comparison, we can do it as an
7666 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7667 extension and comparisons with zero are done as unsigned. This is
7668 the case even on machines that can do fast sign extension, since
8008b228 7669 zero-extension is easier to combine with other operations than
77fa0940
RK
7670 sign-extension is. If we are comparing against a constant, we must
7671 convert it to what it would look like unsigned. */
bbf6f052 7672 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7673 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7674 {
7675 if (GET_CODE (op1) == CONST_INT
7676 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7677 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7678 unsignedp = 1;
7679 }
cd1b4b44 7680#endif
bbf6f052
RK
7681
7682 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7683
7684 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7685}
7686\f
7687/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7688 and return an rtx for the result. EXP is either a comparison
7689 or a TRUTH_NOT_EXPR whose operand is a comparison.
7690
bbf6f052
RK
7691 If TARGET is nonzero, store the result there if convenient.
7692
7693 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7694 cheap.
7695
7696 Return zero if there is no suitable set-flag instruction
7697 available on this machine.
7698
7699 Once expand_expr has been called on the arguments of the comparison,
7700 we are committed to doing the store flag, since it is not safe to
7701 re-evaluate the expression. We emit the store-flag insn by calling
7702 emit_store_flag, but only expand the arguments if we have a reason
7703 to believe that emit_store_flag will be successful. If we think that
7704 it will, but it isn't, we have to simulate the store-flag with a
7705 set/jump/set sequence. */
7706
7707static rtx
7708do_store_flag (exp, target, mode, only_cheap)
7709 tree exp;
7710 rtx target;
7711 enum machine_mode mode;
7712 int only_cheap;
7713{
7714 enum rtx_code code;
e7c33f54 7715 tree arg0, arg1, type;
bbf6f052 7716 tree tem;
e7c33f54
RK
7717 enum machine_mode operand_mode;
7718 int invert = 0;
7719 int unsignedp;
bbf6f052
RK
7720 rtx op0, op1;
7721 enum insn_code icode;
7722 rtx subtarget = target;
7723 rtx result, label, pattern, jump_pat;
7724
e7c33f54
RK
7725 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7726 result at the end. We can't simply invert the test since it would
7727 have already been inverted if it were valid. This case occurs for
7728 some floating-point comparisons. */
7729
7730 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7731 invert = 1, exp = TREE_OPERAND (exp, 0);
7732
7733 arg0 = TREE_OPERAND (exp, 0);
7734 arg1 = TREE_OPERAND (exp, 1);
7735 type = TREE_TYPE (arg0);
7736 operand_mode = TYPE_MODE (type);
7737 unsignedp = TREE_UNSIGNED (type);
7738
bbf6f052
RK
7739 /* We won't bother with BLKmode store-flag operations because it would mean
7740 passing a lot of information to emit_store_flag. */
7741 if (operand_mode == BLKmode)
7742 return 0;
7743
d964285c
CH
7744 STRIP_NOPS (arg0);
7745 STRIP_NOPS (arg1);
bbf6f052
RK
7746
7747 /* Get the rtx comparison code to use. We know that EXP is a comparison
7748 operation of some type. Some comparisons against 1 and -1 can be
7749 converted to comparisons with zero. Do so here so that the tests
7750 below will be aware that we have a comparison with zero. These
7751 tests will not catch constants in the first operand, but constants
7752 are rarely passed as the first operand. */
7753
7754 switch (TREE_CODE (exp))
7755 {
7756 case EQ_EXPR:
7757 code = EQ;
7758 break;
7759 case NE_EXPR:
7760 code = NE;
7761 break;
7762 case LT_EXPR:
7763 if (integer_onep (arg1))
7764 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7765 else
7766 code = unsignedp ? LTU : LT;
7767 break;
7768 case LE_EXPR:
5bf6e3bd
RK
7769 if (! unsignedp && integer_all_onesp (arg1))
7770 arg1 = integer_zero_node, code = LT;
bbf6f052
RK
7771 else
7772 code = unsignedp ? LEU : LE;
7773 break;
7774 case GT_EXPR:
5bf6e3bd
RK
7775 if (! unsignedp && integer_all_onesp (arg1))
7776 arg1 = integer_zero_node, code = GE;
bbf6f052
RK
7777 else
7778 code = unsignedp ? GTU : GT;
7779 break;
7780 case GE_EXPR:
7781 if (integer_onep (arg1))
7782 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7783 else
7784 code = unsignedp ? GEU : GE;
7785 break;
7786 default:
7787 abort ();
7788 }
7789
7790 /* Put a constant second. */
7791 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7792 {
7793 tem = arg0; arg0 = arg1; arg1 = tem;
7794 code = swap_condition (code);
7795 }
7796
7797 /* If this is an equality or inequality test of a single bit, we can
7798 do this by shifting the bit being tested to the low-order bit and
7799 masking the result with the constant 1. If the condition was EQ,
7800 we xor it with 1. This does not require an scc insn and is faster
7801 than an scc insn even if we have it. */
7802
7803 if ((code == NE || code == EQ)
7804 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7805 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7806 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7807 {
7808 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7809 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
7810
7811 if (subtarget == 0 || GET_CODE (subtarget) != REG
7812 || GET_MODE (subtarget) != operand_mode
7813 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7814 subtarget = 0;
7815
7816 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7817
7818 if (bitnum != 0)
7819 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7820 size_int (bitnum), target, 1);
7821
7822 if (GET_MODE (op0) != mode)
7823 op0 = convert_to_mode (mode, op0, 1);
7824
7825 if (bitnum != TYPE_PRECISION (type) - 1)
7826 op0 = expand_and (op0, const1_rtx, target);
7827
e7c33f54 7828 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
7829 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7830 OPTAB_LIB_WIDEN);
7831
7832 return op0;
7833 }
7834
7835 /* Now see if we are likely to be able to do this. Return if not. */
7836 if (! can_compare_p (operand_mode))
7837 return 0;
7838 icode = setcc_gen_code[(int) code];
7839 if (icode == CODE_FOR_nothing
7840 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7841 {
7842 /* We can only do this if it is one of the special cases that
7843 can be handled without an scc insn. */
7844 if ((code == LT && integer_zerop (arg1))
7845 || (! only_cheap && code == GE && integer_zerop (arg1)))
7846 ;
7847 else if (BRANCH_COST >= 0
7848 && ! only_cheap && (code == NE || code == EQ)
7849 && TREE_CODE (type) != REAL_TYPE
7850 && ((abs_optab->handlers[(int) operand_mode].insn_code
7851 != CODE_FOR_nothing)
7852 || (ffs_optab->handlers[(int) operand_mode].insn_code
7853 != CODE_FOR_nothing)))
7854 ;
7855 else
7856 return 0;
7857 }
7858
7859 preexpand_calls (exp);
7860 if (subtarget == 0 || GET_CODE (subtarget) != REG
7861 || GET_MODE (subtarget) != operand_mode
7862 || ! safe_from_p (subtarget, arg1))
7863 subtarget = 0;
7864
7865 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7866 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7867
7868 if (target == 0)
7869 target = gen_reg_rtx (mode);
7870
d39985fa
RK
7871 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7872 because, if the emit_store_flag does anything it will succeed and
7873 OP0 and OP1 will not be used subsequently. */
7874
7875 result = emit_store_flag (target, code,
7876 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7877 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7878 operand_mode, unsignedp, 1);
bbf6f052
RK
7879
7880 if (result)
e7c33f54
RK
7881 {
7882 if (invert)
7883 result = expand_binop (mode, xor_optab, result, const1_rtx,
7884 result, 0, OPTAB_LIB_WIDEN);
7885 return result;
7886 }
bbf6f052
RK
7887
7888 /* If this failed, we have to do this with set/compare/jump/set code. */
7889 if (target == 0 || GET_CODE (target) != REG
7890 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7891 target = gen_reg_rtx (GET_MODE (target));
7892
e7c33f54 7893 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
7894 result = compare_from_rtx (op0, op1, code, unsignedp,
7895 operand_mode, NULL_RTX, 0);
bbf6f052 7896 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
7897 return (((result == const0_rtx && ! invert)
7898 || (result != const0_rtx && invert))
7899 ? const0_rtx : const1_rtx);
bbf6f052
RK
7900
7901 label = gen_label_rtx ();
7902 if (bcc_gen_fctn[(int) code] == 0)
7903 abort ();
7904
7905 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 7906 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
7907 emit_label (label);
7908
7909 return target;
7910}
7911\f
7912/* Generate a tablejump instruction (used for switch statements). */
7913
7914#ifdef HAVE_tablejump
7915
7916/* INDEX is the value being switched on, with the lowest value
7917 in the table already subtracted.
88d3b7f0 7918 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
7919 RANGE is the length of the jump table.
7920 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7921
7922 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7923 index value is out of range. */
7924
7925void
e87b4f3f 7926do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 7927 rtx index, range, table_label, default_label;
e87b4f3f 7928 enum machine_mode mode;
bbf6f052
RK
7929{
7930 register rtx temp, vector;
7931
88d3b7f0
RS
7932 /* Do an unsigned comparison (in the proper mode) between the index
7933 expression and the value which represents the length of the range.
7934 Since we just finished subtracting the lower bound of the range
7935 from the index expression, this comparison allows us to simultaneously
7936 check that the original index expression value is both greater than
7937 or equal to the minimum value of the range and less than or equal to
7938 the maximum value of the range. */
e87b4f3f 7939
b4c65118 7940 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
bbf6f052 7941 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
7942
7943 /* If index is in range, it must fit in Pmode.
7944 Convert to Pmode so we can index with it. */
7945 if (mode != Pmode)
7946 index = convert_to_mode (Pmode, index, 1);
7947
bbf6f052
RK
7948 /* If flag_force_addr were to affect this address
7949 it could interfere with the tricky assumptions made
7950 about addresses that contain label-refs,
7951 which may be valid only very near the tablejump itself. */
7952 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7953 GET_MODE_SIZE, because this indicates how large insns are. The other
7954 uses should all be Pmode, because they are addresses. This code
7955 could fail if addresses and insns are not the same size. */
7956 index = memory_address_noforce
7957 (CASE_VECTOR_MODE,
7958 gen_rtx (PLUS, Pmode,
7959 gen_rtx (MULT, Pmode, index,
906c4e36 7960 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
7961 gen_rtx (LABEL_REF, Pmode, table_label)));
7962 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7963 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7964 RTX_UNCHANGING_P (vector) = 1;
7965 convert_move (temp, vector, 0);
7966
7967 emit_jump_insn (gen_tablejump (temp, table_label));
7968
7969#ifndef CASE_VECTOR_PC_RELATIVE
7970 /* If we are generating PIC code or if the table is PC-relative, the
7971 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7972 if (! flag_pic)
7973 emit_barrier ();
7974#endif
7975}
7976
7977#endif /* HAVE_tablejump */
This page took 1.053516 seconds and 5 git commands to generate.