]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
(emit_call_1): Don't defer pops in const call.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
bbc8a071
RK
38 from first to last or from last to first.
39
40 They should if the stack and args grow in opposite directions, but
41 only if we have push insns. */
bbf6f052 42
bbf6f052 43#ifdef PUSH_ROUNDING
bbc8a071
RK
44
45#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNARD)
bbf6f052
RK
46#define PUSH_ARGS_REVERSED /* If it's last to first */
47#endif
bbc8a071 48
bbf6f052
RK
49#endif
50
51#ifndef STACK_PUSH_CODE
52#ifdef STACK_GROWS_DOWNWARD
53#define STACK_PUSH_CODE PRE_DEC
54#else
55#define STACK_PUSH_CODE PRE_INC
56#endif
57#endif
58
59/* Like STACK_BOUNDARY but in units of bytes, not bits. */
60#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
61
62/* If this is nonzero, we do not bother generating VOLATILE
63 around volatile memory references, and we are willing to
64 output indirect addresses. If cse is to follow, we reject
65 indirect addresses so a useful potential cse is generated;
66 if it is used only once, instruction combination will produce
67 the same indirect address eventually. */
68int cse_not_expected;
69
70/* Nonzero to generate code for all the subroutines within an
71 expression before generating the upper levels of the expression.
72 Nowadays this is never zero. */
73int do_preexpand_calls = 1;
74
75/* Number of units that we should eventually pop off the stack.
76 These are the arguments to function calls that have already returned. */
77int pending_stack_adjust;
78
79/* Nonzero means stack pops must not be deferred, and deferred stack
80 pops must not be output. It is nonzero inside a function call,
81 inside a conditional expression, inside a statement expression,
82 and in other cases as well. */
83int inhibit_defer_pop;
84
85/* A list of all cleanups which belong to the arguments of
86 function calls being expanded by expand_call. */
87tree cleanups_this_call;
88
89/* Nonzero means __builtin_saveregs has already been done in this function.
90 The value is the pseudoreg containing the value __builtin_saveregs
91 returned. */
92static rtx saveregs_value;
93
4969d05d
RK
94/* This structure is used by move_by_pieces to describe the move to
95 be performed. */
96
97struct move_by_pieces
98{
99 rtx to;
100 rtx to_addr;
101 int autinc_to;
102 int explicit_inc_to;
103 rtx from;
104 rtx from_addr;
105 int autinc_from;
106 int explicit_inc_from;
107 int len;
108 int offset;
109 int reverse;
110};
111
112static rtx enqueue_insn PROTO((rtx, rtx));
113static int queued_subexp_p PROTO((rtx));
114static void init_queue PROTO((void));
115static void move_by_pieces PROTO((rtx, rtx, int, int));
116static int move_by_pieces_ninsns PROTO((unsigned int, int));
117static void move_by_pieces_1 PROTO((rtx (*) (), enum machine_mode,
118 struct move_by_pieces *));
119static void group_insns PROTO((rtx));
120static void store_constructor PROTO((tree, rtx));
121static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
122 enum machine_mode, int, int, int));
123static tree save_noncopied_parts PROTO((tree, tree));
124static tree init_noncopied_parts PROTO((tree, tree));
125static int safe_from_p PROTO((rtx, tree));
126static int fixed_type_p PROTO((tree));
127static int get_pointer_alignment PROTO((tree, unsigned));
128static tree string_constant PROTO((tree, tree *));
129static tree c_strlen PROTO((tree));
130static rtx expand_builtin PROTO((tree, rtx, rtx, enum machine_mode, int));
131static rtx expand_increment PROTO((tree, int));
132static void preexpand_calls PROTO((tree));
133static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
134static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
135static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
136static void do_jump_for_compare PROTO((rtx, rtx, rtx));
137static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
138static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 139
4fa52007
RK
140/* Record for each mode whether we can move a register directly to or
141 from an object of that mode in memory. If we can't, we won't try
142 to use that mode directly when accessing a field of that mode. */
143
144static char direct_load[NUM_MACHINE_MODES];
145static char direct_store[NUM_MACHINE_MODES];
146
bbf6f052
RK
147/* MOVE_RATIO is the number of move instructions that is better than
148 a block move. */
149
150#ifndef MOVE_RATIO
266007a7 151#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
152#define MOVE_RATIO 2
153#else
154/* A value of around 6 would minimize code size; infinity would minimize
155 execution time. */
156#define MOVE_RATIO 15
157#endif
158#endif
e87b4f3f 159
266007a7
RK
160/* This array records the insn_code of insns to perform block moves. */
161static enum insn_code movstr_optab[NUM_MACHINE_MODES];
162
e87b4f3f
RS
163/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
164
165#ifndef SLOW_UNALIGNED_ACCESS
166#define SLOW_UNALIGNED_ACCESS 0
167#endif
bbf6f052 168\f
4fa52007 169/* This is run once per compilation to set up which modes can be used
266007a7 170 directly in memory and to initialize the block move optab. */
4fa52007
RK
171
172void
173init_expr_once ()
174{
175 rtx insn, pat;
176 enum machine_mode mode;
e2549997
RS
177 /* Try indexing by frame ptr and try by stack ptr.
178 It is known that on the Convex the stack ptr isn't a valid index.
179 With luck, one or the other is valid on any machine. */
4fa52007 180 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 181 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
182
183 start_sequence ();
184 insn = emit_insn (gen_rtx (SET, 0, 0));
185 pat = PATTERN (insn);
186
187 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
188 mode = (enum machine_mode) ((int) mode + 1))
189 {
190 int regno;
191 rtx reg;
192 int num_clobbers;
193
194 direct_load[(int) mode] = direct_store[(int) mode] = 0;
195 PUT_MODE (mem, mode);
e2549997 196 PUT_MODE (mem1, mode);
4fa52007 197
e6fe56a4
RK
198 /* See if there is some register that can be used in this mode and
199 directly loaded or stored from memory. */
200
7308a047
RS
201 if (mode != VOIDmode && mode != BLKmode)
202 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
203 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
204 regno++)
205 {
206 if (! HARD_REGNO_MODE_OK (regno, mode))
207 continue;
e6fe56a4 208
7308a047 209 reg = gen_rtx (REG, mode, regno);
e6fe56a4 210
7308a047
RS
211 SET_SRC (pat) = mem;
212 SET_DEST (pat) = reg;
213 if (recog (pat, insn, &num_clobbers) >= 0)
214 direct_load[(int) mode] = 1;
e6fe56a4 215
e2549997
RS
216 SET_SRC (pat) = mem1;
217 SET_DEST (pat) = reg;
218 if (recog (pat, insn, &num_clobbers) >= 0)
219 direct_load[(int) mode] = 1;
220
7308a047
RS
221 SET_SRC (pat) = reg;
222 SET_DEST (pat) = mem;
223 if (recog (pat, insn, &num_clobbers) >= 0)
224 direct_store[(int) mode] = 1;
e2549997
RS
225
226 SET_SRC (pat) = reg;
227 SET_DEST (pat) = mem1;
228 if (recog (pat, insn, &num_clobbers) >= 0)
229 direct_store[(int) mode] = 1;
7308a047 230 }
266007a7
RK
231
232 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
233 }
234
235 end_sequence ();
266007a7
RK
236
237#ifdef HAVE_movstrqi
238 if (HAVE_movstrqi)
239 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
240#endif
241#ifdef HAVE_movstrhi
242 if (HAVE_movstrhi)
243 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
244#endif
245#ifdef HAVE_movstrsi
246 if (HAVE_movstrsi)
247 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
248#endif
249#ifdef HAVE_movstrdi
250 if (HAVE_movstrdi)
251 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
252#endif
253#ifdef HAVE_movstrti
254 if (HAVE_movstrti)
255 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
256#endif
4fa52007
RK
257}
258
bbf6f052
RK
259/* This is run at the start of compiling a function. */
260
261void
262init_expr ()
263{
264 init_queue ();
265
266 pending_stack_adjust = 0;
267 inhibit_defer_pop = 0;
268 cleanups_this_call = 0;
269 saveregs_value = 0;
e87b4f3f 270 forced_labels = 0;
bbf6f052
RK
271}
272
273/* Save all variables describing the current status into the structure *P.
274 This is used before starting a nested function. */
275
276void
277save_expr_status (p)
278 struct function *p;
279{
280 /* Instead of saving the postincrement queue, empty it. */
281 emit_queue ();
282
283 p->pending_stack_adjust = pending_stack_adjust;
284 p->inhibit_defer_pop = inhibit_defer_pop;
285 p->cleanups_this_call = cleanups_this_call;
286 p->saveregs_value = saveregs_value;
e87b4f3f 287 p->forced_labels = forced_labels;
bbf6f052
RK
288
289 pending_stack_adjust = 0;
290 inhibit_defer_pop = 0;
291 cleanups_this_call = 0;
292 saveregs_value = 0;
e87b4f3f 293 forced_labels = 0;
bbf6f052
RK
294}
295
296/* Restore all variables describing the current status from the structure *P.
297 This is used after a nested function. */
298
299void
300restore_expr_status (p)
301 struct function *p;
302{
303 pending_stack_adjust = p->pending_stack_adjust;
304 inhibit_defer_pop = p->inhibit_defer_pop;
305 cleanups_this_call = p->cleanups_this_call;
306 saveregs_value = p->saveregs_value;
e87b4f3f 307 forced_labels = p->forced_labels;
bbf6f052
RK
308}
309\f
310/* Manage the queue of increment instructions to be output
311 for POSTINCREMENT_EXPR expressions, etc. */
312
313static rtx pending_chain;
314
315/* Queue up to increment (or change) VAR later. BODY says how:
316 BODY should be the same thing you would pass to emit_insn
317 to increment right away. It will go to emit_insn later on.
318
319 The value is a QUEUED expression to be used in place of VAR
320 where you want to guarantee the pre-incrementation value of VAR. */
321
322static rtx
323enqueue_insn (var, body)
324 rtx var, body;
325{
326 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 327 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
328 return pending_chain;
329}
330
331/* Use protect_from_queue to convert a QUEUED expression
332 into something that you can put immediately into an instruction.
333 If the queued incrementation has not happened yet,
334 protect_from_queue returns the variable itself.
335 If the incrementation has happened, protect_from_queue returns a temp
336 that contains a copy of the old value of the variable.
337
338 Any time an rtx which might possibly be a QUEUED is to be put
339 into an instruction, it must be passed through protect_from_queue first.
340 QUEUED expressions are not meaningful in instructions.
341
342 Do not pass a value through protect_from_queue and then hold
343 on to it for a while before putting it in an instruction!
344 If the queue is flushed in between, incorrect code will result. */
345
346rtx
347protect_from_queue (x, modify)
348 register rtx x;
349 int modify;
350{
351 register RTX_CODE code = GET_CODE (x);
352
353#if 0 /* A QUEUED can hang around after the queue is forced out. */
354 /* Shortcut for most common case. */
355 if (pending_chain == 0)
356 return x;
357#endif
358
359 if (code != QUEUED)
360 {
361 /* A special hack for read access to (MEM (QUEUED ...))
362 to facilitate use of autoincrement.
363 Make a copy of the contents of the memory location
364 rather than a copy of the address, but not
365 if the value is of mode BLKmode. */
366 if (code == MEM && GET_MODE (x) != BLKmode
367 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
368 {
369 register rtx y = XEXP (x, 0);
370 XEXP (x, 0) = QUEUED_VAR (y);
371 if (QUEUED_INSN (y))
372 {
373 register rtx temp = gen_reg_rtx (GET_MODE (x));
374 emit_insn_before (gen_move_insn (temp, x),
375 QUEUED_INSN (y));
376 return temp;
377 }
378 return x;
379 }
380 /* Otherwise, recursively protect the subexpressions of all
381 the kinds of rtx's that can contain a QUEUED. */
382 if (code == MEM)
383 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
384 else if (code == PLUS || code == MULT)
385 {
386 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
387 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
388 }
389 return x;
390 }
391 /* If the increment has not happened, use the variable itself. */
392 if (QUEUED_INSN (x) == 0)
393 return QUEUED_VAR (x);
394 /* If the increment has happened and a pre-increment copy exists,
395 use that copy. */
396 if (QUEUED_COPY (x) != 0)
397 return QUEUED_COPY (x);
398 /* The increment has happened but we haven't set up a pre-increment copy.
399 Set one up now, and use it. */
400 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
401 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
402 QUEUED_INSN (x));
403 return QUEUED_COPY (x);
404}
405
406/* Return nonzero if X contains a QUEUED expression:
407 if it contains anything that will be altered by a queued increment.
408 We handle only combinations of MEM, PLUS, MINUS and MULT operators
409 since memory addresses generally contain only those. */
410
411static int
412queued_subexp_p (x)
413 rtx x;
414{
415 register enum rtx_code code = GET_CODE (x);
416 switch (code)
417 {
418 case QUEUED:
419 return 1;
420 case MEM:
421 return queued_subexp_p (XEXP (x, 0));
422 case MULT:
423 case PLUS:
424 case MINUS:
425 return queued_subexp_p (XEXP (x, 0))
426 || queued_subexp_p (XEXP (x, 1));
427 }
428 return 0;
429}
430
431/* Perform all the pending incrementations. */
432
433void
434emit_queue ()
435{
436 register rtx p;
437 while (p = pending_chain)
438 {
439 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
440 pending_chain = QUEUED_NEXT (p);
441 }
442}
443
444static void
445init_queue ()
446{
447 if (pending_chain)
448 abort ();
449}
450\f
451/* Copy data from FROM to TO, where the machine modes are not the same.
452 Both modes may be integer, or both may be floating.
453 UNSIGNEDP should be nonzero if FROM is an unsigned type.
454 This causes zero-extension instead of sign-extension. */
455
456void
457convert_move (to, from, unsignedp)
458 register rtx to, from;
459 int unsignedp;
460{
461 enum machine_mode to_mode = GET_MODE (to);
462 enum machine_mode from_mode = GET_MODE (from);
463 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
464 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
465 enum insn_code code;
466 rtx libcall;
467
468 /* rtx code for making an equivalent value. */
469 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
470
471 to = protect_from_queue (to, 1);
472 from = protect_from_queue (from, 0);
473
474 if (to_real != from_real)
475 abort ();
476
1499e0a8
RK
477 /* If FROM is a SUBREG that indicates that we have already done at least
478 the required extension, strip it. We don't handle such SUBREGs as
479 TO here. */
480
481 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
482 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
483 >= GET_MODE_SIZE (to_mode))
484 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
485 from = gen_lowpart (to_mode, from), from_mode = to_mode;
486
487 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
488 abort ();
489
bbf6f052
RK
490 if (to_mode == from_mode
491 || (from_mode == VOIDmode && CONSTANT_P (from)))
492 {
493 emit_move_insn (to, from);
494 return;
495 }
496
497 if (to_real)
498 {
b424402e
RS
499#ifdef HAVE_extendqfhf2
500 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
501 {
502 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
503 return;
504 }
505#endif
506#ifdef HAVE_extendqfsf2
507 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
508 {
509 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
510 return;
511 }
512#endif
513#ifdef HAVE_extendqfdf2
514 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
515 {
516 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
517 return;
518 }
519#endif
520#ifdef HAVE_extendqfxf2
521 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
522 {
523 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
524 return;
525 }
526#endif
527#ifdef HAVE_extendqftf2
528 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
529 {
530 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
531 return;
532 }
533#endif
534
535#ifdef HAVE_extendhfsf2
536 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
537 {
538 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
539 return;
540 }
541#endif
542#ifdef HAVE_extendhfdf2
543 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
544 {
545 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
546 return;
547 }
548#endif
549#ifdef HAVE_extendhfxf2
550 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
551 {
552 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
553 return;
554 }
555#endif
556#ifdef HAVE_extendhftf2
557 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
558 {
559 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
560 return;
561 }
562#endif
563
bbf6f052
RK
564#ifdef HAVE_extendsfdf2
565 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
566 {
567 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
568 return;
569 }
570#endif
b092b471
JW
571#ifdef HAVE_extendsfxf2
572 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
573 {
574 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
bbf6f052
RK
578#ifdef HAVE_extendsftf2
579 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
580 {
581 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
b092b471
JW
585#ifdef HAVE_extenddfxf2
586 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
587 {
588 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
bbf6f052
RK
592#ifdef HAVE_extenddftf2
593 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
594 {
595 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
b424402e
RS
599
600#ifdef HAVE_trunchfqf2
601 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
604 return;
605 }
606#endif
607#ifdef HAVE_truncsfqf2
608 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_truncdfqf2
615 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_truncxfqf2
622 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_trunctfqf2
629 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
630 {
631 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635#ifdef HAVE_truncsfhf2
636 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
639 return;
640 }
641#endif
642#ifdef HAVE_truncdfhf2
643 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
644 {
645 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
646 return;
647 }
648#endif
649#ifdef HAVE_truncxfhf2
650 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
656#ifdef HAVE_trunctfhf2
657 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
658 {
659 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
660 return;
661 }
662#endif
bbf6f052
RK
663#ifdef HAVE_truncdfsf2
664 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
667 return;
668 }
669#endif
b092b471
JW
670#ifdef HAVE_truncxfsf2
671 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
bbf6f052
RK
677#ifdef HAVE_trunctfsf2
678 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
b092b471
JW
684#ifdef HAVE_truncxfdf2
685 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
bbf6f052
RK
691#ifdef HAVE_trunctfdf2
692 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
693 {
694 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
698
b092b471
JW
699 libcall = (rtx) 0;
700 switch (from_mode)
701 {
702 case SFmode:
703 switch (to_mode)
704 {
705 case DFmode:
706 libcall = extendsfdf2_libfunc;
707 break;
708
709 case XFmode:
710 libcall = extendsfxf2_libfunc;
711 break;
712
713 case TFmode:
714 libcall = extendsftf2_libfunc;
715 break;
716 }
717 break;
718
719 case DFmode:
720 switch (to_mode)
721 {
722 case SFmode:
723 libcall = truncdfsf2_libfunc;
724 break;
725
726 case XFmode:
727 libcall = extenddfxf2_libfunc;
728 break;
729
730 case TFmode:
731 libcall = extenddftf2_libfunc;
732 break;
733 }
734 break;
735
736 case XFmode:
737 switch (to_mode)
738 {
739 case SFmode:
740 libcall = truncxfsf2_libfunc;
741 break;
742
743 case DFmode:
744 libcall = truncxfdf2_libfunc;
745 break;
746 }
747 break;
748
749 case TFmode:
750 switch (to_mode)
751 {
752 case SFmode:
753 libcall = trunctfsf2_libfunc;
754 break;
755
756 case DFmode:
757 libcall = trunctfdf2_libfunc;
758 break;
759 }
760 break;
761 }
762
763 if (libcall == (rtx) 0)
764 /* This conversion is not implemented yet. */
bbf6f052
RK
765 abort ();
766
e87b4f3f 767 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
768 emit_move_insn (to, hard_libcall_value (to_mode));
769 return;
770 }
771
772 /* Now both modes are integers. */
773
774 /* Handle expanding beyond a word. */
775 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
776 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
777 {
778 rtx insns;
779 rtx lowpart;
780 rtx fill_value;
781 rtx lowfrom;
782 int i;
783 enum machine_mode lowpart_mode;
784 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
785
786 /* Try converting directly if the insn is supported. */
787 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
788 != CODE_FOR_nothing)
789 {
cd1b4b44
RK
790 /* If FROM is a SUBREG, put it into a register. Do this
791 so that we always generate the same set of insns for
792 better cse'ing; if an intermediate assignment occurred,
793 we won't be doing the operation directly on the SUBREG. */
794 if (optimize > 0 && GET_CODE (from) == SUBREG)
795 from = force_reg (from_mode, from);
bbf6f052
RK
796 emit_unop_insn (code, to, from, equiv_code);
797 return;
798 }
799 /* Next, try converting via full word. */
800 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
801 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
802 != CODE_FOR_nothing))
803 {
804 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
805 emit_unop_insn (code, to,
806 gen_lowpart (word_mode, to), equiv_code);
807 return;
808 }
809
810 /* No special multiword conversion insn; do it by hand. */
811 start_sequence ();
812
813 /* Get a copy of FROM widened to a word, if necessary. */
814 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
815 lowpart_mode = word_mode;
816 else
817 lowpart_mode = from_mode;
818
819 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
820
821 lowpart = gen_lowpart (lowpart_mode, to);
822 emit_move_insn (lowpart, lowfrom);
823
824 /* Compute the value to put in each remaining word. */
825 if (unsignedp)
826 fill_value = const0_rtx;
827 else
828 {
829#ifdef HAVE_slt
830 if (HAVE_slt
831 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
832 && STORE_FLAG_VALUE == -1)
833 {
906c4e36
RK
834 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
835 lowpart_mode, 0, 0);
bbf6f052
RK
836 fill_value = gen_reg_rtx (word_mode);
837 emit_insn (gen_slt (fill_value));
838 }
839 else
840#endif
841 {
842 fill_value
843 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
844 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 845 NULL_RTX, 0);
bbf6f052
RK
846 fill_value = convert_to_mode (word_mode, fill_value, 1);
847 }
848 }
849
850 /* Fill the remaining words. */
851 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
852 {
853 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
854 rtx subword = operand_subword (to, index, 1, to_mode);
855
856 if (subword == 0)
857 abort ();
858
859 if (fill_value != subword)
860 emit_move_insn (subword, fill_value);
861 }
862
863 insns = get_insns ();
864 end_sequence ();
865
906c4e36 866 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
867 gen_rtx (equiv_code, to_mode, from));
868 return;
869 }
870
d3c64ee3
RS
871 /* Truncating multi-word to a word or less. */
872 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
873 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052
RK
874 {
875 convert_move (to, gen_lowpart (word_mode, from), 0);
876 return;
877 }
878
879 /* Handle pointer conversion */ /* SPEE 900220 */
880 if (to_mode == PSImode)
881 {
882 if (from_mode != SImode)
883 from = convert_to_mode (SImode, from, unsignedp);
884
885#ifdef HAVE_truncsipsi
886 if (HAVE_truncsipsi)
887 {
888 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
889 return;
890 }
891#endif /* HAVE_truncsipsi */
892 abort ();
893 }
894
895 if (from_mode == PSImode)
896 {
897 if (to_mode != SImode)
898 {
899 from = convert_to_mode (SImode, from, unsignedp);
900 from_mode = SImode;
901 }
902 else
903 {
904#ifdef HAVE_extendpsisi
905 if (HAVE_extendpsisi)
906 {
907 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
908 return;
909 }
910#endif /* HAVE_extendpsisi */
911 abort ();
912 }
913 }
914
915 /* Now follow all the conversions between integers
916 no more than a word long. */
917
918 /* For truncation, usually we can just refer to FROM in a narrower mode. */
919 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
920 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 921 GET_MODE_BITSIZE (from_mode)))
bbf6f052 922 {
d3c64ee3
RS
923 if (!((GET_CODE (from) == MEM
924 && ! MEM_VOLATILE_P (from)
925 && direct_load[(int) to_mode]
926 && ! mode_dependent_address_p (XEXP (from, 0)))
927 || GET_CODE (from) == REG
928 || GET_CODE (from) == SUBREG))
929 from = force_reg (from_mode, from);
bbf6f052
RK
930 emit_move_insn (to, gen_lowpart (to_mode, from));
931 return;
932 }
933
d3c64ee3 934 /* Handle extension. */
bbf6f052
RK
935 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
936 {
937 /* Convert directly if that works. */
938 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
939 != CODE_FOR_nothing)
940 {
3dc4195c
RK
941 /* If FROM is a SUBREG, put it into a register. Do this
942 so that we always generate the same set of insns for
943 better cse'ing; if an intermediate assignment occurred,
944 we won't be doing the operation directly on the SUBREG. */
945 if (optimize > 0 && GET_CODE (from) == SUBREG)
946 from = force_reg (from_mode, from);
bbf6f052
RK
947 emit_unop_insn (code, to, from, equiv_code);
948 return;
949 }
950 else
951 {
952 enum machine_mode intermediate;
953
954 /* Search for a mode to convert via. */
955 for (intermediate = from_mode; intermediate != VOIDmode;
956 intermediate = GET_MODE_WIDER_MODE (intermediate))
957 if ((can_extend_p (to_mode, intermediate, unsignedp)
958 != CODE_FOR_nothing)
959 && (can_extend_p (intermediate, from_mode, unsignedp)
960 != CODE_FOR_nothing))
961 {
962 convert_move (to, convert_to_mode (intermediate, from,
963 unsignedp), unsignedp);
964 return;
965 }
966
967 /* No suitable intermediate mode. */
968 abort ();
969 }
970 }
971
972 /* Support special truncate insns for certain modes. */
973
974 if (from_mode == DImode && to_mode == SImode)
975 {
976#ifdef HAVE_truncdisi2
977 if (HAVE_truncdisi2)
978 {
979 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
980 return;
981 }
982#endif
983 convert_move (to, force_reg (from_mode, from), unsignedp);
984 return;
985 }
986
987 if (from_mode == DImode && to_mode == HImode)
988 {
989#ifdef HAVE_truncdihi2
990 if (HAVE_truncdihi2)
991 {
992 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
993 return;
994 }
995#endif
996 convert_move (to, force_reg (from_mode, from), unsignedp);
997 return;
998 }
999
1000 if (from_mode == DImode && to_mode == QImode)
1001 {
1002#ifdef HAVE_truncdiqi2
1003 if (HAVE_truncdiqi2)
1004 {
1005 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1006 return;
1007 }
1008#endif
1009 convert_move (to, force_reg (from_mode, from), unsignedp);
1010 return;
1011 }
1012
1013 if (from_mode == SImode && to_mode == HImode)
1014 {
1015#ifdef HAVE_truncsihi2
1016 if (HAVE_truncsihi2)
1017 {
1018 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1019 return;
1020 }
1021#endif
1022 convert_move (to, force_reg (from_mode, from), unsignedp);
1023 return;
1024 }
1025
1026 if (from_mode == SImode && to_mode == QImode)
1027 {
1028#ifdef HAVE_truncsiqi2
1029 if (HAVE_truncsiqi2)
1030 {
1031 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1032 return;
1033 }
1034#endif
1035 convert_move (to, force_reg (from_mode, from), unsignedp);
1036 return;
1037 }
1038
1039 if (from_mode == HImode && to_mode == QImode)
1040 {
1041#ifdef HAVE_trunchiqi2
1042 if (HAVE_trunchiqi2)
1043 {
1044 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1045 return;
1046 }
1047#endif
1048 convert_move (to, force_reg (from_mode, from), unsignedp);
1049 return;
1050 }
1051
1052 /* Handle truncation of volatile memrefs, and so on;
1053 the things that couldn't be truncated directly,
1054 and for which there was no special instruction. */
1055 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1056 {
1057 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1058 emit_move_insn (to, temp);
1059 return;
1060 }
1061
1062 /* Mode combination is not recognized. */
1063 abort ();
1064}
1065
1066/* Return an rtx for a value that would result
1067 from converting X to mode MODE.
1068 Both X and MODE may be floating, or both integer.
1069 UNSIGNEDP is nonzero if X is an unsigned value.
1070 This can be done by referring to a part of X in place
5d901c31
RS
1071 or by copying to a new temporary with conversion.
1072
1073 This function *must not* call protect_from_queue
1074 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1075
1076rtx
1077convert_to_mode (mode, x, unsignedp)
1078 enum machine_mode mode;
1079 rtx x;
1080 int unsignedp;
1081{
1082 register rtx temp;
1499e0a8
RK
1083
1084 /* If FROM is a SUBREG that indicates that we have already done at least
1085 the required extension, strip it. */
1086
1087 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1088 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1089 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1090 x = gen_lowpart (mode, x);
bbf6f052 1091
bbf6f052
RK
1092 if (mode == GET_MODE (x))
1093 return x;
1094
1095 /* There is one case that we must handle specially: If we are converting
906c4e36 1096 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1097 we are to interpret the constant as unsigned, gen_lowpart will do
1098 the wrong if the constant appears negative. What we want to do is
1099 make the high-order word of the constant zero, not all ones. */
1100
1101 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1102 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1103 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1104 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1105
1106 /* We can do this with a gen_lowpart if both desired and current modes
1107 are integer, and this is either a constant integer, a register, or a
1108 non-volatile MEM. Except for the constant case, we must be narrowing
1109 the operand. */
1110
1111 if (GET_CODE (x) == CONST_INT
1112 || (GET_MODE_CLASS (mode) == MODE_INT
1113 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1114 && (GET_CODE (x) == CONST_DOUBLE
1115 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1116 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 1117 && direct_load[(int) mode]
bbf6f052
RK
1118 || GET_CODE (x) == REG)))))
1119 return gen_lowpart (mode, x);
1120
1121 temp = gen_reg_rtx (mode);
1122 convert_move (temp, x, unsignedp);
1123 return temp;
1124}
1125\f
1126/* Generate several move instructions to copy LEN bytes
1127 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1128 The caller must pass FROM and TO
1129 through protect_from_queue before calling.
1130 ALIGN (in bytes) is maximum alignment we can assume. */
1131
bbf6f052
RK
1132static void
1133move_by_pieces (to, from, len, align)
1134 rtx to, from;
1135 int len, align;
1136{
1137 struct move_by_pieces data;
1138 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1139 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1140
1141 data.offset = 0;
1142 data.to_addr = to_addr;
1143 data.from_addr = from_addr;
1144 data.to = to;
1145 data.from = from;
1146 data.autinc_to
1147 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1148 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1149 data.autinc_from
1150 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1151 || GET_CODE (from_addr) == POST_INC
1152 || GET_CODE (from_addr) == POST_DEC);
1153
1154 data.explicit_inc_from = 0;
1155 data.explicit_inc_to = 0;
1156 data.reverse
1157 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1158 if (data.reverse) data.offset = len;
1159 data.len = len;
1160
1161 /* If copying requires more than two move insns,
1162 copy addresses to registers (to make displacements shorter)
1163 and use post-increment if available. */
1164 if (!(data.autinc_from && data.autinc_to)
1165 && move_by_pieces_ninsns (len, align) > 2)
1166 {
1167#ifdef HAVE_PRE_DECREMENT
1168 if (data.reverse && ! data.autinc_from)
1169 {
1170 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1171 data.autinc_from = 1;
1172 data.explicit_inc_from = -1;
1173 }
1174#endif
1175#ifdef HAVE_POST_INCREMENT
1176 if (! data.autinc_from)
1177 {
1178 data.from_addr = copy_addr_to_reg (from_addr);
1179 data.autinc_from = 1;
1180 data.explicit_inc_from = 1;
1181 }
1182#endif
1183 if (!data.autinc_from && CONSTANT_P (from_addr))
1184 data.from_addr = copy_addr_to_reg (from_addr);
1185#ifdef HAVE_PRE_DECREMENT
1186 if (data.reverse && ! data.autinc_to)
1187 {
1188 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1189 data.autinc_to = 1;
1190 data.explicit_inc_to = -1;
1191 }
1192#endif
1193#ifdef HAVE_POST_INCREMENT
1194 if (! data.reverse && ! data.autinc_to)
1195 {
1196 data.to_addr = copy_addr_to_reg (to_addr);
1197 data.autinc_to = 1;
1198 data.explicit_inc_to = 1;
1199 }
1200#endif
1201 if (!data.autinc_to && CONSTANT_P (to_addr))
1202 data.to_addr = copy_addr_to_reg (to_addr);
1203 }
1204
e87b4f3f
RS
1205 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1206 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1207 align = MOVE_MAX;
bbf6f052
RK
1208
1209 /* First move what we can in the largest integer mode, then go to
1210 successively smaller modes. */
1211
1212 while (max_size > 1)
1213 {
1214 enum machine_mode mode = VOIDmode, tmode;
1215 enum insn_code icode;
1216
e7c33f54
RK
1217 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1218 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1219 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1220 mode = tmode;
1221
1222 if (mode == VOIDmode)
1223 break;
1224
1225 icode = mov_optab->handlers[(int) mode].insn_code;
1226 if (icode != CODE_FOR_nothing
1227 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1228 GET_MODE_SIZE (mode)))
1229 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1230
1231 max_size = GET_MODE_SIZE (mode);
1232 }
1233
1234 /* The code above should have handled everything. */
1235 if (data.len != 0)
1236 abort ();
1237}
1238
1239/* Return number of insns required to move L bytes by pieces.
1240 ALIGN (in bytes) is maximum alignment we can assume. */
1241
1242static int
1243move_by_pieces_ninsns (l, align)
1244 unsigned int l;
1245 int align;
1246{
1247 register int n_insns = 0;
e87b4f3f 1248 int max_size = MOVE_MAX + 1;
bbf6f052 1249
e87b4f3f
RS
1250 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1251 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1252 align = MOVE_MAX;
bbf6f052
RK
1253
1254 while (max_size > 1)
1255 {
1256 enum machine_mode mode = VOIDmode, tmode;
1257 enum insn_code icode;
1258
e7c33f54
RK
1259 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1260 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1261 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1262 mode = tmode;
1263
1264 if (mode == VOIDmode)
1265 break;
1266
1267 icode = mov_optab->handlers[(int) mode].insn_code;
1268 if (icode != CODE_FOR_nothing
1269 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1270 GET_MODE_SIZE (mode)))
1271 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1272
1273 max_size = GET_MODE_SIZE (mode);
1274 }
1275
1276 return n_insns;
1277}
1278
1279/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1280 with move instructions for mode MODE. GENFUN is the gen_... function
1281 to make a move insn for that mode. DATA has all the other info. */
1282
1283static void
1284move_by_pieces_1 (genfun, mode, data)
1285 rtx (*genfun) ();
1286 enum machine_mode mode;
1287 struct move_by_pieces *data;
1288{
1289 register int size = GET_MODE_SIZE (mode);
1290 register rtx to1, from1;
1291
1292 while (data->len >= size)
1293 {
1294 if (data->reverse) data->offset -= size;
1295
1296 to1 = (data->autinc_to
1297 ? gen_rtx (MEM, mode, data->to_addr)
1298 : change_address (data->to, mode,
1299 plus_constant (data->to_addr, data->offset)));
1300 from1 =
1301 (data->autinc_from
1302 ? gen_rtx (MEM, mode, data->from_addr)
1303 : change_address (data->from, mode,
1304 plus_constant (data->from_addr, data->offset)));
1305
1306#ifdef HAVE_PRE_DECREMENT
1307 if (data->explicit_inc_to < 0)
906c4e36 1308 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1309 if (data->explicit_inc_from < 0)
906c4e36 1310 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1311#endif
1312
1313 emit_insn ((*genfun) (to1, from1));
1314#ifdef HAVE_POST_INCREMENT
1315 if (data->explicit_inc_to > 0)
906c4e36 1316 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1317 if (data->explicit_inc_from > 0)
906c4e36 1318 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1319#endif
1320
1321 if (! data->reverse) data->offset += size;
1322
1323 data->len -= size;
1324 }
1325}
1326\f
1327/* Emit code to move a block Y to a block X.
1328 This may be done with string-move instructions,
1329 with multiple scalar move instructions, or with a library call.
1330
1331 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1332 with mode BLKmode.
1333 SIZE is an rtx that says how long they are.
1334 ALIGN is the maximum alignment we can assume they have,
1335 measured in bytes. */
1336
1337void
1338emit_block_move (x, y, size, align)
1339 rtx x, y;
1340 rtx size;
1341 int align;
1342{
1343 if (GET_MODE (x) != BLKmode)
1344 abort ();
1345
1346 if (GET_MODE (y) != BLKmode)
1347 abort ();
1348
1349 x = protect_from_queue (x, 1);
1350 y = protect_from_queue (y, 0);
5d901c31 1351 size = protect_from_queue (size, 0);
bbf6f052
RK
1352
1353 if (GET_CODE (x) != MEM)
1354 abort ();
1355 if (GET_CODE (y) != MEM)
1356 abort ();
1357 if (size == 0)
1358 abort ();
1359
1360 if (GET_CODE (size) == CONST_INT
906c4e36 1361 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1362 move_by_pieces (x, y, INTVAL (size), align);
1363 else
1364 {
1365 /* Try the most limited insn first, because there's no point
1366 including more than one in the machine description unless
1367 the more limited one has some advantage. */
266007a7 1368
0bba3f6f 1369 rtx opalign = GEN_INT (align);
266007a7
RK
1370 enum machine_mode mode;
1371
1372 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1373 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1374 {
266007a7 1375 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1376
1377 if (code != CODE_FOR_nothing
803090c4
RK
1378 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1379 here because if SIZE is less than the mode mask, as it is
8008b228 1380 returned by the macro, it will definitely be less than the
803090c4 1381 actual mode mask. */
266007a7 1382 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1383 && (insn_operand_predicate[(int) code][0] == 0
1384 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1385 && (insn_operand_predicate[(int) code][1] == 0
1386 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1387 && (insn_operand_predicate[(int) code][3] == 0
1388 || (*insn_operand_predicate[(int) code][3]) (opalign,
1389 VOIDmode)))
bbf6f052 1390 {
1ba1e2a8 1391 rtx op2;
266007a7
RK
1392 rtx last = get_last_insn ();
1393 rtx pat;
1394
1ba1e2a8 1395 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1396 if (insn_operand_predicate[(int) code][2] != 0
1397 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1398 op2 = copy_to_mode_reg (mode, op2);
1399
1400 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1401 if (pat)
1402 {
1403 emit_insn (pat);
1404 return;
1405 }
1406 else
1407 delete_insns_since (last);
bbf6f052
RK
1408 }
1409 }
bbf6f052
RK
1410
1411#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1412 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1413 VOIDmode, 3, XEXP (x, 0), Pmode,
1414 XEXP (y, 0), Pmode,
5a2724d7 1415 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1416#else
d562e42e 1417 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1418 VOIDmode, 3, XEXP (y, 0), Pmode,
1419 XEXP (x, 0), Pmode,
5a2724d7 1420 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1421#endif
1422 }
1423}
1424\f
1425/* Copy all or part of a value X into registers starting at REGNO.
1426 The number of registers to be filled is NREGS. */
1427
1428void
1429move_block_to_reg (regno, x, nregs, mode)
1430 int regno;
1431 rtx x;
1432 int nregs;
1433 enum machine_mode mode;
1434{
1435 int i;
1436 rtx pat, last;
1437
1438 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1439 x = validize_mem (force_const_mem (mode, x));
1440
1441 /* See if the machine can do this with a load multiple insn. */
1442#ifdef HAVE_load_multiple
1443 last = get_last_insn ();
1444 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1445 GEN_INT (nregs));
bbf6f052
RK
1446 if (pat)
1447 {
1448 emit_insn (pat);
1449 return;
1450 }
1451 else
1452 delete_insns_since (last);
1453#endif
1454
1455 for (i = 0; i < nregs; i++)
1456 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1457 operand_subword_force (x, i, mode));
1458}
1459
1460/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1461 The number of registers to be filled is NREGS. */
1462
1463void
1464move_block_from_reg (regno, x, nregs)
1465 int regno;
1466 rtx x;
1467 int nregs;
1468{
1469 int i;
1470 rtx pat, last;
1471
1472 /* See if the machine can do this with a store multiple insn. */
1473#ifdef HAVE_store_multiple
1474 last = get_last_insn ();
1475 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1476 GEN_INT (nregs));
bbf6f052
RK
1477 if (pat)
1478 {
1479 emit_insn (pat);
1480 return;
1481 }
1482 else
1483 delete_insns_since (last);
1484#endif
1485
1486 for (i = 0; i < nregs; i++)
1487 {
1488 rtx tem = operand_subword (x, i, 1, BLKmode);
1489
1490 if (tem == 0)
1491 abort ();
1492
1493 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1494 }
1495}
1496
1497/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1498
1499void
1500use_regs (regno, nregs)
1501 int regno;
1502 int nregs;
1503{
1504 int i;
1505
1506 for (i = 0; i < nregs; i++)
1507 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1508}
7308a047
RS
1509
1510/* Mark the instructions since PREV as a libcall block.
1511 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1512
f76a70d5 1513static void
7308a047
RS
1514group_insns (prev)
1515 rtx prev;
1516{
1517 rtx insn_first;
1518 rtx insn_last;
1519
1520 /* Find the instructions to mark */
1521 if (prev)
1522 insn_first = NEXT_INSN (prev);
1523 else
1524 insn_first = get_insns ();
1525
1526 insn_last = get_last_insn ();
1527
1528 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1529 REG_NOTES (insn_last));
1530
1531 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1532 REG_NOTES (insn_first));
1533}
bbf6f052
RK
1534\f
1535/* Write zeros through the storage of OBJECT.
1536 If OBJECT has BLKmode, SIZE is its length in bytes. */
1537
1538void
1539clear_storage (object, size)
1540 rtx object;
1541 int size;
1542{
1543 if (GET_MODE (object) == BLKmode)
1544 {
1545#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1546 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1547 VOIDmode, 3,
1548 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1549 GEN_INT (size), Pmode);
bbf6f052 1550#else
d562e42e 1551 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1552 VOIDmode, 2,
1553 XEXP (object, 0), Pmode,
906c4e36 1554 GEN_INT (size), Pmode);
bbf6f052
RK
1555#endif
1556 }
1557 else
1558 emit_move_insn (object, const0_rtx);
1559}
1560
1561/* Generate code to copy Y into X.
1562 Both Y and X must have the same mode, except that
1563 Y can be a constant with VOIDmode.
1564 This mode cannot be BLKmode; use emit_block_move for that.
1565
1566 Return the last instruction emitted. */
1567
1568rtx
1569emit_move_insn (x, y)
1570 rtx x, y;
1571{
1572 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1573 enum machine_mode submode;
1574 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1575 int i;
1576
1577 x = protect_from_queue (x, 1);
1578 y = protect_from_queue (y, 0);
1579
1580 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1581 abort ();
1582
1583 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1584 y = force_const_mem (mode, y);
1585
1586 /* If X or Y are memory references, verify that their addresses are valid
1587 for the machine. */
1588 if (GET_CODE (x) == MEM
1589 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1590 && ! push_operand (x, GET_MODE (x)))
1591 || (flag_force_addr
1592 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1593 x = change_address (x, VOIDmode, XEXP (x, 0));
1594
1595 if (GET_CODE (y) == MEM
1596 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1597 || (flag_force_addr
1598 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1599 y = change_address (y, VOIDmode, XEXP (y, 0));
1600
1601 if (mode == BLKmode)
1602 abort ();
1603
261c4230
RS
1604 return emit_move_insn_1 (x, y);
1605}
1606
1607/* Low level part of emit_move_insn.
1608 Called just like emit_move_insn, but assumes X and Y
1609 are basically valid. */
1610
1611rtx
1612emit_move_insn_1 (x, y)
1613 rtx x, y;
1614{
1615 enum machine_mode mode = GET_MODE (x);
1616 enum machine_mode submode;
1617 enum mode_class class = GET_MODE_CLASS (mode);
1618 int i;
1619
7308a047
RS
1620 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1621 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1622 (class == MODE_COMPLEX_INT
1623 ? MODE_INT : MODE_FLOAT),
1624 0);
1625
bbf6f052
RK
1626 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1627 return
1628 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1629
89742723 1630 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047
RS
1631 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1632 && submode != BLKmode
1633 && (mov_optab->handlers[(int) submode].insn_code
1634 != CODE_FOR_nothing))
1635 {
1636 /* Don't split destination if it is a stack push. */
1637 int stack = push_operand (x, GET_MODE (x));
1638 rtx prev = get_last_insn ();
1639
1640 /* Tell flow that the whole of the destination is being set. */
1641 if (GET_CODE (x) == REG)
1642 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1643
1644 /* If this is a stack, push the highpart first, so it
1645 will be in the argument order.
1646
1647 In that case, change_address is used only to convert
1648 the mode, not to change the address. */
1649 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1650 ((stack ? change_address (x, submode, (rtx) 0)
1651 : gen_highpart (submode, x)),
1652 gen_highpart (submode, y)));
1653 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1654 ((stack ? change_address (x, submode, (rtx) 0)
1655 : gen_lowpart (submode, x)),
1656 gen_lowpart (submode, y)));
1657
1658 group_insns (prev);
7a1ab50a
RS
1659
1660 return get_last_insn ();
7308a047
RS
1661 }
1662
bbf6f052
RK
1663 /* This will handle any multi-word mode that lacks a move_insn pattern.
1664 However, you will get better code if you define such patterns,
1665 even if they must turn into multiple assembler instructions. */
a4320483 1666 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1667 {
1668 rtx last_insn = 0;
7308a047 1669 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1670
1671 for (i = 0;
1672 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1673 i++)
1674 {
1675 rtx xpart = operand_subword (x, i, 1, mode);
1676 rtx ypart = operand_subword (y, i, 1, mode);
1677
1678 /* If we can't get a part of Y, put Y into memory if it is a
1679 constant. Otherwise, force it into a register. If we still
1680 can't get a part of Y, abort. */
1681 if (ypart == 0 && CONSTANT_P (y))
1682 {
1683 y = force_const_mem (mode, y);
1684 ypart = operand_subword (y, i, 1, mode);
1685 }
1686 else if (ypart == 0)
1687 ypart = operand_subword_force (y, i, mode);
1688
1689 if (xpart == 0 || ypart == 0)
1690 abort ();
1691
1692 last_insn = emit_move_insn (xpart, ypart);
1693 }
7308a047
RS
1694 /* Mark these insns as a libcall block. */
1695 group_insns (prev_insn);
1696
bbf6f052
RK
1697 return last_insn;
1698 }
1699 else
1700 abort ();
1701}
1702\f
1703/* Pushing data onto the stack. */
1704
1705/* Push a block of length SIZE (perhaps variable)
1706 and return an rtx to address the beginning of the block.
1707 Note that it is not possible for the value returned to be a QUEUED.
1708 The value may be virtual_outgoing_args_rtx.
1709
1710 EXTRA is the number of bytes of padding to push in addition to SIZE.
1711 BELOW nonzero means this padding comes at low addresses;
1712 otherwise, the padding comes at high addresses. */
1713
1714rtx
1715push_block (size, extra, below)
1716 rtx size;
1717 int extra, below;
1718{
1719 register rtx temp;
1720 if (CONSTANT_P (size))
1721 anti_adjust_stack (plus_constant (size, extra));
1722 else if (GET_CODE (size) == REG && extra == 0)
1723 anti_adjust_stack (size);
1724 else
1725 {
1726 rtx temp = copy_to_mode_reg (Pmode, size);
1727 if (extra != 0)
906c4e36 1728 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1729 temp, 0, OPTAB_LIB_WIDEN);
1730 anti_adjust_stack (temp);
1731 }
1732
1733#ifdef STACK_GROWS_DOWNWARD
1734 temp = virtual_outgoing_args_rtx;
1735 if (extra != 0 && below)
1736 temp = plus_constant (temp, extra);
1737#else
1738 if (GET_CODE (size) == CONST_INT)
1739 temp = plus_constant (virtual_outgoing_args_rtx,
1740 - INTVAL (size) - (below ? 0 : extra));
1741 else if (extra != 0 && !below)
1742 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1743 negate_rtx (Pmode, plus_constant (size, extra)));
1744 else
1745 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1746 negate_rtx (Pmode, size));
1747#endif
1748
1749 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1750}
1751
87e38d84 1752rtx
bbf6f052
RK
1753gen_push_operand ()
1754{
1755 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1756}
1757
1758/* Generate code to push X onto the stack, assuming it has mode MODE and
1759 type TYPE.
1760 MODE is redundant except when X is a CONST_INT (since they don't
1761 carry mode info).
1762 SIZE is an rtx for the size of data to be copied (in bytes),
1763 needed only if X is BLKmode.
1764
1765 ALIGN (in bytes) is maximum alignment we can assume.
1766
cd048831
RK
1767 If PARTIAL and REG are both nonzero, then copy that many of the first
1768 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
1769 The amount of space pushed is decreased by PARTIAL words,
1770 rounded *down* to a multiple of PARM_BOUNDARY.
1771 REG must be a hard register in this case.
cd048831
RK
1772 If REG is zero but PARTIAL is not, take any all others actions for an
1773 argument partially in registers, but do not actually load any
1774 registers.
bbf6f052
RK
1775
1776 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1777 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1778
1779 On a machine that lacks real push insns, ARGS_ADDR is the address of
1780 the bottom of the argument block for this call. We use indexing off there
1781 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1782 argument block has not been preallocated.
1783
1784 ARGS_SO_FAR is the size of args previously pushed for this call. */
1785
1786void
1787emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1788 args_addr, args_so_far)
1789 register rtx x;
1790 enum machine_mode mode;
1791 tree type;
1792 rtx size;
1793 int align;
1794 int partial;
1795 rtx reg;
1796 int extra;
1797 rtx args_addr;
1798 rtx args_so_far;
1799{
1800 rtx xinner;
1801 enum direction stack_direction
1802#ifdef STACK_GROWS_DOWNWARD
1803 = downward;
1804#else
1805 = upward;
1806#endif
1807
1808 /* Decide where to pad the argument: `downward' for below,
1809 `upward' for above, or `none' for don't pad it.
1810 Default is below for small data on big-endian machines; else above. */
1811 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1812
1813 /* Invert direction if stack is post-update. */
1814 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1815 if (where_pad != none)
1816 where_pad = (where_pad == downward ? upward : downward);
1817
1818 xinner = x = protect_from_queue (x, 0);
1819
1820 if (mode == BLKmode)
1821 {
1822 /* Copy a block into the stack, entirely or partially. */
1823
1824 register rtx temp;
1825 int used = partial * UNITS_PER_WORD;
1826 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1827 int skip;
1828
1829 if (size == 0)
1830 abort ();
1831
1832 used -= offset;
1833
1834 /* USED is now the # of bytes we need not copy to the stack
1835 because registers will take care of them. */
1836
1837 if (partial != 0)
1838 xinner = change_address (xinner, BLKmode,
1839 plus_constant (XEXP (xinner, 0), used));
1840
1841 /* If the partial register-part of the arg counts in its stack size,
1842 skip the part of stack space corresponding to the registers.
1843 Otherwise, start copying to the beginning of the stack space,
1844 by setting SKIP to 0. */
1845#ifndef REG_PARM_STACK_SPACE
1846 skip = 0;
1847#else
1848 skip = used;
1849#endif
1850
1851#ifdef PUSH_ROUNDING
1852 /* Do it with several push insns if that doesn't take lots of insns
1853 and if there is no difficulty with push insns that skip bytes
1854 on the stack for alignment purposes. */
1855 if (args_addr == 0
1856 && GET_CODE (size) == CONST_INT
1857 && skip == 0
1858 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1859 < MOVE_RATIO)
bbf6f052
RK
1860 /* Here we avoid the case of a structure whose weak alignment
1861 forces many pushes of a small amount of data,
1862 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1863 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1864 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1865 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1866 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1867 {
1868 /* Push padding now if padding above and stack grows down,
1869 or if padding below and stack grows up.
1870 But if space already allocated, this has already been done. */
1871 if (extra && args_addr == 0
1872 && where_pad != none && where_pad != stack_direction)
906c4e36 1873 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1874
1875 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1876 INTVAL (size) - used, align);
1877 }
1878 else
1879#endif /* PUSH_ROUNDING */
1880 {
1881 /* Otherwise make space on the stack and copy the data
1882 to the address of that space. */
1883
1884 /* Deduct words put into registers from the size we must copy. */
1885 if (partial != 0)
1886 {
1887 if (GET_CODE (size) == CONST_INT)
906c4e36 1888 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1889 else
1890 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1891 GEN_INT (used), NULL_RTX, 0,
1892 OPTAB_LIB_WIDEN);
bbf6f052
RK
1893 }
1894
1895 /* Get the address of the stack space.
1896 In this case, we do not deal with EXTRA separately.
1897 A single stack adjust will do. */
1898 if (! args_addr)
1899 {
1900 temp = push_block (size, extra, where_pad == downward);
1901 extra = 0;
1902 }
1903 else if (GET_CODE (args_so_far) == CONST_INT)
1904 temp = memory_address (BLKmode,
1905 plus_constant (args_addr,
1906 skip + INTVAL (args_so_far)));
1907 else
1908 temp = memory_address (BLKmode,
1909 plus_constant (gen_rtx (PLUS, Pmode,
1910 args_addr, args_so_far),
1911 skip));
1912
1913 /* TEMP is the address of the block. Copy the data there. */
1914 if (GET_CODE (size) == CONST_INT
1915 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1916 < MOVE_RATIO))
1917 {
1918 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1919 INTVAL (size), align);
1920 goto ret;
1921 }
1922 /* Try the most limited insn first, because there's no point
1923 including more than one in the machine description unless
1924 the more limited one has some advantage. */
1925#ifdef HAVE_movstrqi
1926 if (HAVE_movstrqi
1927 && GET_CODE (size) == CONST_INT
1928 && ((unsigned) INTVAL (size)
1929 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1930 {
c841050e
RS
1931 rtx pat = gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
1932 xinner, size, GEN_INT (align));
1933 if (pat != 0)
1934 {
1935 emit_insn (pat);
1936 goto ret;
1937 }
bbf6f052
RK
1938 }
1939#endif
1940#ifdef HAVE_movstrhi
1941 if (HAVE_movstrhi
1942 && GET_CODE (size) == CONST_INT
1943 && ((unsigned) INTVAL (size)
1944 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1945 {
c841050e
RS
1946 rtx pat = gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
1947 xinner, size, GEN_INT (align));
1948 if (pat != 0)
1949 {
1950 emit_insn (pat);
1951 goto ret;
1952 }
bbf6f052
RK
1953 }
1954#endif
1955#ifdef HAVE_movstrsi
1956 if (HAVE_movstrsi)
1957 {
c841050e
RS
1958 rtx pat = gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
1959 xinner, size, GEN_INT (align));
1960 if (pat != 0)
1961 {
1962 emit_insn (pat);
1963 goto ret;
1964 }
bbf6f052
RK
1965 }
1966#endif
1967#ifdef HAVE_movstrdi
1968 if (HAVE_movstrdi)
1969 {
c841050e
RS
1970 rtx pat = gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
1971 xinner, size, GEN_INT (align));
1972 if (pat != 0)
1973 {
1974 emit_insn (pat);
1975 goto ret;
1976 }
bbf6f052
RK
1977 }
1978#endif
1979
1980#ifndef ACCUMULATE_OUTGOING_ARGS
1981 /* If the source is referenced relative to the stack pointer,
1982 copy it to another register to stabilize it. We do not need
1983 to do this if we know that we won't be changing sp. */
1984
1985 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1986 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1987 temp = copy_to_reg (temp);
1988#endif
1989
1990 /* Make inhibit_defer_pop nonzero around the library call
1991 to force it to pop the bcopy-arguments right away. */
1992 NO_DEFER_POP;
1993#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1994 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1995 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1996 size, Pmode);
1997#else
d562e42e 1998 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1999 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
2000 size, Pmode);
2001#endif
2002 OK_DEFER_POP;
2003 }
2004 }
2005 else if (partial > 0)
2006 {
2007 /* Scalar partly in registers. */
2008
2009 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2010 int i;
2011 int not_stack;
2012 /* # words of start of argument
2013 that we must make space for but need not store. */
2014 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2015 int args_offset = INTVAL (args_so_far);
2016 int skip;
2017
2018 /* Push padding now if padding above and stack grows down,
2019 or if padding below and stack grows up.
2020 But if space already allocated, this has already been done. */
2021 if (extra && args_addr == 0
2022 && where_pad != none && where_pad != stack_direction)
906c4e36 2023 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2024
2025 /* If we make space by pushing it, we might as well push
2026 the real data. Otherwise, we can leave OFFSET nonzero
2027 and leave the space uninitialized. */
2028 if (args_addr == 0)
2029 offset = 0;
2030
2031 /* Now NOT_STACK gets the number of words that we don't need to
2032 allocate on the stack. */
2033 not_stack = partial - offset;
2034
2035 /* If the partial register-part of the arg counts in its stack size,
2036 skip the part of stack space corresponding to the registers.
2037 Otherwise, start copying to the beginning of the stack space,
2038 by setting SKIP to 0. */
2039#ifndef REG_PARM_STACK_SPACE
2040 skip = 0;
2041#else
2042 skip = not_stack;
2043#endif
2044
2045 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2046 x = validize_mem (force_const_mem (mode, x));
2047
2048 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2049 SUBREGs of such registers are not allowed. */
2050 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2051 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2052 x = copy_to_reg (x);
2053
2054 /* Loop over all the words allocated on the stack for this arg. */
2055 /* We can do it by words, because any scalar bigger than a word
2056 has a size a multiple of a word. */
2057#ifndef PUSH_ARGS_REVERSED
2058 for (i = not_stack; i < size; i++)
2059#else
2060 for (i = size - 1; i >= not_stack; i--)
2061#endif
2062 if (i >= not_stack + offset)
2063 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2064 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2065 0, args_addr,
2066 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2067 * UNITS_PER_WORD)));
2068 }
2069 else
2070 {
2071 rtx addr;
2072
2073 /* Push padding now if padding above and stack grows down,
2074 or if padding below and stack grows up.
2075 But if space already allocated, this has already been done. */
2076 if (extra && args_addr == 0
2077 && where_pad != none && where_pad != stack_direction)
906c4e36 2078 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2079
2080#ifdef PUSH_ROUNDING
2081 if (args_addr == 0)
2082 addr = gen_push_operand ();
2083 else
2084#endif
2085 if (GET_CODE (args_so_far) == CONST_INT)
2086 addr
2087 = memory_address (mode,
2088 plus_constant (args_addr, INTVAL (args_so_far)));
2089 else
2090 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2091 args_so_far));
2092
2093 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2094 }
2095
2096 ret:
2097 /* If part should go in registers, copy that part
2098 into the appropriate registers. Do this now, at the end,
2099 since mem-to-mem copies above may do function calls. */
cd048831 2100 if (partial > 0 && reg != 0)
bbf6f052
RK
2101 move_block_to_reg (REGNO (reg), x, partial, mode);
2102
2103 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2104 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2105}
2106\f
2107/* Output a library call to function FUN (a SYMBOL_REF rtx)
2108 (emitting the queue unless NO_QUEUE is nonzero),
2109 for a value of mode OUTMODE,
2110 with NARGS different arguments, passed as alternating rtx values
2111 and machine_modes to convert them to.
2112 The rtx values should have been passed through protect_from_queue already.
2113
2114 NO_QUEUE will be true if and only if the library call is a `const' call
2115 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
d562e42e
JW
2116 to the variable is_const in expand_call.
2117
2118 NO_QUEUE must be true for const calls, because if it isn't, then
2119 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2120 and will be lost if the libcall sequence is optimized away.
2121
2122 NO_QUEUE must be false for non-const calls, because if it isn't, the
2123 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2124 optimized. For instance, the instruction scheduler may incorrectly
2125 move memory references across the non-const call. */
bbf6f052
RK
2126
2127void
2128emit_library_call (va_alist)
2129 va_dcl
2130{
2131 va_list p;
4e7c9a22 2132 /* Total size in bytes of all the stack-parms scanned so far. */
bbf6f052 2133 struct args_size args_size;
4e7c9a22
RS
2134 /* Size of arguments before any adjustments (such as rounding). */
2135 struct args_size original_args_size;
bbf6f052
RK
2136 register int argnum;
2137 enum machine_mode outmode;
2138 int nargs;
2139 rtx fun;
2140 rtx orgfun;
2141 int inc;
2142 int count;
2143 rtx argblock = 0;
2144 CUMULATIVE_ARGS args_so_far;
2145 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2146 struct args_size offset; struct args_size size; };
2147 struct arg *argvec;
2148 int old_inhibit_defer_pop = inhibit_defer_pop;
2149 int no_queue = 0;
2150 rtx use_insns;
2151
2152 va_start (p);
2153 orgfun = fun = va_arg (p, rtx);
2154 no_queue = va_arg (p, int);
2155 outmode = va_arg (p, enum machine_mode);
2156 nargs = va_arg (p, int);
2157
2158 /* Copy all the libcall-arguments out of the varargs data
2159 and into a vector ARGVEC.
2160
2161 Compute how to pass each argument. We only support a very small subset
2162 of the full argument passing conventions to limit complexity here since
2163 library functions shouldn't have many args. */
2164
2165 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2166
8eef5f77 2167 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
bbf6f052
RK
2168
2169 args_size.constant = 0;
2170 args_size.var = 0;
2171
2172 for (count = 0; count < nargs; count++)
2173 {
2174 rtx val = va_arg (p, rtx);
2175 enum machine_mode mode = va_arg (p, enum machine_mode);
2176
2177 /* We cannot convert the arg value to the mode the library wants here;
2178 must do it earlier where we know the signedness of the arg. */
2179 if (mode == BLKmode
2180 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2181 abort ();
2182
2183 /* On some machines, there's no way to pass a float to a library fcn.
2184 Pass it as a double instead. */
2185#ifdef LIBGCC_NEEDS_DOUBLE
2186 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
803090c4 2187 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
bbf6f052
RK
2188#endif
2189
5d901c31
RS
2190 /* There's no need to call protect_from_queue, because
2191 either emit_move_insn or emit_push_insn will do that. */
2192
bbf6f052
RK
2193 /* Make sure it is a reasonable operand for a move or push insn. */
2194 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2195 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 2196 val = force_operand (val, NULL_RTX);
bbf6f052
RK
2197
2198 argvec[count].value = val;
2199 argvec[count].mode = mode;
2200
2201#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 2202 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
2203 abort ();
2204#endif
2205
906c4e36 2206 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2207 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2208 abort ();
2209#ifdef FUNCTION_ARG_PARTIAL_NREGS
2210 argvec[count].partial
906c4e36 2211 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2212#else
2213 argvec[count].partial = 0;
2214#endif
2215
906c4e36 2216 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 2217 argvec[count].reg && argvec[count].partial == 0,
906c4e36 2218 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
2219 &argvec[count].size);
2220
2221 if (argvec[count].size.var)
2222 abort ();
2223
2224#ifndef REG_PARM_STACK_SPACE
2225 if (argvec[count].partial)
2226 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2227#endif
2228
2229 if (argvec[count].reg == 0 || argvec[count].partial != 0
2230#ifdef REG_PARM_STACK_SPACE
2231 || 1
2232#endif
2233 )
2234 args_size.constant += argvec[count].size.constant;
2235
2236#ifdef ACCUMULATE_OUTGOING_ARGS
2237 /* If this arg is actually passed on the stack, it might be
2238 clobbering something we already put there (this library call might
2239 be inside the evaluation of an argument to a function whose call
2240 requires the stack). This will only occur when the library call
2241 has sufficient args to run out of argument registers. Abort in
2242 this case; if this ever occurs, code must be added to save and
2243 restore the arg slot. */
2244
2245 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2246 abort ();
2247#endif
2248
2249 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2250 }
2251 va_end (p);
2252
2253 /* If this machine requires an external definition for library
2254 functions, write one out. */
2255 assemble_external_libcall (fun);
2256
4e7c9a22 2257 original_args_size = args_size;
bbf6f052
RK
2258#ifdef STACK_BOUNDARY
2259 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2260 / STACK_BYTES) * STACK_BYTES);
2261#endif
2262
2263#ifdef REG_PARM_STACK_SPACE
2264 args_size.constant = MAX (args_size.constant,
428e0ca8 2265 REG_PARM_STACK_SPACE (NULL_TREE));
428e0ca8
JW
2266#ifndef OUTGOING_REG_PARM_STACK_SPACE
2267 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2268#endif
8207ec04
RK
2269#endif
2270
2271#ifdef ACCUMULATE_OUTGOING_ARGS
bbf6f052
RK
2272 if (args_size.constant > current_function_outgoing_args_size)
2273 current_function_outgoing_args_size = args_size.constant;
2274 args_size.constant = 0;
2275#endif
2276
2277#ifndef PUSH_ROUNDING
906c4e36 2278 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
2279#endif
2280
4e7c9a22
RS
2281#ifdef PUSH_ARGS_REVERSED
2282#ifdef STACK_BOUNDARY
2283 /* If we push args individually in reverse order, perform stack alignment
2284 before the first push (the last arg). */
2285 if (argblock == 0)
2286 anti_adjust_stack (GEN_INT (args_size.constant
2287 - original_args_size.constant));
2288#endif
2289#endif
2290
bbf6f052
RK
2291#ifdef PUSH_ARGS_REVERSED
2292 inc = -1;
2293 argnum = nargs - 1;
2294#else
2295 inc = 1;
2296 argnum = 0;
2297#endif
2298
2299 /* Push the args that need to be pushed. */
2300
2301 for (count = 0; count < nargs; count++, argnum += inc)
2302 {
2303 register enum machine_mode mode = argvec[argnum].mode;
2304 register rtx val = argvec[argnum].value;
2305 rtx reg = argvec[argnum].reg;
2306 int partial = argvec[argnum].partial;
2307
2308 if (! (reg != 0 && partial == 0))
906c4e36
RK
2309 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2310 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
2311 NO_DEFER_POP;
2312 }
2313
4e7c9a22
RS
2314#ifndef PUSH_ARGS_REVERSED
2315#ifdef STACK_BOUNDARY
2316 /* If we pushed args in forward order, perform stack alignment
2317 after pushing the last arg. */
2318 if (argblock == 0)
2319 anti_adjust_stack (GEN_INT (args_size.constant
2320 - original_args_size.constant));
2321#endif
2322#endif
2323
bbf6f052
RK
2324#ifdef PUSH_ARGS_REVERSED
2325 argnum = nargs - 1;
2326#else
2327 argnum = 0;
2328#endif
2329
2330 /* Now load any reg parms into their regs. */
2331
2332 for (count = 0; count < nargs; count++, argnum += inc)
2333 {
2334 register enum machine_mode mode = argvec[argnum].mode;
2335 register rtx val = argvec[argnum].value;
2336 rtx reg = argvec[argnum].reg;
2337 int partial = argvec[argnum].partial;
2338
2339 if (reg != 0 && partial == 0)
2340 emit_move_insn (reg, val);
2341 NO_DEFER_POP;
2342 }
2343
2344 /* For version 1.37, try deleting this entirely. */
2345 if (! no_queue)
2346 emit_queue ();
2347
2348 /* Any regs containing parms remain in use through the call. */
2349 start_sequence ();
2350 for (count = 0; count < nargs; count++)
2351 if (argvec[count].reg != 0)
2352 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2353
2354 use_insns = get_insns ();
2355 end_sequence ();
2356
906c4e36 2357 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2358
2359 /* Don't allow popping to be deferred, since then
2360 cse'ing of library calls could delete a call and leave the pop. */
2361 NO_DEFER_POP;
2362
2363 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2364 will set inhibit_defer_pop to that value. */
2365
2366 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2367 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2368 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2369 old_inhibit_defer_pop + 1, use_insns, no_queue);
2370
2371 /* Now restore inhibit_defer_pop to its actual original value. */
2372 OK_DEFER_POP;
2373}
2374\f
42b85a55 2375/* Like emit_library_call except that an extra argument, VALUE,
e010155c
RS
2376 comes second and says where to store the result.
2377 (If VALUE is zero, the result comes in the function value register.) */
42b85a55
RS
2378
2379void
2380emit_library_call_value (va_alist)
2381 va_dcl
2382{
2383 va_list p;
4e7c9a22 2384 /* Total size in bytes of all the stack-parms scanned so far. */
42b85a55 2385 struct args_size args_size;
4e7c9a22
RS
2386 /* Size of arguments before any adjustments (such as rounding). */
2387 struct args_size original_args_size;
42b85a55
RS
2388 register int argnum;
2389 enum machine_mode outmode;
2390 int nargs;
2391 rtx fun;
2392 rtx orgfun;
2393 int inc;
2394 int count;
2395 rtx argblock = 0;
2396 CUMULATIVE_ARGS args_so_far;
2397 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2398 struct args_size offset; struct args_size size; };
2399 struct arg *argvec;
2400 int old_inhibit_defer_pop = inhibit_defer_pop;
2401 int no_queue = 0;
2402 rtx use_insns;
2403 rtx value;
2404 rtx mem_value = 0;
2405
2406 va_start (p);
2407 orgfun = fun = va_arg (p, rtx);
2408 value = va_arg (p, rtx);
2409 no_queue = va_arg (p, int);
2410 outmode = va_arg (p, enum machine_mode);
2411 nargs = va_arg (p, int);
2412
2413 /* If this kind of value comes back in memory,
2414 decide where in memory it should come back. */
2415 if (RETURN_IN_MEMORY (type_for_mode (outmode, 0)))
2416 {
2417 if (GET_CODE (value) == MEM)
2418 mem_value = value;
2419 else
2420 mem_value = assign_stack_temp (outmode, GET_MODE_SIZE (outmode), 0);
2421 }
2422
2423 /* ??? Unfinished: must pass the memory address as an argument. */
2424
2425 /* Copy all the libcall-arguments out of the varargs data
2426 and into a vector ARGVEC.
2427
2428 Compute how to pass each argument. We only support a very small subset
2429 of the full argument passing conventions to limit complexity here since
2430 library functions shouldn't have many args. */
2431
e010155c 2432 argvec = (struct arg *) alloca ((nargs + 1) * sizeof (struct arg));
42b85a55
RS
2433
2434 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
2435
2436 args_size.constant = 0;
2437 args_size.var = 0;
2438
e010155c
RS
2439 count = 0;
2440
2441 /* If there's a structure value address to be passed,
2442 either pass it in the special place, or pass it as an extra argument. */
2443 if (mem_value)
2444 {
2445 rtx addr = XEXP (mem_value, 0);
2446
2447 if (! struct_value_rtx)
2448 {
2449 nargs++;
2450
2451 /* Make sure it is a reasonable operand for a move or push insn. */
2452 if (GET_CODE (addr) != REG && GET_CODE (addr) != MEM
2453 && ! (CONSTANT_P (addr) && LEGITIMATE_CONSTANT_P (addr)))
2454 addr = force_operand (addr, NULL_RTX);
2455
2456 argvec[count].value = addr;
2457 argvec[count].mode = outmode;
2458 argvec[count].partial = 0;
2459
2460 argvec[count].reg = FUNCTION_ARG (args_so_far, outmode, NULL_TREE, 1);
2461#ifdef FUNCTION_ARG_PARTIAL_NREGS
2462 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, outmode, NULL_TREE, 1))
2463 abort ();
2464#endif
2465
2466 locate_and_pad_parm (outmode, NULL_TREE,
2467 argvec[count].reg && argvec[count].partial == 0,
2468 NULL_TREE, &args_size, &argvec[count].offset,
2469 &argvec[count].size);
2470
2471
2472 if (argvec[count].reg == 0 || argvec[count].partial != 0
2473#ifdef REG_PARM_STACK_SPACE
2474 || 1
2475#endif
2476 )
2477 args_size.constant += argvec[count].size.constant;
2478
2479 FUNCTION_ARG_ADVANCE (args_so_far, outmode, (tree)0, 1);
2480 }
2481 }
2482
2483 for (; count < nargs; count++)
42b85a55
RS
2484 {
2485 rtx val = va_arg (p, rtx);
2486 enum machine_mode mode = va_arg (p, enum machine_mode);
2487
2488 /* We cannot convert the arg value to the mode the library wants here;
2489 must do it earlier where we know the signedness of the arg. */
2490 if (mode == BLKmode
2491 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2492 abort ();
2493
2494 /* On some machines, there's no way to pass a float to a library fcn.
2495 Pass it as a double instead. */
2496#ifdef LIBGCC_NEEDS_DOUBLE
2497 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
2498 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
2499#endif
2500
2501 /* There's no need to call protect_from_queue, because
2502 either emit_move_insn or emit_push_insn will do that. */
2503
2504 /* Make sure it is a reasonable operand for a move or push insn. */
2505 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2506 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
2507 val = force_operand (val, NULL_RTX);
2508
2509 argvec[count].value = val;
2510 argvec[count].mode = mode;
2511
2512#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
2513 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
2514 abort ();
2515#endif
2516
2517 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
2518 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2519 abort ();
2520#ifdef FUNCTION_ARG_PARTIAL_NREGS
2521 argvec[count].partial
2522 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
2523#else
2524 argvec[count].partial = 0;
2525#endif
2526
2527 locate_and_pad_parm (mode, NULL_TREE,
2528 argvec[count].reg && argvec[count].partial == 0,
2529 NULL_TREE, &args_size, &argvec[count].offset,
2530 &argvec[count].size);
2531
2532 if (argvec[count].size.var)
2533 abort ();
2534
2535#ifndef REG_PARM_STACK_SPACE
2536 if (argvec[count].partial)
2537 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2538#endif
2539
2540 if (argvec[count].reg == 0 || argvec[count].partial != 0
2541#ifdef REG_PARM_STACK_SPACE
2542 || 1
2543#endif
2544 )
2545 args_size.constant += argvec[count].size.constant;
2546
2547#ifdef ACCUMULATE_OUTGOING_ARGS
2548 /* If this arg is actually passed on the stack, it might be
2549 clobbering something we already put there (this library call might
2550 be inside the evaluation of an argument to a function whose call
2551 requires the stack). This will only occur when the library call
2552 has sufficient args to run out of argument registers. Abort in
2553 this case; if this ever occurs, code must be added to save and
2554 restore the arg slot. */
2555
2556 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2557 abort ();
2558#endif
2559
2560 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2561 }
2562 va_end (p);
2563
2564 /* If this machine requires an external definition for library
2565 functions, write one out. */
2566 assemble_external_libcall (fun);
2567
4e7c9a22 2568 original_args_size = args_size;
42b85a55
RS
2569#ifdef STACK_BOUNDARY
2570 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2571 / STACK_BYTES) * STACK_BYTES);
2572#endif
2573
2574#ifdef REG_PARM_STACK_SPACE
2575 args_size.constant = MAX (args_size.constant,
2576 REG_PARM_STACK_SPACE (NULL_TREE));
2577#ifndef OUTGOING_REG_PARM_STACK_SPACE
2578 args_size.constant -= REG_PARM_STACK_SPACE (NULL_TREE);
2579#endif
2580#endif
2581
2582#ifdef ACCUMULATE_OUTGOING_ARGS
2583 if (args_size.constant > current_function_outgoing_args_size)
2584 current_function_outgoing_args_size = args_size.constant;
2585 args_size.constant = 0;
2586#endif
2587
2588#ifndef PUSH_ROUNDING
2589 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
2590#endif
2591
4e7c9a22
RS
2592#ifdef PUSH_ARGS_REVERSED
2593#ifdef STACK_BOUNDARY
2594 /* If we push args individually in reverse order, perform stack alignment
2595 before the first push (the last arg). */
2596 if (argblock == 0)
2597 anti_adjust_stack (GEN_INT (args_size.constant
2598 - original_args_size.constant));
2599#endif
2600#endif
2601
42b85a55
RS
2602#ifdef PUSH_ARGS_REVERSED
2603 inc = -1;
2604 argnum = nargs - 1;
2605#else
2606 inc = 1;
2607 argnum = 0;
2608#endif
2609
2610 /* Push the args that need to be pushed. */
2611
2612 for (count = 0; count < nargs; count++, argnum += inc)
2613 {
2614 register enum machine_mode mode = argvec[argnum].mode;
2615 register rtx val = argvec[argnum].value;
2616 rtx reg = argvec[argnum].reg;
2617 int partial = argvec[argnum].partial;
2618
2619 if (! (reg != 0 && partial == 0))
2620 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2621 argblock, GEN_INT (argvec[count].offset.constant));
2622 NO_DEFER_POP;
2623 }
2624
4e7c9a22
RS
2625#ifndef PUSH_ARGS_REVERSED
2626#ifdef STACK_BOUNDARY
2627 /* If we pushed args in forward order, perform stack alignment
2628 after pushing the last arg. */
2629 if (argblock == 0)
2630 anti_adjust_stack (GEN_INT (args_size.constant
2631 - original_args_size.constant));
2632#endif
2633#endif
2634
42b85a55
RS
2635#ifdef PUSH_ARGS_REVERSED
2636 argnum = nargs - 1;
2637#else
2638 argnum = 0;
2639#endif
2640
2641 /* Now load any reg parms into their regs. */
2642
e010155c 2643 if (mem_value != 0 && struct_value_rtx != 0)
d6af3b06 2644 emit_move_insn (struct_value_rtx, XEXP (mem_value, 0));
e010155c 2645
42b85a55
RS
2646 for (count = 0; count < nargs; count++, argnum += inc)
2647 {
2648 register enum machine_mode mode = argvec[argnum].mode;
2649 register rtx val = argvec[argnum].value;
2650 rtx reg = argvec[argnum].reg;
2651 int partial = argvec[argnum].partial;
2652
2653 if (reg != 0 && partial == 0)
2654 emit_move_insn (reg, val);
2655 NO_DEFER_POP;
2656 }
2657
e010155c 2658#if 0
42b85a55
RS
2659 /* For version 1.37, try deleting this entirely. */
2660 if (! no_queue)
2661 emit_queue ();
e010155c 2662#endif
42b85a55
RS
2663
2664 /* Any regs containing parms remain in use through the call. */
2665 start_sequence ();
2666 for (count = 0; count < nargs; count++)
2667 if (argvec[count].reg != 0)
2668 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2669
2670 use_insns = get_insns ();
2671 end_sequence ();
2672
2673 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
2674
2675 /* Don't allow popping to be deferred, since then
2676 cse'ing of library calls could delete a call and leave the pop. */
2677 NO_DEFER_POP;
2678
2679 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2680 will set inhibit_defer_pop to that value. */
2681
2682 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2683 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
2684 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
2685 old_inhibit_defer_pop + 1, use_insns, no_queue);
2686
2687 /* Now restore inhibit_defer_pop to its actual original value. */
2688 OK_DEFER_POP;
2689
2690 /* Copy the value to the right place. */
e010155c 2691 if (outmode != VOIDmode)
42b85a55 2692 {
e010155c
RS
2693 if (mem_value)
2694 {
2695 if (value == 0)
2696 value = hard_libcall_value (outmode);
2697 if (value != mem_value)
2698 emit_move_insn (value, mem_value);
2699 }
2700 else if (value != 0)
2701 emit_move_insn (value, hard_libcall_value (outmode));
42b85a55 2702 }
42b85a55
RS
2703}
2704\f
bbf6f052
RK
2705/* Expand an assignment that stores the value of FROM into TO.
2706 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2707 (This may contain a QUEUED rtx.)
2708 Otherwise, the returned value is not meaningful.
2709
2710 SUGGEST_REG is no longer actually used.
2711 It used to mean, copy the value through a register
2712 and return that register, if that is possible.
2713 But now we do this if WANT_VALUE.
2714
2715 If the value stored is a constant, we return the constant. */
2716
2717rtx
2718expand_assignment (to, from, want_value, suggest_reg)
2719 tree to, from;
2720 int want_value;
2721 int suggest_reg;
2722{
2723 register rtx to_rtx = 0;
2724 rtx result;
2725
2726 /* Don't crash if the lhs of the assignment was erroneous. */
2727
2728 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2729 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2730
2731 /* Assignment of a structure component needs special treatment
2732 if the structure component's rtx is not simply a MEM.
2733 Assignment of an array element at a constant index
2734 has the same problem. */
2735
2736 if (TREE_CODE (to) == COMPONENT_REF
2737 || TREE_CODE (to) == BIT_FIELD_REF
2738 || (TREE_CODE (to) == ARRAY_REF
2739 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2740 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2741 {
2742 enum machine_mode mode1;
2743 int bitsize;
2744 int bitpos;
7bb0943f 2745 tree offset;
bbf6f052
RK
2746 int unsignedp;
2747 int volatilep = 0;
7bb0943f 2748 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2749 &mode1, &unsignedp, &volatilep);
2750
2751 /* If we are going to use store_bit_field and extract_bit_field,
2752 make sure to_rtx will be safe for multiple use. */
2753
2754 if (mode1 == VOIDmode && want_value)
2755 tem = stabilize_reference (tem);
2756
906c4e36 2757 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2758 if (offset != 0)
2759 {
906c4e36 2760 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2761
2762 if (GET_CODE (to_rtx) != MEM)
2763 abort ();
2764 to_rtx = change_address (to_rtx, VOIDmode,
2765 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2766 force_reg (Pmode, offset_rtx)));
2767 }
bbf6f052
RK
2768 if (volatilep)
2769 {
2770 if (GET_CODE (to_rtx) == MEM)
2771 MEM_VOLATILE_P (to_rtx) = 1;
2772#if 0 /* This was turned off because, when a field is volatile
2773 in an object which is not volatile, the object may be in a register,
2774 and then we would abort over here. */
2775 else
2776 abort ();
2777#endif
2778 }
2779
2780 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2781 (want_value
2782 /* Spurious cast makes HPUX compiler happy. */
2783 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2784 : VOIDmode),
2785 unsignedp,
2786 /* Required alignment of containing datum. */
2787 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2788 int_size_in_bytes (TREE_TYPE (tem)));
2789 preserve_temp_slots (result);
2790 free_temp_slots ();
2791
2792 return result;
2793 }
2794
2795 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2796 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2797
2798 if (to_rtx == 0)
906c4e36 2799 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052 2800
86d38d25
RS
2801 /* Don't move directly into a return register. */
2802 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
2803 {
2804 rtx temp = expand_expr (from, 0, VOIDmode, 0);
2805 emit_move_insn (to_rtx, temp);
2806 preserve_temp_slots (to_rtx);
2807 free_temp_slots ();
2808 return to_rtx;
2809 }
2810
bbf6f052
RK
2811 /* In case we are returning the contents of an object which overlaps
2812 the place the value is being stored, use a safe function when copying
2813 a value through a pointer into a structure value return block. */
2814 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2815 && current_function_returns_struct
2816 && !current_function_returns_pcc_struct)
2817 {
906c4e36 2818 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2819 rtx size = expr_size (from);
2820
2821#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2822 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2823 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2824 XEXP (from_rtx, 0), Pmode,
2825 size, Pmode);
2826#else
d562e42e 2827 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2828 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2829 XEXP (to_rtx, 0), Pmode,
2830 size, Pmode);
2831#endif
2832
2833 preserve_temp_slots (to_rtx);
2834 free_temp_slots ();
2835 return to_rtx;
2836 }
2837
2838 /* Compute FROM and store the value in the rtx we got. */
2839
2840 result = store_expr (from, to_rtx, want_value);
2841 preserve_temp_slots (result);
2842 free_temp_slots ();
2843 return result;
2844}
2845
2846/* Generate code for computing expression EXP,
2847 and storing the value into TARGET.
2848 Returns TARGET or an equivalent value.
2849 TARGET may contain a QUEUED rtx.
2850
2851 If SUGGEST_REG is nonzero, copy the value through a register
2852 and return that register, if that is possible.
2853
2854 If the value stored is a constant, we return the constant. */
2855
2856rtx
2857store_expr (exp, target, suggest_reg)
2858 register tree exp;
2859 register rtx target;
2860 int suggest_reg;
2861{
2862 register rtx temp;
2863 int dont_return_target = 0;
2864
2865 if (TREE_CODE (exp) == COMPOUND_EXPR)
2866 {
2867 /* Perform first part of compound expression, then assign from second
2868 part. */
2869 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2870 emit_queue ();
2871 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2872 }
2873 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2874 {
2875 /* For conditional expression, get safe form of the target. Then
2876 test the condition, doing the appropriate assignment on either
2877 side. This avoids the creation of unnecessary temporaries.
2878 For non-BLKmode, it is more efficient not to do this. */
2879
2880 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2881
2882 emit_queue ();
2883 target = protect_from_queue (target, 1);
2884
2885 NO_DEFER_POP;
2886 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2887 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2888 emit_queue ();
2889 emit_jump_insn (gen_jump (lab2));
2890 emit_barrier ();
2891 emit_label (lab1);
2892 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2893 emit_queue ();
2894 emit_label (lab2);
2895 OK_DEFER_POP;
2896 return target;
2897 }
2898 else if (suggest_reg && GET_CODE (target) == MEM
2899 && GET_MODE (target) != BLKmode)
2900 /* If target is in memory and caller wants value in a register instead,
2901 arrange that. Pass TARGET as target for expand_expr so that,
2902 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2903 We know expand_expr will not use the target in that case. */
2904 {
906c4e36 2905 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2906 GET_MODE (target), 0);
2907 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2908 temp = copy_to_reg (temp);
2909 dont_return_target = 1;
2910 }
2911 else if (queued_subexp_p (target))
2912 /* If target contains a postincrement, it is not safe
2913 to use as the returned value. It would access the wrong
2914 place by the time the queued increment gets output.
2915 So copy the value through a temporary and use that temp
2916 as the result. */
2917 {
2918 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2919 {
2920 /* Expand EXP into a new pseudo. */
2921 temp = gen_reg_rtx (GET_MODE (target));
2922 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2923 }
2924 else
906c4e36 2925 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2926 dont_return_target = 1;
2927 }
1499e0a8
RK
2928 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2929 /* If this is an scalar in a register that is stored in a wider mode
2930 than the declared mode, compute the result into its declared mode
2931 and then convert to the wider mode. Our value is the computed
2932 expression. */
2933 {
2934 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2935 convert_move (SUBREG_REG (target), temp,
2936 SUBREG_PROMOTED_UNSIGNED_P (target));
2937 return temp;
2938 }
bbf6f052
RK
2939 else
2940 {
2941 temp = expand_expr (exp, target, GET_MODE (target), 0);
2942 /* DO return TARGET if it's a specified hardware register.
2943 expand_return relies on this. */
2944 if (!(target && GET_CODE (target) == REG
2945 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2946 && CONSTANT_P (temp))
2947 dont_return_target = 1;
2948 }
2949
2950 /* If value was not generated in the target, store it there.
2951 Convert the value to TARGET's type first if nec. */
2952
2953 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2954 {
2955 target = protect_from_queue (target, 1);
2956 if (GET_MODE (temp) != GET_MODE (target)
2957 && GET_MODE (temp) != VOIDmode)
2958 {
2959 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2960 if (dont_return_target)
2961 {
2962 /* In this case, we will return TEMP,
2963 so make sure it has the proper mode.
2964 But don't forget to store the value into TARGET. */
2965 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2966 emit_move_insn (target, temp);
2967 }
2968 else
2969 convert_move (target, temp, unsignedp);
2970 }
2971
2972 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2973 {
2974 /* Handle copying a string constant into an array.
2975 The string constant may be shorter than the array.
2976 So copy just the string's actual length, and clear the rest. */
2977 rtx size;
2978
e87b4f3f
RS
2979 /* Get the size of the data type of the string,
2980 which is actually the size of the target. */
2981 size = expr_size (exp);
2982 if (GET_CODE (size) == CONST_INT
2983 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2984 emit_block_move (target, temp, size,
2985 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2986 else
bbf6f052 2987 {
e87b4f3f
RS
2988 /* Compute the size of the data to copy from the string. */
2989 tree copy_size
2990 = fold (build (MIN_EXPR, sizetype,
2991 size_binop (CEIL_DIV_EXPR,
2992 TYPE_SIZE (TREE_TYPE (exp)),
2993 size_int (BITS_PER_UNIT)),
2994 convert (sizetype,
2995 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2996 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2997 VOIDmode, 0);
e87b4f3f
RS
2998 rtx label = 0;
2999
3000 /* Copy that much. */
3001 emit_block_move (target, temp, copy_size_rtx,
3002 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3003
3004 /* Figure out how much is left in TARGET
3005 that we have to clear. */
3006 if (GET_CODE (copy_size_rtx) == CONST_INT)
3007 {
3008 temp = plus_constant (XEXP (target, 0),
3009 TREE_STRING_LENGTH (exp));
3010 size = plus_constant (size,
3011 - TREE_STRING_LENGTH (exp));
3012 }
3013 else
3014 {
3015 enum machine_mode size_mode = Pmode;
3016
3017 temp = force_reg (Pmode, XEXP (target, 0));
3018 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
3019 copy_size_rtx, NULL_RTX, 0,
3020 OPTAB_LIB_WIDEN);
e87b4f3f
RS
3021
3022 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
3023 copy_size_rtx, NULL_RTX, 0,
3024 OPTAB_LIB_WIDEN);
e87b4f3f 3025
906c4e36 3026 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3027 GET_MODE (size), 0, 0);
3028 label = gen_label_rtx ();
3029 emit_jump_insn (gen_blt (label));
3030 }
3031
3032 if (size != const0_rtx)
3033 {
bbf6f052 3034#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3035 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 3036 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 3037#else
d562e42e 3038 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 3039 temp, Pmode, size, Pmode);
bbf6f052 3040#endif
e87b4f3f
RS
3041 }
3042 if (label)
3043 emit_label (label);
bbf6f052
RK
3044 }
3045 }
3046 else if (GET_MODE (temp) == BLKmode)
3047 emit_block_move (target, temp, expr_size (exp),
3048 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3049 else
3050 emit_move_insn (target, temp);
3051 }
3052 if (dont_return_target)
3053 return temp;
3054 return target;
3055}
3056\f
3057/* Store the value of constructor EXP into the rtx TARGET.
3058 TARGET is either a REG or a MEM. */
3059
3060static void
3061store_constructor (exp, target)
3062 tree exp;
3063 rtx target;
3064{
4af3895e
JVA
3065 tree type = TREE_TYPE (exp);
3066
bbf6f052
RK
3067 /* We know our target cannot conflict, since safe_from_p has been called. */
3068#if 0
3069 /* Don't try copying piece by piece into a hard register
3070 since that is vulnerable to being clobbered by EXP.
3071 Instead, construct in a pseudo register and then copy it all. */
3072 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3073 {
3074 rtx temp = gen_reg_rtx (GET_MODE (target));
3075 store_constructor (exp, temp);
3076 emit_move_insn (target, temp);
3077 return;
3078 }
3079#endif
3080
4af3895e 3081 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
3082 {
3083 register tree elt;
3084
4af3895e
JVA
3085 /* Inform later passes that the whole union value is dead. */
3086 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 3087 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
3088
3089 /* If we are building a static constructor into a register,
3090 set the initial value as zero so we can fold the value into
3091 a constant. */
3092 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
3093 emit_move_insn (target, const0_rtx);
3094
bbf6f052
RK
3095 /* If the constructor has fewer fields than the structure,
3096 clear the whole structure first. */
3097 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
3098 != list_length (TYPE_FIELDS (type)))
3099 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
3100 else
3101 /* Inform later passes that the old value is dead. */
3102 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3103
3104 /* Store each element of the constructor into
3105 the corresponding field of TARGET. */
3106
3107 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3108 {
3109 register tree field = TREE_PURPOSE (elt);
3110 register enum machine_mode mode;
3111 int bitsize;
3112 int bitpos;
3113 int unsignedp;
3114
f32fd778
RS
3115 /* Just ignore missing fields.
3116 We cleared the whole structure, above,
3117 if any fields are missing. */
3118 if (field == 0)
3119 continue;
3120
bbf6f052
RK
3121 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3122 unsignedp = TREE_UNSIGNED (field);
3123 mode = DECL_MODE (field);
3124 if (DECL_BIT_FIELD (field))
3125 mode = VOIDmode;
3126
3127 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
3128 /* ??? This case remains to be written. */
3129 abort ();
3130
3131 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
3132
3133 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3134 /* The alignment of TARGET is
3135 at least what its type requires. */
3136 VOIDmode, 0,
4af3895e
JVA
3137 TYPE_ALIGN (type) / BITS_PER_UNIT,
3138 int_size_in_bytes (type));
bbf6f052
RK
3139 }
3140 }
4af3895e 3141 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3142 {
3143 register tree elt;
3144 register int i;
4af3895e 3145 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3146 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3147 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3148 tree elttype = TREE_TYPE (type);
bbf6f052
RK
3149
3150 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
3151 clear the whole structure first. Similarly if this this is
3152 static constructor of a non-BLKmode object. */
bbf6f052 3153
4af3895e
JVA
3154 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
3155 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
3156 clear_storage (target, maxelt - minelt + 1);
3157 else
3158 /* Inform later passes that the old value is dead. */
3159 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
3160
3161 /* Store each element of the constructor into
3162 the corresponding element of TARGET, determined
3163 by counting the elements. */
3164 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3165 elt;
3166 elt = TREE_CHAIN (elt), i++)
3167 {
3168 register enum machine_mode mode;
3169 int bitsize;
3170 int bitpos;
3171 int unsignedp;
3172
3173 mode = TYPE_MODE (elttype);
3174 bitsize = GET_MODE_BITSIZE (mode);
3175 unsignedp = TREE_UNSIGNED (elttype);
3176
3177 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3178
3179 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
3180 /* The alignment of TARGET is
3181 at least what its type requires. */
3182 VOIDmode, 0,
4af3895e
JVA
3183 TYPE_ALIGN (type) / BITS_PER_UNIT,
3184 int_size_in_bytes (type));
bbf6f052
RK
3185 }
3186 }
3187
3188 else
3189 abort ();
3190}
3191
3192/* Store the value of EXP (an expression tree)
3193 into a subfield of TARGET which has mode MODE and occupies
3194 BITSIZE bits, starting BITPOS bits from the start of TARGET.
3195 If MODE is VOIDmode, it means that we are storing into a bit-field.
3196
3197 If VALUE_MODE is VOIDmode, return nothing in particular.
3198 UNSIGNEDP is not used in this case.
3199
3200 Otherwise, return an rtx for the value stored. This rtx
3201 has mode VALUE_MODE if that is convenient to do.
3202 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
3203
3204 ALIGN is the alignment that TARGET is known to have, measured in bytes.
3205 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
3206
3207static rtx
3208store_field (target, bitsize, bitpos, mode, exp, value_mode,
3209 unsignedp, align, total_size)
3210 rtx target;
3211 int bitsize, bitpos;
3212 enum machine_mode mode;
3213 tree exp;
3214 enum machine_mode value_mode;
3215 int unsignedp;
3216 int align;
3217 int total_size;
3218{
906c4e36 3219 HOST_WIDE_INT width_mask = 0;
bbf6f052 3220
906c4e36
RK
3221 if (bitsize < HOST_BITS_PER_WIDE_INT)
3222 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
3223
3224 /* If we are storing into an unaligned field of an aligned union that is
3225 in a register, we may have the mode of TARGET being an integer mode but
3226 MODE == BLKmode. In that case, get an aligned object whose size and
3227 alignment are the same as TARGET and store TARGET into it (we can avoid
3228 the store if the field being stored is the entire width of TARGET). Then
3229 call ourselves recursively to store the field into a BLKmode version of
3230 that object. Finally, load from the object into TARGET. This is not
3231 very efficient in general, but should only be slightly more expensive
3232 than the otherwise-required unaligned accesses. Perhaps this can be
3233 cleaned up later. */
3234
3235 if (mode == BLKmode
3236 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
3237 {
3238 rtx object = assign_stack_temp (GET_MODE (target),
3239 GET_MODE_SIZE (GET_MODE (target)), 0);
3240 rtx blk_object = copy_rtx (object);
3241
3242 PUT_MODE (blk_object, BLKmode);
3243
3244 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
3245 emit_move_insn (object, target);
3246
3247 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
3248 align, total_size);
3249
3250 emit_move_insn (target, object);
3251
3252 return target;
3253 }
3254
3255 /* If the structure is in a register or if the component
3256 is a bit field, we cannot use addressing to access it.
3257 Use bit-field techniques or SUBREG to store in it. */
3258
4fa52007
RK
3259 if (mode == VOIDmode
3260 || (mode != BLKmode && ! direct_store[(int) mode])
3261 || GET_CODE (target) == REG
bbf6f052
RK
3262 || GET_CODE (target) == SUBREG)
3263 {
906c4e36 3264 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
3265 /* Store the value in the bitfield. */
3266 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
3267 if (value_mode != VOIDmode)
3268 {
3269 /* The caller wants an rtx for the value. */
3270 /* If possible, avoid refetching from the bitfield itself. */
3271 if (width_mask != 0
3272 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 3273 {
9074de27 3274 tree count;
5c4d7cfb 3275 enum machine_mode tmode;
86a2c12a 3276
5c4d7cfb
RS
3277 if (unsignedp)
3278 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
3279 tmode = GET_MODE (temp);
86a2c12a
RS
3280 if (tmode == VOIDmode)
3281 tmode = value_mode;
5c4d7cfb
RS
3282 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
3283 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
3284 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
3285 }
bbf6f052 3286 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
3287 NULL_RTX, value_mode, 0, align,
3288 total_size);
bbf6f052
RK
3289 }
3290 return const0_rtx;
3291 }
3292 else
3293 {
3294 rtx addr = XEXP (target, 0);
3295 rtx to_rtx;
3296
3297 /* If a value is wanted, it must be the lhs;
3298 so make the address stable for multiple use. */
3299
3300 if (value_mode != VOIDmode && GET_CODE (addr) != REG
3301 && ! CONSTANT_ADDRESS_P (addr)
3302 /* A frame-pointer reference is already stable. */
3303 && ! (GET_CODE (addr) == PLUS
3304 && GET_CODE (XEXP (addr, 1)) == CONST_INT
3305 && (XEXP (addr, 0) == virtual_incoming_args_rtx
3306 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
3307 addr = copy_to_reg (addr);
3308
3309 /* Now build a reference to just the desired component. */
3310
3311 to_rtx = change_address (target, mode,
3312 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
3313 MEM_IN_STRUCT_P (to_rtx) = 1;
3314
3315 return store_expr (exp, to_rtx, value_mode != VOIDmode);
3316 }
3317}
3318\f
3319/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
3320 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
3321 ARRAY_REFs at constant positions and find the ultimate containing object,
3322 which we return.
3323
3324 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
3325 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
3326 If the position of the field is variable, we store a tree
3327 giving the variable offset (in units) in *POFFSET.
3328 This offset is in addition to the bit position.
3329 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
3330
3331 If any of the extraction expressions is volatile,
3332 we store 1 in *PVOLATILEP. Otherwise we don't change that.
3333
3334 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
3335 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
3336 is redundant.
3337
3338 If the field describes a variable-sized object, *PMODE is set to
3339 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
3340 this case, but the address of the object can be found. */
bbf6f052
RK
3341
3342tree
4969d05d
RK
3343get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
3344 punsignedp, pvolatilep)
bbf6f052
RK
3345 tree exp;
3346 int *pbitsize;
3347 int *pbitpos;
7bb0943f 3348 tree *poffset;
bbf6f052
RK
3349 enum machine_mode *pmode;
3350 int *punsignedp;
3351 int *pvolatilep;
3352{
3353 tree size_tree = 0;
3354 enum machine_mode mode = VOIDmode;
7bb0943f 3355 tree offset = 0;
bbf6f052
RK
3356
3357 if (TREE_CODE (exp) == COMPONENT_REF)
3358 {
3359 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
3360 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
3361 mode = DECL_MODE (TREE_OPERAND (exp, 1));
3362 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
3363 }
3364 else if (TREE_CODE (exp) == BIT_FIELD_REF)
3365 {
3366 size_tree = TREE_OPERAND (exp, 1);
3367 *punsignedp = TREE_UNSIGNED (exp);
3368 }
3369 else
3370 {
3371 mode = TYPE_MODE (TREE_TYPE (exp));
3372 *pbitsize = GET_MODE_BITSIZE (mode);
3373 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3374 }
3375
3376 if (size_tree)
3377 {
3378 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
3379 mode = BLKmode, *pbitsize = -1;
3380 else
3381 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
3382 }
3383
3384 /* Compute cumulative bit-offset for nested component-refs and array-refs,
3385 and find the ultimate containing object. */
3386
3387 *pbitpos = 0;
3388
3389 while (1)
3390 {
7bb0943f 3391 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 3392 {
7bb0943f
RS
3393 tree pos = (TREE_CODE (exp) == COMPONENT_REF
3394 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
3395 : TREE_OPERAND (exp, 2));
bbf6f052 3396
7bb0943f
RS
3397 if (TREE_CODE (pos) == PLUS_EXPR)
3398 {
3399 tree constant, var;
3400 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
3401 {
3402 constant = TREE_OPERAND (pos, 0);
3403 var = TREE_OPERAND (pos, 1);
3404 }
3405 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3406 {
3407 constant = TREE_OPERAND (pos, 1);
3408 var = TREE_OPERAND (pos, 0);
3409 }
3410 else
3411 abort ();
3412 *pbitpos += TREE_INT_CST_LOW (constant);
3413 if (offset)
3414 offset = size_binop (PLUS_EXPR, offset,
3415 size_binop (FLOOR_DIV_EXPR, var,
3416 size_int (BITS_PER_UNIT)));
3417 else
3418 offset = size_binop (FLOOR_DIV_EXPR, var,
3419 size_int (BITS_PER_UNIT));
3420 }
3421 else if (TREE_CODE (pos) == INTEGER_CST)
3422 *pbitpos += TREE_INT_CST_LOW (pos);
3423 else
3424 {
3425 /* Assume here that the offset is a multiple of a unit.
3426 If not, there should be an explicitly added constant. */
3427 if (offset)
3428 offset = size_binop (PLUS_EXPR, offset,
3429 size_binop (FLOOR_DIV_EXPR, pos,
3430 size_int (BITS_PER_UNIT)));
3431 else
3432 offset = size_binop (FLOOR_DIV_EXPR, pos,
3433 size_int (BITS_PER_UNIT));
3434 }
bbf6f052 3435 }
bbf6f052 3436
bbf6f052
RK
3437 else if (TREE_CODE (exp) == ARRAY_REF
3438 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3439 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3440 {
3441 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3442 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
3443 }
3444 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3445 && ! ((TREE_CODE (exp) == NOP_EXPR
3446 || TREE_CODE (exp) == CONVERT_EXPR)
3447 && (TYPE_MODE (TREE_TYPE (exp))
3448 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3449 break;
7bb0943f
RS
3450
3451 /* If any reference in the chain is volatile, the effect is volatile. */
3452 if (TREE_THIS_VOLATILE (exp))
3453 *pvolatilep = 1;
bbf6f052
RK
3454 exp = TREE_OPERAND (exp, 0);
3455 }
3456
3457 /* If this was a bit-field, see if there is a mode that allows direct
3458 access in case EXP is in memory. */
3459 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3460 {
3461 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3462 if (mode == BLKmode)
3463 mode = VOIDmode;
3464 }
3465
3466 *pmode = mode;
7bb0943f
RS
3467 *poffset = offset;
3468#if 0
3469 /* We aren't finished fixing the callers to really handle nonzero offset. */
3470 if (offset != 0)
3471 abort ();
3472#endif
bbf6f052
RK
3473
3474 return exp;
3475}
3476\f
3477/* Given an rtx VALUE that may contain additions and multiplications,
3478 return an equivalent value that just refers to a register or memory.
3479 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3480 and returning a pseudo-register containing the value.
3481
3482 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3483
3484rtx
3485force_operand (value, target)
3486 rtx value, target;
3487{
3488 register optab binoptab = 0;
3489 /* Use a temporary to force order of execution of calls to
3490 `force_operand'. */
3491 rtx tmp;
3492 register rtx op2;
3493 /* Use subtarget as the target for operand 0 of a binary operation. */
3494 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3495
3496 if (GET_CODE (value) == PLUS)
3497 binoptab = add_optab;
3498 else if (GET_CODE (value) == MINUS)
3499 binoptab = sub_optab;
3500 else if (GET_CODE (value) == MULT)
3501 {
3502 op2 = XEXP (value, 1);
3503 if (!CONSTANT_P (op2)
3504 && !(GET_CODE (op2) == REG && op2 != subtarget))
3505 subtarget = 0;
3506 tmp = force_operand (XEXP (value, 0), subtarget);
3507 return expand_mult (GET_MODE (value), tmp,
906c4e36 3508 force_operand (op2, NULL_RTX),
bbf6f052
RK
3509 target, 0);
3510 }
3511
3512 if (binoptab)
3513 {
3514 op2 = XEXP (value, 1);
3515 if (!CONSTANT_P (op2)
3516 && !(GET_CODE (op2) == REG && op2 != subtarget))
3517 subtarget = 0;
3518 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3519 {
3520 binoptab = add_optab;
3521 op2 = negate_rtx (GET_MODE (value), op2);
3522 }
3523
3524 /* Check for an addition with OP2 a constant integer and our first
3525 operand a PLUS of a virtual register and something else. In that
3526 case, we want to emit the sum of the virtual register and the
3527 constant first and then add the other value. This allows virtual
3528 register instantiation to simply modify the constant rather than
3529 creating another one around this addition. */
3530 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3531 && GET_CODE (XEXP (value, 0)) == PLUS
3532 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3533 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3534 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3535 {
3536 rtx temp = expand_binop (GET_MODE (value), binoptab,
3537 XEXP (XEXP (value, 0), 0), op2,
3538 subtarget, 0, OPTAB_LIB_WIDEN);
3539 return expand_binop (GET_MODE (value), binoptab, temp,
3540 force_operand (XEXP (XEXP (value, 0), 1), 0),
3541 target, 0, OPTAB_LIB_WIDEN);
3542 }
3543
3544 tmp = force_operand (XEXP (value, 0), subtarget);
3545 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3546 force_operand (op2, NULL_RTX),
bbf6f052 3547 target, 0, OPTAB_LIB_WIDEN);
8008b228 3548 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
3549 because the only operations we are expanding here are signed ones. */
3550 }
3551 return value;
3552}
3553\f
3554/* Subroutine of expand_expr:
3555 save the non-copied parts (LIST) of an expr (LHS), and return a list
3556 which can restore these values to their previous values,
3557 should something modify their storage. */
3558
3559static tree
3560save_noncopied_parts (lhs, list)
3561 tree lhs;
3562 tree list;
3563{
3564 tree tail;
3565 tree parts = 0;
3566
3567 for (tail = list; tail; tail = TREE_CHAIN (tail))
3568 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3569 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3570 else
3571 {
3572 tree part = TREE_VALUE (tail);
3573 tree part_type = TREE_TYPE (part);
906c4e36 3574 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3575 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3576 int_size_in_bytes (part_type), 0);
3577 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3578 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3579 parts = tree_cons (to_be_saved,
906c4e36
RK
3580 build (RTL_EXPR, part_type, NULL_TREE,
3581 (tree) target),
bbf6f052
RK
3582 parts);
3583 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3584 }
3585 return parts;
3586}
3587
3588/* Subroutine of expand_expr:
3589 record the non-copied parts (LIST) of an expr (LHS), and return a list
3590 which specifies the initial values of these parts. */
3591
3592static tree
3593init_noncopied_parts (lhs, list)
3594 tree lhs;
3595 tree list;
3596{
3597 tree tail;
3598 tree parts = 0;
3599
3600 for (tail = list; tail; tail = TREE_CHAIN (tail))
3601 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3602 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3603 else
3604 {
3605 tree part = TREE_VALUE (tail);
3606 tree part_type = TREE_TYPE (part);
906c4e36 3607 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3608 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3609 }
3610 return parts;
3611}
3612
3613/* Subroutine of expand_expr: return nonzero iff there is no way that
3614 EXP can reference X, which is being modified. */
3615
3616static int
3617safe_from_p (x, exp)
3618 rtx x;
3619 tree exp;
3620{
3621 rtx exp_rtl = 0;
3622 int i, nops;
3623
3624 if (x == 0)
3625 return 1;
3626
3627 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3628 find the underlying pseudo. */
3629 if (GET_CODE (x) == SUBREG)
3630 {
3631 x = SUBREG_REG (x);
3632 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3633 return 0;
3634 }
3635
3636 /* If X is a location in the outgoing argument area, it is always safe. */
3637 if (GET_CODE (x) == MEM
3638 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3639 || (GET_CODE (XEXP (x, 0)) == PLUS
3640 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3641 return 1;
3642
3643 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3644 {
3645 case 'd':
3646 exp_rtl = DECL_RTL (exp);
3647 break;
3648
3649 case 'c':
3650 return 1;
3651
3652 case 'x':
3653 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3654 return ((TREE_VALUE (exp) == 0
3655 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3656 && (TREE_CHAIN (exp) == 0
3657 || safe_from_p (x, TREE_CHAIN (exp))));
3658 else
3659 return 0;
3660
3661 case '1':
3662 return safe_from_p (x, TREE_OPERAND (exp, 0));
3663
3664 case '2':
3665 case '<':
3666 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3667 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3668
3669 case 'e':
3670 case 'r':
3671 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3672 the expression. If it is set, we conflict iff we are that rtx or
3673 both are in memory. Otherwise, we check all operands of the
3674 expression recursively. */
3675
3676 switch (TREE_CODE (exp))
3677 {
3678 case ADDR_EXPR:
3679 return staticp (TREE_OPERAND (exp, 0));
3680
3681 case INDIRECT_REF:
3682 if (GET_CODE (x) == MEM)
3683 return 0;
3684 break;
3685
3686 case CALL_EXPR:
3687 exp_rtl = CALL_EXPR_RTL (exp);
3688 if (exp_rtl == 0)
3689 {
3690 /* Assume that the call will clobber all hard registers and
3691 all of memory. */
3692 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3693 || GET_CODE (x) == MEM)
3694 return 0;
3695 }
3696
3697 break;
3698
3699 case RTL_EXPR:
3700 exp_rtl = RTL_EXPR_RTL (exp);
3701 if (exp_rtl == 0)
3702 /* We don't know what this can modify. */
3703 return 0;
3704
3705 break;
3706
3707 case WITH_CLEANUP_EXPR:
3708 exp_rtl = RTL_EXPR_RTL (exp);
3709 break;
3710
3711 case SAVE_EXPR:
3712 exp_rtl = SAVE_EXPR_RTL (exp);
3713 break;
3714
8129842c
RS
3715 case BIND_EXPR:
3716 /* The only operand we look at is operand 1. The rest aren't
3717 part of the expression. */
3718 return safe_from_p (x, TREE_OPERAND (exp, 1));
3719
bbf6f052
RK
3720 case METHOD_CALL_EXPR:
3721 /* This takes a rtx argument, but shouldn't appear here. */
3722 abort ();
3723 }
3724
3725 /* If we have an rtx, we do not need to scan our operands. */
3726 if (exp_rtl)
3727 break;
3728
3729 nops = tree_code_length[(int) TREE_CODE (exp)];
3730 for (i = 0; i < nops; i++)
3731 if (TREE_OPERAND (exp, i) != 0
3732 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3733 return 0;
3734 }
3735
3736 /* If we have an rtl, find any enclosed object. Then see if we conflict
3737 with it. */
3738 if (exp_rtl)
3739 {
3740 if (GET_CODE (exp_rtl) == SUBREG)
3741 {
3742 exp_rtl = SUBREG_REG (exp_rtl);
3743 if (GET_CODE (exp_rtl) == REG
3744 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3745 return 0;
3746 }
3747
3748 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3749 are memory and EXP is not readonly. */
3750 return ! (rtx_equal_p (x, exp_rtl)
3751 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3752 && ! TREE_READONLY (exp)));
3753 }
3754
3755 /* If we reach here, it is safe. */
3756 return 1;
3757}
3758
3759/* Subroutine of expand_expr: return nonzero iff EXP is an
3760 expression whose type is statically determinable. */
3761
3762static int
3763fixed_type_p (exp)
3764 tree exp;
3765{
3766 if (TREE_CODE (exp) == PARM_DECL
3767 || TREE_CODE (exp) == VAR_DECL
3768 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3769 || TREE_CODE (exp) == COMPONENT_REF
3770 || TREE_CODE (exp) == ARRAY_REF)
3771 return 1;
3772 return 0;
3773}
3774\f
3775/* expand_expr: generate code for computing expression EXP.
3776 An rtx for the computed value is returned. The value is never null.
3777 In the case of a void EXP, const0_rtx is returned.
3778
3779 The value may be stored in TARGET if TARGET is nonzero.
3780 TARGET is just a suggestion; callers must assume that
3781 the rtx returned may not be the same as TARGET.
3782
3783 If TARGET is CONST0_RTX, it means that the value will be ignored.
3784
3785 If TMODE is not VOIDmode, it suggests generating the
3786 result in mode TMODE. But this is done only when convenient.
3787 Otherwise, TMODE is ignored and the value generated in its natural mode.
3788 TMODE is just a suggestion; callers must assume that
3789 the rtx returned may not have mode TMODE.
3790
3791 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3792 with a constant address even if that address is not normally legitimate.
3793 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3794
3795 If MODIFIER is EXPAND_SUM then when EXP is an addition
3796 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3797 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3798 products as above, or REG or MEM, or constant.
3799 Ordinarily in such cases we would output mul or add instructions
3800 and then return a pseudo reg containing the sum.
3801
3802 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3803 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3804 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3805 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3806
3807rtx
3808expand_expr (exp, target, tmode, modifier)
3809 register tree exp;
3810 rtx target;
3811 enum machine_mode tmode;
3812 enum expand_modifier modifier;
3813{
3814 register rtx op0, op1, temp;
3815 tree type = TREE_TYPE (exp);
3816 int unsignedp = TREE_UNSIGNED (type);
3817 register enum machine_mode mode = TYPE_MODE (type);
3818 register enum tree_code code = TREE_CODE (exp);
3819 optab this_optab;
3820 /* Use subtarget as the target for operand 0 of a binary operation. */
3821 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3822 rtx original_target = target;
3823 int ignore = target == const0_rtx;
3824 tree context;
3825
3826 /* Don't use hard regs as subtargets, because the combiner
3827 can only handle pseudo regs. */
3828 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3829 subtarget = 0;
3830 /* Avoid subtargets inside loops,
3831 since they hide some invariant expressions. */
3832 if (preserve_subexpressions_p ())
3833 subtarget = 0;
3834
3835 if (ignore) target = 0, original_target = 0;
3836
3837 /* If will do cse, generate all results into pseudo registers
3838 since 1) that allows cse to find more things
3839 and 2) otherwise cse could produce an insn the machine
3840 cannot support. */
3841
3842 if (! cse_not_expected && mode != BLKmode && target
3843 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3844 target = subtarget;
3845
3846 /* Ensure we reference a volatile object even if value is ignored. */
3847 if (ignore && TREE_THIS_VOLATILE (exp)
3e3f5658 3848 && TREE_CODE (exp) != FUNCTION_DECL
bbf6f052
RK
3849 && mode != VOIDmode && mode != BLKmode)
3850 {
3851 target = gen_reg_rtx (mode);
3852 temp = expand_expr (exp, target, VOIDmode, modifier);
3853 if (temp != target)
3854 emit_move_insn (target, temp);
3855 return target;
3856 }
3857
3858 switch (code)
3859 {
3860 case LABEL_DECL:
b552441b
RS
3861 {
3862 tree function = decl_function_context (exp);
3863 /* Handle using a label in a containing function. */
3864 if (function != current_function_decl && function != 0)
3865 {
3866 struct function *p = find_function_data (function);
3867 /* Allocate in the memory associated with the function
3868 that the label is in. */
3869 push_obstacks (p->function_obstack,
3870 p->function_maybepermanent_obstack);
3871
3872 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3873 label_rtx (exp), p->forced_labels);
3874 pop_obstacks ();
3875 }
3876 else if (modifier == EXPAND_INITIALIZER)
3877 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3878 label_rtx (exp), forced_labels);
26fcb35a 3879 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3880 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3881 if (function != current_function_decl && function != 0)
3882 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3883 return temp;
b552441b 3884 }
bbf6f052
RK
3885
3886 case PARM_DECL:
3887 if (DECL_RTL (exp) == 0)
3888 {
3889 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3890 return CONST0_RTX (mode);
bbf6f052
RK
3891 }
3892
3893 case FUNCTION_DECL:
3894 case VAR_DECL:
3895 case RESULT_DECL:
3896 if (DECL_RTL (exp) == 0)
3897 abort ();
3898 /* Ensure variable marked as used
3899 even if it doesn't go through a parser. */
3900 TREE_USED (exp) = 1;
3901 /* Handle variables inherited from containing functions. */
3902 context = decl_function_context (exp);
3903
3904 /* We treat inline_function_decl as an alias for the current function
3905 because that is the inline function whose vars, types, etc.
3906 are being merged into the current function.
3907 See expand_inline_function. */
3908 if (context != 0 && context != current_function_decl
3909 && context != inline_function_decl
3910 /* If var is static, we don't need a static chain to access it. */
3911 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3912 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3913 {
3914 rtx addr;
3915
3916 /* Mark as non-local and addressable. */
81feeecb 3917 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3918 mark_addressable (exp);
3919 if (GET_CODE (DECL_RTL (exp)) != MEM)
3920 abort ();
3921 addr = XEXP (DECL_RTL (exp), 0);
3922 if (GET_CODE (addr) == MEM)
3923 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3924 else
3925 addr = fix_lexical_addr (addr, exp);
3926 return change_address (DECL_RTL (exp), mode, addr);
3927 }
4af3895e 3928
bbf6f052
RK
3929 /* This is the case of an array whose size is to be determined
3930 from its initializer, while the initializer is still being parsed.
3931 See expand_decl. */
3932 if (GET_CODE (DECL_RTL (exp)) == MEM
3933 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3934 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3935 XEXP (DECL_RTL (exp), 0));
3936 if (GET_CODE (DECL_RTL (exp)) == MEM
3937 && modifier != EXPAND_CONST_ADDRESS
3938 && modifier != EXPAND_SUM
3939 && modifier != EXPAND_INITIALIZER)
3940 {
3941 /* DECL_RTL probably contains a constant address.
3942 On RISC machines where a constant address isn't valid,
3943 make some insns to get that address into a register. */
3944 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3945 || (flag_force_addr
3946 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3947 return change_address (DECL_RTL (exp), VOIDmode,
3948 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3949 }
1499e0a8
RK
3950
3951 /* If the mode of DECL_RTL does not match that of the decl, it
3952 must be a promoted value. We return a SUBREG of the wanted mode,
3953 but mark it so that we know that it was already extended. */
3954
3955 if (GET_CODE (DECL_RTL (exp)) == REG
3956 && GET_MODE (DECL_RTL (exp)) != mode)
3957 {
3958 enum machine_mode decl_mode = DECL_MODE (exp);
3959
3960 /* Get the signedness used for this variable. Ensure we get the
3961 same mode we got when the variable was declared. */
3962
3963 PROMOTE_MODE (decl_mode, unsignedp, type);
3964
3965 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3966 abort ();
3967
3968 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3969 SUBREG_PROMOTED_VAR_P (temp) = 1;
3970 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3971 return temp;
3972 }
3973
bbf6f052
RK
3974 return DECL_RTL (exp);
3975
3976 case INTEGER_CST:
3977 return immed_double_const (TREE_INT_CST_LOW (exp),
3978 TREE_INT_CST_HIGH (exp),
3979 mode);
3980
3981 case CONST_DECL:
3982 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3983
3984 case REAL_CST:
3985 /* If optimized, generate immediate CONST_DOUBLE
3986 which will be turned into memory by reload if necessary.
3987
3988 We used to force a register so that loop.c could see it. But
3989 this does not allow gen_* patterns to perform optimizations with
3990 the constants. It also produces two insns in cases like "x = 1.0;".
3991 On most machines, floating-point constants are not permitted in
3992 many insns, so we'd end up copying it to a register in any case.
3993
3994 Now, we do the copying in expand_binop, if appropriate. */
3995 return immed_real_const (exp);
3996
3997 case COMPLEX_CST:
3998 case STRING_CST:
3999 if (! TREE_CST_RTL (exp))
4000 output_constant_def (exp);
4001
4002 /* TREE_CST_RTL probably contains a constant address.
4003 On RISC machines where a constant address isn't valid,
4004 make some insns to get that address into a register. */
4005 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
4006 && modifier != EXPAND_CONST_ADDRESS
4007 && modifier != EXPAND_INITIALIZER
4008 && modifier != EXPAND_SUM
4009 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
4010 return change_address (TREE_CST_RTL (exp), VOIDmode,
4011 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
4012 return TREE_CST_RTL (exp);
4013
4014 case SAVE_EXPR:
4015 context = decl_function_context (exp);
4016 /* We treat inline_function_decl as an alias for the current function
4017 because that is the inline function whose vars, types, etc.
4018 are being merged into the current function.
4019 See expand_inline_function. */
4020 if (context == current_function_decl || context == inline_function_decl)
4021 context = 0;
4022
4023 /* If this is non-local, handle it. */
4024 if (context)
4025 {
4026 temp = SAVE_EXPR_RTL (exp);
4027 if (temp && GET_CODE (temp) == REG)
4028 {
4029 put_var_into_stack (exp);
4030 temp = SAVE_EXPR_RTL (exp);
4031 }
4032 if (temp == 0 || GET_CODE (temp) != MEM)
4033 abort ();
4034 return change_address (temp, mode,
4035 fix_lexical_addr (XEXP (temp, 0), exp));
4036 }
4037 if (SAVE_EXPR_RTL (exp) == 0)
4038 {
4039 if (mode == BLKmode)
4040 temp
4041 = assign_stack_temp (mode,
4042 int_size_in_bytes (TREE_TYPE (exp)), 0);
4043 else
1499e0a8
RK
4044 {
4045 enum machine_mode var_mode = mode;
4046
4047 if (TREE_CODE (type) == INTEGER_TYPE
4048 || TREE_CODE (type) == ENUMERAL_TYPE
4049 || TREE_CODE (type) == BOOLEAN_TYPE
4050 || TREE_CODE (type) == CHAR_TYPE
4051 || TREE_CODE (type) == REAL_TYPE
4052 || TREE_CODE (type) == POINTER_TYPE
4053 || TREE_CODE (type) == OFFSET_TYPE)
4054 {
4055 PROMOTE_MODE (var_mode, unsignedp, type);
4056 }
4057
4058 temp = gen_reg_rtx (var_mode);
4059 }
4060
bbf6f052
RK
4061 SAVE_EXPR_RTL (exp) = temp;
4062 store_expr (TREE_OPERAND (exp, 0), temp, 0);
4063 if (!optimize && GET_CODE (temp) == REG)
4064 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
4065 save_expr_regs);
4066 }
1499e0a8
RK
4067
4068 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
4069 must be a promoted value. We return a SUBREG of the wanted mode,
4070 but mark it so that we know that it was already extended. Note
4071 that `unsignedp' was modified above in this case. */
4072
4073 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
4074 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
4075 {
4076 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
4077 SUBREG_PROMOTED_VAR_P (temp) = 1;
4078 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
4079 return temp;
4080 }
4081
bbf6f052
RK
4082 return SAVE_EXPR_RTL (exp);
4083
4084 case EXIT_EXPR:
4085 /* Exit the current loop if the body-expression is true. */
4086 {
4087 rtx label = gen_label_rtx ();
906c4e36
RK
4088 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
4089 expand_exit_loop (NULL_PTR);
bbf6f052
RK
4090 emit_label (label);
4091 }
4092 return const0_rtx;
4093
4094 case LOOP_EXPR:
4095 expand_start_loop (1);
4096 expand_expr_stmt (TREE_OPERAND (exp, 0));
4097 expand_end_loop ();
4098
4099 return const0_rtx;
4100
4101 case BIND_EXPR:
4102 {
4103 tree vars = TREE_OPERAND (exp, 0);
4104 int vars_need_expansion = 0;
4105
4106 /* Need to open a binding contour here because
4107 if there are any cleanups they most be contained here. */
4108 expand_start_bindings (0);
4109
2df53c0b
RS
4110 /* Mark the corresponding BLOCK for output in its proper place. */
4111 if (TREE_OPERAND (exp, 2) != 0
4112 && ! TREE_USED (TREE_OPERAND (exp, 2)))
4113 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
4114
4115 /* If VARS have not yet been expanded, expand them now. */
4116 while (vars)
4117 {
4118 if (DECL_RTL (vars) == 0)
4119 {
4120 vars_need_expansion = 1;
4121 expand_decl (vars);
4122 }
4123 expand_decl_init (vars);
4124 vars = TREE_CHAIN (vars);
4125 }
4126
4127 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
4128
4129 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
4130
4131 return temp;
4132 }
4133
4134 case RTL_EXPR:
4135 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
4136 abort ();
4137 emit_insns (RTL_EXPR_SEQUENCE (exp));
4138 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
4139 return RTL_EXPR_RTL (exp);
4140
4141 case CONSTRUCTOR:
4af3895e
JVA
4142 /* All elts simple constants => refer to a constant in memory. But
4143 if this is a non-BLKmode mode, let it store a field at a time
4144 since that should make a CONST_INT or CONST_DOUBLE when we
4145 fold. */
4146 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
4147 {
4148 rtx constructor = output_constant_def (exp);
b552441b
RS
4149 if (modifier != EXPAND_CONST_ADDRESS
4150 && modifier != EXPAND_INITIALIZER
4151 && modifier != EXPAND_SUM
4152 && !memory_address_p (GET_MODE (constructor),
4153 XEXP (constructor, 0)))
bbf6f052
RK
4154 constructor = change_address (constructor, VOIDmode,
4155 XEXP (constructor, 0));
4156 return constructor;
4157 }
4158
4159 if (ignore)
4160 {
4161 tree elt;
4162 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4163 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
4164 return const0_rtx;
4165 }
4166 else
4167 {
4168 if (target == 0 || ! safe_from_p (target, exp))
4169 {
4170 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
4171 target = gen_reg_rtx (mode);
4172 else
4173 {
3b94d087
RS
4174 enum tree_code c = TREE_CODE (type);
4175 target
4176 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
4177 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
4178 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
4179 }
4180 }
4181 store_constructor (exp, target);
4182 return target;
4183 }
4184
4185 case INDIRECT_REF:
4186 {
4187 tree exp1 = TREE_OPERAND (exp, 0);
4188 tree exp2;
4189
4190 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
4191 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
4192 This code has the same general effect as simply doing
4193 expand_expr on the save expr, except that the expression PTR
4194 is computed for use as a memory address. This means different
4195 code, suitable for indexing, may be generated. */
4196 if (TREE_CODE (exp1) == SAVE_EXPR
4197 && SAVE_EXPR_RTL (exp1) == 0
4198 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
4199 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
4200 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
4201 {
906c4e36
RK
4202 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
4203 VOIDmode, EXPAND_SUM);
bbf6f052
RK
4204 op0 = memory_address (mode, temp);
4205 op0 = copy_all_regs (op0);
4206 SAVE_EXPR_RTL (exp1) = op0;
4207 }
4208 else
4209 {
906c4e36 4210 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4211 op0 = memory_address (mode, op0);
4212 }
8c8a8e34
JW
4213
4214 temp = gen_rtx (MEM, mode, op0);
4215 /* If address was computed by addition,
4216 mark this as an element of an aggregate. */
4217 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4218 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
4219 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
4220 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
4221 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4222 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4223 || (TREE_CODE (exp1) == ADDR_EXPR
4224 && (exp2 = TREE_OPERAND (exp1, 0))
4225 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
4226 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
4227 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
4228 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4229 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4230#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4231 a location is accessed through a pointer to const does not mean
4232 that the value there can never change. */
8c8a8e34 4233 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 4234#endif
8c8a8e34
JW
4235 return temp;
4236 }
bbf6f052
RK
4237
4238 case ARRAY_REF:
4239 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
4240 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4241 {
4242 /* Nonconstant array index or nonconstant element size.
4243 Generate the tree for *(&array+index) and expand that,
4244 except do it in a language-independent way
4245 and don't complain about non-lvalue arrays.
4246 `mark_addressable' should already have been called
4247 for any array for which this case will be reached. */
4248
4249 /* Don't forget the const or volatile flag from the array element. */
4250 tree variant_type = build_type_variant (type,
4251 TREE_READONLY (exp),
4252 TREE_THIS_VOLATILE (exp));
4253 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
4254 TREE_OPERAND (exp, 0));
4255 tree index = TREE_OPERAND (exp, 1);
4256 tree elt;
4257
4258 /* Convert the integer argument to a type the same size as a pointer
4259 so the multiply won't overflow spuriously. */
4260 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
4261 index = convert (type_for_size (POINTER_SIZE, 0), index);
4262
4263 /* Don't think the address has side effects
4264 just because the array does.
4265 (In some cases the address might have side effects,
4266 and we fail to record that fact here. However, it should not
4267 matter, since expand_expr should not care.) */
4268 TREE_SIDE_EFFECTS (array_adr) = 0;
4269
4270 elt = build1 (INDIRECT_REF, type,
4271 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
4272 array_adr,
4273 fold (build (MULT_EXPR,
4274 TYPE_POINTER_TO (variant_type),
4275 index, size_in_bytes (type))))));
4276
4277 /* Volatility, etc., of new expression is same as old expression. */
4278 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
4279 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
4280 TREE_READONLY (elt) = TREE_READONLY (exp);
4281
4282 return expand_expr (elt, target, tmode, modifier);
4283 }
4284
4285 /* Fold an expression like: "foo"[2].
4286 This is not done in fold so it won't happen inside &. */
4287 {
4288 int i;
4289 tree arg0 = TREE_OPERAND (exp, 0);
4290 tree arg1 = TREE_OPERAND (exp, 1);
4291
4292 if (TREE_CODE (arg0) == STRING_CST
4293 && TREE_CODE (arg1) == INTEGER_CST
4294 && !TREE_INT_CST_HIGH (arg1)
4295 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
4296 {
4297 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
4298 {
4299 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
4300 TREE_TYPE (exp) = integer_type_node;
4301 return expand_expr (exp, target, tmode, modifier);
4302 }
4303 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
4304 {
4305 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
4306 TREE_TYPE (exp) = integer_type_node;
4307 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
4308 }
4309 }
4310 }
4311
4312 /* If this is a constant index into a constant array,
4af3895e
JVA
4313 just get the value from the array. Handle both the cases when
4314 we have an explicit constructor and when our operand is a variable
4315 that was declared const. */
4316
4317 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
4318 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4319 {
4320 tree index = fold (TREE_OPERAND (exp, 1));
4321 if (TREE_CODE (index) == INTEGER_CST
4322 && TREE_INT_CST_HIGH (index) == 0)
4323 {
4324 int i = TREE_INT_CST_LOW (index);
4325 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
4326
4327 while (elem && i--)
4328 elem = TREE_CHAIN (elem);
4329 if (elem)
4330 return expand_expr (fold (TREE_VALUE (elem)), target,
4331 tmode, modifier);
4332 }
4333 }
4334
4335 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
4336 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4337 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
4338 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
4339 && DECL_INITIAL (TREE_OPERAND (exp, 0))
4340 && optimize >= 1
4341 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
4342 != ERROR_MARK))
bbf6f052
RK
4343 {
4344 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
4345 if (TREE_CODE (index) == INTEGER_CST
4346 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
4347 {
4348 int i = TREE_INT_CST_LOW (index);
8c8a8e34 4349 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 4350
8c8a8e34
JW
4351 if (TREE_CODE (init) == CONSTRUCTOR)
4352 {
4353 tree elem = CONSTRUCTOR_ELTS (init);
4354
4355 while (elem && i--)
4356 elem = TREE_CHAIN (elem);
4357 if (elem)
4358 return expand_expr (fold (TREE_VALUE (elem)), target,
4359 tmode, modifier);
4360 }
4361 else if (TREE_CODE (init) == STRING_CST
4362 && i < TREE_STRING_LENGTH (init))
4363 {
906c4e36 4364 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
4365 return convert_to_mode (mode, temp, 0);
4366 }
bbf6f052
RK
4367 }
4368 }
4369 /* Treat array-ref with constant index as a component-ref. */
4370
4371 case COMPONENT_REF:
4372 case BIT_FIELD_REF:
4af3895e
JVA
4373 /* If the operand is a CONSTRUCTOR, we can just extract the
4374 appropriate field if it is present. */
4375 if (code != ARRAY_REF
4376 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4377 {
4378 tree elt;
4379
4380 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
4381 elt = TREE_CHAIN (elt))
4382 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
4383 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
4384 }
4385
bbf6f052
RK
4386 {
4387 enum machine_mode mode1;
4388 int bitsize;
4389 int bitpos;
7bb0943f 4390 tree offset;
bbf6f052 4391 int volatilep = 0;
7bb0943f 4392 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
4393 &mode1, &unsignedp, &volatilep);
4394
4395 /* In some cases, we will be offsetting OP0's address by a constant.
4396 So get it as a sum, if possible. If we will be using it
4397 directly in an insn, we validate it. */
906c4e36 4398 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 4399
8c8a8e34 4400 /* If this is a constant, put it into a register if it is a
8008b228 4401 legitimate constant and memory if it isn't. */
8c8a8e34
JW
4402 if (CONSTANT_P (op0))
4403 {
4404 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
4405 if (LEGITIMATE_CONSTANT_P (op0))
4406 op0 = force_reg (mode, op0);
4407 else
4408 op0 = validize_mem (force_const_mem (mode, op0));
4409 }
4410
7bb0943f
RS
4411 if (offset != 0)
4412 {
906c4e36 4413 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
4414
4415 if (GET_CODE (op0) != MEM)
4416 abort ();
4417 op0 = change_address (op0, VOIDmode,
4418 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
4419 force_reg (Pmode, offset_rtx)));
4420 }
4421
bbf6f052
RK
4422 /* Don't forget about volatility even if this is a bitfield. */
4423 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4424 {
4425 op0 = copy_rtx (op0);
4426 MEM_VOLATILE_P (op0) = 1;
4427 }
4428
4429 if (mode1 == VOIDmode
0bba3f6f
RK
4430 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4431 && modifier != EXPAND_CONST_ADDRESS
4432 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
4433 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4434 {
4435 /* In cases where an aligned union has an unaligned object
4436 as a field, we might be extracting a BLKmode value from
4437 an integer-mode (e.g., SImode) object. Handle this case
4438 by doing the extract into an object as wide as the field
4439 (which we know to be the width of a basic mode), then
4440 storing into memory, and changing the mode to BLKmode. */
4441 enum machine_mode ext_mode = mode;
4442
4443 if (ext_mode == BLKmode)
4444 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4445
4446 if (ext_mode == BLKmode)
4447 abort ();
4448
4449 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4450 unsignedp, target, ext_mode, ext_mode,
4451 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4452 int_size_in_bytes (TREE_TYPE (tem)));
4453 if (mode == BLKmode)
4454 {
4455 rtx new = assign_stack_temp (ext_mode,
4456 bitsize / BITS_PER_UNIT, 0);
4457
4458 emit_move_insn (new, op0);
4459 op0 = copy_rtx (new);
4460 PUT_MODE (op0, BLKmode);
4461 }
4462
4463 return op0;
4464 }
4465
4466 /* Get a reference to just this component. */
4467 if (modifier == EXPAND_CONST_ADDRESS
4468 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4469 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4470 (bitpos / BITS_PER_UNIT)));
4471 else
4472 op0 = change_address (op0, mode1,
4473 plus_constant (XEXP (op0, 0),
4474 (bitpos / BITS_PER_UNIT)));
4475 MEM_IN_STRUCT_P (op0) = 1;
4476 MEM_VOLATILE_P (op0) |= volatilep;
4477 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4478 return op0;
4479 if (target == 0)
4480 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4481 convert_move (target, op0, unsignedp);
4482 return target;
4483 }
4484
4485 case OFFSET_REF:
4486 {
4487 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4488 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4489 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4490 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4491 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4492 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
89742723 4493#if 0 /* It is incorrect to set RTX_UNCHANGING_P here, because the fact that
bbf6f052
RK
4494 a location is accessed through a pointer to const does not mean
4495 that the value there can never change. */
4496 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4497#endif
4498 return temp;
4499 }
4500
4501 /* Intended for a reference to a buffer of a file-object in Pascal.
4502 But it's not certain that a special tree code will really be
4503 necessary for these. INDIRECT_REF might work for them. */
4504 case BUFFER_REF:
4505 abort ();
4506
7308a047
RS
4507 /* IN_EXPR: Inlined pascal set IN expression.
4508
4509 Algorithm:
4510 rlo = set_low - (set_low%bits_per_word);
4511 the_word = set [ (index - rlo)/bits_per_word ];
4512 bit_index = index % bits_per_word;
4513 bitmask = 1 << bit_index;
4514 return !!(the_word & bitmask); */
4515 case IN_EXPR:
4516 preexpand_calls (exp);
4517 {
4518 tree set = TREE_OPERAND (exp, 0);
4519 tree index = TREE_OPERAND (exp, 1);
4520 tree set_type = TREE_TYPE (set);
4521
4522 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4523 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4524
4525 rtx index_val;
4526 rtx lo_r;
4527 rtx hi_r;
4528 rtx rlow;
4529 rtx diff, quo, rem, addr, bit, result;
4530 rtx setval, setaddr;
4531 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4532
4533 if (target == 0)
17938e57 4534 target = gen_reg_rtx (mode);
7308a047
RS
4535
4536 /* If domain is empty, answer is no. */
4537 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4538 return const0_rtx;
4539
4540 index_val = expand_expr (index, 0, VOIDmode, 0);
4541 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4542 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4543 setval = expand_expr (set, 0, VOIDmode, 0);
4544 setaddr = XEXP (setval, 0);
4545
4546 /* Compare index against bounds, if they are constant. */
4547 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4548 && GET_CODE (lo_r) == CONST_INT
4549 && INTVAL (index_val) < INTVAL (lo_r))
4550 return const0_rtx;
7308a047
RS
4551
4552 if (GET_CODE (index_val) == CONST_INT
17938e57
RK
4553 && GET_CODE (hi_r) == CONST_INT
4554 && INTVAL (hi_r) < INTVAL (index_val))
4555 return const0_rtx;
7308a047
RS
4556
4557 /* If we get here, we have to generate the code for both cases
4558 (in range and out of range). */
4559
4560 op0 = gen_label_rtx ();
4561 op1 = gen_label_rtx ();
4562
4563 if (! (GET_CODE (index_val) == CONST_INT
4564 && GET_CODE (lo_r) == CONST_INT))
4565 {
17938e57
RK
4566 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
4567 GET_MODE (index_val), 0, 0);
7308a047
RS
4568 emit_jump_insn (gen_blt (op1));
4569 }
4570
4571 if (! (GET_CODE (index_val) == CONST_INT
4572 && GET_CODE (hi_r) == CONST_INT))
4573 {
17938e57
RK
4574 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
4575 GET_MODE (index_val), 0, 0);
7308a047
RS
4576 emit_jump_insn (gen_bgt (op1));
4577 }
4578
4579 /* Calculate the element number of bit zero in the first word
4580 of the set. */
4581 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
4582 rlow = GEN_INT (INTVAL (lo_r)
4583 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 4584 else
17938e57
RK
4585 rlow = expand_binop (index_mode, and_optab, lo_r,
4586 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
4587 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4588
4589 diff = expand_binop (index_mode, sub_optab,
17938e57 4590 index_val, rlow, NULL_RTX, 0, OPTAB_LIB_WIDEN);
7308a047
RS
4591
4592 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
17938e57 4593 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047 4594 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
17938e57 4595 GEN_INT (BITS_PER_UNIT), NULL_RTX, 0);
7308a047
RS
4596 addr = memory_address (byte_mode,
4597 expand_binop (index_mode, add_optab,
17938e57
RK
4598 diff, setaddr, NULL_RTX, 0,
4599 OPTAB_LIB_WIDEN));
7308a047
RS
4600 /* Extract the bit we want to examine */
4601 bit = expand_shift (RSHIFT_EXPR, byte_mode,
17938e57
RK
4602 gen_rtx (MEM, byte_mode, addr),
4603 make_tree (TREE_TYPE (index), rem),
4604 NULL_RTX, 1);
4605 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
4606 GET_MODE (target) == byte_mode ? target : 0,
7308a047 4607 1, OPTAB_LIB_WIDEN);
17938e57
RK
4608
4609 if (result != target)
4610 convert_move (target, result, 1);
7308a047
RS
4611
4612 /* Output the code to handle the out-of-range case. */
4613 emit_jump (op0);
4614 emit_label (op1);
4615 emit_move_insn (target, const0_rtx);
4616 emit_label (op0);
4617 return target;
4618 }
4619
bbf6f052
RK
4620 case WITH_CLEANUP_EXPR:
4621 if (RTL_EXPR_RTL (exp) == 0)
4622 {
4623 RTL_EXPR_RTL (exp)
4624 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4625 cleanups_this_call
4626 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4627 /* That's it for this cleanup. */
4628 TREE_OPERAND (exp, 2) = 0;
4629 }
4630 return RTL_EXPR_RTL (exp);
4631
4632 case CALL_EXPR:
4633 /* Check for a built-in function. */
4634 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4635 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4636 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4637 return expand_builtin (exp, target, subtarget, tmode, ignore);
4638 /* If this call was expanded already by preexpand_calls,
4639 just return the result we got. */
4640 if (CALL_EXPR_RTL (exp) != 0)
4641 return CALL_EXPR_RTL (exp);
8129842c 4642 return expand_call (exp, target, ignore);
bbf6f052
RK
4643
4644 case NON_LVALUE_EXPR:
4645 case NOP_EXPR:
4646 case CONVERT_EXPR:
4647 case REFERENCE_EXPR:
4648 if (TREE_CODE (type) == VOID_TYPE || ignore)
4649 {
4650 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4651 return const0_rtx;
4652 }
4653 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4654 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4655 if (TREE_CODE (type) == UNION_TYPE)
4656 {
4657 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4658 if (target == 0)
4659 {
4660 if (mode == BLKmode)
4661 {
4662 if (TYPE_SIZE (type) == 0
4663 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4664 abort ();
4665 target = assign_stack_temp (BLKmode,
4666 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4667 + BITS_PER_UNIT - 1)
4668 / BITS_PER_UNIT, 0);
4669 }
4670 else
4671 target = gen_reg_rtx (mode);
4672 }
4673 if (GET_CODE (target) == MEM)
4674 /* Store data into beginning of memory target. */
4675 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4676 change_address (target, TYPE_MODE (valtype), 0), 0);
4677
bbf6f052
RK
4678 else if (GET_CODE (target) == REG)
4679 /* Store this field into a union of the proper type. */
4680 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4681 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4682 VOIDmode, 0, 1,
4683 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4684 else
4685 abort ();
4686
4687 /* Return the entire union. */
4688 return target;
4689 }
1499e0a8 4690 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4691 if (GET_MODE (op0) == mode)
4692 return op0;
4693 /* If arg is a constant integer being extended from a narrower mode,
4694 we must really truncate to get the extended bits right. Otherwise
4695 (unsigned long) (unsigned char) ("\377"[0])
4696 would come out as ffffffff. */
4697 if (GET_MODE (op0) == VOIDmode
4698 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4699 < GET_MODE_BITSIZE (mode)))
4700 {
4701 /* MODE must be narrower than HOST_BITS_PER_INT. */
4702 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4703
4704 if (width < HOST_BITS_PER_WIDE_INT)
4705 {
4706 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4707 : CONST_DOUBLE_LOW (op0));
4708 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4709 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4710 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4711 else
4712 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4713
4714 op0 = GEN_INT (val);
4715 }
4716 else
4717 {
4718 op0 = (simplify_unary_operation
4719 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4720 ? ZERO_EXTEND : SIGN_EXTEND),
4721 mode, op0,
4722 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4723 if (op0 == 0)
4724 abort ();
4725 }
4726 }
4727 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4728 return op0;
26fcb35a
RS
4729 if (modifier == EXPAND_INITIALIZER)
4730 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4731 if (flag_force_mem && GET_CODE (op0) == MEM)
4732 op0 = copy_to_reg (op0);
4733
4734 if (target == 0)
4735 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4736 else
4737 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4738 return target;
4739
4740 case PLUS_EXPR:
4741 /* We come here from MINUS_EXPR when the second operand is a constant. */
4742 plus_expr:
4743 this_optab = add_optab;
4744
4745 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4746 something else, make sure we add the register to the constant and
4747 then to the other thing. This case can occur during strength
4748 reduction and doing it this way will produce better code if the
4749 frame pointer or argument pointer is eliminated.
4750
4751 fold-const.c will ensure that the constant is always in the inner
4752 PLUS_EXPR, so the only case we need to do anything about is if
4753 sp, ap, or fp is our second argument, in which case we must swap
4754 the innermost first argument and our second argument. */
4755
4756 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4757 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4758 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4759 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4760 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4761 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4762 {
4763 tree t = TREE_OPERAND (exp, 1);
4764
4765 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4766 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4767 }
4768
4769 /* If the result is to be Pmode and we are adding an integer to
4770 something, we might be forming a constant. So try to use
4771 plus_constant. If it produces a sum and we can't accept it,
4772 use force_operand. This allows P = &ARR[const] to generate
4773 efficient code on machines where a SYMBOL_REF is not a valid
4774 address.
4775
4776 If this is an EXPAND_SUM call, always return the sum. */
4777 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4778 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4779 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4780 || mode == Pmode))
4781 {
4782 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4783 EXPAND_SUM);
4784 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4785 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4786 op1 = force_operand (op1, target);
4787 return op1;
4788 }
4789
4790 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4791 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4792 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4793 || mode == Pmode))
4794 {
4795 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4796 EXPAND_SUM);
4797 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4798 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4799 op0 = force_operand (op0, target);
4800 return op0;
4801 }
4802
4803 /* No sense saving up arithmetic to be done
4804 if it's all in the wrong mode to form part of an address.
4805 And force_operand won't know whether to sign-extend or
4806 zero-extend. */
4807 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4808 || mode != Pmode) goto binop;
4809
4810 preexpand_calls (exp);
4811 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4812 subtarget = 0;
4813
4814 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4815 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4816
4817 /* Make sure any term that's a sum with a constant comes last. */
4818 if (GET_CODE (op0) == PLUS
4819 && CONSTANT_P (XEXP (op0, 1)))
4820 {
4821 temp = op0;
4822 op0 = op1;
4823 op1 = temp;
4824 }
4825 /* If adding to a sum including a constant,
4826 associate it to put the constant outside. */
4827 if (GET_CODE (op1) == PLUS
4828 && CONSTANT_P (XEXP (op1, 1)))
4829 {
4830 rtx constant_term = const0_rtx;
4831
4832 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4833 if (temp != 0)
4834 op0 = temp;
6f90e075
JW
4835 /* Ensure that MULT comes first if there is one. */
4836 else if (GET_CODE (op0) == MULT)
4837 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4838 else
4839 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4840
4841 /* Let's also eliminate constants from op0 if possible. */
4842 op0 = eliminate_constant_term (op0, &constant_term);
4843
4844 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4845 their sum should be a constant. Form it into OP1, since the
4846 result we want will then be OP0 + OP1. */
4847
4848 temp = simplify_binary_operation (PLUS, mode, constant_term,
4849 XEXP (op1, 1));
4850 if (temp != 0)
4851 op1 = temp;
4852 else
4853 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4854 }
4855
4856 /* Put a constant term last and put a multiplication first. */
4857 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4858 temp = op1, op1 = op0, op0 = temp;
4859
4860 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4861 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4862
4863 case MINUS_EXPR:
4864 /* Handle difference of two symbolic constants,
4865 for the sake of an initializer. */
4866 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4867 && really_constant_p (TREE_OPERAND (exp, 0))
4868 && really_constant_p (TREE_OPERAND (exp, 1)))
4869 {
906c4e36
RK
4870 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4871 VOIDmode, modifier);
4872 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4873 VOIDmode, modifier);
bbf6f052
RK
4874 return gen_rtx (MINUS, mode, op0, op1);
4875 }
4876 /* Convert A - const to A + (-const). */
4877 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4878 {
4879 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4880 fold (build1 (NEGATE_EXPR, type,
4881 TREE_OPERAND (exp, 1))));
4882 goto plus_expr;
4883 }
4884 this_optab = sub_optab;
4885 goto binop;
4886
4887 case MULT_EXPR:
4888 preexpand_calls (exp);
4889 /* If first operand is constant, swap them.
4890 Thus the following special case checks need only
4891 check the second operand. */
4892 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4893 {
4894 register tree t1 = TREE_OPERAND (exp, 0);
4895 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4896 TREE_OPERAND (exp, 1) = t1;
4897 }
4898
4899 /* Attempt to return something suitable for generating an
4900 indexed address, for machines that support that. */
4901
4902 if (modifier == EXPAND_SUM && mode == Pmode
4903 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4904 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4905 {
4906 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4907
4908 /* Apply distributive law if OP0 is x+c. */
4909 if (GET_CODE (op0) == PLUS
4910 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4911 return gen_rtx (PLUS, mode,
4912 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4913 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4914 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4915 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4916
4917 if (GET_CODE (op0) != REG)
906c4e36 4918 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4919 if (GET_CODE (op0) != REG)
4920 op0 = copy_to_mode_reg (mode, op0);
4921
4922 return gen_rtx (MULT, mode, op0,
906c4e36 4923 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4924 }
4925
4926 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4927 subtarget = 0;
4928
4929 /* Check for multiplying things that have been extended
4930 from a narrower type. If this machine supports multiplying
4931 in that narrower type with a result in the desired type,
4932 do it that way, and avoid the explicit type-conversion. */
4933 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4934 && TREE_CODE (type) == INTEGER_TYPE
4935 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4936 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4937 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4938 && int_fits_type_p (TREE_OPERAND (exp, 1),
4939 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4940 /* Don't use a widening multiply if a shift will do. */
4941 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4942 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4943 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4944 ||
4945 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4946 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4947 ==
4948 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4949 /* If both operands are extended, they must either both
4950 be zero-extended or both be sign-extended. */
4951 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4952 ==
4953 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4954 {
4955 enum machine_mode innermode
4956 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4957 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4958 ? umul_widen_optab : smul_widen_optab);
4959 if (mode == GET_MODE_WIDER_MODE (innermode)
4960 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4961 {
4962 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4963 NULL_RTX, VOIDmode, 0);
bbf6f052 4964 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4966 VOIDmode, 0);
bbf6f052
RK
4967 else
4968 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4969 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4970 goto binop2;
4971 }
4972 }
4973 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4974 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4975 return expand_mult (mode, op0, op1, target, unsignedp);
4976
4977 case TRUNC_DIV_EXPR:
4978 case FLOOR_DIV_EXPR:
4979 case CEIL_DIV_EXPR:
4980 case ROUND_DIV_EXPR:
4981 case EXACT_DIV_EXPR:
4982 preexpand_calls (exp);
4983 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4984 subtarget = 0;
4985 /* Possible optimization: compute the dividend with EXPAND_SUM
4986 then if the divisor is constant can optimize the case
4987 where some terms of the dividend have coeffs divisible by it. */
4988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4989 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4990 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4991
4992 case RDIV_EXPR:
4993 this_optab = flodiv_optab;
4994 goto binop;
4995
4996 case TRUNC_MOD_EXPR:
4997 case FLOOR_MOD_EXPR:
4998 case CEIL_MOD_EXPR:
4999 case ROUND_MOD_EXPR:
5000 preexpand_calls (exp);
5001 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5002 subtarget = 0;
5003 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5004 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5005 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
5006
5007 case FIX_ROUND_EXPR:
5008 case FIX_FLOOR_EXPR:
5009 case FIX_CEIL_EXPR:
5010 abort (); /* Not used for C. */
5011
5012 case FIX_TRUNC_EXPR:
906c4e36 5013 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5014 if (target == 0)
5015 target = gen_reg_rtx (mode);
5016 expand_fix (target, op0, unsignedp);
5017 return target;
5018
5019 case FLOAT_EXPR:
906c4e36 5020 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5021 if (target == 0)
5022 target = gen_reg_rtx (mode);
5023 /* expand_float can't figure out what to do if FROM has VOIDmode.
5024 So give it the correct mode. With -O, cse will optimize this. */
5025 if (GET_MODE (op0) == VOIDmode)
5026 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
5027 op0);
5028 expand_float (target, op0,
5029 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
5030 return target;
5031
5032 case NEGATE_EXPR:
5033 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5034 temp = expand_unop (mode, neg_optab, op0, target, 0);
5035 if (temp == 0)
5036 abort ();
5037 return temp;
5038
5039 case ABS_EXPR:
5040 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5041
2d7050fd
RS
5042 /* Handle complex values specially. */
5043 {
5044 enum machine_mode opmode
5045 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5046
5047 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
5048 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
5049 return expand_complex_abs (opmode, op0, target, unsignedp);
5050 }
5051
bbf6f052
RK
5052 /* Unsigned abs is simply the operand. Testing here means we don't
5053 risk generating incorrect code below. */
5054 if (TREE_UNSIGNED (type))
5055 return op0;
5056
5057 /* First try to do it with a special abs instruction. */
5058 temp = expand_unop (mode, abs_optab, op0, target, 0);
5059 if (temp != 0)
5060 return temp;
5061
5062 /* If this machine has expensive jumps, we can do integer absolute
5063 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
5064 where W is the width of MODE. */
5065
5066 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
5067 {
5068 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
5069 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 5070 NULL_RTX, 0);
bbf6f052
RK
5071
5072 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
5073 OPTAB_LIB_WIDEN);
5074 if (temp != 0)
5075 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
5076 OPTAB_LIB_WIDEN);
5077
5078 if (temp != 0)
5079 return temp;
5080 }
5081
5082 /* If that does not win, use conditional jump and negate. */
5083 target = original_target;
5084 temp = gen_label_rtx ();
5085 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
5086 || (GET_CODE (target) == REG
5087 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5088 target = gen_reg_rtx (mode);
5089 emit_move_insn (target, op0);
5090 emit_cmp_insn (target,
5091 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
5092 NULL_RTX, VOIDmode, 0),
5093 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
5094 NO_DEFER_POP;
5095 emit_jump_insn (gen_bge (temp));
5096 op0 = expand_unop (mode, neg_optab, target, target, 0);
5097 if (op0 != target)
5098 emit_move_insn (target, op0);
5099 emit_label (temp);
5100 OK_DEFER_POP;
5101 return target;
5102
5103 case MAX_EXPR:
5104 case MIN_EXPR:
5105 target = original_target;
5106 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
5107 || (GET_CODE (target) == REG
5108 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5109 target = gen_reg_rtx (mode);
906c4e36 5110 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5111 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5112
5113 /* First try to do it with a special MIN or MAX instruction.
5114 If that does not win, use a conditional jump to select the proper
5115 value. */
5116 this_optab = (TREE_UNSIGNED (type)
5117 ? (code == MIN_EXPR ? umin_optab : umax_optab)
5118 : (code == MIN_EXPR ? smin_optab : smax_optab));
5119
5120 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
5121 OPTAB_WIDEN);
5122 if (temp != 0)
5123 return temp;
5124
5125 if (target != op0)
5126 emit_move_insn (target, op0);
5127 op0 = gen_label_rtx ();
5128 if (code == MAX_EXPR)
5129 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
5130 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
5131 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
5132 else
5133 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
5134 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
5135 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
5136 if (temp == const0_rtx)
5137 emit_move_insn (target, op1);
5138 else if (temp != const_true_rtx)
5139 {
5140 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
5141 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
5142 else
5143 abort ();
5144 emit_move_insn (target, op1);
5145 }
5146 emit_label (op0);
5147 return target;
5148
5149/* ??? Can optimize when the operand of this is a bitwise operation,
5150 by using a different bitwise operation. */
5151 case BIT_NOT_EXPR:
5152 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5153 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
5154 if (temp == 0)
5155 abort ();
5156 return temp;
5157
5158 case FFS_EXPR:
5159 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5160 temp = expand_unop (mode, ffs_optab, op0, target, 1);
5161 if (temp == 0)
5162 abort ();
5163 return temp;
5164
5165/* ??? Can optimize bitwise operations with one arg constant.
5166 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
5167 and (a bitwise1 b) bitwise2 b (etc)
5168 but that is probably not worth while. */
5169
5170/* BIT_AND_EXPR is for bitwise anding.
5171 TRUTH_AND_EXPR is for anding two boolean values
5172 when we want in all cases to compute both of them.
5173 In general it is fastest to do TRUTH_AND_EXPR by
5174 computing both operands as actual zero-or-1 values
5175 and then bitwise anding. In cases where there cannot
5176 be any side effects, better code would be made by
5177 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
5178 but the question is how to recognize those cases. */
5179
5180 case TRUTH_AND_EXPR:
5181 case BIT_AND_EXPR:
5182 this_optab = and_optab;
5183 goto binop;
5184
5185/* See comment above about TRUTH_AND_EXPR; it applies here too. */
5186 case TRUTH_OR_EXPR:
5187 case BIT_IOR_EXPR:
5188 this_optab = ior_optab;
5189 goto binop;
5190
874726a8 5191 case TRUTH_XOR_EXPR:
bbf6f052
RK
5192 case BIT_XOR_EXPR:
5193 this_optab = xor_optab;
5194 goto binop;
5195
5196 case LSHIFT_EXPR:
5197 case RSHIFT_EXPR:
5198 case LROTATE_EXPR:
5199 case RROTATE_EXPR:
5200 preexpand_calls (exp);
5201 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5202 subtarget = 0;
5203 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
5204 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
5205 unsignedp);
5206
5207/* Could determine the answer when only additive constants differ.
5208 Also, the addition of one can be handled by changing the condition. */
5209 case LT_EXPR:
5210 case LE_EXPR:
5211 case GT_EXPR:
5212 case GE_EXPR:
5213 case EQ_EXPR:
5214 case NE_EXPR:
5215 preexpand_calls (exp);
5216 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
5217 if (temp != 0)
5218 return temp;
5219 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
5220 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
5221 && original_target
5222 && GET_CODE (original_target) == REG
5223 && (GET_MODE (original_target)
5224 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
5225 {
5226 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
5227 if (temp != original_target)
5228 temp = copy_to_reg (temp);
5229 op1 = gen_label_rtx ();
906c4e36 5230 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
5231 GET_MODE (temp), unsignedp, 0);
5232 emit_jump_insn (gen_beq (op1));
5233 emit_move_insn (temp, const1_rtx);
5234 emit_label (op1);
5235 return temp;
5236 }
5237 /* If no set-flag instruction, must generate a conditional
5238 store into a temporary variable. Drop through
5239 and handle this like && and ||. */
5240
5241 case TRUTH_ANDIF_EXPR:
5242 case TRUTH_ORIF_EXPR:
5243 if (target == 0 || ! safe_from_p (target, exp)
5244 /* Make sure we don't have a hard reg (such as function's return
5245 value) live across basic blocks, if not optimizing. */
5246 || (!optimize && GET_CODE (target) == REG
5247 && REGNO (target) < FIRST_PSEUDO_REGISTER))
5248 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5249 emit_clr_insn (target);
5250 op1 = gen_label_rtx ();
5251 jumpifnot (exp, op1);
5252 emit_0_to_1_insn (target);
5253 emit_label (op1);
5254 return target;
5255
5256 case TRUTH_NOT_EXPR:
5257 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
5258 /* The parser is careful to generate TRUTH_NOT_EXPR
5259 only with operands that are always zero or one. */
906c4e36 5260 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
5261 target, 1, OPTAB_LIB_WIDEN);
5262 if (temp == 0)
5263 abort ();
5264 return temp;
5265
5266 case COMPOUND_EXPR:
5267 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
5268 emit_queue ();
5269 return expand_expr (TREE_OPERAND (exp, 1),
5270 (ignore ? const0_rtx : target),
5271 VOIDmode, 0);
5272
5273 case COND_EXPR:
5274 {
5275 /* Note that COND_EXPRs whose type is a structure or union
5276 are required to be constructed to contain assignments of
5277 a temporary variable, so that we can evaluate them here
5278 for side effect only. If type is void, we must do likewise. */
5279
5280 /* If an arm of the branch requires a cleanup,
5281 only that cleanup is performed. */
5282
5283 tree singleton = 0;
5284 tree binary_op = 0, unary_op = 0;
5285 tree old_cleanups = cleanups_this_call;
5286 cleanups_this_call = 0;
5287
5288 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
5289 convert it to our mode, if necessary. */
5290 if (integer_onep (TREE_OPERAND (exp, 1))
5291 && integer_zerop (TREE_OPERAND (exp, 2))
5292 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5293 {
5294 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
5295 if (GET_MODE (op0) == mode)
5296 return op0;
5297 if (target == 0)
5298 target = gen_reg_rtx (mode);
5299 convert_move (target, op0, unsignedp);
5300 return target;
5301 }
5302
5303 /* If we are not to produce a result, we have no target. Otherwise,
5304 if a target was specified use it; it will not be used as an
5305 intermediate target unless it is safe. If no target, use a
5306 temporary. */
5307
5308 if (mode == VOIDmode || ignore)
5309 temp = 0;
5310 else if (original_target
5311 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
5312 temp = original_target;
5313 else if (mode == BLKmode)
5314 {
5315 if (TYPE_SIZE (type) == 0
5316 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
5317 abort ();
5318 temp = assign_stack_temp (BLKmode,
5319 (TREE_INT_CST_LOW (TYPE_SIZE (type))
5320 + BITS_PER_UNIT - 1)
5321 / BITS_PER_UNIT, 0);
5322 }
5323 else
5324 temp = gen_reg_rtx (mode);
5325
5326 /* Check for X ? A + B : A. If we have this, we can copy
5327 A to the output and conditionally add B. Similarly for unary
5328 operations. Don't do this if X has side-effects because
5329 those side effects might affect A or B and the "?" operation is
5330 a sequence point in ANSI. (We test for side effects later.) */
5331
5332 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
5333 && operand_equal_p (TREE_OPERAND (exp, 2),
5334 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5335 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
5336 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
5337 && operand_equal_p (TREE_OPERAND (exp, 1),
5338 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5339 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
5340 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
5341 && operand_equal_p (TREE_OPERAND (exp, 2),
5342 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
5343 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
5344 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
5345 && operand_equal_p (TREE_OPERAND (exp, 1),
5346 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
5347 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
5348
5349 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
5350 operation, do this as A + (X != 0). Similarly for other simple
5351 binary operators. */
5352 if (singleton && binary_op
5353 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5354 && (TREE_CODE (binary_op) == PLUS_EXPR
5355 || TREE_CODE (binary_op) == MINUS_EXPR
5356 || TREE_CODE (binary_op) == BIT_IOR_EXPR
5357 || TREE_CODE (binary_op) == BIT_XOR_EXPR
5358 || TREE_CODE (binary_op) == BIT_AND_EXPR)
5359 && integer_onep (TREE_OPERAND (binary_op, 1))
5360 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
5361 {
5362 rtx result;
5363 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
5364 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
5365 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
5366 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
5367 : and_optab);
5368
5369 /* If we had X ? A : A + 1, do this as A + (X == 0).
5370
5371 We have to invert the truth value here and then put it
5372 back later if do_store_flag fails. We cannot simply copy
5373 TREE_OPERAND (exp, 0) to another variable and modify that
5374 because invert_truthvalue can modify the tree pointed to
5375 by its argument. */
5376 if (singleton == TREE_OPERAND (exp, 1))
5377 TREE_OPERAND (exp, 0)
5378 = invert_truthvalue (TREE_OPERAND (exp, 0));
5379
5380 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
5381 (safe_from_p (temp, singleton)
5382 ? temp : NULL_RTX),
bbf6f052
RK
5383 mode, BRANCH_COST <= 1);
5384
5385 if (result)
5386 {
906c4e36 5387 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5388 return expand_binop (mode, boptab, op1, result, temp,
5389 unsignedp, OPTAB_LIB_WIDEN);
5390 }
5391 else if (singleton == TREE_OPERAND (exp, 1))
5392 TREE_OPERAND (exp, 0)
5393 = invert_truthvalue (TREE_OPERAND (exp, 0));
5394 }
5395
5396 NO_DEFER_POP;
5397 op0 = gen_label_rtx ();
5398
5399 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
5400 {
5401 if (temp != 0)
5402 {
5403 /* If the target conflicts with the other operand of the
5404 binary op, we can't use it. Also, we can't use the target
5405 if it is a hard register, because evaluating the condition
5406 might clobber it. */
5407 if ((binary_op
5408 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
5409 || (GET_CODE (temp) == REG
5410 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
5411 temp = gen_reg_rtx (mode);
5412 store_expr (singleton, temp, 0);
5413 }
5414 else
906c4e36
RK
5415 expand_expr (singleton,
5416 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5417 if (cleanups_this_call)
5418 {
5419 sorry ("aggregate value in COND_EXPR");
5420 cleanups_this_call = 0;
5421 }
5422 if (singleton == TREE_OPERAND (exp, 1))
5423 jumpif (TREE_OPERAND (exp, 0), op0);
5424 else
5425 jumpifnot (TREE_OPERAND (exp, 0), op0);
5426
5427 if (binary_op && temp == 0)
5428 /* Just touch the other operand. */
5429 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5430 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5431 else if (binary_op)
5432 store_expr (build (TREE_CODE (binary_op), type,
5433 make_tree (type, temp),
5434 TREE_OPERAND (binary_op, 1)),
5435 temp, 0);
5436 else
5437 store_expr (build1 (TREE_CODE (unary_op), type,
5438 make_tree (type, temp)),
5439 temp, 0);
5440 op1 = op0;
5441 }
5442#if 0
5443 /* This is now done in jump.c and is better done there because it
5444 produces shorter register lifetimes. */
5445
5446 /* Check for both possibilities either constants or variables
5447 in registers (but not the same as the target!). If so, can
5448 save branches by assigning one, branching, and assigning the
5449 other. */
5450 else if (temp && GET_MODE (temp) != BLKmode
5451 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5452 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5453 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5454 && DECL_RTL (TREE_OPERAND (exp, 1))
5455 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5456 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5457 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5458 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5459 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5460 && DECL_RTL (TREE_OPERAND (exp, 2))
5461 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5462 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5463 {
5464 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5465 temp = gen_reg_rtx (mode);
5466 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5467 jumpifnot (TREE_OPERAND (exp, 0), op0);
5468 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5469 op1 = op0;
5470 }
5471#endif
5472 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5473 comparison operator. If we have one of these cases, set the
5474 output to A, branch on A (cse will merge these two references),
5475 then set the output to FOO. */
5476 else if (temp
5477 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5478 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5479 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5480 TREE_OPERAND (exp, 1), 0)
5481 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5482 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5483 {
5484 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5485 temp = gen_reg_rtx (mode);
5486 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5487 jumpif (TREE_OPERAND (exp, 0), op0);
5488 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5489 op1 = op0;
5490 }
5491 else if (temp
5492 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5493 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5494 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5495 TREE_OPERAND (exp, 2), 0)
5496 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5497 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5498 {
5499 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5500 temp = gen_reg_rtx (mode);
5501 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5502 jumpifnot (TREE_OPERAND (exp, 0), op0);
5503 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5504 op1 = op0;
5505 }
5506 else
5507 {
5508 op1 = gen_label_rtx ();
5509 jumpifnot (TREE_OPERAND (exp, 0), op0);
5510 if (temp != 0)
5511 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5512 else
906c4e36
RK
5513 expand_expr (TREE_OPERAND (exp, 1),
5514 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5515 if (cleanups_this_call)
5516 {
5517 sorry ("aggregate value in COND_EXPR");
5518 cleanups_this_call = 0;
5519 }
5520
5521 emit_queue ();
5522 emit_jump_insn (gen_jump (op1));
5523 emit_barrier ();
5524 emit_label (op0);
5525 if (temp != 0)
5526 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5527 else
906c4e36
RK
5528 expand_expr (TREE_OPERAND (exp, 2),
5529 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5530 }
5531
5532 if (cleanups_this_call)
5533 {
5534 sorry ("aggregate value in COND_EXPR");
5535 cleanups_this_call = 0;
5536 }
5537
5538 emit_queue ();
5539 emit_label (op1);
5540 OK_DEFER_POP;
5541 cleanups_this_call = old_cleanups;
5542 return temp;
5543 }
5544
5545 case TARGET_EXPR:
5546 {
5547 /* Something needs to be initialized, but we didn't know
5548 where that thing was when building the tree. For example,
5549 it could be the return value of a function, or a parameter
5550 to a function which lays down in the stack, or a temporary
5551 variable which must be passed by reference.
5552
5553 We guarantee that the expression will either be constructed
5554 or copied into our original target. */
5555
5556 tree slot = TREE_OPERAND (exp, 0);
5c062816 5557 tree exp1;
bbf6f052
RK
5558
5559 if (TREE_CODE (slot) != VAR_DECL)
5560 abort ();
5561
5562 if (target == 0)
5563 {
5564 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5565 {
5566 target = DECL_RTL (slot);
5c062816 5567 /* If we have already expanded the slot, so don't do
ac993f4f 5568 it again. (mrs) */
5c062816
MS
5569 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5570 return target;
ac993f4f 5571 }
bbf6f052
RK
5572 else
5573 {
5574 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5575 /* All temp slots at this level must not conflict. */
5576 preserve_temp_slots (target);
5577 DECL_RTL (slot) = target;
5578 }
5579
5580#if 0
ac993f4f
MS
5581 /* I bet this needs to be done, and I bet that it needs to
5582 be above, inside the else clause. The reason is
5583 simple, how else is it going to get cleaned up? (mrs)
5584
5585 The reason is probably did not work before, and was
5586 commented out is because this was re-expanding already
5587 expanded target_exprs (target == 0 and DECL_RTL (slot)
5588 != 0) also cleaning them up many times as well. :-( */
5589
bbf6f052
RK
5590 /* Since SLOT is not known to the called function
5591 to belong to its stack frame, we must build an explicit
5592 cleanup. This case occurs when we must build up a reference
5593 to pass the reference as an argument. In this case,
5594 it is very likely that such a reference need not be
5595 built here. */
5596
5597 if (TREE_OPERAND (exp, 2) == 0)
5598 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5599 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5600 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5601 cleanups_this_call);
bbf6f052
RK
5602#endif
5603 }
5604 else
5605 {
5606 /* This case does occur, when expanding a parameter which
5607 needs to be constructed on the stack. The target
5608 is the actual stack address that we want to initialize.
5609 The function we call will perform the cleanup in this case. */
5610
5611 DECL_RTL (slot) = target;
5612 }
5613
5c062816
MS
5614 exp1 = TREE_OPERAND (exp, 1);
5615 /* Mark it as expanded. */
5616 TREE_OPERAND (exp, 1) = NULL_TREE;
5617
5618 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5619 }
5620
5621 case INIT_EXPR:
5622 {
5623 tree lhs = TREE_OPERAND (exp, 0);
5624 tree rhs = TREE_OPERAND (exp, 1);
5625 tree noncopied_parts = 0;
5626 tree lhs_type = TREE_TYPE (lhs);
5627
5628 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5629 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5630 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5631 TYPE_NONCOPIED_PARTS (lhs_type));
5632 while (noncopied_parts != 0)
5633 {
5634 expand_assignment (TREE_VALUE (noncopied_parts),
5635 TREE_PURPOSE (noncopied_parts), 0, 0);
5636 noncopied_parts = TREE_CHAIN (noncopied_parts);
5637 }
5638 return temp;
5639 }
5640
5641 case MODIFY_EXPR:
5642 {
5643 /* If lhs is complex, expand calls in rhs before computing it.
5644 That's so we don't compute a pointer and save it over a call.
5645 If lhs is simple, compute it first so we can give it as a
5646 target if the rhs is just a call. This avoids an extra temp and copy
5647 and that prevents a partial-subsumption which makes bad code.
5648 Actually we could treat component_ref's of vars like vars. */
5649
5650 tree lhs = TREE_OPERAND (exp, 0);
5651 tree rhs = TREE_OPERAND (exp, 1);
5652 tree noncopied_parts = 0;
5653 tree lhs_type = TREE_TYPE (lhs);
5654
5655 temp = 0;
5656
5657 if (TREE_CODE (lhs) != VAR_DECL
5658 && TREE_CODE (lhs) != RESULT_DECL
5659 && TREE_CODE (lhs) != PARM_DECL)
5660 preexpand_calls (exp);
5661
5662 /* Check for |= or &= of a bitfield of size one into another bitfield
5663 of size 1. In this case, (unless we need the result of the
5664 assignment) we can do this more efficiently with a
5665 test followed by an assignment, if necessary.
5666
5667 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5668 things change so we do, this code should be enhanced to
5669 support it. */
5670 if (ignore
5671 && TREE_CODE (lhs) == COMPONENT_REF
5672 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5673 || TREE_CODE (rhs) == BIT_AND_EXPR)
5674 && TREE_OPERAND (rhs, 0) == lhs
5675 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5676 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5677 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5678 {
5679 rtx label = gen_label_rtx ();
5680
5681 do_jump (TREE_OPERAND (rhs, 1),
5682 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5683 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5684 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5685 (TREE_CODE (rhs) == BIT_IOR_EXPR
5686 ? integer_one_node
5687 : integer_zero_node)),
5688 0, 0);
e7c33f54 5689 do_pending_stack_adjust ();
bbf6f052
RK
5690 emit_label (label);
5691 return const0_rtx;
5692 }
5693
5694 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5695 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5696 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5697 TYPE_NONCOPIED_PARTS (lhs_type));
5698
5699 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5700 while (noncopied_parts != 0)
5701 {
5702 expand_assignment (TREE_PURPOSE (noncopied_parts),
5703 TREE_VALUE (noncopied_parts), 0, 0);
5704 noncopied_parts = TREE_CHAIN (noncopied_parts);
5705 }
5706 return temp;
5707 }
5708
5709 case PREINCREMENT_EXPR:
5710 case PREDECREMENT_EXPR:
5711 return expand_increment (exp, 0);
5712
5713 case POSTINCREMENT_EXPR:
5714 case POSTDECREMENT_EXPR:
5715 /* Faster to treat as pre-increment if result is not used. */
5716 return expand_increment (exp, ! ignore);
5717
5718 case ADDR_EXPR:
5719 /* Are we taking the address of a nested function? */
5720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5721 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5722 {
5723 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5724 op0 = force_operand (op0, target);
5725 }
5726 else
5727 {
906c4e36 5728 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5729 (modifier == EXPAND_INITIALIZER
5730 ? modifier : EXPAND_CONST_ADDRESS));
5731 if (GET_CODE (op0) != MEM)
5732 abort ();
5733
5734 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5735 return XEXP (op0, 0);
5736 op0 = force_operand (XEXP (op0, 0), target);
5737 }
5738 if (flag_force_addr && GET_CODE (op0) != REG)
5739 return force_reg (Pmode, op0);
5740 return op0;
5741
5742 case ENTRY_VALUE_EXPR:
5743 abort ();
5744
7308a047
RS
5745 /* COMPLEX type for Extended Pascal & Fortran */
5746 case COMPLEX_EXPR:
5747 {
5748 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5749
5750 rtx prev;
5751
5752 /* Get the rtx code of the operands. */
5753 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5754 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5755
5756 if (! target)
5757 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5758
5759 prev = get_last_insn ();
5760
5761 /* Tell flow that the whole of the destination is being set. */
5762 if (GET_CODE (target) == REG)
5763 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5764
5765 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5766 emit_move_insn (gen_realpart (mode, target), op0);
5767 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5768
5769 /* Complex construction should appear as a single unit. */
5770 group_insns (prev);
5771
5772 return target;
5773 }
5774
5775 case REALPART_EXPR:
2d7050fd
RS
5776 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5777 return gen_realpart (mode, op0);
7308a047
RS
5778
5779 case IMAGPART_EXPR:
2d7050fd
RS
5780 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5781 return gen_imagpart (mode, op0);
7308a047
RS
5782
5783 case CONJ_EXPR:
5784 {
5785 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5786 rtx imag_t;
5787 rtx prev;
5788
5789 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5790
5791 if (! target)
5792 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5793
5794 prev = get_last_insn ();
5795
5796 /* Tell flow that the whole of the destination is being set. */
5797 if (GET_CODE (target) == REG)
5798 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5799
5800 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5801 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5802
2d7050fd 5803 imag_t = gen_imagpart (mode, target);
7308a047 5804 temp = expand_unop (mode, neg_optab,
2d7050fd 5805 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5806 if (temp != imag_t)
5807 emit_move_insn (imag_t, temp);
5808
5809 /* Conjugate should appear as a single unit */
5810 group_insns (prev);
5811
5812 return target;
5813 }
5814
bbf6f052
RK
5815 case ERROR_MARK:
5816 return const0_rtx;
5817
5818 default:
5819 return (*lang_expand_expr) (exp, target, tmode, modifier);
5820 }
5821
5822 /* Here to do an ordinary binary operator, generating an instruction
5823 from the optab already placed in `this_optab'. */
5824 binop:
5825 preexpand_calls (exp);
5826 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5827 subtarget = 0;
5828 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5829 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5830 binop2:
5831 temp = expand_binop (mode, this_optab, op0, op1, target,
5832 unsignedp, OPTAB_LIB_WIDEN);
5833 if (temp == 0)
5834 abort ();
5835 return temp;
5836}
5837\f
e87b4f3f
RS
5838/* Return the alignment in bits of EXP, a pointer valued expression.
5839 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5840 The alignment returned is, by default, the alignment of the thing that
5841 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5842
5843 Otherwise, look at the expression to see if we can do better, i.e., if the
5844 expression is actually pointing at an object whose alignment is tighter. */
5845
5846static int
5847get_pointer_alignment (exp, max_align)
5848 tree exp;
5849 unsigned max_align;
5850{
5851 unsigned align, inner;
5852
5853 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5854 return 0;
5855
5856 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5857 align = MIN (align, max_align);
5858
5859 while (1)
5860 {
5861 switch (TREE_CODE (exp))
5862 {
5863 case NOP_EXPR:
5864 case CONVERT_EXPR:
5865 case NON_LVALUE_EXPR:
5866 exp = TREE_OPERAND (exp, 0);
5867 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5868 return align;
5869 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5870 inner = MIN (inner, max_align);
5871 align = MAX (align, inner);
5872 break;
5873
5874 case PLUS_EXPR:
5875 /* If sum of pointer + int, restrict our maximum alignment to that
5876 imposed by the integer. If not, we can't do any better than
5877 ALIGN. */
5878 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5879 return align;
5880
e87b4f3f
RS
5881 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5882 & (max_align - 1))
5883 != 0)
bbf6f052
RK
5884 max_align >>= 1;
5885
5886 exp = TREE_OPERAND (exp, 0);
5887 break;
5888
5889 case ADDR_EXPR:
5890 /* See what we are pointing at and look at its alignment. */
5891 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5892 if (TREE_CODE (exp) == FUNCTION_DECL)
5893 align = MAX (align, FUNCTION_BOUNDARY);
5894 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5895 align = MAX (align, DECL_ALIGN (exp));
5896#ifdef CONSTANT_ALIGNMENT
5897 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5898 align = CONSTANT_ALIGNMENT (exp, align);
5899#endif
5900 return MIN (align, max_align);
5901
5902 default:
5903 return align;
5904 }
5905 }
5906}
5907\f
5908/* Return the tree node and offset if a given argument corresponds to
5909 a string constant. */
5910
5911static tree
5912string_constant (arg, ptr_offset)
5913 tree arg;
5914 tree *ptr_offset;
5915{
5916 STRIP_NOPS (arg);
5917
5918 if (TREE_CODE (arg) == ADDR_EXPR
5919 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5920 {
5921 *ptr_offset = integer_zero_node;
5922 return TREE_OPERAND (arg, 0);
5923 }
5924 else if (TREE_CODE (arg) == PLUS_EXPR)
5925 {
5926 tree arg0 = TREE_OPERAND (arg, 0);
5927 tree arg1 = TREE_OPERAND (arg, 1);
5928
5929 STRIP_NOPS (arg0);
5930 STRIP_NOPS (arg1);
5931
5932 if (TREE_CODE (arg0) == ADDR_EXPR
5933 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5934 {
5935 *ptr_offset = arg1;
5936 return TREE_OPERAND (arg0, 0);
5937 }
5938 else if (TREE_CODE (arg1) == ADDR_EXPR
5939 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5940 {
5941 *ptr_offset = arg0;
5942 return TREE_OPERAND (arg1, 0);
5943 }
5944 }
5945
5946 return 0;
5947}
5948
5949/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5950 way, because it could contain a zero byte in the middle.
5951 TREE_STRING_LENGTH is the size of the character array, not the string.
5952
5953 Unfortunately, string_constant can't access the values of const char
5954 arrays with initializers, so neither can we do so here. */
5955
5956static tree
5957c_strlen (src)
5958 tree src;
5959{
5960 tree offset_node;
5961 int offset, max;
5962 char *ptr;
5963
5964 src = string_constant (src, &offset_node);
5965 if (src == 0)
5966 return 0;
5967 max = TREE_STRING_LENGTH (src);
5968 ptr = TREE_STRING_POINTER (src);
5969 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5970 {
5971 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5972 compute the offset to the following null if we don't know where to
5973 start searching for it. */
5974 int i;
5975 for (i = 0; i < max; i++)
5976 if (ptr[i] == 0)
5977 return 0;
5978 /* We don't know the starting offset, but we do know that the string
5979 has no internal zero bytes. We can assume that the offset falls
5980 within the bounds of the string; otherwise, the programmer deserves
5981 what he gets. Subtract the offset from the length of the string,
5982 and return that. */
5983 /* This would perhaps not be valid if we were dealing with named
5984 arrays in addition to literal string constants. */
5985 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5986 }
5987
5988 /* We have a known offset into the string. Start searching there for
5989 a null character. */
5990 if (offset_node == 0)
5991 offset = 0;
5992 else
5993 {
5994 /* Did we get a long long offset? If so, punt. */
5995 if (TREE_INT_CST_HIGH (offset_node) != 0)
5996 return 0;
5997 offset = TREE_INT_CST_LOW (offset_node);
5998 }
5999 /* If the offset is known to be out of bounds, warn, and call strlen at
6000 runtime. */
6001 if (offset < 0 || offset > max)
6002 {
6003 warning ("offset outside bounds of constant string");
6004 return 0;
6005 }
6006 /* Use strlen to search for the first zero byte. Since any strings
6007 constructed with build_string will have nulls appended, we win even
6008 if we get handed something like (char[4])"abcd".
6009
6010 Since OFFSET is our starting index into the string, no further
6011 calculation is needed. */
6012 return size_int (strlen (ptr + offset));
6013}
6014\f
6015/* Expand an expression EXP that calls a built-in function,
6016 with result going to TARGET if that's convenient
6017 (and in mode MODE if that's convenient).
6018 SUBTARGET may be used as the target for computing one of EXP's operands.
6019 IGNORE is nonzero if the value is to be ignored. */
6020
6021static rtx
6022expand_builtin (exp, target, subtarget, mode, ignore)
6023 tree exp;
6024 rtx target;
6025 rtx subtarget;
6026 enum machine_mode mode;
6027 int ignore;
6028{
6029 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6030 tree arglist = TREE_OPERAND (exp, 1);
6031 rtx op0;
60bac6ea 6032 rtx lab1, insns;
bbf6f052 6033 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 6034 optab builtin_optab;
bbf6f052
RK
6035
6036 switch (DECL_FUNCTION_CODE (fndecl))
6037 {
6038 case BUILT_IN_ABS:
6039 case BUILT_IN_LABS:
6040 case BUILT_IN_FABS:
6041 /* build_function_call changes these into ABS_EXPR. */
6042 abort ();
6043
1bbddf11
JVA
6044 case BUILT_IN_SIN:
6045 case BUILT_IN_COS:
e87b4f3f
RS
6046 case BUILT_IN_FSQRT:
6047 /* If not optimizing, call the library function. */
8c8a8e34 6048 if (! optimize)
e87b4f3f
RS
6049 break;
6050
6051 if (arglist == 0
19deaec9 6052 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 6053 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 6054 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 6055
db0e6d01
RS
6056 /* Stabilize and compute the argument. */
6057 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
6058 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
6059 {
6060 exp = copy_node (exp);
6061 arglist = copy_node (arglist);
6062 TREE_OPERAND (exp, 1) = arglist;
6063 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
6064 }
e87b4f3f 6065 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
6066
6067 /* Make a suitable register to place result in. */
6068 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
6069
c1f7c223 6070 emit_queue ();
8c8a8e34 6071 start_sequence ();
e7c33f54 6072
1bbddf11
JVA
6073 switch (DECL_FUNCTION_CODE (fndecl))
6074 {
6075 case BUILT_IN_SIN:
6076 builtin_optab = sin_optab; break;
6077 case BUILT_IN_COS:
6078 builtin_optab = cos_optab; break;
6079 case BUILT_IN_FSQRT:
6080 builtin_optab = sqrt_optab; break;
6081 default:
6082 abort ();
6083 }
6084
6085 /* Compute into TARGET.
e87b4f3f
RS
6086 Set TARGET to wherever the result comes back. */
6087 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 6088 builtin_optab, op0, target, 0);
e7c33f54
RK
6089
6090 /* If we were unable to expand via the builtin, stop the
6091 sequence (without outputting the insns) and break, causing
6092 a call the the library function. */
e87b4f3f 6093 if (target == 0)
e7c33f54 6094 {
8c8a8e34 6095 end_sequence ();
e7c33f54
RK
6096 break;
6097 }
e87b4f3f 6098
60bac6ea
RS
6099 /* Check the results by default. But if flag_fast_math is turned on,
6100 then assume sqrt will always be called with valid arguments. */
6101
6102 if (! flag_fast_math)
6103 {
1bbddf11 6104 /* Don't define the builtin FP instructions
60bac6ea
RS
6105 if your machine is not IEEE. */
6106 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
6107 abort ();
6108
6109 lab1 = gen_label_rtx ();
6110
6111 /* Test the result; if it is NaN, set errno=EDOM because
6112 the argument was not in the domain. */
6113 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
6114 emit_jump_insn (gen_beq (lab1));
6115
6116#if TARGET_EDOM
6117 {
6118#ifdef GEN_ERRNO_RTX
6119 rtx errno_rtx = GEN_ERRNO_RTX;
6120#else
6121 rtx errno_rtx
6122 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
6123#endif
6124
6125 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
6126 }
6127#else
6128 /* We can't set errno=EDOM directly; let the library call do it.
6129 Pop the arguments right away in case the call gets deleted. */
6130 NO_DEFER_POP;
6131 expand_call (exp, target, 0);
6132 OK_DEFER_POP;
6133#endif
6134
6135 emit_label (lab1);
6136 }
e87b4f3f 6137
e7c33f54 6138 /* Output the entire sequence. */
8c8a8e34
JW
6139 insns = get_insns ();
6140 end_sequence ();
6141 emit_insns (insns);
e7c33f54
RK
6142
6143 return target;
6144
bbf6f052
RK
6145 case BUILT_IN_SAVEREGS:
6146 /* Don't do __builtin_saveregs more than once in a function.
6147 Save the result of the first call and reuse it. */
6148 if (saveregs_value != 0)
6149 return saveregs_value;
6150 {
6151 /* When this function is called, it means that registers must be
6152 saved on entry to this function. So we migrate the
6153 call to the first insn of this function. */
6154 rtx temp;
6155 rtx seq;
6156 rtx valreg, saved_valreg;
6157
6158 /* Now really call the function. `expand_call' does not call
6159 expand_builtin, so there is no danger of infinite recursion here. */
6160 start_sequence ();
6161
6162#ifdef EXPAND_BUILTIN_SAVEREGS
6163 /* Do whatever the machine needs done in this case. */
6164 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
6165#else
6166 /* The register where the function returns its value
6167 is likely to have something else in it, such as an argument.
6168 So preserve that register around the call. */
6169 if (value_mode != VOIDmode)
6170 {
6171 valreg = hard_libcall_value (value_mode);
6172 saved_valreg = gen_reg_rtx (value_mode);
6173 emit_move_insn (saved_valreg, valreg);
6174 }
6175
6176 /* Generate the call, putting the value in a pseudo. */
6177 temp = expand_call (exp, target, ignore);
6178
6179 if (value_mode != VOIDmode)
6180 emit_move_insn (valreg, saved_valreg);
6181#endif
6182
6183 seq = get_insns ();
6184 end_sequence ();
6185
6186 saveregs_value = temp;
6187
6188 /* This won't work inside a SEQUENCE--it really has to be
6189 at the start of the function. */
6190 if (in_sequence_p ())
6191 {
6192 /* Better to do this than to crash. */
6193 error ("`va_start' used within `({...})'");
6194 return temp;
6195 }
6196
6197 /* Put the sequence after the NOTE that starts the function. */
6198 emit_insns_before (seq, NEXT_INSN (get_insns ()));
6199 return temp;
6200 }
6201
6202 /* __builtin_args_info (N) returns word N of the arg space info
6203 for the current function. The number and meanings of words
6204 is controlled by the definition of CUMULATIVE_ARGS. */
6205 case BUILT_IN_ARGS_INFO:
6206 {
6207 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
6208 int i;
6209 int *word_ptr = (int *) &current_function_args_info;
6210 tree type, elts, result;
6211
6212 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
6213 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
6214 __FILE__, __LINE__);
6215
6216 if (arglist != 0)
6217 {
6218 tree arg = TREE_VALUE (arglist);
6219 if (TREE_CODE (arg) != INTEGER_CST)
42b85a55 6220 error ("argument of `__builtin_args_info' must be constant");
bbf6f052
RK
6221 else
6222 {
6223 int wordnum = TREE_INT_CST_LOW (arg);
6224
42b85a55
RS
6225 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
6226 error ("argument of `__builtin_args_info' out of range");
bbf6f052 6227 else
906c4e36 6228 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
6229 }
6230 }
6231 else
42b85a55 6232 error ("missing argument in `__builtin_args_info'");
bbf6f052
RK
6233
6234 return const0_rtx;
6235
6236#if 0
6237 for (i = 0; i < nwords; i++)
6238 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
6239
6240 type = build_array_type (integer_type_node,
6241 build_index_type (build_int_2 (nwords, 0)));
6242 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
6243 TREE_CONSTANT (result) = 1;
6244 TREE_STATIC (result) = 1;
6245 result = build (INDIRECT_REF, build_pointer_type (type), result);
6246 TREE_CONSTANT (result) = 1;
906c4e36 6247 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6248#endif
6249 }
6250
6251 /* Return the address of the first anonymous stack arg. */
6252 case BUILT_IN_NEXT_ARG:
6253 {
6254 tree fntype = TREE_TYPE (current_function_decl);
6255 if (!(TYPE_ARG_TYPES (fntype) != 0
6256 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
6257 != void_type_node)))
6258 {
6259 error ("`va_start' used in function with fixed args");
6260 return const0_rtx;
6261 }
6262 }
6263
6264 return expand_binop (Pmode, add_optab,
6265 current_function_internal_arg_pointer,
6266 current_function_arg_offset_rtx,
906c4e36 6267 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
6268
6269 case BUILT_IN_CLASSIFY_TYPE:
6270 if (arglist != 0)
6271 {
6272 tree type = TREE_TYPE (TREE_VALUE (arglist));
6273 enum tree_code code = TREE_CODE (type);
6274 if (code == VOID_TYPE)
906c4e36 6275 return GEN_INT (void_type_class);
bbf6f052 6276 if (code == INTEGER_TYPE)
906c4e36 6277 return GEN_INT (integer_type_class);
bbf6f052 6278 if (code == CHAR_TYPE)
906c4e36 6279 return GEN_INT (char_type_class);
bbf6f052 6280 if (code == ENUMERAL_TYPE)
906c4e36 6281 return GEN_INT (enumeral_type_class);
bbf6f052 6282 if (code == BOOLEAN_TYPE)
906c4e36 6283 return GEN_INT (boolean_type_class);
bbf6f052 6284 if (code == POINTER_TYPE)
906c4e36 6285 return GEN_INT (pointer_type_class);
bbf6f052 6286 if (code == REFERENCE_TYPE)
906c4e36 6287 return GEN_INT (reference_type_class);
bbf6f052 6288 if (code == OFFSET_TYPE)
906c4e36 6289 return GEN_INT (offset_type_class);
bbf6f052 6290 if (code == REAL_TYPE)
906c4e36 6291 return GEN_INT (real_type_class);
bbf6f052 6292 if (code == COMPLEX_TYPE)
906c4e36 6293 return GEN_INT (complex_type_class);
bbf6f052 6294 if (code == FUNCTION_TYPE)
906c4e36 6295 return GEN_INT (function_type_class);
bbf6f052 6296 if (code == METHOD_TYPE)
906c4e36 6297 return GEN_INT (method_type_class);
bbf6f052 6298 if (code == RECORD_TYPE)
906c4e36 6299 return GEN_INT (record_type_class);
bbf6f052 6300 if (code == UNION_TYPE)
906c4e36 6301 return GEN_INT (union_type_class);
bbf6f052 6302 if (code == ARRAY_TYPE)
906c4e36 6303 return GEN_INT (array_type_class);
bbf6f052 6304 if (code == STRING_TYPE)
906c4e36 6305 return GEN_INT (string_type_class);
bbf6f052 6306 if (code == SET_TYPE)
906c4e36 6307 return GEN_INT (set_type_class);
bbf6f052 6308 if (code == FILE_TYPE)
906c4e36 6309 return GEN_INT (file_type_class);
bbf6f052 6310 if (code == LANG_TYPE)
906c4e36 6311 return GEN_INT (lang_type_class);
bbf6f052 6312 }
906c4e36 6313 return GEN_INT (no_type_class);
bbf6f052
RK
6314
6315 case BUILT_IN_CONSTANT_P:
6316 if (arglist == 0)
6317 return const0_rtx;
6318 else
cda0ec81 6319 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
6320 ? const1_rtx : const0_rtx);
6321
6322 case BUILT_IN_FRAME_ADDRESS:
6323 /* The argument must be a nonnegative integer constant.
6324 It counts the number of frames to scan up the stack.
6325 The value is the address of that frame. */
6326 case BUILT_IN_RETURN_ADDRESS:
6327 /* The argument must be a nonnegative integer constant.
6328 It counts the number of frames to scan up the stack.
6329 The value is the return address saved in that frame. */
6330 if (arglist == 0)
6331 /* Warning about missing arg was already issued. */
6332 return const0_rtx;
6333 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
6334 {
42b85a55 6335 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6336 return const0_rtx;
6337 }
6338 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
6339 {
42b85a55 6340 error ("invalid arg to `__builtin_return_address'");
bbf6f052
RK
6341 return const0_rtx;
6342 }
6343 else
6344 {
6345 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
6346 rtx tem = frame_pointer_rtx;
6347 int i;
6348
46b68a37
JW
6349 /* Some machines need special handling before we can access arbitrary
6350 frames. For example, on the sparc, we must first flush all
6351 register windows to the stack. */
6352#ifdef SETUP_FRAME_ADDRESSES
6353 SETUP_FRAME_ADDRESSES ();
6354#endif
6355
6356 /* On the sparc, the return address is not in the frame, it is
6357 in a register. There is no way to access it off of the current
6358 frame pointer, but it can be accessed off the previous frame
6359 pointer by reading the value from the register window save
6360 area. */
6361#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
6362 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_RETURN_ADDRESS)
6363 count--;
6364#endif
6365
bbf6f052
RK
6366 /* Scan back COUNT frames to the specified frame. */
6367 for (i = 0; i < count; i++)
6368 {
6369 /* Assume the dynamic chain pointer is in the word that
6370 the frame address points to, unless otherwise specified. */
6371#ifdef DYNAMIC_CHAIN_ADDRESS
6372 tem = DYNAMIC_CHAIN_ADDRESS (tem);
6373#endif
6374 tem = memory_address (Pmode, tem);
6375 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
6376 }
6377
6378 /* For __builtin_frame_address, return what we've got. */
6379 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
6380 return tem;
6381
6382 /* For __builtin_return_address,
6383 Get the return address from that frame. */
6384#ifdef RETURN_ADDR_RTX
6385 return RETURN_ADDR_RTX (count, tem);
6386#else
6387 tem = memory_address (Pmode,
6388 plus_constant (tem, GET_MODE_SIZE (Pmode)));
6389 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
6390#endif
6391 }
6392
6393 case BUILT_IN_ALLOCA:
6394 if (arglist == 0
6395 /* Arg could be non-integer if user redeclared this fcn wrong. */
6396 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6397 return const0_rtx;
6398 current_function_calls_alloca = 1;
6399 /* Compute the argument. */
906c4e36 6400 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6401
6402 /* Allocate the desired space. */
8c8a8e34 6403 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
6404
6405 /* Record the new stack level for nonlocal gotos. */
6dc42e49 6406 if (nonlocal_goto_handler_slot != 0)
906c4e36 6407 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
6408 return target;
6409
6410 case BUILT_IN_FFS:
6411 /* If not optimizing, call the library function. */
6412 if (!optimize)
6413 break;
6414
6415 if (arglist == 0
6416 /* Arg could be non-integer if user redeclared this fcn wrong. */
6417 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
6418 return const0_rtx;
6419
6420 /* Compute the argument. */
6421 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
6422 /* Compute ffs, into TARGET if possible.
6423 Set TARGET to wherever the result comes back. */
6424 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
6425 ffs_optab, op0, target, 1);
6426 if (target == 0)
6427 abort ();
6428 return target;
6429
6430 case BUILT_IN_STRLEN:
6431 /* If not optimizing, call the library function. */
6432 if (!optimize)
6433 break;
6434
6435 if (arglist == 0
6436 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6437 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
6438 return const0_rtx;
6439 else
6440 {
e7c33f54
RK
6441 tree src = TREE_VALUE (arglist);
6442 tree len = c_strlen (src);
bbf6f052 6443
e7c33f54
RK
6444 int align
6445 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6446
6447 rtx result, src_rtx, char_rtx;
6448 enum machine_mode insn_mode = value_mode, char_mode;
6449 enum insn_code icode;
6450
6451 /* If the length is known, just return it. */
6452 if (len != 0)
6453 return expand_expr (len, target, mode, 0);
6454
6455 /* If SRC is not a pointer type, don't do this operation inline. */
6456 if (align == 0)
6457 break;
6458
6459 /* Call a function if we can't compute strlen in the right mode. */
6460
6461 while (insn_mode != VOIDmode)
6462 {
6463 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6464 if (icode != CODE_FOR_nothing)
6465 break;
6466
6467 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6468 }
6469 if (insn_mode == VOIDmode)
bbf6f052 6470 break;
e7c33f54
RK
6471
6472 /* Make a place to write the result of the instruction. */
6473 result = target;
6474 if (! (result != 0
6475 && GET_CODE (result) == REG
6476 && GET_MODE (result) == insn_mode
6477 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6478 result = gen_reg_rtx (insn_mode);
6479
4d613828 6480 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6481
4d613828 6482 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6483 result = gen_reg_rtx (insn_mode);
6484
6485 src_rtx = memory_address (BLKmode,
906c4e36 6486 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6487 EXPAND_NORMAL));
4d613828 6488 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6489 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6490
6491 char_rtx = const0_rtx;
4d613828
RS
6492 char_mode = insn_operand_mode[(int)icode][2];
6493 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6494 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6495
6496 emit_insn (GEN_FCN (icode) (result,
6497 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6498 char_rtx, GEN_INT (align)));
e7c33f54
RK
6499
6500 /* Return the value in the proper mode for this function. */
6501 if (GET_MODE (result) == value_mode)
6502 return result;
6503 else if (target != 0)
6504 {
6505 convert_move (target, result, 0);
6506 return target;
6507 }
6508 else
6509 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6510 }
6511
6512 case BUILT_IN_STRCPY:
6513 /* If not optimizing, call the library function. */
6514 if (!optimize)
6515 break;
6516
6517 if (arglist == 0
6518 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6519 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6520 || TREE_CHAIN (arglist) == 0
6521 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6522 return const0_rtx;
6523 else
6524 {
6525 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6526
6527 if (len == 0)
6528 break;
6529
6530 len = size_binop (PLUS_EXPR, len, integer_one_node);
6531
906c4e36 6532 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6533 }
6534
6535 /* Drops in. */
6536 case BUILT_IN_MEMCPY:
6537 /* If not optimizing, call the library function. */
6538 if (!optimize)
6539 break;
6540
6541 if (arglist == 0
6542 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6543 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6544 || TREE_CHAIN (arglist) == 0
6545 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6546 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6548 return const0_rtx;
6549 else
6550 {
6551 tree dest = TREE_VALUE (arglist);
6552 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6553 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6554
6555 int src_align
6556 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6557 int dest_align
6558 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9937da1a 6559 rtx dest_rtx, dest_mem, src_mem;
bbf6f052
RK
6560
6561 /* If either SRC or DEST is not a pointer type, don't do
6562 this operation in-line. */
6563 if (src_align == 0 || dest_align == 0)
6564 {
6565 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6566 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6567 break;
6568 }
6569
906c4e36 6570 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
9937da1a
RS
6571 dest_mem = gen_rtx (MEM, BLKmode,
6572 memory_address (BLKmode, dest_rtx));
6573 src_mem = gen_rtx (MEM, BLKmode,
6574 memory_address (BLKmode,
6575 expand_expr (src, NULL_RTX,
6576 Pmode,
6577 EXPAND_NORMAL)));
bbf6f052
RK
6578
6579 /* Copy word part most expediently. */
9937da1a 6580 emit_block_move (dest_mem, src_mem,
906c4e36 6581 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6582 MIN (src_align, dest_align));
6583 return dest_rtx;
6584 }
6585
6586/* These comparison functions need an instruction that returns an actual
6587 index. An ordinary compare that just sets the condition codes
6588 is not enough. */
6589#ifdef HAVE_cmpstrsi
6590 case BUILT_IN_STRCMP:
6591 /* If not optimizing, call the library function. */
6592 if (!optimize)
6593 break;
6594
6595 if (arglist == 0
6596 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6597 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6598 || TREE_CHAIN (arglist) == 0
6599 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6600 return const0_rtx;
6601 else if (!HAVE_cmpstrsi)
6602 break;
6603 {
6604 tree arg1 = TREE_VALUE (arglist);
6605 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6606 tree offset;
6607 tree len, len2;
6608
6609 len = c_strlen (arg1);
6610 if (len)
6611 len = size_binop (PLUS_EXPR, integer_one_node, len);
6612 len2 = c_strlen (arg2);
6613 if (len2)
6614 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6615
6616 /* If we don't have a constant length for the first, use the length
6617 of the second, if we know it. We don't require a constant for
6618 this case; some cost analysis could be done if both are available
6619 but neither is constant. For now, assume they're equally cheap.
6620
6621 If both strings have constant lengths, use the smaller. This
6622 could arise if optimization results in strcpy being called with
6623 two fixed strings, or if the code was machine-generated. We should
6624 add some code to the `memcmp' handler below to deal with such
6625 situations, someday. */
6626 if (!len || TREE_CODE (len) != INTEGER_CST)
6627 {
6628 if (len2)
6629 len = len2;
6630 else if (len == 0)
6631 break;
6632 }
6633 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6634 {
6635 if (tree_int_cst_lt (len2, len))
6636 len = len2;
6637 }
6638
906c4e36 6639 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6640 }
6641
6642 /* Drops in. */
6643 case BUILT_IN_MEMCMP:
6644 /* If not optimizing, call the library function. */
6645 if (!optimize)
6646 break;
6647
6648 if (arglist == 0
6649 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6650 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6651 || TREE_CHAIN (arglist) == 0
6652 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6653 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6654 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6655 return const0_rtx;
6656 else if (!HAVE_cmpstrsi)
6657 break;
6658 {
6659 tree arg1 = TREE_VALUE (arglist);
6660 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6661 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6662 rtx result;
6663
6664 int arg1_align
6665 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6666 int arg2_align
6667 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6668 enum machine_mode insn_mode
6669 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6670
6671 /* If we don't have POINTER_TYPE, call the function. */
6672 if (arg1_align == 0 || arg2_align == 0)
6673 {
6674 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6675 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6676 break;
6677 }
6678
6679 /* Make a place to write the result of the instruction. */
6680 result = target;
6681 if (! (result != 0
6682 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6683 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6684 result = gen_reg_rtx (insn_mode);
6685
6686 emit_insn (gen_cmpstrsi (result,
6687 gen_rtx (MEM, BLKmode,
906c4e36
RK
6688 expand_expr (arg1, NULL_RTX, Pmode,
6689 EXPAND_NORMAL)),
bbf6f052 6690 gen_rtx (MEM, BLKmode,
906c4e36
RK
6691 expand_expr (arg2, NULL_RTX, Pmode,
6692 EXPAND_NORMAL)),
6693 expand_expr (len, NULL_RTX, VOIDmode, 0),
6694 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6695
6696 /* Return the value in the proper mode for this function. */
6697 mode = TYPE_MODE (TREE_TYPE (exp));
6698 if (GET_MODE (result) == mode)
6699 return result;
6700 else if (target != 0)
6701 {
6702 convert_move (target, result, 0);
6703 return target;
6704 }
6705 else
6706 return convert_to_mode (mode, result, 0);
6707 }
6708#else
6709 case BUILT_IN_STRCMP:
6710 case BUILT_IN_MEMCMP:
6711 break;
6712#endif
6713
6714 default: /* just do library call, if unknown builtin */
42b85a55 6715 error ("built-in function `%s' not currently supported",
bbf6f052
RK
6716 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6717 }
6718
6719 /* The switch statement above can drop through to cause the function
6720 to be called normally. */
6721
6722 return expand_call (exp, target, ignore);
6723}
6724\f
6725/* Expand code for a post- or pre- increment or decrement
6726 and return the RTX for the result.
6727 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6728
6729static rtx
6730expand_increment (exp, post)
6731 register tree exp;
6732 int post;
6733{
6734 register rtx op0, op1;
6735 register rtx temp, value;
6736 register tree incremented = TREE_OPERAND (exp, 0);
6737 optab this_optab = add_optab;
6738 int icode;
6739 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6740 int op0_is_copy = 0;
6741
6742 /* Stabilize any component ref that might need to be
6743 evaluated more than once below. */
ca300798
RS
6744 if (!post
6745 || TREE_CODE (incremented) == BIT_FIELD_REF
bbf6f052
RK
6746 || (TREE_CODE (incremented) == COMPONENT_REF
6747 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6748 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6749 incremented = stabilize_reference (incremented);
6750
6751 /* Compute the operands as RTX.
6752 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6753 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6754 and insns were generated in computing it. */
6755
bbf6f052 6756 temp = get_last_insn ();
906c4e36 6757 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6758
6759 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6760 in place but intead must do sign- or zero-extension during assignment,
6761 so we copy it into a new register and let the code below use it as
6762 a copy.
6763
6764 Note that we can safely modify this SUBREG since it is know not to be
6765 shared (it was made by the expand_expr call above). */
6766
6767 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6768 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6769
94a58076
RS
6770 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6771 && temp != get_last_insn ());
906c4e36 6772 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6773
6774 /* Decide whether incrementing or decrementing. */
6775 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6776 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6777 this_optab = sub_optab;
6778
6779 /* If OP0 is not the actual lvalue, but rather a copy in a register,
ca300798
RS
6780 then we cannot just increment OP0. We must therefore contrive to
6781 increment the original value. Then, for postincrement, we can return
6782 OP0 since it is a copy of the old value. For preincrement, we want
6783 to always expand here, since this generates better or equivalent code. */
6784 if (!post || op0_is_copy)
bbf6f052
RK
6785 {
6786 /* This is the easiest way to increment the value wherever it is.
ca300798
RS
6787 Problems with multiple evaluation of INCREMENTED are prevented
6788 because either (1) it is a component_ref or preincrement,
bbf6f052
RK
6789 in which case it was stabilized above, or (2) it is an array_ref
6790 with constant index in an array in a register, which is
6791 safe to reevaluate. */
6792 tree newexp = build ((this_optab == add_optab
6793 ? PLUS_EXPR : MINUS_EXPR),
6794 TREE_TYPE (exp),
6795 incremented,
6796 TREE_OPERAND (exp, 1));
6797 temp = expand_assignment (incremented, newexp, ! post, 0);
6798 return post ? op0 : temp;
6799 }
6800
6801 /* Convert decrement by a constant into a negative increment. */
6802 if (this_optab == sub_optab
6803 && GET_CODE (op1) == CONST_INT)
6804 {
906c4e36 6805 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6806 this_optab = add_optab;
6807 }
6808
6809 if (post)
6810 {
6811 /* We have a true reference to the value in OP0.
6812 If there is an insn to add or subtract in this mode, queue it. */
6813
6814#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6815 op0 = stabilize (op0);
6816#endif
6817
6818 icode = (int) this_optab->handlers[(int) mode].insn_code;
6819 if (icode != (int) CODE_FOR_nothing
6820 /* Make sure that OP0 is valid for operands 0 and 1
6821 of the insn we want to queue. */
6822 && (*insn_operand_predicate[icode][0]) (op0, mode)
6823 && (*insn_operand_predicate[icode][1]) (op0, mode))
6824 {
6825 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6826 op1 = force_reg (mode, op1);
6827
6828 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6829 }
6830 }
6831
6832 /* Preincrement, or we can't increment with one simple insn. */
6833 if (post)
6834 /* Save a copy of the value before inc or dec, to return it later. */
6835 temp = value = copy_to_reg (op0);
6836 else
6837 /* Arrange to return the incremented value. */
6838 /* Copy the rtx because expand_binop will protect from the queue,
6839 and the results of that would be invalid for us to return
6840 if our caller does emit_queue before using our result. */
6841 temp = copy_rtx (value = op0);
6842
6843 /* Increment however we can. */
6844 op1 = expand_binop (mode, this_optab, value, op1, op0,
6845 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6846 /* Make sure the value is stored into OP0. */
6847 if (op1 != op0)
6848 emit_move_insn (op0, op1);
6849
6850 return temp;
6851}
6852\f
6853/* Expand all function calls contained within EXP, innermost ones first.
6854 But don't look within expressions that have sequence points.
6855 For each CALL_EXPR, record the rtx for its value
6856 in the CALL_EXPR_RTL field. */
6857
6858static void
6859preexpand_calls (exp)
6860 tree exp;
6861{
6862 register int nops, i;
6863 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6864
6865 if (! do_preexpand_calls)
6866 return;
6867
6868 /* Only expressions and references can contain calls. */
6869
6870 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6871 return;
6872
6873 switch (TREE_CODE (exp))
6874 {
6875 case CALL_EXPR:
6876 /* Do nothing if already expanded. */
6877 if (CALL_EXPR_RTL (exp) != 0)
6878 return;
6879
6880 /* Do nothing to built-in functions. */
6881 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6882 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6883 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6884 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6885 return;
6886
6887 case COMPOUND_EXPR:
6888 case COND_EXPR:
6889 case TRUTH_ANDIF_EXPR:
6890 case TRUTH_ORIF_EXPR:
6891 /* If we find one of these, then we can be sure
6892 the adjust will be done for it (since it makes jumps).
6893 Do it now, so that if this is inside an argument
6894 of a function, we don't get the stack adjustment
6895 after some other args have already been pushed. */
6896 do_pending_stack_adjust ();
6897 return;
6898
6899 case BLOCK:
6900 case RTL_EXPR:
6901 case WITH_CLEANUP_EXPR:
6902 return;
6903
6904 case SAVE_EXPR:
6905 if (SAVE_EXPR_RTL (exp) != 0)
6906 return;
6907 }
6908
6909 nops = tree_code_length[(int) TREE_CODE (exp)];
6910 for (i = 0; i < nops; i++)
6911 if (TREE_OPERAND (exp, i) != 0)
6912 {
6913 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6914 if (type == 'e' || type == '<' || type == '1' || type == '2'
6915 || type == 'r')
6916 preexpand_calls (TREE_OPERAND (exp, i));
6917 }
6918}
6919\f
6920/* At the start of a function, record that we have no previously-pushed
6921 arguments waiting to be popped. */
6922
6923void
6924init_pending_stack_adjust ()
6925{
6926 pending_stack_adjust = 0;
6927}
6928
6929/* When exiting from function, if safe, clear out any pending stack adjust
6930 so the adjustment won't get done. */
6931
6932void
6933clear_pending_stack_adjust ()
6934{
6935#ifdef EXIT_IGNORE_STACK
6936 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 6937 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
6938 && ! flag_inline_functions)
6939 pending_stack_adjust = 0;
6940#endif
6941}
6942
6943/* Pop any previously-pushed arguments that have not been popped yet. */
6944
6945void
6946do_pending_stack_adjust ()
6947{
6948 if (inhibit_defer_pop == 0)
6949 {
6950 if (pending_stack_adjust != 0)
906c4e36 6951 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
6952 pending_stack_adjust = 0;
6953 }
6954}
6955
6956/* Expand all cleanups up to OLD_CLEANUPS.
6957 Needed here, and also for language-dependent calls. */
6958
6959void
6960expand_cleanups_to (old_cleanups)
6961 tree old_cleanups;
6962{
6963 while (cleanups_this_call != old_cleanups)
6964 {
906c4e36 6965 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6966 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6967 }
6968}
6969\f
6970/* Expand conditional expressions. */
6971
6972/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6973 LABEL is an rtx of code CODE_LABEL, in this function and all the
6974 functions here. */
6975
6976void
6977jumpifnot (exp, label)
6978 tree exp;
6979 rtx label;
6980{
906c4e36 6981 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
6982}
6983
6984/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6985
6986void
6987jumpif (exp, label)
6988 tree exp;
6989 rtx label;
6990{
906c4e36 6991 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
6992}
6993
6994/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6995 the result is zero, or IF_TRUE_LABEL if the result is one.
6996 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6997 meaning fall through in that case.
6998
e7c33f54
RK
6999 do_jump always does any pending stack adjust except when it does not
7000 actually perform a jump. An example where there is no jump
7001 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
7002
bbf6f052
RK
7003 This function is responsible for optimizing cases such as
7004 &&, || and comparison operators in EXP. */
7005
7006void
7007do_jump (exp, if_false_label, if_true_label)
7008 tree exp;
7009 rtx if_false_label, if_true_label;
7010{
7011 register enum tree_code code = TREE_CODE (exp);
7012 /* Some cases need to create a label to jump to
7013 in order to properly fall through.
7014 These cases set DROP_THROUGH_LABEL nonzero. */
7015 rtx drop_through_label = 0;
7016 rtx temp;
7017 rtx comparison = 0;
7018 int i;
7019 tree type;
7020
7021 emit_queue ();
7022
7023 switch (code)
7024 {
7025 case ERROR_MARK:
7026 break;
7027
7028 case INTEGER_CST:
7029 temp = integer_zerop (exp) ? if_false_label : if_true_label;
7030 if (temp)
7031 emit_jump (temp);
7032 break;
7033
7034#if 0
7035 /* This is not true with #pragma weak */
7036 case ADDR_EXPR:
7037 /* The address of something can never be zero. */
7038 if (if_true_label)
7039 emit_jump (if_true_label);
7040 break;
7041#endif
7042
7043 case NOP_EXPR:
7044 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
7045 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
7046 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
7047 goto normal;
7048 case CONVERT_EXPR:
7049 /* If we are narrowing the operand, we have to do the compare in the
7050 narrower mode. */
7051 if ((TYPE_PRECISION (TREE_TYPE (exp))
7052 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7053 goto normal;
7054 case NON_LVALUE_EXPR:
7055 case REFERENCE_EXPR:
7056 case ABS_EXPR:
7057 case NEGATE_EXPR:
7058 case LROTATE_EXPR:
7059 case RROTATE_EXPR:
7060 /* These cannot change zero->non-zero or vice versa. */
7061 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7062 break;
7063
7064#if 0
7065 /* This is never less insns than evaluating the PLUS_EXPR followed by
7066 a test and can be longer if the test is eliminated. */
7067 case PLUS_EXPR:
7068 /* Reduce to minus. */
7069 exp = build (MINUS_EXPR, TREE_TYPE (exp),
7070 TREE_OPERAND (exp, 0),
7071 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
7072 TREE_OPERAND (exp, 1))));
7073 /* Process as MINUS. */
7074#endif
7075
7076 case MINUS_EXPR:
7077 /* Non-zero iff operands of minus differ. */
7078 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
7079 TREE_OPERAND (exp, 0),
7080 TREE_OPERAND (exp, 1)),
7081 NE, NE);
7082 break;
7083
7084 case BIT_AND_EXPR:
7085 /* If we are AND'ing with a small constant, do this comparison in the
7086 smallest type that fits. If the machine doesn't have comparisons
7087 that small, it will be converted back to the wider comparison.
7088 This helps if we are testing the sign bit of a narrower object.
7089 combine can't do this for us because it can't know whether a
7090 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
7091
08af8e09
RK
7092 if (! SLOW_BYTE_ACCESS
7093 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7094 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
7095 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
7096 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
7097 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7098 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7099 != CODE_FOR_nothing))
bbf6f052
RK
7100 {
7101 do_jump (convert (type, exp), if_false_label, if_true_label);
7102 break;
7103 }
7104 goto normal;
7105
7106 case TRUTH_NOT_EXPR:
7107 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7108 break;
7109
7110 case TRUTH_ANDIF_EXPR:
7111 if (if_false_label == 0)
7112 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 7113 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
7114 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7115 break;
7116
7117 case TRUTH_ORIF_EXPR:
7118 if (if_true_label == 0)
7119 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 7120 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
7121 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7122 break;
7123
7124 case COMPOUND_EXPR:
7125 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7126 free_temp_slots ();
7127 emit_queue ();
e7c33f54 7128 do_pending_stack_adjust ();
bbf6f052
RK
7129 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
7130 break;
7131
7132 case COMPONENT_REF:
7133 case BIT_FIELD_REF:
7134 case ARRAY_REF:
7135 {
7136 int bitsize, bitpos, unsignedp;
7137 enum machine_mode mode;
7138 tree type;
7bb0943f 7139 tree offset;
bbf6f052
RK
7140 int volatilep = 0;
7141
7142 /* Get description of this reference. We don't actually care
7143 about the underlying object here. */
7bb0943f
RS
7144 get_inner_reference (exp, &bitsize, &bitpos, &offset,
7145 &mode, &unsignedp, &volatilep);
bbf6f052
RK
7146
7147 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
7148 if (! SLOW_BYTE_ACCESS
7149 && type != 0 && bitsize >= 0
7150 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
7151 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
7152 != CODE_FOR_nothing))
bbf6f052
RK
7153 {
7154 do_jump (convert (type, exp), if_false_label, if_true_label);
7155 break;
7156 }
7157 goto normal;
7158 }
7159
7160 case COND_EXPR:
7161 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
7162 if (integer_onep (TREE_OPERAND (exp, 1))
7163 && integer_zerop (TREE_OPERAND (exp, 2)))
7164 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7165
7166 else if (integer_zerop (TREE_OPERAND (exp, 1))
7167 && integer_onep (TREE_OPERAND (exp, 2)))
7168 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7169
7170 else
7171 {
7172 register rtx label1 = gen_label_rtx ();
7173 drop_through_label = gen_label_rtx ();
906c4e36 7174 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
7175 /* Now the THEN-expression. */
7176 do_jump (TREE_OPERAND (exp, 1),
7177 if_false_label ? if_false_label : drop_through_label,
7178 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
7179 /* In case the do_jump just above never jumps. */
7180 do_pending_stack_adjust ();
bbf6f052
RK
7181 emit_label (label1);
7182 /* Now the ELSE-expression. */
7183 do_jump (TREE_OPERAND (exp, 2),
7184 if_false_label ? if_false_label : drop_through_label,
7185 if_true_label ? if_true_label : drop_through_label);
7186 }
7187 break;
7188
7189 case EQ_EXPR:
7190 if (integer_zerop (TREE_OPERAND (exp, 1)))
7191 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
7192 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7193 == MODE_INT)
7194 &&
7195 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7196 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
7197 else
7198 comparison = compare (exp, EQ, EQ);
7199 break;
7200
7201 case NE_EXPR:
7202 if (integer_zerop (TREE_OPERAND (exp, 1)))
7203 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
7204 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7205 == MODE_INT)
7206 &&
7207 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7208 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
7209 else
7210 comparison = compare (exp, NE, NE);
7211 break;
7212
7213 case LT_EXPR:
7214 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7215 == MODE_INT)
7216 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7217 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
7218 else
7219 comparison = compare (exp, LT, LTU);
7220 break;
7221
7222 case LE_EXPR:
7223 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7224 == MODE_INT)
7225 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7226 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
7227 else
7228 comparison = compare (exp, LE, LEU);
7229 break;
7230
7231 case GT_EXPR:
7232 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7233 == MODE_INT)
7234 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7235 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
7236 else
7237 comparison = compare (exp, GT, GTU);
7238 break;
7239
7240 case GE_EXPR:
7241 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7242 == MODE_INT)
7243 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7244 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
7245 else
7246 comparison = compare (exp, GE, GEU);
7247 break;
7248
7249 default:
7250 normal:
906c4e36 7251 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7252#if 0
7253 /* This is not needed any more and causes poor code since it causes
7254 comparisons and tests from non-SI objects to have different code
7255 sequences. */
7256 /* Copy to register to avoid generating bad insns by cse
7257 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
7258 if (!cse_not_expected && GET_CODE (temp) == MEM)
7259 temp = copy_to_reg (temp);
7260#endif
7261 do_pending_stack_adjust ();
7262 if (GET_CODE (temp) == CONST_INT)
7263 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
7264 else if (GET_CODE (temp) == LABEL_REF)
7265 comparison = const_true_rtx;
7266 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
7267 && !can_compare_p (GET_MODE (temp)))
7268 /* Note swapping the labels gives us not-equal. */
7269 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
7270 else if (GET_MODE (temp) != VOIDmode)
7271 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
7272 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
7273 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
7274 else
7275 abort ();
7276 }
7277
7278 /* Do any postincrements in the expression that was tested. */
7279 emit_queue ();
7280
7281 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
7282 straight into a conditional jump instruction as the jump condition.
7283 Otherwise, all the work has been done already. */
7284
7285 if (comparison == const_true_rtx)
7286 {
7287 if (if_true_label)
7288 emit_jump (if_true_label);
7289 }
7290 else if (comparison == const0_rtx)
7291 {
7292 if (if_false_label)
7293 emit_jump (if_false_label);
7294 }
7295 else if (comparison)
7296 do_jump_for_compare (comparison, if_false_label, if_true_label);
7297
7298 free_temp_slots ();
7299
7300 if (drop_through_label)
e7c33f54
RK
7301 {
7302 /* If do_jump produces code that might be jumped around,
7303 do any stack adjusts from that code, before the place
7304 where control merges in. */
7305 do_pending_stack_adjust ();
7306 emit_label (drop_through_label);
7307 }
bbf6f052
RK
7308}
7309\f
7310/* Given a comparison expression EXP for values too wide to be compared
7311 with one insn, test the comparison and jump to the appropriate label.
7312 The code of EXP is ignored; we always test GT if SWAP is 0,
7313 and LT if SWAP is 1. */
7314
7315static void
7316do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
7317 tree exp;
7318 int swap;
7319 rtx if_false_label, if_true_label;
7320{
906c4e36
RK
7321 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
7322 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7323 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7324 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7325 rtx drop_through_label = 0;
7326 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
7327 int i;
7328
7329 if (! if_true_label || ! if_false_label)
7330 drop_through_label = gen_label_rtx ();
7331 if (! if_true_label)
7332 if_true_label = drop_through_label;
7333 if (! if_false_label)
7334 if_false_label = drop_through_label;
7335
7336 /* Compare a word at a time, high order first. */
7337 for (i = 0; i < nwords; i++)
7338 {
7339 rtx comp;
7340 rtx op0_word, op1_word;
7341
7342 if (WORDS_BIG_ENDIAN)
7343 {
7344 op0_word = operand_subword_force (op0, i, mode);
7345 op1_word = operand_subword_force (op1, i, mode);
7346 }
7347 else
7348 {
7349 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
7350 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
7351 }
7352
7353 /* All but high-order word must be compared as unsigned. */
7354 comp = compare_from_rtx (op0_word, op1_word,
7355 (unsignedp || i > 0) ? GTU : GT,
906c4e36 7356 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
7357 if (comp == const_true_rtx)
7358 emit_jump (if_true_label);
7359 else if (comp != const0_rtx)
906c4e36 7360 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
7361
7362 /* Consider lower words only if these are equal. */
7363 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 7364 NULL_RTX, 0);
bbf6f052
RK
7365 if (comp == const_true_rtx)
7366 emit_jump (if_false_label);
7367 else if (comp != const0_rtx)
906c4e36 7368 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
7369 }
7370
7371 if (if_false_label)
7372 emit_jump (if_false_label);
7373 if (drop_through_label)
7374 emit_label (drop_through_label);
7375}
7376
7377/* Given an EQ_EXPR expression EXP for values too wide to be compared
7378 with one insn, test the comparison and jump to the appropriate label. */
7379
7380static void
7381do_jump_by_parts_equality (exp, if_false_label, if_true_label)
7382 tree exp;
7383 rtx if_false_label, if_true_label;
7384{
906c4e36
RK
7385 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7386 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7387 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
7388 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
7389 int i;
7390 rtx drop_through_label = 0;
7391
7392 if (! if_false_label)
7393 drop_through_label = if_false_label = gen_label_rtx ();
7394
7395 for (i = 0; i < nwords; i++)
7396 {
7397 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
7398 operand_subword_force (op1, i, mode),
cd1b4b44
RK
7399 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
7400 word_mode, NULL_RTX, 0);
bbf6f052
RK
7401 if (comp == const_true_rtx)
7402 emit_jump (if_false_label);
7403 else if (comp != const0_rtx)
906c4e36 7404 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7405 }
7406
7407 if (if_true_label)
7408 emit_jump (if_true_label);
7409 if (drop_through_label)
7410 emit_label (drop_through_label);
7411}
7412\f
7413/* Jump according to whether OP0 is 0.
7414 We assume that OP0 has an integer mode that is too wide
7415 for the available compare insns. */
7416
7417static void
7418do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
7419 rtx op0;
7420 rtx if_false_label, if_true_label;
7421{
7422 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
7423 int i;
7424 rtx drop_through_label = 0;
7425
7426 if (! if_false_label)
7427 drop_through_label = if_false_label = gen_label_rtx ();
7428
7429 for (i = 0; i < nwords; i++)
7430 {
7431 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
7432 GET_MODE (op0)),
cd1b4b44 7433 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
7434 if (comp == const_true_rtx)
7435 emit_jump (if_false_label);
7436 else if (comp != const0_rtx)
906c4e36 7437 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
7438 }
7439
7440 if (if_true_label)
7441 emit_jump (if_true_label);
7442 if (drop_through_label)
7443 emit_label (drop_through_label);
7444}
7445
7446/* Given a comparison expression in rtl form, output conditional branches to
7447 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7448
7449static void
7450do_jump_for_compare (comparison, if_false_label, if_true_label)
7451 rtx comparison, if_false_label, if_true_label;
7452{
7453 if (if_true_label)
7454 {
7455 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7456 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7457 else
7458 abort ();
7459
7460 if (if_false_label)
7461 emit_jump (if_false_label);
7462 }
7463 else if (if_false_label)
7464 {
7465 rtx insn;
7466 rtx prev = PREV_INSN (get_last_insn ());
7467 rtx branch = 0;
7468
7469 /* Output the branch with the opposite condition. Then try to invert
7470 what is generated. If more than one insn is a branch, or if the
7471 branch is not the last insn written, abort. If we can't invert
7472 the branch, emit make a true label, redirect this jump to that,
7473 emit a jump to the false label and define the true label. */
7474
7475 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7476 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7477 else
7478 abort ();
7479
7480 /* Here we get the insn before what was just emitted.
7481 On some machines, emitting the branch can discard
7482 the previous compare insn and emit a replacement. */
7483 if (prev == 0)
7484 /* If there's only one preceding insn... */
7485 insn = get_insns ();
7486 else
7487 insn = NEXT_INSN (prev);
7488
7489 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7490 if (GET_CODE (insn) == JUMP_INSN)
7491 {
7492 if (branch)
7493 abort ();
7494 branch = insn;
7495 }
7496
7497 if (branch != get_last_insn ())
7498 abort ();
7499
7500 if (! invert_jump (branch, if_false_label))
7501 {
7502 if_true_label = gen_label_rtx ();
7503 redirect_jump (branch, if_true_label);
7504 emit_jump (if_false_label);
7505 emit_label (if_true_label);
7506 }
7507 }
7508}
7509\f
7510/* Generate code for a comparison expression EXP
7511 (including code to compute the values to be compared)
7512 and set (CC0) according to the result.
7513 SIGNED_CODE should be the rtx operation for this comparison for
7514 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7515
7516 We force a stack adjustment unless there are currently
7517 things pushed on the stack that aren't yet used. */
7518
7519static rtx
7520compare (exp, signed_code, unsigned_code)
7521 register tree exp;
7522 enum rtx_code signed_code, unsigned_code;
7523{
906c4e36
RK
7524 register rtx op0
7525 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7526 register rtx op1
7527 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7528 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7529 register enum machine_mode mode = TYPE_MODE (type);
7530 int unsignedp = TREE_UNSIGNED (type);
7531 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7532
7533 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7534 ((mode == BLKmode)
906c4e36 7535 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7536 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7537}
7538
7539/* Like compare but expects the values to compare as two rtx's.
7540 The decision as to signed or unsigned comparison must be made by the caller.
7541
7542 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7543 compared.
7544
7545 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7546 size of MODE should be used. */
7547
7548rtx
7549compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7550 register rtx op0, op1;
7551 enum rtx_code code;
7552 int unsignedp;
7553 enum machine_mode mode;
7554 rtx size;
7555 int align;
7556{
a7c5971a
RK
7557 rtx tem;
7558
bf743ac5
RK
7559 /* If one operand is constant, make it the second one. Only do this
7560 if the other operand is not constant as well. */
bbf6f052 7561
bf743ac5
RK
7562 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
7563 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 7564 {
a7c5971a 7565 tem = op0;
bbf6f052
RK
7566 op0 = op1;
7567 op1 = tem;
7568 code = swap_condition (code);
7569 }
7570
7571 if (flag_force_mem)
7572 {
7573 op0 = force_not_mem (op0);
7574 op1 = force_not_mem (op1);
7575 }
7576
7577 do_pending_stack_adjust ();
7578
a7c5971a
RK
7579 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
7580 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
7581 return tem;
bbf6f052 7582
cd1b4b44
RK
7583#if 0
7584 /* There's no need to do this now that combine.c can eliminate lots of
7585 sign extensions. This can be less efficient in certain cases on other
7586 machines.
7587
bbf6f052
RK
7588 /* If this is a signed equality comparison, we can do it as an
7589 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7590 extension and comparisons with zero are done as unsigned. This is
7591 the case even on machines that can do fast sign extension, since
8008b228 7592 zero-extension is easier to combine with other operations than
77fa0940
RK
7593 sign-extension is. If we are comparing against a constant, we must
7594 convert it to what it would look like unsigned. */
bbf6f052 7595 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7596 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7597 {
7598 if (GET_CODE (op1) == CONST_INT
7599 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7600 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7601 unsignedp = 1;
7602 }
cd1b4b44 7603#endif
bbf6f052
RK
7604
7605 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7606
7607 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7608}
7609\f
7610/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7611 and return an rtx for the result. EXP is either a comparison
7612 or a TRUTH_NOT_EXPR whose operand is a comparison.
7613
bbf6f052
RK
7614 If TARGET is nonzero, store the result there if convenient.
7615
7616 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7617 cheap.
7618
7619 Return zero if there is no suitable set-flag instruction
7620 available on this machine.
7621
7622 Once expand_expr has been called on the arguments of the comparison,
7623 we are committed to doing the store flag, since it is not safe to
7624 re-evaluate the expression. We emit the store-flag insn by calling
7625 emit_store_flag, but only expand the arguments if we have a reason
7626 to believe that emit_store_flag will be successful. If we think that
7627 it will, but it isn't, we have to simulate the store-flag with a
7628 set/jump/set sequence. */
7629
7630static rtx
7631do_store_flag (exp, target, mode, only_cheap)
7632 tree exp;
7633 rtx target;
7634 enum machine_mode mode;
7635 int only_cheap;
7636{
7637 enum rtx_code code;
e7c33f54 7638 tree arg0, arg1, type;
bbf6f052 7639 tree tem;
e7c33f54
RK
7640 enum machine_mode operand_mode;
7641 int invert = 0;
7642 int unsignedp;
bbf6f052
RK
7643 rtx op0, op1;
7644 enum insn_code icode;
7645 rtx subtarget = target;
7646 rtx result, label, pattern, jump_pat;
7647
e7c33f54
RK
7648 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7649 result at the end. We can't simply invert the test since it would
7650 have already been inverted if it were valid. This case occurs for
7651 some floating-point comparisons. */
7652
7653 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7654 invert = 1, exp = TREE_OPERAND (exp, 0);
7655
7656 arg0 = TREE_OPERAND (exp, 0);
7657 arg1 = TREE_OPERAND (exp, 1);
7658 type = TREE_TYPE (arg0);
7659 operand_mode = TYPE_MODE (type);
7660 unsignedp = TREE_UNSIGNED (type);
7661
bbf6f052
RK
7662 /* We won't bother with BLKmode store-flag operations because it would mean
7663 passing a lot of information to emit_store_flag. */
7664 if (operand_mode == BLKmode)
7665 return 0;
7666
d964285c
CH
7667 STRIP_NOPS (arg0);
7668 STRIP_NOPS (arg1);
bbf6f052
RK
7669
7670 /* Get the rtx comparison code to use. We know that EXP is a comparison
7671 operation of some type. Some comparisons against 1 and -1 can be
7672 converted to comparisons with zero. Do so here so that the tests
7673 below will be aware that we have a comparison with zero. These
7674 tests will not catch constants in the first operand, but constants
7675 are rarely passed as the first operand. */
7676
7677 switch (TREE_CODE (exp))
7678 {
7679 case EQ_EXPR:
7680 code = EQ;
7681 break;
7682 case NE_EXPR:
7683 code = NE;
7684 break;
7685 case LT_EXPR:
7686 if (integer_onep (arg1))
7687 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7688 else
7689 code = unsignedp ? LTU : LT;
7690 break;
7691 case LE_EXPR:
7692 if (integer_all_onesp (arg1))
7693 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7694 else
7695 code = unsignedp ? LEU : LE;
7696 break;
7697 case GT_EXPR:
7698 if (integer_all_onesp (arg1))
7699 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7700 else
7701 code = unsignedp ? GTU : GT;
7702 break;
7703 case GE_EXPR:
7704 if (integer_onep (arg1))
7705 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7706 else
7707 code = unsignedp ? GEU : GE;
7708 break;
7709 default:
7710 abort ();
7711 }
7712
7713 /* Put a constant second. */
7714 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7715 {
7716 tem = arg0; arg0 = arg1; arg1 = tem;
7717 code = swap_condition (code);
7718 }
7719
7720 /* If this is an equality or inequality test of a single bit, we can
7721 do this by shifting the bit being tested to the low-order bit and
7722 masking the result with the constant 1. If the condition was EQ,
7723 we xor it with 1. This does not require an scc insn and is faster
7724 than an scc insn even if we have it. */
7725
7726 if ((code == NE || code == EQ)
7727 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7728 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7729 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7730 {
7731 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7732 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
7733
7734 if (subtarget == 0 || GET_CODE (subtarget) != REG
7735 || GET_MODE (subtarget) != operand_mode
7736 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7737 subtarget = 0;
7738
7739 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7740
7741 if (bitnum != 0)
7742 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7743 size_int (bitnum), target, 1);
7744
7745 if (GET_MODE (op0) != mode)
7746 op0 = convert_to_mode (mode, op0, 1);
7747
7748 if (bitnum != TYPE_PRECISION (type) - 1)
7749 op0 = expand_and (op0, const1_rtx, target);
7750
e7c33f54 7751 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
7752 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7753 OPTAB_LIB_WIDEN);
7754
7755 return op0;
7756 }
7757
7758 /* Now see if we are likely to be able to do this. Return if not. */
7759 if (! can_compare_p (operand_mode))
7760 return 0;
7761 icode = setcc_gen_code[(int) code];
7762 if (icode == CODE_FOR_nothing
7763 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7764 {
7765 /* We can only do this if it is one of the special cases that
7766 can be handled without an scc insn. */
7767 if ((code == LT && integer_zerop (arg1))
7768 || (! only_cheap && code == GE && integer_zerop (arg1)))
7769 ;
7770 else if (BRANCH_COST >= 0
7771 && ! only_cheap && (code == NE || code == EQ)
7772 && TREE_CODE (type) != REAL_TYPE
7773 && ((abs_optab->handlers[(int) operand_mode].insn_code
7774 != CODE_FOR_nothing)
7775 || (ffs_optab->handlers[(int) operand_mode].insn_code
7776 != CODE_FOR_nothing)))
7777 ;
7778 else
7779 return 0;
7780 }
7781
7782 preexpand_calls (exp);
7783 if (subtarget == 0 || GET_CODE (subtarget) != REG
7784 || GET_MODE (subtarget) != operand_mode
7785 || ! safe_from_p (subtarget, arg1))
7786 subtarget = 0;
7787
7788 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7789 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7790
7791 if (target == 0)
7792 target = gen_reg_rtx (mode);
7793
d39985fa
RK
7794 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7795 because, if the emit_store_flag does anything it will succeed and
7796 OP0 and OP1 will not be used subsequently. */
7797
7798 result = emit_store_flag (target, code,
7799 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7800 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7801 operand_mode, unsignedp, 1);
bbf6f052
RK
7802
7803 if (result)
e7c33f54
RK
7804 {
7805 if (invert)
7806 result = expand_binop (mode, xor_optab, result, const1_rtx,
7807 result, 0, OPTAB_LIB_WIDEN);
7808 return result;
7809 }
bbf6f052
RK
7810
7811 /* If this failed, we have to do this with set/compare/jump/set code. */
7812 if (target == 0 || GET_CODE (target) != REG
7813 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7814 target = gen_reg_rtx (GET_MODE (target));
7815
e7c33f54 7816 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
7817 result = compare_from_rtx (op0, op1, code, unsignedp,
7818 operand_mode, NULL_RTX, 0);
bbf6f052 7819 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
7820 return (((result == const0_rtx && ! invert)
7821 || (result != const0_rtx && invert))
7822 ? const0_rtx : const1_rtx);
bbf6f052
RK
7823
7824 label = gen_label_rtx ();
7825 if (bcc_gen_fctn[(int) code] == 0)
7826 abort ();
7827
7828 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 7829 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
7830 emit_label (label);
7831
7832 return target;
7833}
7834\f
7835/* Generate a tablejump instruction (used for switch statements). */
7836
7837#ifdef HAVE_tablejump
7838
7839/* INDEX is the value being switched on, with the lowest value
7840 in the table already subtracted.
88d3b7f0 7841 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
7842 RANGE is the length of the jump table.
7843 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7844
7845 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7846 index value is out of range. */
7847
7848void
e87b4f3f 7849do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 7850 rtx index, range, table_label, default_label;
e87b4f3f 7851 enum machine_mode mode;
bbf6f052
RK
7852{
7853 register rtx temp, vector;
7854
88d3b7f0
RS
7855 /* Do an unsigned comparison (in the proper mode) between the index
7856 expression and the value which represents the length of the range.
7857 Since we just finished subtracting the lower bound of the range
7858 from the index expression, this comparison allows us to simultaneously
7859 check that the original index expression value is both greater than
7860 or equal to the minimum value of the range and less than or equal to
7861 the maximum value of the range. */
e87b4f3f 7862
b4c65118 7863 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 1, 0);
bbf6f052 7864 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
7865
7866 /* If index is in range, it must fit in Pmode.
7867 Convert to Pmode so we can index with it. */
7868 if (mode != Pmode)
7869 index = convert_to_mode (Pmode, index, 1);
7870
bbf6f052
RK
7871 /* If flag_force_addr were to affect this address
7872 it could interfere with the tricky assumptions made
7873 about addresses that contain label-refs,
7874 which may be valid only very near the tablejump itself. */
7875 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7876 GET_MODE_SIZE, because this indicates how large insns are. The other
7877 uses should all be Pmode, because they are addresses. This code
7878 could fail if addresses and insns are not the same size. */
7879 index = memory_address_noforce
7880 (CASE_VECTOR_MODE,
7881 gen_rtx (PLUS, Pmode,
7882 gen_rtx (MULT, Pmode, index,
906c4e36 7883 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
7884 gen_rtx (LABEL_REF, Pmode, table_label)));
7885 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7886 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7887 RTX_UNCHANGING_P (vector) = 1;
7888 convert_move (temp, vector, 0);
7889
7890 emit_jump_insn (gen_tablejump (temp, table_label));
7891
7892#ifndef CASE_VECTOR_PC_RELATIVE
7893 /* If we are generating PIC code or if the table is PC-relative, the
7894 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7895 if (! flag_pic)
7896 emit_barrier ();
7897#endif
7898}
7899
7900#endif /* HAVE_tablejump */
This page took 0.957479 seconds and 5 git commands to generate.