]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
# Fix misspellings in comments.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052
RK
1/* Convert tree expression to rtl instructions, for GNU compiler.
2 Copyright (C) 1988, 1992 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "function.h"
26#include "insn-flags.h"
27#include "insn-codes.h"
28#include "expr.h"
29#include "insn-config.h"
30#include "recog.h"
31#include "output.h"
32#include "gvarargs.h"
33#include "typeclass.h"
34
35#define CEIL(x,y) (((x) + (y) - 1) / (y))
36
37/* Decide whether a function's arguments should be processed
38 from first to last or from last to first. */
39
40#ifdef STACK_GROWS_DOWNWARD
41#ifdef PUSH_ROUNDING
42#define PUSH_ARGS_REVERSED /* If it's last to first */
43#endif
44#endif
45
46#ifndef STACK_PUSH_CODE
47#ifdef STACK_GROWS_DOWNWARD
48#define STACK_PUSH_CODE PRE_DEC
49#else
50#define STACK_PUSH_CODE PRE_INC
51#endif
52#endif
53
54/* Like STACK_BOUNDARY but in units of bytes, not bits. */
55#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
56
57/* If this is nonzero, we do not bother generating VOLATILE
58 around volatile memory references, and we are willing to
59 output indirect addresses. If cse is to follow, we reject
60 indirect addresses so a useful potential cse is generated;
61 if it is used only once, instruction combination will produce
62 the same indirect address eventually. */
63int cse_not_expected;
64
65/* Nonzero to generate code for all the subroutines within an
66 expression before generating the upper levels of the expression.
67 Nowadays this is never zero. */
68int do_preexpand_calls = 1;
69
70/* Number of units that we should eventually pop off the stack.
71 These are the arguments to function calls that have already returned. */
72int pending_stack_adjust;
73
74/* Nonzero means stack pops must not be deferred, and deferred stack
75 pops must not be output. It is nonzero inside a function call,
76 inside a conditional expression, inside a statement expression,
77 and in other cases as well. */
78int inhibit_defer_pop;
79
80/* A list of all cleanups which belong to the arguments of
81 function calls being expanded by expand_call. */
82tree cleanups_this_call;
83
84/* Nonzero means __builtin_saveregs has already been done in this function.
85 The value is the pseudoreg containing the value __builtin_saveregs
86 returned. */
87static rtx saveregs_value;
88
89rtx store_expr ();
90static void store_constructor ();
91static rtx store_field ();
92static rtx expand_builtin ();
93static rtx compare ();
94static rtx do_store_flag ();
95static void preexpand_calls ();
96static rtx expand_increment ();
97static void init_queue ();
98
99void do_pending_stack_adjust ();
100static void do_jump_for_compare ();
101static void do_jump_by_parts_equality ();
102static void do_jump_by_parts_equality_rtx ();
103static void do_jump_by_parts_greater ();
104
4fa52007
RK
105/* Record for each mode whether we can move a register directly to or
106 from an object of that mode in memory. If we can't, we won't try
107 to use that mode directly when accessing a field of that mode. */
108
109static char direct_load[NUM_MACHINE_MODES];
110static char direct_store[NUM_MACHINE_MODES];
111
bbf6f052
RK
112/* MOVE_RATIO is the number of move instructions that is better than
113 a block move. */
114
115#ifndef MOVE_RATIO
266007a7 116#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
117#define MOVE_RATIO 2
118#else
119/* A value of around 6 would minimize code size; infinity would minimize
120 execution time. */
121#define MOVE_RATIO 15
122#endif
123#endif
e87b4f3f 124
266007a7
RK
125/* This array records the insn_code of insns to perform block moves. */
126static enum insn_code movstr_optab[NUM_MACHINE_MODES];
127
e87b4f3f
RS
128/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
129
130#ifndef SLOW_UNALIGNED_ACCESS
131#define SLOW_UNALIGNED_ACCESS 0
132#endif
bbf6f052 133\f
4fa52007 134/* This is run once per compilation to set up which modes can be used
266007a7 135 directly in memory and to initialize the block move optab. */
4fa52007
RK
136
137void
138init_expr_once ()
139{
140 rtx insn, pat;
141 enum machine_mode mode;
e2549997
RS
142 /* Try indexing by frame ptr and try by stack ptr.
143 It is known that on the Convex the stack ptr isn't a valid index.
144 With luck, one or the other is valid on any machine. */
4fa52007 145 rtx mem = gen_rtx (MEM, VOIDmode, stack_pointer_rtx);
e2549997 146 rtx mem1 = gen_rtx (MEM, VOIDmode, frame_pointer_rtx);
4fa52007
RK
147
148 start_sequence ();
149 insn = emit_insn (gen_rtx (SET, 0, 0));
150 pat = PATTERN (insn);
151
152 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
153 mode = (enum machine_mode) ((int) mode + 1))
154 {
155 int regno;
156 rtx reg;
157 int num_clobbers;
158
159 direct_load[(int) mode] = direct_store[(int) mode] = 0;
160 PUT_MODE (mem, mode);
e2549997 161 PUT_MODE (mem1, mode);
4fa52007 162
e6fe56a4
RK
163 /* See if there is some register that can be used in this mode and
164 directly loaded or stored from memory. */
165
7308a047
RS
166 if (mode != VOIDmode && mode != BLKmode)
167 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
168 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
169 regno++)
170 {
171 if (! HARD_REGNO_MODE_OK (regno, mode))
172 continue;
e6fe56a4 173
7308a047 174 reg = gen_rtx (REG, mode, regno);
e6fe56a4 175
7308a047
RS
176 SET_SRC (pat) = mem;
177 SET_DEST (pat) = reg;
178 if (recog (pat, insn, &num_clobbers) >= 0)
179 direct_load[(int) mode] = 1;
e6fe56a4 180
e2549997
RS
181 SET_SRC (pat) = mem1;
182 SET_DEST (pat) = reg;
183 if (recog (pat, insn, &num_clobbers) >= 0)
184 direct_load[(int) mode] = 1;
185
7308a047
RS
186 SET_SRC (pat) = reg;
187 SET_DEST (pat) = mem;
188 if (recog (pat, insn, &num_clobbers) >= 0)
189 direct_store[(int) mode] = 1;
e2549997
RS
190
191 SET_SRC (pat) = reg;
192 SET_DEST (pat) = mem1;
193 if (recog (pat, insn, &num_clobbers) >= 0)
194 direct_store[(int) mode] = 1;
7308a047 195 }
266007a7
RK
196
197 movstr_optab[(int) mode] = CODE_FOR_nothing;
4fa52007
RK
198 }
199
200 end_sequence ();
266007a7
RK
201
202#ifdef HAVE_movstrqi
203 if (HAVE_movstrqi)
204 movstr_optab[(int) QImode] = CODE_FOR_movstrqi;
205#endif
206#ifdef HAVE_movstrhi
207 if (HAVE_movstrhi)
208 movstr_optab[(int) HImode] = CODE_FOR_movstrhi;
209#endif
210#ifdef HAVE_movstrsi
211 if (HAVE_movstrsi)
212 movstr_optab[(int) SImode] = CODE_FOR_movstrsi;
213#endif
214#ifdef HAVE_movstrdi
215 if (HAVE_movstrdi)
216 movstr_optab[(int) DImode] = CODE_FOR_movstrdi;
217#endif
218#ifdef HAVE_movstrti
219 if (HAVE_movstrti)
220 movstr_optab[(int) TImode] = CODE_FOR_movstrti;
221#endif
4fa52007
RK
222}
223
bbf6f052
RK
224/* This is run at the start of compiling a function. */
225
226void
227init_expr ()
228{
229 init_queue ();
230
231 pending_stack_adjust = 0;
232 inhibit_defer_pop = 0;
233 cleanups_this_call = 0;
234 saveregs_value = 0;
e87b4f3f 235 forced_labels = 0;
bbf6f052
RK
236}
237
238/* Save all variables describing the current status into the structure *P.
239 This is used before starting a nested function. */
240
241void
242save_expr_status (p)
243 struct function *p;
244{
245 /* Instead of saving the postincrement queue, empty it. */
246 emit_queue ();
247
248 p->pending_stack_adjust = pending_stack_adjust;
249 p->inhibit_defer_pop = inhibit_defer_pop;
250 p->cleanups_this_call = cleanups_this_call;
251 p->saveregs_value = saveregs_value;
e87b4f3f 252 p->forced_labels = forced_labels;
bbf6f052
RK
253
254 pending_stack_adjust = 0;
255 inhibit_defer_pop = 0;
256 cleanups_this_call = 0;
257 saveregs_value = 0;
e87b4f3f 258 forced_labels = 0;
bbf6f052
RK
259}
260
261/* Restore all variables describing the current status from the structure *P.
262 This is used after a nested function. */
263
264void
265restore_expr_status (p)
266 struct function *p;
267{
268 pending_stack_adjust = p->pending_stack_adjust;
269 inhibit_defer_pop = p->inhibit_defer_pop;
270 cleanups_this_call = p->cleanups_this_call;
271 saveregs_value = p->saveregs_value;
e87b4f3f 272 forced_labels = p->forced_labels;
bbf6f052
RK
273}
274\f
275/* Manage the queue of increment instructions to be output
276 for POSTINCREMENT_EXPR expressions, etc. */
277
278static rtx pending_chain;
279
280/* Queue up to increment (or change) VAR later. BODY says how:
281 BODY should be the same thing you would pass to emit_insn
282 to increment right away. It will go to emit_insn later on.
283
284 The value is a QUEUED expression to be used in place of VAR
285 where you want to guarantee the pre-incrementation value of VAR. */
286
287static rtx
288enqueue_insn (var, body)
289 rtx var, body;
290{
291 pending_chain = gen_rtx (QUEUED, GET_MODE (var),
906c4e36 292 var, NULL_RTX, NULL_RTX, body, pending_chain);
bbf6f052
RK
293 return pending_chain;
294}
295
296/* Use protect_from_queue to convert a QUEUED expression
297 into something that you can put immediately into an instruction.
298 If the queued incrementation has not happened yet,
299 protect_from_queue returns the variable itself.
300 If the incrementation has happened, protect_from_queue returns a temp
301 that contains a copy of the old value of the variable.
302
303 Any time an rtx which might possibly be a QUEUED is to be put
304 into an instruction, it must be passed through protect_from_queue first.
305 QUEUED expressions are not meaningful in instructions.
306
307 Do not pass a value through protect_from_queue and then hold
308 on to it for a while before putting it in an instruction!
309 If the queue is flushed in between, incorrect code will result. */
310
311rtx
312protect_from_queue (x, modify)
313 register rtx x;
314 int modify;
315{
316 register RTX_CODE code = GET_CODE (x);
317
318#if 0 /* A QUEUED can hang around after the queue is forced out. */
319 /* Shortcut for most common case. */
320 if (pending_chain == 0)
321 return x;
322#endif
323
324 if (code != QUEUED)
325 {
326 /* A special hack for read access to (MEM (QUEUED ...))
327 to facilitate use of autoincrement.
328 Make a copy of the contents of the memory location
329 rather than a copy of the address, but not
330 if the value is of mode BLKmode. */
331 if (code == MEM && GET_MODE (x) != BLKmode
332 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
333 {
334 register rtx y = XEXP (x, 0);
335 XEXP (x, 0) = QUEUED_VAR (y);
336 if (QUEUED_INSN (y))
337 {
338 register rtx temp = gen_reg_rtx (GET_MODE (x));
339 emit_insn_before (gen_move_insn (temp, x),
340 QUEUED_INSN (y));
341 return temp;
342 }
343 return x;
344 }
345 /* Otherwise, recursively protect the subexpressions of all
346 the kinds of rtx's that can contain a QUEUED. */
347 if (code == MEM)
348 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
349 else if (code == PLUS || code == MULT)
350 {
351 XEXP (x, 0) = protect_from_queue (XEXP (x, 0), 0);
352 XEXP (x, 1) = protect_from_queue (XEXP (x, 1), 0);
353 }
354 return x;
355 }
356 /* If the increment has not happened, use the variable itself. */
357 if (QUEUED_INSN (x) == 0)
358 return QUEUED_VAR (x);
359 /* If the increment has happened and a pre-increment copy exists,
360 use that copy. */
361 if (QUEUED_COPY (x) != 0)
362 return QUEUED_COPY (x);
363 /* The increment has happened but we haven't set up a pre-increment copy.
364 Set one up now, and use it. */
365 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
366 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
367 QUEUED_INSN (x));
368 return QUEUED_COPY (x);
369}
370
371/* Return nonzero if X contains a QUEUED expression:
372 if it contains anything that will be altered by a queued increment.
373 We handle only combinations of MEM, PLUS, MINUS and MULT operators
374 since memory addresses generally contain only those. */
375
376static int
377queued_subexp_p (x)
378 rtx x;
379{
380 register enum rtx_code code = GET_CODE (x);
381 switch (code)
382 {
383 case QUEUED:
384 return 1;
385 case MEM:
386 return queued_subexp_p (XEXP (x, 0));
387 case MULT:
388 case PLUS:
389 case MINUS:
390 return queued_subexp_p (XEXP (x, 0))
391 || queued_subexp_p (XEXP (x, 1));
392 }
393 return 0;
394}
395
396/* Perform all the pending incrementations. */
397
398void
399emit_queue ()
400{
401 register rtx p;
402 while (p = pending_chain)
403 {
404 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
405 pending_chain = QUEUED_NEXT (p);
406 }
407}
408
409static void
410init_queue ()
411{
412 if (pending_chain)
413 abort ();
414}
415\f
416/* Copy data from FROM to TO, where the machine modes are not the same.
417 Both modes may be integer, or both may be floating.
418 UNSIGNEDP should be nonzero if FROM is an unsigned type.
419 This causes zero-extension instead of sign-extension. */
420
421void
422convert_move (to, from, unsignedp)
423 register rtx to, from;
424 int unsignedp;
425{
426 enum machine_mode to_mode = GET_MODE (to);
427 enum machine_mode from_mode = GET_MODE (from);
428 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
429 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
430 enum insn_code code;
431 rtx libcall;
432
433 /* rtx code for making an equivalent value. */
434 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
435
436 to = protect_from_queue (to, 1);
437 from = protect_from_queue (from, 0);
438
439 if (to_real != from_real)
440 abort ();
441
1499e0a8
RK
442 /* If FROM is a SUBREG that indicates that we have already done at least
443 the required extension, strip it. We don't handle such SUBREGs as
444 TO here. */
445
446 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
447 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
448 >= GET_MODE_SIZE (to_mode))
449 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
450 from = gen_lowpart (to_mode, from), from_mode = to_mode;
451
452 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
453 abort ();
454
bbf6f052
RK
455 if (to_mode == from_mode
456 || (from_mode == VOIDmode && CONSTANT_P (from)))
457 {
458 emit_move_insn (to, from);
459 return;
460 }
461
462 if (to_real)
463 {
b424402e
RS
464#ifdef HAVE_extendqfhf2
465 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == HFmode)
466 {
467 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
468 return;
469 }
470#endif
471#ifdef HAVE_extendqfsf2
472 if (HAVE_extendqfsf2 && from_mode == QFmode && to_mode == SFmode)
473 {
474 emit_unop_insn (CODE_FOR_extendqfsf2, to, from, UNKNOWN);
475 return;
476 }
477#endif
478#ifdef HAVE_extendqfdf2
479 if (HAVE_extendqfdf2 && from_mode == QFmode && to_mode == DFmode)
480 {
481 emit_unop_insn (CODE_FOR_extendqfdf2, to, from, UNKNOWN);
482 return;
483 }
484#endif
485#ifdef HAVE_extendqfxf2
486 if (HAVE_extendqfxf2 && from_mode == QFmode && to_mode == XFmode)
487 {
488 emit_unop_insn (CODE_FOR_extendqfxf2, to, from, UNKNOWN);
489 return;
490 }
491#endif
492#ifdef HAVE_extendqftf2
493 if (HAVE_extendqftf2 && from_mode == QFmode && to_mode == TFmode)
494 {
495 emit_unop_insn (CODE_FOR_extendqftf2, to, from, UNKNOWN);
496 return;
497 }
498#endif
499
500#ifdef HAVE_extendhfsf2
501 if (HAVE_extendhfsf2 && from_mode == HFmode && to_mode == SFmode)
502 {
503 emit_unop_insn (CODE_FOR_extendhfsf2, to, from, UNKNOWN);
504 return;
505 }
506#endif
507#ifdef HAVE_extendhfdf2
508 if (HAVE_extendhfdf2 && from_mode == HFmode && to_mode == DFmode)
509 {
510 emit_unop_insn (CODE_FOR_extendhfdf2, to, from, UNKNOWN);
511 return;
512 }
513#endif
514#ifdef HAVE_extendhfxf2
515 if (HAVE_extendhfxf2 && from_mode == HFmode && to_mode == XFmode)
516 {
517 emit_unop_insn (CODE_FOR_extendhfxf2, to, from, UNKNOWN);
518 return;
519 }
520#endif
521#ifdef HAVE_extendhftf2
522 if (HAVE_extendhftf2 && from_mode == HFmode && to_mode == TFmode)
523 {
524 emit_unop_insn (CODE_FOR_extendhftf2, to, from, UNKNOWN);
525 return;
526 }
527#endif
528
bbf6f052
RK
529#ifdef HAVE_extendsfdf2
530 if (HAVE_extendsfdf2 && from_mode == SFmode && to_mode == DFmode)
531 {
532 emit_unop_insn (CODE_FOR_extendsfdf2, to, from, UNKNOWN);
533 return;
534 }
535#endif
b092b471
JW
536#ifdef HAVE_extendsfxf2
537 if (HAVE_extendsfxf2 && from_mode == SFmode && to_mode == XFmode)
538 {
539 emit_unop_insn (CODE_FOR_extendsfxf2, to, from, UNKNOWN);
540 return;
541 }
542#endif
bbf6f052
RK
543#ifdef HAVE_extendsftf2
544 if (HAVE_extendsftf2 && from_mode == SFmode && to_mode == TFmode)
545 {
546 emit_unop_insn (CODE_FOR_extendsftf2, to, from, UNKNOWN);
547 return;
548 }
549#endif
b092b471
JW
550#ifdef HAVE_extenddfxf2
551 if (HAVE_extenddfxf2 && from_mode == DFmode && to_mode == XFmode)
552 {
553 emit_unop_insn (CODE_FOR_extenddfxf2, to, from, UNKNOWN);
554 return;
555 }
556#endif
bbf6f052
RK
557#ifdef HAVE_extenddftf2
558 if (HAVE_extenddftf2 && from_mode == DFmode && to_mode == TFmode)
559 {
560 emit_unop_insn (CODE_FOR_extenddftf2, to, from, UNKNOWN);
561 return;
562 }
563#endif
b424402e
RS
564
565#ifdef HAVE_trunchfqf2
566 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
569 return;
570 }
571#endif
572#ifdef HAVE_truncsfqf2
573 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
576 return;
577 }
578#endif
579#ifdef HAVE_truncdfqf2
580 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
583 return;
584 }
585#endif
586#ifdef HAVE_truncxfqf2
587 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
590 return;
591 }
592#endif
593#ifdef HAVE_trunctfqf2
594 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
597 return;
598 }
599#endif
600#ifdef HAVE_truncsfhf2
601 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
604 return;
605 }
606#endif
607#ifdef HAVE_truncdfhf2
608 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_truncxfhf2
615 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
616 {
617 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_trunctfhf2
622 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
623 {
624 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
bbf6f052
RK
628#ifdef HAVE_truncdfsf2
629 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
b092b471
JW
635#ifdef HAVE_truncxfsf2
636 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
639 return;
640 }
641#endif
bbf6f052
RK
642#ifdef HAVE_trunctfsf2
643 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
644 {
645 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
646 return;
647 }
648#endif
b092b471
JW
649#ifdef HAVE_truncxfdf2
650 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
651 {
652 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
bbf6f052
RK
656#ifdef HAVE_trunctfdf2
657 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
658 {
659 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
660 return;
661 }
662#endif
663
b092b471
JW
664 libcall = (rtx) 0;
665 switch (from_mode)
666 {
667 case SFmode:
668 switch (to_mode)
669 {
670 case DFmode:
671 libcall = extendsfdf2_libfunc;
672 break;
673
674 case XFmode:
675 libcall = extendsfxf2_libfunc;
676 break;
677
678 case TFmode:
679 libcall = extendsftf2_libfunc;
680 break;
681 }
682 break;
683
684 case DFmode:
685 switch (to_mode)
686 {
687 case SFmode:
688 libcall = truncdfsf2_libfunc;
689 break;
690
691 case XFmode:
692 libcall = extenddfxf2_libfunc;
693 break;
694
695 case TFmode:
696 libcall = extenddftf2_libfunc;
697 break;
698 }
699 break;
700
701 case XFmode:
702 switch (to_mode)
703 {
704 case SFmode:
705 libcall = truncxfsf2_libfunc;
706 break;
707
708 case DFmode:
709 libcall = truncxfdf2_libfunc;
710 break;
711 }
712 break;
713
714 case TFmode:
715 switch (to_mode)
716 {
717 case SFmode:
718 libcall = trunctfsf2_libfunc;
719 break;
720
721 case DFmode:
722 libcall = trunctfdf2_libfunc;
723 break;
724 }
725 break;
726 }
727
728 if (libcall == (rtx) 0)
729 /* This conversion is not implemented yet. */
bbf6f052
RK
730 abort ();
731
e87b4f3f 732 emit_library_call (libcall, 1, to_mode, 1, from, from_mode);
bbf6f052
RK
733 emit_move_insn (to, hard_libcall_value (to_mode));
734 return;
735 }
736
737 /* Now both modes are integers. */
738
739 /* Handle expanding beyond a word. */
740 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
741 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
742 {
743 rtx insns;
744 rtx lowpart;
745 rtx fill_value;
746 rtx lowfrom;
747 int i;
748 enum machine_mode lowpart_mode;
749 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
750
751 /* Try converting directly if the insn is supported. */
752 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
753 != CODE_FOR_nothing)
754 {
cd1b4b44
RK
755 /* If FROM is a SUBREG, put it into a register. Do this
756 so that we always generate the same set of insns for
757 better cse'ing; if an intermediate assignment occurred,
758 we won't be doing the operation directly on the SUBREG. */
759 if (optimize > 0 && GET_CODE (from) == SUBREG)
760 from = force_reg (from_mode, from);
bbf6f052
RK
761 emit_unop_insn (code, to, from, equiv_code);
762 return;
763 }
764 /* Next, try converting via full word. */
765 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
766 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
767 != CODE_FOR_nothing))
768 {
769 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
770 emit_unop_insn (code, to,
771 gen_lowpart (word_mode, to), equiv_code);
772 return;
773 }
774
775 /* No special multiword conversion insn; do it by hand. */
776 start_sequence ();
777
778 /* Get a copy of FROM widened to a word, if necessary. */
779 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
780 lowpart_mode = word_mode;
781 else
782 lowpart_mode = from_mode;
783
784 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
785
786 lowpart = gen_lowpart (lowpart_mode, to);
787 emit_move_insn (lowpart, lowfrom);
788
789 /* Compute the value to put in each remaining word. */
790 if (unsignedp)
791 fill_value = const0_rtx;
792 else
793 {
794#ifdef HAVE_slt
795 if (HAVE_slt
796 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
797 && STORE_FLAG_VALUE == -1)
798 {
906c4e36
RK
799 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
800 lowpart_mode, 0, 0);
bbf6f052
RK
801 fill_value = gen_reg_rtx (word_mode);
802 emit_insn (gen_slt (fill_value));
803 }
804 else
805#endif
806 {
807 fill_value
808 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
809 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 810 NULL_RTX, 0);
bbf6f052
RK
811 fill_value = convert_to_mode (word_mode, fill_value, 1);
812 }
813 }
814
815 /* Fill the remaining words. */
816 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
817 {
818 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
819 rtx subword = operand_subword (to, index, 1, to_mode);
820
821 if (subword == 0)
822 abort ();
823
824 if (fill_value != subword)
825 emit_move_insn (subword, fill_value);
826 }
827
828 insns = get_insns ();
829 end_sequence ();
830
906c4e36 831 emit_no_conflict_block (insns, to, from, NULL_RTX,
bbf6f052
RK
832 gen_rtx (equiv_code, to_mode, from));
833 return;
834 }
835
836 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD)
837 {
838 convert_move (to, gen_lowpart (word_mode, from), 0);
839 return;
840 }
841
842 /* Handle pointer conversion */ /* SPEE 900220 */
843 if (to_mode == PSImode)
844 {
845 if (from_mode != SImode)
846 from = convert_to_mode (SImode, from, unsignedp);
847
848#ifdef HAVE_truncsipsi
849 if (HAVE_truncsipsi)
850 {
851 emit_unop_insn (CODE_FOR_truncsipsi, to, from, UNKNOWN);
852 return;
853 }
854#endif /* HAVE_truncsipsi */
855 abort ();
856 }
857
858 if (from_mode == PSImode)
859 {
860 if (to_mode != SImode)
861 {
862 from = convert_to_mode (SImode, from, unsignedp);
863 from_mode = SImode;
864 }
865 else
866 {
867#ifdef HAVE_extendpsisi
868 if (HAVE_extendpsisi)
869 {
870 emit_unop_insn (CODE_FOR_extendpsisi, to, from, UNKNOWN);
871 return;
872 }
873#endif /* HAVE_extendpsisi */
874 abort ();
875 }
876 }
877
878 /* Now follow all the conversions between integers
879 no more than a word long. */
880
881 /* For truncation, usually we can just refer to FROM in a narrower mode. */
882 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
883 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
884 GET_MODE_BITSIZE (from_mode))
885 && ((GET_CODE (from) == MEM
886 && ! MEM_VOLATILE_P (from)
4fa52007 887 && direct_load[(int) to_mode]
bbf6f052
RK
888 && ! mode_dependent_address_p (XEXP (from, 0)))
889 || GET_CODE (from) == REG
890 || GET_CODE (from) == SUBREG))
891 {
892 emit_move_insn (to, gen_lowpart (to_mode, from));
893 return;
894 }
895
896 /* For truncation, usually we can just refer to FROM in a narrower mode. */
897 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
898 {
899 /* Convert directly if that works. */
900 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
901 != CODE_FOR_nothing)
902 {
3dc4195c
RK
903 /* If FROM is a SUBREG, put it into a register. Do this
904 so that we always generate the same set of insns for
905 better cse'ing; if an intermediate assignment occurred,
906 we won't be doing the operation directly on the SUBREG. */
907 if (optimize > 0 && GET_CODE (from) == SUBREG)
908 from = force_reg (from_mode, from);
bbf6f052
RK
909 emit_unop_insn (code, to, from, equiv_code);
910 return;
911 }
912 else
913 {
914 enum machine_mode intermediate;
915
916 /* Search for a mode to convert via. */
917 for (intermediate = from_mode; intermediate != VOIDmode;
918 intermediate = GET_MODE_WIDER_MODE (intermediate))
919 if ((can_extend_p (to_mode, intermediate, unsignedp)
920 != CODE_FOR_nothing)
921 && (can_extend_p (intermediate, from_mode, unsignedp)
922 != CODE_FOR_nothing))
923 {
924 convert_move (to, convert_to_mode (intermediate, from,
925 unsignedp), unsignedp);
926 return;
927 }
928
929 /* No suitable intermediate mode. */
930 abort ();
931 }
932 }
933
934 /* Support special truncate insns for certain modes. */
935
936 if (from_mode == DImode && to_mode == SImode)
937 {
938#ifdef HAVE_truncdisi2
939 if (HAVE_truncdisi2)
940 {
941 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
942 return;
943 }
944#endif
945 convert_move (to, force_reg (from_mode, from), unsignedp);
946 return;
947 }
948
949 if (from_mode == DImode && to_mode == HImode)
950 {
951#ifdef HAVE_truncdihi2
952 if (HAVE_truncdihi2)
953 {
954 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
955 return;
956 }
957#endif
958 convert_move (to, force_reg (from_mode, from), unsignedp);
959 return;
960 }
961
962 if (from_mode == DImode && to_mode == QImode)
963 {
964#ifdef HAVE_truncdiqi2
965 if (HAVE_truncdiqi2)
966 {
967 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
968 return;
969 }
970#endif
971 convert_move (to, force_reg (from_mode, from), unsignedp);
972 return;
973 }
974
975 if (from_mode == SImode && to_mode == HImode)
976 {
977#ifdef HAVE_truncsihi2
978 if (HAVE_truncsihi2)
979 {
980 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
981 return;
982 }
983#endif
984 convert_move (to, force_reg (from_mode, from), unsignedp);
985 return;
986 }
987
988 if (from_mode == SImode && to_mode == QImode)
989 {
990#ifdef HAVE_truncsiqi2
991 if (HAVE_truncsiqi2)
992 {
993 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
994 return;
995 }
996#endif
997 convert_move (to, force_reg (from_mode, from), unsignedp);
998 return;
999 }
1000
1001 if (from_mode == HImode && to_mode == QImode)
1002 {
1003#ifdef HAVE_trunchiqi2
1004 if (HAVE_trunchiqi2)
1005 {
1006 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1007 return;
1008 }
1009#endif
1010 convert_move (to, force_reg (from_mode, from), unsignedp);
1011 return;
1012 }
1013
1014 /* Handle truncation of volatile memrefs, and so on;
1015 the things that couldn't be truncated directly,
1016 and for which there was no special instruction. */
1017 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1018 {
1019 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1020 emit_move_insn (to, temp);
1021 return;
1022 }
1023
1024 /* Mode combination is not recognized. */
1025 abort ();
1026}
1027
1028/* Return an rtx for a value that would result
1029 from converting X to mode MODE.
1030 Both X and MODE may be floating, or both integer.
1031 UNSIGNEDP is nonzero if X is an unsigned value.
1032 This can be done by referring to a part of X in place
5d901c31
RS
1033 or by copying to a new temporary with conversion.
1034
1035 This function *must not* call protect_from_queue
1036 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1037
1038rtx
1039convert_to_mode (mode, x, unsignedp)
1040 enum machine_mode mode;
1041 rtx x;
1042 int unsignedp;
1043{
1044 register rtx temp;
1499e0a8
RK
1045
1046 /* If FROM is a SUBREG that indicates that we have already done at least
1047 the required extension, strip it. */
1048
1049 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1050 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1051 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1052 x = gen_lowpart (mode, x);
bbf6f052 1053
bbf6f052
RK
1054 if (mode == GET_MODE (x))
1055 return x;
1056
1057 /* There is one case that we must handle specially: If we are converting
906c4e36 1058 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1059 we are to interpret the constant as unsigned, gen_lowpart will do
1060 the wrong if the constant appears negative. What we want to do is
1061 make the high-order word of the constant zero, not all ones. */
1062
1063 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1064 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1065 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
906c4e36 1066 return immed_double_const (INTVAL (x), (HOST_WIDE_INT) 0, mode);
bbf6f052
RK
1067
1068 /* We can do this with a gen_lowpart if both desired and current modes
1069 are integer, and this is either a constant integer, a register, or a
1070 non-volatile MEM. Except for the constant case, we must be narrowing
1071 the operand. */
1072
1073 if (GET_CODE (x) == CONST_INT
1074 || (GET_MODE_CLASS (mode) == MODE_INT
1075 && GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
1076 && (GET_CODE (x) == CONST_DOUBLE
1077 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (GET_MODE (x))
1078 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x))
4fa52007 1079 && direct_load[(int) mode]
bbf6f052
RK
1080 || GET_CODE (x) == REG)))))
1081 return gen_lowpart (mode, x);
1082
1083 temp = gen_reg_rtx (mode);
1084 convert_move (temp, x, unsignedp);
1085 return temp;
1086}
1087\f
1088/* Generate several move instructions to copy LEN bytes
1089 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1090 The caller must pass FROM and TO
1091 through protect_from_queue before calling.
1092 ALIGN (in bytes) is maximum alignment we can assume. */
1093
1094struct move_by_pieces
1095{
1096 rtx to;
1097 rtx to_addr;
1098 int autinc_to;
1099 int explicit_inc_to;
1100 rtx from;
1101 rtx from_addr;
1102 int autinc_from;
1103 int explicit_inc_from;
1104 int len;
1105 int offset;
1106 int reverse;
1107};
1108
1109static void move_by_pieces_1 ();
1110static int move_by_pieces_ninsns ();
1111
1112static void
1113move_by_pieces (to, from, len, align)
1114 rtx to, from;
1115 int len, align;
1116{
1117 struct move_by_pieces data;
1118 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1119 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1120
1121 data.offset = 0;
1122 data.to_addr = to_addr;
1123 data.from_addr = from_addr;
1124 data.to = to;
1125 data.from = from;
1126 data.autinc_to
1127 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1128 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1129 data.autinc_from
1130 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1131 || GET_CODE (from_addr) == POST_INC
1132 || GET_CODE (from_addr) == POST_DEC);
1133
1134 data.explicit_inc_from = 0;
1135 data.explicit_inc_to = 0;
1136 data.reverse
1137 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1138 if (data.reverse) data.offset = len;
1139 data.len = len;
1140
1141 /* If copying requires more than two move insns,
1142 copy addresses to registers (to make displacements shorter)
1143 and use post-increment if available. */
1144 if (!(data.autinc_from && data.autinc_to)
1145 && move_by_pieces_ninsns (len, align) > 2)
1146 {
1147#ifdef HAVE_PRE_DECREMENT
1148 if (data.reverse && ! data.autinc_from)
1149 {
1150 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1151 data.autinc_from = 1;
1152 data.explicit_inc_from = -1;
1153 }
1154#endif
1155#ifdef HAVE_POST_INCREMENT
1156 if (! data.autinc_from)
1157 {
1158 data.from_addr = copy_addr_to_reg (from_addr);
1159 data.autinc_from = 1;
1160 data.explicit_inc_from = 1;
1161 }
1162#endif
1163 if (!data.autinc_from && CONSTANT_P (from_addr))
1164 data.from_addr = copy_addr_to_reg (from_addr);
1165#ifdef HAVE_PRE_DECREMENT
1166 if (data.reverse && ! data.autinc_to)
1167 {
1168 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1169 data.autinc_to = 1;
1170 data.explicit_inc_to = -1;
1171 }
1172#endif
1173#ifdef HAVE_POST_INCREMENT
1174 if (! data.reverse && ! data.autinc_to)
1175 {
1176 data.to_addr = copy_addr_to_reg (to_addr);
1177 data.autinc_to = 1;
1178 data.explicit_inc_to = 1;
1179 }
1180#endif
1181 if (!data.autinc_to && CONSTANT_P (to_addr))
1182 data.to_addr = copy_addr_to_reg (to_addr);
1183 }
1184
e87b4f3f
RS
1185 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1186 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1187 align = MOVE_MAX;
bbf6f052
RK
1188
1189 /* First move what we can in the largest integer mode, then go to
1190 successively smaller modes. */
1191
1192 while (max_size > 1)
1193 {
1194 enum machine_mode mode = VOIDmode, tmode;
1195 enum insn_code icode;
1196
e7c33f54
RK
1197 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1198 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1199 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1200 mode = tmode;
1201
1202 if (mode == VOIDmode)
1203 break;
1204
1205 icode = mov_optab->handlers[(int) mode].insn_code;
1206 if (icode != CODE_FOR_nothing
1207 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1208 GET_MODE_SIZE (mode)))
1209 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1210
1211 max_size = GET_MODE_SIZE (mode);
1212 }
1213
1214 /* The code above should have handled everything. */
1215 if (data.len != 0)
1216 abort ();
1217}
1218
1219/* Return number of insns required to move L bytes by pieces.
1220 ALIGN (in bytes) is maximum alignment we can assume. */
1221
1222static int
1223move_by_pieces_ninsns (l, align)
1224 unsigned int l;
1225 int align;
1226{
1227 register int n_insns = 0;
e87b4f3f 1228 int max_size = MOVE_MAX + 1;
bbf6f052 1229
e87b4f3f
RS
1230 if (! (STRICT_ALIGNMENT || SLOW_UNALIGNED_ACCESS)
1231 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1232 align = MOVE_MAX;
bbf6f052
RK
1233
1234 while (max_size > 1)
1235 {
1236 enum machine_mode mode = VOIDmode, tmode;
1237 enum insn_code icode;
1238
e7c33f54
RK
1239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1241 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1242 mode = tmode;
1243
1244 if (mode == VOIDmode)
1245 break;
1246
1247 icode = mov_optab->handlers[(int) mode].insn_code;
1248 if (icode != CODE_FOR_nothing
1249 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1250 GET_MODE_SIZE (mode)))
1251 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1252
1253 max_size = GET_MODE_SIZE (mode);
1254 }
1255
1256 return n_insns;
1257}
1258
1259/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1260 with move instructions for mode MODE. GENFUN is the gen_... function
1261 to make a move insn for that mode. DATA has all the other info. */
1262
1263static void
1264move_by_pieces_1 (genfun, mode, data)
1265 rtx (*genfun) ();
1266 enum machine_mode mode;
1267 struct move_by_pieces *data;
1268{
1269 register int size = GET_MODE_SIZE (mode);
1270 register rtx to1, from1;
1271
1272 while (data->len >= size)
1273 {
1274 if (data->reverse) data->offset -= size;
1275
1276 to1 = (data->autinc_to
1277 ? gen_rtx (MEM, mode, data->to_addr)
1278 : change_address (data->to, mode,
1279 plus_constant (data->to_addr, data->offset)));
1280 from1 =
1281 (data->autinc_from
1282 ? gen_rtx (MEM, mode, data->from_addr)
1283 : change_address (data->from, mode,
1284 plus_constant (data->from_addr, data->offset)));
1285
1286#ifdef HAVE_PRE_DECREMENT
1287 if (data->explicit_inc_to < 0)
906c4e36 1288 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1289 if (data->explicit_inc_from < 0)
906c4e36 1290 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1291#endif
1292
1293 emit_insn ((*genfun) (to1, from1));
1294#ifdef HAVE_POST_INCREMENT
1295 if (data->explicit_inc_to > 0)
906c4e36 1296 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1297 if (data->explicit_inc_from > 0)
906c4e36 1298 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1299#endif
1300
1301 if (! data->reverse) data->offset += size;
1302
1303 data->len -= size;
1304 }
1305}
1306\f
1307/* Emit code to move a block Y to a block X.
1308 This may be done with string-move instructions,
1309 with multiple scalar move instructions, or with a library call.
1310
1311 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1312 with mode BLKmode.
1313 SIZE is an rtx that says how long they are.
1314 ALIGN is the maximum alignment we can assume they have,
1315 measured in bytes. */
1316
1317void
1318emit_block_move (x, y, size, align)
1319 rtx x, y;
1320 rtx size;
1321 int align;
1322{
1323 if (GET_MODE (x) != BLKmode)
1324 abort ();
1325
1326 if (GET_MODE (y) != BLKmode)
1327 abort ();
1328
1329 x = protect_from_queue (x, 1);
1330 y = protect_from_queue (y, 0);
5d901c31 1331 size = protect_from_queue (size, 0);
bbf6f052
RK
1332
1333 if (GET_CODE (x) != MEM)
1334 abort ();
1335 if (GET_CODE (y) != MEM)
1336 abort ();
1337 if (size == 0)
1338 abort ();
1339
1340 if (GET_CODE (size) == CONST_INT
906c4e36 1341 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1342 move_by_pieces (x, y, INTVAL (size), align);
1343 else
1344 {
1345 /* Try the most limited insn first, because there's no point
1346 including more than one in the machine description unless
1347 the more limited one has some advantage. */
266007a7 1348
0bba3f6f 1349 rtx opalign = GEN_INT (align);
266007a7
RK
1350 enum machine_mode mode;
1351
1352 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1353 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1354 {
266007a7 1355 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1356
1357 if (code != CODE_FOR_nothing
803090c4
RK
1358 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1359 here because if SIZE is less than the mode mask, as it is
1360 returned by the macro, it will definately be less than the
1361 actual mode mask. */
266007a7 1362 && (unsigned) INTVAL (size) <= GET_MODE_MASK (mode)
0bba3f6f
RK
1363 && (insn_operand_predicate[(int) code][0] == 0
1364 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1365 && (insn_operand_predicate[(int) code][1] == 0
1366 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1367 && (insn_operand_predicate[(int) code][3] == 0
1368 || (*insn_operand_predicate[(int) code][3]) (opalign,
1369 VOIDmode)))
bbf6f052 1370 {
1ba1e2a8 1371 rtx op2;
266007a7
RK
1372 rtx last = get_last_insn ();
1373 rtx pat;
1374
1ba1e2a8 1375 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1376 if (insn_operand_predicate[(int) code][2] != 0
1377 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1378 op2 = copy_to_mode_reg (mode, op2);
1379
1380 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1381 if (pat)
1382 {
1383 emit_insn (pat);
1384 return;
1385 }
1386 else
1387 delete_insns_since (last);
bbf6f052
RK
1388 }
1389 }
bbf6f052
RK
1390
1391#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1392 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1393 VOIDmode, 3, XEXP (x, 0), Pmode,
1394 XEXP (y, 0), Pmode,
5a2724d7 1395 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052 1396#else
d562e42e 1397 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1398 VOIDmode, 3, XEXP (y, 0), Pmode,
1399 XEXP (x, 0), Pmode,
5a2724d7 1400 convert_to_mode (Pmode, size, 1), Pmode);
bbf6f052
RK
1401#endif
1402 }
1403}
1404\f
1405/* Copy all or part of a value X into registers starting at REGNO.
1406 The number of registers to be filled is NREGS. */
1407
1408void
1409move_block_to_reg (regno, x, nregs, mode)
1410 int regno;
1411 rtx x;
1412 int nregs;
1413 enum machine_mode mode;
1414{
1415 int i;
1416 rtx pat, last;
1417
1418 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1419 x = validize_mem (force_const_mem (mode, x));
1420
1421 /* See if the machine can do this with a load multiple insn. */
1422#ifdef HAVE_load_multiple
1423 last = get_last_insn ();
1424 pat = gen_load_multiple (gen_rtx (REG, word_mode, regno), x,
906c4e36 1425 GEN_INT (nregs));
bbf6f052
RK
1426 if (pat)
1427 {
1428 emit_insn (pat);
1429 return;
1430 }
1431 else
1432 delete_insns_since (last);
1433#endif
1434
1435 for (i = 0; i < nregs; i++)
1436 emit_move_insn (gen_rtx (REG, word_mode, regno + i),
1437 operand_subword_force (x, i, mode));
1438}
1439
1440/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
1441 The number of registers to be filled is NREGS. */
1442
1443void
1444move_block_from_reg (regno, x, nregs)
1445 int regno;
1446 rtx x;
1447 int nregs;
1448{
1449 int i;
1450 rtx pat, last;
1451
1452 /* See if the machine can do this with a store multiple insn. */
1453#ifdef HAVE_store_multiple
1454 last = get_last_insn ();
1455 pat = gen_store_multiple (x, gen_rtx (REG, word_mode, regno),
906c4e36 1456 GEN_INT (nregs));
bbf6f052
RK
1457 if (pat)
1458 {
1459 emit_insn (pat);
1460 return;
1461 }
1462 else
1463 delete_insns_since (last);
1464#endif
1465
1466 for (i = 0; i < nregs; i++)
1467 {
1468 rtx tem = operand_subword (x, i, 1, BLKmode);
1469
1470 if (tem == 0)
1471 abort ();
1472
1473 emit_move_insn (tem, gen_rtx (REG, word_mode, regno + i));
1474 }
1475}
1476
1477/* Mark NREGS consecutive regs, starting at REGNO, as being live now. */
1478
1479void
1480use_regs (regno, nregs)
1481 int regno;
1482 int nregs;
1483{
1484 int i;
1485
1486 for (i = 0; i < nregs; i++)
1487 emit_insn (gen_rtx (USE, VOIDmode, gen_rtx (REG, word_mode, regno + i)));
1488}
7308a047
RS
1489
1490/* Mark the instructions since PREV as a libcall block.
1491 Add REG_LIBCALL to PREV and add a REG_RETVAL to the most recent insn. */
1492
1493static rtx
1494group_insns (prev)
1495 rtx prev;
1496{
1497 rtx insn_first;
1498 rtx insn_last;
1499
1500 /* Find the instructions to mark */
1501 if (prev)
1502 insn_first = NEXT_INSN (prev);
1503 else
1504 insn_first = get_insns ();
1505
1506 insn_last = get_last_insn ();
1507
1508 REG_NOTES (insn_last) = gen_rtx (INSN_LIST, REG_RETVAL, insn_first,
1509 REG_NOTES (insn_last));
1510
1511 REG_NOTES (insn_first) = gen_rtx (INSN_LIST, REG_LIBCALL, insn_last,
1512 REG_NOTES (insn_first));
1513}
bbf6f052
RK
1514\f
1515/* Write zeros through the storage of OBJECT.
1516 If OBJECT has BLKmode, SIZE is its length in bytes. */
1517
1518void
1519clear_storage (object, size)
1520 rtx object;
1521 int size;
1522{
1523 if (GET_MODE (object) == BLKmode)
1524 {
1525#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1526 emit_library_call (memset_libfunc, 0,
bbf6f052
RK
1527 VOIDmode, 3,
1528 XEXP (object, 0), Pmode, const0_rtx, Pmode,
906c4e36 1529 GEN_INT (size), Pmode);
bbf6f052 1530#else
d562e42e 1531 emit_library_call (bzero_libfunc, 0,
bbf6f052
RK
1532 VOIDmode, 2,
1533 XEXP (object, 0), Pmode,
906c4e36 1534 GEN_INT (size), Pmode);
bbf6f052
RK
1535#endif
1536 }
1537 else
1538 emit_move_insn (object, const0_rtx);
1539}
1540
1541/* Generate code to copy Y into X.
1542 Both Y and X must have the same mode, except that
1543 Y can be a constant with VOIDmode.
1544 This mode cannot be BLKmode; use emit_block_move for that.
1545
1546 Return the last instruction emitted. */
1547
1548rtx
1549emit_move_insn (x, y)
1550 rtx x, y;
1551{
1552 enum machine_mode mode = GET_MODE (x);
7308a047
RS
1553 enum machine_mode submode;
1554 enum mode_class class = GET_MODE_CLASS (mode);
bbf6f052
RK
1555 int i;
1556
1557 x = protect_from_queue (x, 1);
1558 y = protect_from_queue (y, 0);
1559
1560 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
1561 abort ();
1562
1563 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
1564 y = force_const_mem (mode, y);
1565
1566 /* If X or Y are memory references, verify that their addresses are valid
1567 for the machine. */
1568 if (GET_CODE (x) == MEM
1569 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
1570 && ! push_operand (x, GET_MODE (x)))
1571 || (flag_force_addr
1572 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
1573 x = change_address (x, VOIDmode, XEXP (x, 0));
1574
1575 if (GET_CODE (y) == MEM
1576 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
1577 || (flag_force_addr
1578 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
1579 y = change_address (y, VOIDmode, XEXP (y, 0));
1580
1581 if (mode == BLKmode)
1582 abort ();
1583
7308a047
RS
1584 if (class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1585 submode = mode_for_size (GET_MODE_UNIT_SIZE (mode) * BITS_PER_UNIT,
1586 (class == MODE_COMPLEX_INT
1587 ? MODE_INT : MODE_FLOAT),
1588 0);
1589
bbf6f052
RK
1590 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
1591 return
1592 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
1593
7308a047
RS
1594 /* Expand complex moves by moving real part and imag part, if posible. */
1595 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
1596 && submode != BLKmode
1597 && (mov_optab->handlers[(int) submode].insn_code
1598 != CODE_FOR_nothing))
1599 {
1600 /* Don't split destination if it is a stack push. */
1601 int stack = push_operand (x, GET_MODE (x));
1602 rtx prev = get_last_insn ();
1603
1604 /* Tell flow that the whole of the destination is being set. */
1605 if (GET_CODE (x) == REG)
1606 emit_insn (gen_rtx (CLOBBER, VOIDmode, x));
1607
1608 /* If this is a stack, push the highpart first, so it
1609 will be in the argument order.
1610
1611 In that case, change_address is used only to convert
1612 the mode, not to change the address. */
1613 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1614 ((stack ? change_address (x, submode, (rtx) 0)
1615 : gen_highpart (submode, x)),
1616 gen_highpart (submode, y)));
1617 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
1618 ((stack ? change_address (x, submode, (rtx) 0)
1619 : gen_lowpart (submode, x)),
1620 gen_lowpart (submode, y)));
1621
1622 group_insns (prev);
7a1ab50a
RS
1623
1624 return get_last_insn ();
7308a047
RS
1625 }
1626
bbf6f052
RK
1627 /* This will handle any multi-word mode that lacks a move_insn pattern.
1628 However, you will get better code if you define such patterns,
1629 even if they must turn into multiple assembler instructions. */
a4320483 1630 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
1631 {
1632 rtx last_insn = 0;
7308a047 1633 rtx prev_insn = get_last_insn ();
bbf6f052
RK
1634
1635 for (i = 0;
1636 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
1637 i++)
1638 {
1639 rtx xpart = operand_subword (x, i, 1, mode);
1640 rtx ypart = operand_subword (y, i, 1, mode);
1641
1642 /* If we can't get a part of Y, put Y into memory if it is a
1643 constant. Otherwise, force it into a register. If we still
1644 can't get a part of Y, abort. */
1645 if (ypart == 0 && CONSTANT_P (y))
1646 {
1647 y = force_const_mem (mode, y);
1648 ypart = operand_subword (y, i, 1, mode);
1649 }
1650 else if (ypart == 0)
1651 ypart = operand_subword_force (y, i, mode);
1652
1653 if (xpart == 0 || ypart == 0)
1654 abort ();
1655
1656 last_insn = emit_move_insn (xpart, ypart);
1657 }
7308a047
RS
1658 /* Mark these insns as a libcall block. */
1659 group_insns (prev_insn);
1660
bbf6f052
RK
1661 return last_insn;
1662 }
1663 else
1664 abort ();
1665}
1666\f
1667/* Pushing data onto the stack. */
1668
1669/* Push a block of length SIZE (perhaps variable)
1670 and return an rtx to address the beginning of the block.
1671 Note that it is not possible for the value returned to be a QUEUED.
1672 The value may be virtual_outgoing_args_rtx.
1673
1674 EXTRA is the number of bytes of padding to push in addition to SIZE.
1675 BELOW nonzero means this padding comes at low addresses;
1676 otherwise, the padding comes at high addresses. */
1677
1678rtx
1679push_block (size, extra, below)
1680 rtx size;
1681 int extra, below;
1682{
1683 register rtx temp;
1684 if (CONSTANT_P (size))
1685 anti_adjust_stack (plus_constant (size, extra));
1686 else if (GET_CODE (size) == REG && extra == 0)
1687 anti_adjust_stack (size);
1688 else
1689 {
1690 rtx temp = copy_to_mode_reg (Pmode, size);
1691 if (extra != 0)
906c4e36 1692 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
1693 temp, 0, OPTAB_LIB_WIDEN);
1694 anti_adjust_stack (temp);
1695 }
1696
1697#ifdef STACK_GROWS_DOWNWARD
1698 temp = virtual_outgoing_args_rtx;
1699 if (extra != 0 && below)
1700 temp = plus_constant (temp, extra);
1701#else
1702 if (GET_CODE (size) == CONST_INT)
1703 temp = plus_constant (virtual_outgoing_args_rtx,
1704 - INTVAL (size) - (below ? 0 : extra));
1705 else if (extra != 0 && !below)
1706 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1707 negate_rtx (Pmode, plus_constant (size, extra)));
1708 else
1709 temp = gen_rtx (PLUS, Pmode, virtual_outgoing_args_rtx,
1710 negate_rtx (Pmode, size));
1711#endif
1712
1713 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
1714}
1715
87e38d84 1716rtx
bbf6f052
RK
1717gen_push_operand ()
1718{
1719 return gen_rtx (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
1720}
1721
1722/* Generate code to push X onto the stack, assuming it has mode MODE and
1723 type TYPE.
1724 MODE is redundant except when X is a CONST_INT (since they don't
1725 carry mode info).
1726 SIZE is an rtx for the size of data to be copied (in bytes),
1727 needed only if X is BLKmode.
1728
1729 ALIGN (in bytes) is maximum alignment we can assume.
1730
1731 If PARTIAL is nonzero, then copy that many of the first words
1732 of X into registers starting with REG, and push the rest of X.
1733 The amount of space pushed is decreased by PARTIAL words,
1734 rounded *down* to a multiple of PARM_BOUNDARY.
1735 REG must be a hard register in this case.
1736
1737 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 1738 This is ignored if an argument block has already been allocated.
bbf6f052
RK
1739
1740 On a machine that lacks real push insns, ARGS_ADDR is the address of
1741 the bottom of the argument block for this call. We use indexing off there
1742 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
1743 argument block has not been preallocated.
1744
1745 ARGS_SO_FAR is the size of args previously pushed for this call. */
1746
1747void
1748emit_push_insn (x, mode, type, size, align, partial, reg, extra,
1749 args_addr, args_so_far)
1750 register rtx x;
1751 enum machine_mode mode;
1752 tree type;
1753 rtx size;
1754 int align;
1755 int partial;
1756 rtx reg;
1757 int extra;
1758 rtx args_addr;
1759 rtx args_so_far;
1760{
1761 rtx xinner;
1762 enum direction stack_direction
1763#ifdef STACK_GROWS_DOWNWARD
1764 = downward;
1765#else
1766 = upward;
1767#endif
1768
1769 /* Decide where to pad the argument: `downward' for below,
1770 `upward' for above, or `none' for don't pad it.
1771 Default is below for small data on big-endian machines; else above. */
1772 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
1773
1774 /* Invert direction if stack is post-update. */
1775 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
1776 if (where_pad != none)
1777 where_pad = (where_pad == downward ? upward : downward);
1778
1779 xinner = x = protect_from_queue (x, 0);
1780
1781 if (mode == BLKmode)
1782 {
1783 /* Copy a block into the stack, entirely or partially. */
1784
1785 register rtx temp;
1786 int used = partial * UNITS_PER_WORD;
1787 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
1788 int skip;
1789
1790 if (size == 0)
1791 abort ();
1792
1793 used -= offset;
1794
1795 /* USED is now the # of bytes we need not copy to the stack
1796 because registers will take care of them. */
1797
1798 if (partial != 0)
1799 xinner = change_address (xinner, BLKmode,
1800 plus_constant (XEXP (xinner, 0), used));
1801
1802 /* If the partial register-part of the arg counts in its stack size,
1803 skip the part of stack space corresponding to the registers.
1804 Otherwise, start copying to the beginning of the stack space,
1805 by setting SKIP to 0. */
1806#ifndef REG_PARM_STACK_SPACE
1807 skip = 0;
1808#else
1809 skip = used;
1810#endif
1811
1812#ifdef PUSH_ROUNDING
1813 /* Do it with several push insns if that doesn't take lots of insns
1814 and if there is no difficulty with push insns that skip bytes
1815 on the stack for alignment purposes. */
1816 if (args_addr == 0
1817 && GET_CODE (size) == CONST_INT
1818 && skip == 0
1819 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
1820 < MOVE_RATIO)
bbf6f052
RK
1821 /* Here we avoid the case of a structure whose weak alignment
1822 forces many pushes of a small amount of data,
1823 and such small pushes do rounding that causes trouble. */
e87b4f3f
RS
1824 && ((! STRICT_ALIGNMENT && ! SLOW_UNALIGNED_ACCESS)
1825 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 1826 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
1827 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
1828 {
1829 /* Push padding now if padding above and stack grows down,
1830 or if padding below and stack grows up.
1831 But if space already allocated, this has already been done. */
1832 if (extra && args_addr == 0
1833 && where_pad != none && where_pad != stack_direction)
906c4e36 1834 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1835
1836 move_by_pieces (gen_rtx (MEM, BLKmode, gen_push_operand ()), xinner,
1837 INTVAL (size) - used, align);
1838 }
1839 else
1840#endif /* PUSH_ROUNDING */
1841 {
1842 /* Otherwise make space on the stack and copy the data
1843 to the address of that space. */
1844
1845 /* Deduct words put into registers from the size we must copy. */
1846 if (partial != 0)
1847 {
1848 if (GET_CODE (size) == CONST_INT)
906c4e36 1849 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
1850 else
1851 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
1852 GEN_INT (used), NULL_RTX, 0,
1853 OPTAB_LIB_WIDEN);
bbf6f052
RK
1854 }
1855
1856 /* Get the address of the stack space.
1857 In this case, we do not deal with EXTRA separately.
1858 A single stack adjust will do. */
1859 if (! args_addr)
1860 {
1861 temp = push_block (size, extra, where_pad == downward);
1862 extra = 0;
1863 }
1864 else if (GET_CODE (args_so_far) == CONST_INT)
1865 temp = memory_address (BLKmode,
1866 plus_constant (args_addr,
1867 skip + INTVAL (args_so_far)));
1868 else
1869 temp = memory_address (BLKmode,
1870 plus_constant (gen_rtx (PLUS, Pmode,
1871 args_addr, args_so_far),
1872 skip));
1873
1874 /* TEMP is the address of the block. Copy the data there. */
1875 if (GET_CODE (size) == CONST_INT
1876 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
1877 < MOVE_RATIO))
1878 {
1879 move_by_pieces (gen_rtx (MEM, BLKmode, temp), xinner,
1880 INTVAL (size), align);
1881 goto ret;
1882 }
1883 /* Try the most limited insn first, because there's no point
1884 including more than one in the machine description unless
1885 the more limited one has some advantage. */
1886#ifdef HAVE_movstrqi
1887 if (HAVE_movstrqi
1888 && GET_CODE (size) == CONST_INT
1889 && ((unsigned) INTVAL (size)
1890 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
1891 {
1892 emit_insn (gen_movstrqi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1893 xinner, size, GEN_INT (align)));
bbf6f052
RK
1894 goto ret;
1895 }
1896#endif
1897#ifdef HAVE_movstrhi
1898 if (HAVE_movstrhi
1899 && GET_CODE (size) == CONST_INT
1900 && ((unsigned) INTVAL (size)
1901 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
1902 {
1903 emit_insn (gen_movstrhi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1904 xinner, size, GEN_INT (align)));
bbf6f052
RK
1905 goto ret;
1906 }
1907#endif
1908#ifdef HAVE_movstrsi
1909 if (HAVE_movstrsi)
1910 {
1911 emit_insn (gen_movstrsi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1912 xinner, size, GEN_INT (align)));
bbf6f052
RK
1913 goto ret;
1914 }
1915#endif
1916#ifdef HAVE_movstrdi
1917 if (HAVE_movstrdi)
1918 {
1919 emit_insn (gen_movstrdi (gen_rtx (MEM, BLKmode, temp),
906c4e36 1920 xinner, size, GEN_INT (align)));
bbf6f052
RK
1921 goto ret;
1922 }
1923#endif
1924
1925#ifndef ACCUMULATE_OUTGOING_ARGS
1926 /* If the source is referenced relative to the stack pointer,
1927 copy it to another register to stabilize it. We do not need
1928 to do this if we know that we won't be changing sp. */
1929
1930 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
1931 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
1932 temp = copy_to_reg (temp);
1933#endif
1934
1935 /* Make inhibit_defer_pop nonzero around the library call
1936 to force it to pop the bcopy-arguments right away. */
1937 NO_DEFER_POP;
1938#ifdef TARGET_MEM_FUNCTIONS
d562e42e 1939 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
1940 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
1941 size, Pmode);
1942#else
d562e42e 1943 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1944 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
1945 size, Pmode);
1946#endif
1947 OK_DEFER_POP;
1948 }
1949 }
1950 else if (partial > 0)
1951 {
1952 /* Scalar partly in registers. */
1953
1954 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
1955 int i;
1956 int not_stack;
1957 /* # words of start of argument
1958 that we must make space for but need not store. */
1959 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
1960 int args_offset = INTVAL (args_so_far);
1961 int skip;
1962
1963 /* Push padding now if padding above and stack grows down,
1964 or if padding below and stack grows up.
1965 But if space already allocated, this has already been done. */
1966 if (extra && args_addr == 0
1967 && where_pad != none && where_pad != stack_direction)
906c4e36 1968 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
1969
1970 /* If we make space by pushing it, we might as well push
1971 the real data. Otherwise, we can leave OFFSET nonzero
1972 and leave the space uninitialized. */
1973 if (args_addr == 0)
1974 offset = 0;
1975
1976 /* Now NOT_STACK gets the number of words that we don't need to
1977 allocate on the stack. */
1978 not_stack = partial - offset;
1979
1980 /* If the partial register-part of the arg counts in its stack size,
1981 skip the part of stack space corresponding to the registers.
1982 Otherwise, start copying to the beginning of the stack space,
1983 by setting SKIP to 0. */
1984#ifndef REG_PARM_STACK_SPACE
1985 skip = 0;
1986#else
1987 skip = not_stack;
1988#endif
1989
1990 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1991 x = validize_mem (force_const_mem (mode, x));
1992
1993 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
1994 SUBREGs of such registers are not allowed. */
1995 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
1996 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
1997 x = copy_to_reg (x);
1998
1999 /* Loop over all the words allocated on the stack for this arg. */
2000 /* We can do it by words, because any scalar bigger than a word
2001 has a size a multiple of a word. */
2002#ifndef PUSH_ARGS_REVERSED
2003 for (i = not_stack; i < size; i++)
2004#else
2005 for (i = size - 1; i >= not_stack; i--)
2006#endif
2007 if (i >= not_stack + offset)
2008 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2009 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2010 0, args_addr,
2011 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2012 * UNITS_PER_WORD)));
2013 }
2014 else
2015 {
2016 rtx addr;
2017
2018 /* Push padding now if padding above and stack grows down,
2019 or if padding below and stack grows up.
2020 But if space already allocated, this has already been done. */
2021 if (extra && args_addr == 0
2022 && where_pad != none && where_pad != stack_direction)
906c4e36 2023 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2024
2025#ifdef PUSH_ROUNDING
2026 if (args_addr == 0)
2027 addr = gen_push_operand ();
2028 else
2029#endif
2030 if (GET_CODE (args_so_far) == CONST_INT)
2031 addr
2032 = memory_address (mode,
2033 plus_constant (args_addr, INTVAL (args_so_far)));
2034 else
2035 addr = memory_address (mode, gen_rtx (PLUS, Pmode, args_addr,
2036 args_so_far));
2037
2038 emit_move_insn (gen_rtx (MEM, mode, addr), x);
2039 }
2040
2041 ret:
2042 /* If part should go in registers, copy that part
2043 into the appropriate registers. Do this now, at the end,
2044 since mem-to-mem copies above may do function calls. */
2045 if (partial > 0)
2046 move_block_to_reg (REGNO (reg), x, partial, mode);
2047
2048 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2049 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2050}
2051\f
2052/* Output a library call to function FUN (a SYMBOL_REF rtx)
2053 (emitting the queue unless NO_QUEUE is nonzero),
2054 for a value of mode OUTMODE,
2055 with NARGS different arguments, passed as alternating rtx values
2056 and machine_modes to convert them to.
2057 The rtx values should have been passed through protect_from_queue already.
2058
2059 NO_QUEUE will be true if and only if the library call is a `const' call
2060 which will be enclosed in REG_LIBCALL/REG_RETVAL notes; it is equivalent
d562e42e
JW
2061 to the variable is_const in expand_call.
2062
2063 NO_QUEUE must be true for const calls, because if it isn't, then
2064 any pending increment will be emitted between REG_LIBCALL/REG_RETVAL notes,
2065 and will be lost if the libcall sequence is optimized away.
2066
2067 NO_QUEUE must be false for non-const calls, because if it isn't, the
2068 call insn will have its CONST_CALL_P bit set, and it will be incorrectly
2069 optimized. For instance, the instruction scheduler may incorrectly
2070 move memory references across the non-const call. */
bbf6f052
RK
2071
2072void
2073emit_library_call (va_alist)
2074 va_dcl
2075{
2076 va_list p;
2077 struct args_size args_size;
2078 register int argnum;
2079 enum machine_mode outmode;
2080 int nargs;
2081 rtx fun;
2082 rtx orgfun;
2083 int inc;
2084 int count;
2085 rtx argblock = 0;
2086 CUMULATIVE_ARGS args_so_far;
2087 struct arg { rtx value; enum machine_mode mode; rtx reg; int partial;
2088 struct args_size offset; struct args_size size; };
2089 struct arg *argvec;
2090 int old_inhibit_defer_pop = inhibit_defer_pop;
2091 int no_queue = 0;
2092 rtx use_insns;
2093
2094 va_start (p);
2095 orgfun = fun = va_arg (p, rtx);
2096 no_queue = va_arg (p, int);
2097 outmode = va_arg (p, enum machine_mode);
2098 nargs = va_arg (p, int);
2099
2100 /* Copy all the libcall-arguments out of the varargs data
2101 and into a vector ARGVEC.
2102
2103 Compute how to pass each argument. We only support a very small subset
2104 of the full argument passing conventions to limit complexity here since
2105 library functions shouldn't have many args. */
2106
2107 argvec = (struct arg *) alloca (nargs * sizeof (struct arg));
2108
8eef5f77 2109 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE, fun);
bbf6f052
RK
2110
2111 args_size.constant = 0;
2112 args_size.var = 0;
2113
2114 for (count = 0; count < nargs; count++)
2115 {
2116 rtx val = va_arg (p, rtx);
2117 enum machine_mode mode = va_arg (p, enum machine_mode);
2118
2119 /* We cannot convert the arg value to the mode the library wants here;
2120 must do it earlier where we know the signedness of the arg. */
2121 if (mode == BLKmode
2122 || (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode))
2123 abort ();
2124
2125 /* On some machines, there's no way to pass a float to a library fcn.
2126 Pass it as a double instead. */
2127#ifdef LIBGCC_NEEDS_DOUBLE
2128 if (LIBGCC_NEEDS_DOUBLE && mode == SFmode)
803090c4 2129 val = convert_to_mode (DFmode, val, 0), mode = DFmode;
bbf6f052
RK
2130#endif
2131
5d901c31
RS
2132 /* There's no need to call protect_from_queue, because
2133 either emit_move_insn or emit_push_insn will do that. */
2134
bbf6f052
RK
2135 /* Make sure it is a reasonable operand for a move or push insn. */
2136 if (GET_CODE (val) != REG && GET_CODE (val) != MEM
2137 && ! (CONSTANT_P (val) && LEGITIMATE_CONSTANT_P (val)))
906c4e36 2138 val = force_operand (val, NULL_RTX);
bbf6f052
RK
2139
2140 argvec[count].value = val;
2141 argvec[count].mode = mode;
2142
2143#ifdef FUNCTION_ARG_PASS_BY_REFERENCE
906c4e36 2144 if (FUNCTION_ARG_PASS_BY_REFERENCE (args_so_far, mode, NULL_TREE, 1))
bbf6f052
RK
2145 abort ();
2146#endif
2147
906c4e36 2148 argvec[count].reg = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2149 if (argvec[count].reg && GET_CODE (argvec[count].reg) == EXPR_LIST)
2150 abort ();
2151#ifdef FUNCTION_ARG_PARTIAL_NREGS
2152 argvec[count].partial
906c4e36 2153 = FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode, NULL_TREE, 1);
bbf6f052
RK
2154#else
2155 argvec[count].partial = 0;
2156#endif
2157
906c4e36 2158 locate_and_pad_parm (mode, NULL_TREE,
bbf6f052 2159 argvec[count].reg && argvec[count].partial == 0,
906c4e36 2160 NULL_TREE, &args_size, &argvec[count].offset,
bbf6f052
RK
2161 &argvec[count].size);
2162
2163 if (argvec[count].size.var)
2164 abort ();
2165
2166#ifndef REG_PARM_STACK_SPACE
2167 if (argvec[count].partial)
2168 argvec[count].size.constant -= argvec[count].partial * UNITS_PER_WORD;
2169#endif
2170
2171 if (argvec[count].reg == 0 || argvec[count].partial != 0
2172#ifdef REG_PARM_STACK_SPACE
2173 || 1
2174#endif
2175 )
2176 args_size.constant += argvec[count].size.constant;
2177
2178#ifdef ACCUMULATE_OUTGOING_ARGS
2179 /* If this arg is actually passed on the stack, it might be
2180 clobbering something we already put there (this library call might
2181 be inside the evaluation of an argument to a function whose call
2182 requires the stack). This will only occur when the library call
2183 has sufficient args to run out of argument registers. Abort in
2184 this case; if this ever occurs, code must be added to save and
2185 restore the arg slot. */
2186
2187 if (argvec[count].reg == 0 || argvec[count].partial != 0)
2188 abort ();
2189#endif
2190
2191 FUNCTION_ARG_ADVANCE (args_so_far, mode, (tree)0, 1);
2192 }
2193 va_end (p);
2194
2195 /* If this machine requires an external definition for library
2196 functions, write one out. */
2197 assemble_external_libcall (fun);
2198
2199#ifdef STACK_BOUNDARY
2200 args_size.constant = (((args_size.constant + (STACK_BYTES - 1))
2201 / STACK_BYTES) * STACK_BYTES);
2202#endif
2203
2204#ifdef REG_PARM_STACK_SPACE
2205 args_size.constant = MAX (args_size.constant,
2206 REG_PARM_STACK_SPACE ((tree) 0));
2207#endif
2208
2209#ifdef ACCUMULATE_OUTGOING_ARGS
2210 if (args_size.constant > current_function_outgoing_args_size)
2211 current_function_outgoing_args_size = args_size.constant;
2212 args_size.constant = 0;
2213#endif
2214
2215#ifndef PUSH_ROUNDING
906c4e36 2216 argblock = push_block (GEN_INT (args_size.constant), 0, 0);
bbf6f052
RK
2217#endif
2218
2219#ifdef PUSH_ARGS_REVERSED
2220 inc = -1;
2221 argnum = nargs - 1;
2222#else
2223 inc = 1;
2224 argnum = 0;
2225#endif
2226
2227 /* Push the args that need to be pushed. */
2228
2229 for (count = 0; count < nargs; count++, argnum += inc)
2230 {
2231 register enum machine_mode mode = argvec[argnum].mode;
2232 register rtx val = argvec[argnum].value;
2233 rtx reg = argvec[argnum].reg;
2234 int partial = argvec[argnum].partial;
2235
2236 if (! (reg != 0 && partial == 0))
906c4e36
RK
2237 emit_push_insn (val, mode, NULL_TREE, NULL_RTX, 0, partial, reg, 0,
2238 argblock, GEN_INT (argvec[count].offset.constant));
bbf6f052
RK
2239 NO_DEFER_POP;
2240 }
2241
2242#ifdef PUSH_ARGS_REVERSED
2243 argnum = nargs - 1;
2244#else
2245 argnum = 0;
2246#endif
2247
2248 /* Now load any reg parms into their regs. */
2249
2250 for (count = 0; count < nargs; count++, argnum += inc)
2251 {
2252 register enum machine_mode mode = argvec[argnum].mode;
2253 register rtx val = argvec[argnum].value;
2254 rtx reg = argvec[argnum].reg;
2255 int partial = argvec[argnum].partial;
2256
2257 if (reg != 0 && partial == 0)
2258 emit_move_insn (reg, val);
2259 NO_DEFER_POP;
2260 }
2261
2262 /* For version 1.37, try deleting this entirely. */
2263 if (! no_queue)
2264 emit_queue ();
2265
2266 /* Any regs containing parms remain in use through the call. */
2267 start_sequence ();
2268 for (count = 0; count < nargs; count++)
2269 if (argvec[count].reg != 0)
2270 emit_insn (gen_rtx (USE, VOIDmode, argvec[count].reg));
2271
2272 use_insns = get_insns ();
2273 end_sequence ();
2274
906c4e36 2275 fun = prepare_call_address (fun, NULL_TREE, &use_insns);
bbf6f052
RK
2276
2277 /* Don't allow popping to be deferred, since then
2278 cse'ing of library calls could delete a call and leave the pop. */
2279 NO_DEFER_POP;
2280
2281 /* We pass the old value of inhibit_defer_pop + 1 to emit_call_1, which
2282 will set inhibit_defer_pop to that value. */
2283
2284 emit_call_1 (fun, get_identifier (XSTR (orgfun, 0)), args_size.constant, 0,
2285 FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1),
906c4e36 2286 outmode != VOIDmode ? hard_libcall_value (outmode) : NULL_RTX,
bbf6f052
RK
2287 old_inhibit_defer_pop + 1, use_insns, no_queue);
2288
2289 /* Now restore inhibit_defer_pop to its actual original value. */
2290 OK_DEFER_POP;
2291}
2292\f
2293/* Expand an assignment that stores the value of FROM into TO.
2294 If WANT_VALUE is nonzero, return an rtx for the value of TO.
2295 (This may contain a QUEUED rtx.)
2296 Otherwise, the returned value is not meaningful.
2297
2298 SUGGEST_REG is no longer actually used.
2299 It used to mean, copy the value through a register
2300 and return that register, if that is possible.
2301 But now we do this if WANT_VALUE.
2302
2303 If the value stored is a constant, we return the constant. */
2304
2305rtx
2306expand_assignment (to, from, want_value, suggest_reg)
2307 tree to, from;
2308 int want_value;
2309 int suggest_reg;
2310{
2311 register rtx to_rtx = 0;
2312 rtx result;
2313
2314 /* Don't crash if the lhs of the assignment was erroneous. */
2315
2316 if (TREE_CODE (to) == ERROR_MARK)
906c4e36 2317 return expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2318
2319 /* Assignment of a structure component needs special treatment
2320 if the structure component's rtx is not simply a MEM.
2321 Assignment of an array element at a constant index
2322 has the same problem. */
2323
2324 if (TREE_CODE (to) == COMPONENT_REF
2325 || TREE_CODE (to) == BIT_FIELD_REF
2326 || (TREE_CODE (to) == ARRAY_REF
2327 && TREE_CODE (TREE_OPERAND (to, 1)) == INTEGER_CST
2328 && TREE_CODE (TYPE_SIZE (TREE_TYPE (to))) == INTEGER_CST))
2329 {
2330 enum machine_mode mode1;
2331 int bitsize;
2332 int bitpos;
7bb0943f 2333 tree offset;
bbf6f052
RK
2334 int unsignedp;
2335 int volatilep = 0;
7bb0943f 2336 tree tem = get_inner_reference (to, &bitsize, &bitpos, &offset,
bbf6f052
RK
2337 &mode1, &unsignedp, &volatilep);
2338
2339 /* If we are going to use store_bit_field and extract_bit_field,
2340 make sure to_rtx will be safe for multiple use. */
2341
2342 if (mode1 == VOIDmode && want_value)
2343 tem = stabilize_reference (tem);
2344
906c4e36 2345 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2346 if (offset != 0)
2347 {
906c4e36 2348 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2349
2350 if (GET_CODE (to_rtx) != MEM)
2351 abort ();
2352 to_rtx = change_address (to_rtx, VOIDmode,
2353 gen_rtx (PLUS, Pmode, XEXP (to_rtx, 0),
2354 force_reg (Pmode, offset_rtx)));
2355 }
bbf6f052
RK
2356 if (volatilep)
2357 {
2358 if (GET_CODE (to_rtx) == MEM)
2359 MEM_VOLATILE_P (to_rtx) = 1;
2360#if 0 /* This was turned off because, when a field is volatile
2361 in an object which is not volatile, the object may be in a register,
2362 and then we would abort over here. */
2363 else
2364 abort ();
2365#endif
2366 }
2367
2368 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2369 (want_value
2370 /* Spurious cast makes HPUX compiler happy. */
2371 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2372 : VOIDmode),
2373 unsignedp,
2374 /* Required alignment of containing datum. */
2375 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
2376 int_size_in_bytes (TREE_TYPE (tem)));
2377 preserve_temp_slots (result);
2378 free_temp_slots ();
2379
2380 return result;
2381 }
2382
2383 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
2384 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
2385
2386 if (to_rtx == 0)
906c4e36 2387 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2388
2389 /* In case we are returning the contents of an object which overlaps
2390 the place the value is being stored, use a safe function when copying
2391 a value through a pointer into a structure value return block. */
2392 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
2393 && current_function_returns_struct
2394 && !current_function_returns_pcc_struct)
2395 {
906c4e36 2396 rtx from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2397 rtx size = expr_size (from);
2398
2399#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2400 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
2401 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
2402 XEXP (from_rtx, 0), Pmode,
2403 size, Pmode);
2404#else
d562e42e 2405 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
2406 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
2407 XEXP (to_rtx, 0), Pmode,
2408 size, Pmode);
2409#endif
2410
2411 preserve_temp_slots (to_rtx);
2412 free_temp_slots ();
2413 return to_rtx;
2414 }
2415
2416 /* Compute FROM and store the value in the rtx we got. */
2417
2418 result = store_expr (from, to_rtx, want_value);
2419 preserve_temp_slots (result);
2420 free_temp_slots ();
2421 return result;
2422}
2423
2424/* Generate code for computing expression EXP,
2425 and storing the value into TARGET.
2426 Returns TARGET or an equivalent value.
2427 TARGET may contain a QUEUED rtx.
2428
2429 If SUGGEST_REG is nonzero, copy the value through a register
2430 and return that register, if that is possible.
2431
2432 If the value stored is a constant, we return the constant. */
2433
2434rtx
2435store_expr (exp, target, suggest_reg)
2436 register tree exp;
2437 register rtx target;
2438 int suggest_reg;
2439{
2440 register rtx temp;
2441 int dont_return_target = 0;
2442
2443 if (TREE_CODE (exp) == COMPOUND_EXPR)
2444 {
2445 /* Perform first part of compound expression, then assign from second
2446 part. */
2447 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
2448 emit_queue ();
2449 return store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2450 }
2451 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
2452 {
2453 /* For conditional expression, get safe form of the target. Then
2454 test the condition, doing the appropriate assignment on either
2455 side. This avoids the creation of unnecessary temporaries.
2456 For non-BLKmode, it is more efficient not to do this. */
2457
2458 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
2459
2460 emit_queue ();
2461 target = protect_from_queue (target, 1);
2462
2463 NO_DEFER_POP;
2464 jumpifnot (TREE_OPERAND (exp, 0), lab1);
2465 store_expr (TREE_OPERAND (exp, 1), target, suggest_reg);
2466 emit_queue ();
2467 emit_jump_insn (gen_jump (lab2));
2468 emit_barrier ();
2469 emit_label (lab1);
2470 store_expr (TREE_OPERAND (exp, 2), target, suggest_reg);
2471 emit_queue ();
2472 emit_label (lab2);
2473 OK_DEFER_POP;
2474 return target;
2475 }
2476 else if (suggest_reg && GET_CODE (target) == MEM
2477 && GET_MODE (target) != BLKmode)
2478 /* If target is in memory and caller wants value in a register instead,
2479 arrange that. Pass TARGET as target for expand_expr so that,
2480 if EXP is another assignment, SUGGEST_REG will be nonzero for it.
2481 We know expand_expr will not use the target in that case. */
2482 {
906c4e36 2483 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
2484 GET_MODE (target), 0);
2485 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
2486 temp = copy_to_reg (temp);
2487 dont_return_target = 1;
2488 }
2489 else if (queued_subexp_p (target))
2490 /* If target contains a postincrement, it is not safe
2491 to use as the returned value. It would access the wrong
2492 place by the time the queued increment gets output.
2493 So copy the value through a temporary and use that temp
2494 as the result. */
2495 {
2496 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
2497 {
2498 /* Expand EXP into a new pseudo. */
2499 temp = gen_reg_rtx (GET_MODE (target));
2500 temp = expand_expr (exp, temp, GET_MODE (target), 0);
2501 }
2502 else
906c4e36 2503 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
bbf6f052
RK
2504 dont_return_target = 1;
2505 }
1499e0a8
RK
2506 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
2507 /* If this is an scalar in a register that is stored in a wider mode
2508 than the declared mode, compute the result into its declared mode
2509 and then convert to the wider mode. Our value is the computed
2510 expression. */
2511 {
2512 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
2513 convert_move (SUBREG_REG (target), temp,
2514 SUBREG_PROMOTED_UNSIGNED_P (target));
2515 return temp;
2516 }
bbf6f052
RK
2517 else
2518 {
2519 temp = expand_expr (exp, target, GET_MODE (target), 0);
2520 /* DO return TARGET if it's a specified hardware register.
2521 expand_return relies on this. */
2522 if (!(target && GET_CODE (target) == REG
2523 && REGNO (target) < FIRST_PSEUDO_REGISTER)
2524 && CONSTANT_P (temp))
2525 dont_return_target = 1;
2526 }
2527
2528 /* If value was not generated in the target, store it there.
2529 Convert the value to TARGET's type first if nec. */
2530
2531 if (temp != target && TREE_CODE (exp) != ERROR_MARK)
2532 {
2533 target = protect_from_queue (target, 1);
2534 if (GET_MODE (temp) != GET_MODE (target)
2535 && GET_MODE (temp) != VOIDmode)
2536 {
2537 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2538 if (dont_return_target)
2539 {
2540 /* In this case, we will return TEMP,
2541 so make sure it has the proper mode.
2542 But don't forget to store the value into TARGET. */
2543 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
2544 emit_move_insn (target, temp);
2545 }
2546 else
2547 convert_move (target, temp, unsignedp);
2548 }
2549
2550 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
2551 {
2552 /* Handle copying a string constant into an array.
2553 The string constant may be shorter than the array.
2554 So copy just the string's actual length, and clear the rest. */
2555 rtx size;
2556
e87b4f3f
RS
2557 /* Get the size of the data type of the string,
2558 which is actually the size of the target. */
2559 size = expr_size (exp);
2560 if (GET_CODE (size) == CONST_INT
2561 && INTVAL (size) < TREE_STRING_LENGTH (exp))
2562 emit_block_move (target, temp, size,
2563 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2564 else
bbf6f052 2565 {
e87b4f3f
RS
2566 /* Compute the size of the data to copy from the string. */
2567 tree copy_size
2568 = fold (build (MIN_EXPR, sizetype,
2569 size_binop (CEIL_DIV_EXPR,
2570 TYPE_SIZE (TREE_TYPE (exp)),
2571 size_int (BITS_PER_UNIT)),
2572 convert (sizetype,
2573 build_int_2 (TREE_STRING_LENGTH (exp), 0))));
906c4e36
RK
2574 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
2575 VOIDmode, 0);
e87b4f3f
RS
2576 rtx label = 0;
2577
2578 /* Copy that much. */
2579 emit_block_move (target, temp, copy_size_rtx,
2580 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2581
2582 /* Figure out how much is left in TARGET
2583 that we have to clear. */
2584 if (GET_CODE (copy_size_rtx) == CONST_INT)
2585 {
2586 temp = plus_constant (XEXP (target, 0),
2587 TREE_STRING_LENGTH (exp));
2588 size = plus_constant (size,
2589 - TREE_STRING_LENGTH (exp));
2590 }
2591 else
2592 {
2593 enum machine_mode size_mode = Pmode;
2594
2595 temp = force_reg (Pmode, XEXP (target, 0));
2596 temp = expand_binop (size_mode, add_optab, temp,
906c4e36
RK
2597 copy_size_rtx, NULL_RTX, 0,
2598 OPTAB_LIB_WIDEN);
e87b4f3f
RS
2599
2600 size = expand_binop (size_mode, sub_optab, size,
906c4e36
RK
2601 copy_size_rtx, NULL_RTX, 0,
2602 OPTAB_LIB_WIDEN);
e87b4f3f 2603
906c4e36 2604 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
2605 GET_MODE (size), 0, 0);
2606 label = gen_label_rtx ();
2607 emit_jump_insn (gen_blt (label));
2608 }
2609
2610 if (size != const0_rtx)
2611 {
bbf6f052 2612#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2613 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
e87b4f3f 2614 temp, Pmode, const0_rtx, Pmode, size, Pmode);
bbf6f052 2615#else
d562e42e 2616 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
e87b4f3f 2617 temp, Pmode, size, Pmode);
bbf6f052 2618#endif
e87b4f3f
RS
2619 }
2620 if (label)
2621 emit_label (label);
bbf6f052
RK
2622 }
2623 }
2624 else if (GET_MODE (temp) == BLKmode)
2625 emit_block_move (target, temp, expr_size (exp),
2626 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
2627 else
2628 emit_move_insn (target, temp);
2629 }
2630 if (dont_return_target)
2631 return temp;
2632 return target;
2633}
2634\f
2635/* Store the value of constructor EXP into the rtx TARGET.
2636 TARGET is either a REG or a MEM. */
2637
2638static void
2639store_constructor (exp, target)
2640 tree exp;
2641 rtx target;
2642{
4af3895e
JVA
2643 tree type = TREE_TYPE (exp);
2644
bbf6f052
RK
2645 /* We know our target cannot conflict, since safe_from_p has been called. */
2646#if 0
2647 /* Don't try copying piece by piece into a hard register
2648 since that is vulnerable to being clobbered by EXP.
2649 Instead, construct in a pseudo register and then copy it all. */
2650 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
2651 {
2652 rtx temp = gen_reg_rtx (GET_MODE (target));
2653 store_constructor (exp, temp);
2654 emit_move_insn (target, temp);
2655 return;
2656 }
2657#endif
2658
4af3895e 2659 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE)
bbf6f052
RK
2660 {
2661 register tree elt;
2662
4af3895e
JVA
2663 /* Inform later passes that the whole union value is dead. */
2664 if (TREE_CODE (type) == UNION_TYPE)
bbf6f052 2665 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
4af3895e
JVA
2666
2667 /* If we are building a static constructor into a register,
2668 set the initial value as zero so we can fold the value into
2669 a constant. */
2670 else if (GET_CODE (target) == REG && TREE_STATIC (exp))
2671 emit_move_insn (target, const0_rtx);
2672
bbf6f052
RK
2673 /* If the constructor has fewer fields than the structure,
2674 clear the whole structure first. */
2675 else if (list_length (CONSTRUCTOR_ELTS (exp))
4af3895e
JVA
2676 != list_length (TYPE_FIELDS (type)))
2677 clear_storage (target, int_size_in_bytes (type));
bbf6f052
RK
2678 else
2679 /* Inform later passes that the old value is dead. */
2680 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2681
2682 /* Store each element of the constructor into
2683 the corresponding field of TARGET. */
2684
2685 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
2686 {
2687 register tree field = TREE_PURPOSE (elt);
2688 register enum machine_mode mode;
2689 int bitsize;
2690 int bitpos;
2691 int unsignedp;
2692
f32fd778
RS
2693 /* Just ignore missing fields.
2694 We cleared the whole structure, above,
2695 if any fields are missing. */
2696 if (field == 0)
2697 continue;
2698
bbf6f052
RK
2699 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
2700 unsignedp = TREE_UNSIGNED (field);
2701 mode = DECL_MODE (field);
2702 if (DECL_BIT_FIELD (field))
2703 mode = VOIDmode;
2704
2705 if (TREE_CODE (DECL_FIELD_BITPOS (field)) != INTEGER_CST)
2706 /* ??? This case remains to be written. */
2707 abort ();
2708
2709 bitpos = TREE_INT_CST_LOW (DECL_FIELD_BITPOS (field));
2710
2711 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2712 /* The alignment of TARGET is
2713 at least what its type requires. */
2714 VOIDmode, 0,
4af3895e
JVA
2715 TYPE_ALIGN (type) / BITS_PER_UNIT,
2716 int_size_in_bytes (type));
bbf6f052
RK
2717 }
2718 }
4af3895e 2719 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
2720 {
2721 register tree elt;
2722 register int i;
4af3895e 2723 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
2724 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
2725 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 2726 tree elttype = TREE_TYPE (type);
bbf6f052
RK
2727
2728 /* If the constructor has fewer fields than the structure,
4af3895e
JVA
2729 clear the whole structure first. Similarly if this this is
2730 static constructor of a non-BLKmode object. */
bbf6f052 2731
4af3895e
JVA
2732 if (list_length (CONSTRUCTOR_ELTS (exp)) < maxelt - minelt + 1
2733 || (GET_CODE (target) == REG && TREE_STATIC (exp)))
bbf6f052
RK
2734 clear_storage (target, maxelt - minelt + 1);
2735 else
2736 /* Inform later passes that the old value is dead. */
2737 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
2738
2739 /* Store each element of the constructor into
2740 the corresponding element of TARGET, determined
2741 by counting the elements. */
2742 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
2743 elt;
2744 elt = TREE_CHAIN (elt), i++)
2745 {
2746 register enum machine_mode mode;
2747 int bitsize;
2748 int bitpos;
2749 int unsignedp;
2750
2751 mode = TYPE_MODE (elttype);
2752 bitsize = GET_MODE_BITSIZE (mode);
2753 unsignedp = TREE_UNSIGNED (elttype);
2754
2755 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
2756
2757 store_field (target, bitsize, bitpos, mode, TREE_VALUE (elt),
2758 /* The alignment of TARGET is
2759 at least what its type requires. */
2760 VOIDmode, 0,
4af3895e
JVA
2761 TYPE_ALIGN (type) / BITS_PER_UNIT,
2762 int_size_in_bytes (type));
bbf6f052
RK
2763 }
2764 }
2765
2766 else
2767 abort ();
2768}
2769
2770/* Store the value of EXP (an expression tree)
2771 into a subfield of TARGET which has mode MODE and occupies
2772 BITSIZE bits, starting BITPOS bits from the start of TARGET.
2773 If MODE is VOIDmode, it means that we are storing into a bit-field.
2774
2775 If VALUE_MODE is VOIDmode, return nothing in particular.
2776 UNSIGNEDP is not used in this case.
2777
2778 Otherwise, return an rtx for the value stored. This rtx
2779 has mode VALUE_MODE if that is convenient to do.
2780 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
2781
2782 ALIGN is the alignment that TARGET is known to have, measured in bytes.
2783 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
2784
2785static rtx
2786store_field (target, bitsize, bitpos, mode, exp, value_mode,
2787 unsignedp, align, total_size)
2788 rtx target;
2789 int bitsize, bitpos;
2790 enum machine_mode mode;
2791 tree exp;
2792 enum machine_mode value_mode;
2793 int unsignedp;
2794 int align;
2795 int total_size;
2796{
906c4e36 2797 HOST_WIDE_INT width_mask = 0;
bbf6f052 2798
906c4e36
RK
2799 if (bitsize < HOST_BITS_PER_WIDE_INT)
2800 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
2801
2802 /* If we are storing into an unaligned field of an aligned union that is
2803 in a register, we may have the mode of TARGET being an integer mode but
2804 MODE == BLKmode. In that case, get an aligned object whose size and
2805 alignment are the same as TARGET and store TARGET into it (we can avoid
2806 the store if the field being stored is the entire width of TARGET). Then
2807 call ourselves recursively to store the field into a BLKmode version of
2808 that object. Finally, load from the object into TARGET. This is not
2809 very efficient in general, but should only be slightly more expensive
2810 than the otherwise-required unaligned accesses. Perhaps this can be
2811 cleaned up later. */
2812
2813 if (mode == BLKmode
2814 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
2815 {
2816 rtx object = assign_stack_temp (GET_MODE (target),
2817 GET_MODE_SIZE (GET_MODE (target)), 0);
2818 rtx blk_object = copy_rtx (object);
2819
2820 PUT_MODE (blk_object, BLKmode);
2821
2822 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
2823 emit_move_insn (object, target);
2824
2825 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
2826 align, total_size);
2827
2828 emit_move_insn (target, object);
2829
2830 return target;
2831 }
2832
2833 /* If the structure is in a register or if the component
2834 is a bit field, we cannot use addressing to access it.
2835 Use bit-field techniques or SUBREG to store in it. */
2836
4fa52007
RK
2837 if (mode == VOIDmode
2838 || (mode != BLKmode && ! direct_store[(int) mode])
2839 || GET_CODE (target) == REG
bbf6f052
RK
2840 || GET_CODE (target) == SUBREG)
2841 {
906c4e36 2842 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
2843 /* Store the value in the bitfield. */
2844 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
2845 if (value_mode != VOIDmode)
2846 {
2847 /* The caller wants an rtx for the value. */
2848 /* If possible, avoid refetching from the bitfield itself. */
2849 if (width_mask != 0
2850 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 2851 {
9074de27 2852 tree count;
5c4d7cfb 2853 enum machine_mode tmode;
86a2c12a 2854
5c4d7cfb
RS
2855 if (unsignedp)
2856 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
2857 tmode = GET_MODE (temp);
86a2c12a
RS
2858 if (tmode == VOIDmode)
2859 tmode = value_mode;
5c4d7cfb
RS
2860 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
2861 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
2862 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
2863 }
bbf6f052 2864 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
2865 NULL_RTX, value_mode, 0, align,
2866 total_size);
bbf6f052
RK
2867 }
2868 return const0_rtx;
2869 }
2870 else
2871 {
2872 rtx addr = XEXP (target, 0);
2873 rtx to_rtx;
2874
2875 /* If a value is wanted, it must be the lhs;
2876 so make the address stable for multiple use. */
2877
2878 if (value_mode != VOIDmode && GET_CODE (addr) != REG
2879 && ! CONSTANT_ADDRESS_P (addr)
2880 /* A frame-pointer reference is already stable. */
2881 && ! (GET_CODE (addr) == PLUS
2882 && GET_CODE (XEXP (addr, 1)) == CONST_INT
2883 && (XEXP (addr, 0) == virtual_incoming_args_rtx
2884 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
2885 addr = copy_to_reg (addr);
2886
2887 /* Now build a reference to just the desired component. */
2888
2889 to_rtx = change_address (target, mode,
2890 plus_constant (addr, (bitpos / BITS_PER_UNIT)));
2891 MEM_IN_STRUCT_P (to_rtx) = 1;
2892
2893 return store_expr (exp, to_rtx, value_mode != VOIDmode);
2894 }
2895}
2896\f
2897/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
2898 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
2899 ARRAY_REFs at constant positions and find the ultimate containing object,
2900 which we return.
2901
2902 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
2903 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
2904 If the position of the field is variable, we store a tree
2905 giving the variable offset (in units) in *POFFSET.
2906 This offset is in addition to the bit position.
2907 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
2908
2909 If any of the extraction expressions is volatile,
2910 we store 1 in *PVOLATILEP. Otherwise we don't change that.
2911
2912 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
2913 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
2914 is redundant.
2915
2916 If the field describes a variable-sized object, *PMODE is set to
2917 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
2918 this case, but the address of the object can be found. */
bbf6f052
RK
2919
2920tree
7bb0943f 2921get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode, punsignedp, pvolatilep)
bbf6f052
RK
2922 tree exp;
2923 int *pbitsize;
2924 int *pbitpos;
7bb0943f 2925 tree *poffset;
bbf6f052
RK
2926 enum machine_mode *pmode;
2927 int *punsignedp;
2928 int *pvolatilep;
2929{
2930 tree size_tree = 0;
2931 enum machine_mode mode = VOIDmode;
7bb0943f 2932 tree offset = 0;
bbf6f052
RK
2933
2934 if (TREE_CODE (exp) == COMPONENT_REF)
2935 {
2936 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
2937 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
2938 mode = DECL_MODE (TREE_OPERAND (exp, 1));
2939 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
2940 }
2941 else if (TREE_CODE (exp) == BIT_FIELD_REF)
2942 {
2943 size_tree = TREE_OPERAND (exp, 1);
2944 *punsignedp = TREE_UNSIGNED (exp);
2945 }
2946 else
2947 {
2948 mode = TYPE_MODE (TREE_TYPE (exp));
2949 *pbitsize = GET_MODE_BITSIZE (mode);
2950 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
2951 }
2952
2953 if (size_tree)
2954 {
2955 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
2956 mode = BLKmode, *pbitsize = -1;
2957 else
2958 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
2959 }
2960
2961 /* Compute cumulative bit-offset for nested component-refs and array-refs,
2962 and find the ultimate containing object. */
2963
2964 *pbitpos = 0;
2965
2966 while (1)
2967 {
7bb0943f 2968 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 2969 {
7bb0943f
RS
2970 tree pos = (TREE_CODE (exp) == COMPONENT_REF
2971 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
2972 : TREE_OPERAND (exp, 2));
bbf6f052 2973
7bb0943f
RS
2974 if (TREE_CODE (pos) == PLUS_EXPR)
2975 {
2976 tree constant, var;
2977 if (TREE_CODE (TREE_OPERAND (pos, 0)) == INTEGER_CST)
2978 {
2979 constant = TREE_OPERAND (pos, 0);
2980 var = TREE_OPERAND (pos, 1);
2981 }
2982 else if (TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
2983 {
2984 constant = TREE_OPERAND (pos, 1);
2985 var = TREE_OPERAND (pos, 0);
2986 }
2987 else
2988 abort ();
2989 *pbitpos += TREE_INT_CST_LOW (constant);
2990 if (offset)
2991 offset = size_binop (PLUS_EXPR, offset,
2992 size_binop (FLOOR_DIV_EXPR, var,
2993 size_int (BITS_PER_UNIT)));
2994 else
2995 offset = size_binop (FLOOR_DIV_EXPR, var,
2996 size_int (BITS_PER_UNIT));
2997 }
2998 else if (TREE_CODE (pos) == INTEGER_CST)
2999 *pbitpos += TREE_INT_CST_LOW (pos);
3000 else
3001 {
3002 /* Assume here that the offset is a multiple of a unit.
3003 If not, there should be an explicitly added constant. */
3004 if (offset)
3005 offset = size_binop (PLUS_EXPR, offset,
3006 size_binop (FLOOR_DIV_EXPR, pos,
3007 size_int (BITS_PER_UNIT)));
3008 else
3009 offset = size_binop (FLOOR_DIV_EXPR, pos,
3010 size_int (BITS_PER_UNIT));
3011 }
bbf6f052 3012 }
bbf6f052 3013
bbf6f052
RK
3014 else if (TREE_CODE (exp) == ARRAY_REF
3015 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
3016 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
3017 {
3018 *pbitpos += (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
3019 * TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))));
bbf6f052
RK
3020 }
3021 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
3022 && ! ((TREE_CODE (exp) == NOP_EXPR
3023 || TREE_CODE (exp) == CONVERT_EXPR)
3024 && (TYPE_MODE (TREE_TYPE (exp))
3025 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
3026 break;
7bb0943f
RS
3027
3028 /* If any reference in the chain is volatile, the effect is volatile. */
3029 if (TREE_THIS_VOLATILE (exp))
3030 *pvolatilep = 1;
bbf6f052
RK
3031 exp = TREE_OPERAND (exp, 0);
3032 }
3033
3034 /* If this was a bit-field, see if there is a mode that allows direct
3035 access in case EXP is in memory. */
3036 if (mode == VOIDmode && *pbitpos % *pbitsize == 0)
3037 {
3038 mode = mode_for_size (*pbitsize, MODE_INT, 0);
3039 if (mode == BLKmode)
3040 mode = VOIDmode;
3041 }
3042
3043 *pmode = mode;
7bb0943f
RS
3044 *poffset = offset;
3045#if 0
3046 /* We aren't finished fixing the callers to really handle nonzero offset. */
3047 if (offset != 0)
3048 abort ();
3049#endif
bbf6f052
RK
3050
3051 return exp;
3052}
3053\f
3054/* Given an rtx VALUE that may contain additions and multiplications,
3055 return an equivalent value that just refers to a register or memory.
3056 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
3057 and returning a pseudo-register containing the value.
3058
3059 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
3060
3061rtx
3062force_operand (value, target)
3063 rtx value, target;
3064{
3065 register optab binoptab = 0;
3066 /* Use a temporary to force order of execution of calls to
3067 `force_operand'. */
3068 rtx tmp;
3069 register rtx op2;
3070 /* Use subtarget as the target for operand 0 of a binary operation. */
3071 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3072
3073 if (GET_CODE (value) == PLUS)
3074 binoptab = add_optab;
3075 else if (GET_CODE (value) == MINUS)
3076 binoptab = sub_optab;
3077 else if (GET_CODE (value) == MULT)
3078 {
3079 op2 = XEXP (value, 1);
3080 if (!CONSTANT_P (op2)
3081 && !(GET_CODE (op2) == REG && op2 != subtarget))
3082 subtarget = 0;
3083 tmp = force_operand (XEXP (value, 0), subtarget);
3084 return expand_mult (GET_MODE (value), tmp,
906c4e36 3085 force_operand (op2, NULL_RTX),
bbf6f052
RK
3086 target, 0);
3087 }
3088
3089 if (binoptab)
3090 {
3091 op2 = XEXP (value, 1);
3092 if (!CONSTANT_P (op2)
3093 && !(GET_CODE (op2) == REG && op2 != subtarget))
3094 subtarget = 0;
3095 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
3096 {
3097 binoptab = add_optab;
3098 op2 = negate_rtx (GET_MODE (value), op2);
3099 }
3100
3101 /* Check for an addition with OP2 a constant integer and our first
3102 operand a PLUS of a virtual register and something else. In that
3103 case, we want to emit the sum of the virtual register and the
3104 constant first and then add the other value. This allows virtual
3105 register instantiation to simply modify the constant rather than
3106 creating another one around this addition. */
3107 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
3108 && GET_CODE (XEXP (value, 0)) == PLUS
3109 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
3110 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
3111 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
3112 {
3113 rtx temp = expand_binop (GET_MODE (value), binoptab,
3114 XEXP (XEXP (value, 0), 0), op2,
3115 subtarget, 0, OPTAB_LIB_WIDEN);
3116 return expand_binop (GET_MODE (value), binoptab, temp,
3117 force_operand (XEXP (XEXP (value, 0), 1), 0),
3118 target, 0, OPTAB_LIB_WIDEN);
3119 }
3120
3121 tmp = force_operand (XEXP (value, 0), subtarget);
3122 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 3123 force_operand (op2, NULL_RTX),
bbf6f052
RK
3124 target, 0, OPTAB_LIB_WIDEN);
3125 /* We give UNSIGNEP = 0 to expand_binop
3126 because the only operations we are expanding here are signed ones. */
3127 }
3128 return value;
3129}
3130\f
3131/* Subroutine of expand_expr:
3132 save the non-copied parts (LIST) of an expr (LHS), and return a list
3133 which can restore these values to their previous values,
3134 should something modify their storage. */
3135
3136static tree
3137save_noncopied_parts (lhs, list)
3138 tree lhs;
3139 tree list;
3140{
3141 tree tail;
3142 tree parts = 0;
3143
3144 for (tail = list; tail; tail = TREE_CHAIN (tail))
3145 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3146 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
3147 else
3148 {
3149 tree part = TREE_VALUE (tail);
3150 tree part_type = TREE_TYPE (part);
906c4e36 3151 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3152 rtx target = assign_stack_temp (TYPE_MODE (part_type),
3153 int_size_in_bytes (part_type), 0);
3154 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 3155 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 3156 parts = tree_cons (to_be_saved,
906c4e36
RK
3157 build (RTL_EXPR, part_type, NULL_TREE,
3158 (tree) target),
bbf6f052
RK
3159 parts);
3160 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
3161 }
3162 return parts;
3163}
3164
3165/* Subroutine of expand_expr:
3166 record the non-copied parts (LIST) of an expr (LHS), and return a list
3167 which specifies the initial values of these parts. */
3168
3169static tree
3170init_noncopied_parts (lhs, list)
3171 tree lhs;
3172 tree list;
3173{
3174 tree tail;
3175 tree parts = 0;
3176
3177 for (tail = list; tail; tail = TREE_CHAIN (tail))
3178 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
3179 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
3180 else
3181 {
3182 tree part = TREE_VALUE (tail);
3183 tree part_type = TREE_TYPE (part);
906c4e36 3184 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
3185 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
3186 }
3187 return parts;
3188}
3189
3190/* Subroutine of expand_expr: return nonzero iff there is no way that
3191 EXP can reference X, which is being modified. */
3192
3193static int
3194safe_from_p (x, exp)
3195 rtx x;
3196 tree exp;
3197{
3198 rtx exp_rtl = 0;
3199 int i, nops;
3200
3201 if (x == 0)
3202 return 1;
3203
3204 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
3205 find the underlying pseudo. */
3206 if (GET_CODE (x) == SUBREG)
3207 {
3208 x = SUBREG_REG (x);
3209 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3210 return 0;
3211 }
3212
3213 /* If X is a location in the outgoing argument area, it is always safe. */
3214 if (GET_CODE (x) == MEM
3215 && (XEXP (x, 0) == virtual_outgoing_args_rtx
3216 || (GET_CODE (XEXP (x, 0)) == PLUS
3217 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
3218 return 1;
3219
3220 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
3221 {
3222 case 'd':
3223 exp_rtl = DECL_RTL (exp);
3224 break;
3225
3226 case 'c':
3227 return 1;
3228
3229 case 'x':
3230 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
3231 return ((TREE_VALUE (exp) == 0
3232 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
3233 && (TREE_CHAIN (exp) == 0
3234 || safe_from_p (x, TREE_CHAIN (exp))));
3235 else
3236 return 0;
3237
3238 case '1':
3239 return safe_from_p (x, TREE_OPERAND (exp, 0));
3240
3241 case '2':
3242 case '<':
3243 return (safe_from_p (x, TREE_OPERAND (exp, 0))
3244 && safe_from_p (x, TREE_OPERAND (exp, 1)));
3245
3246 case 'e':
3247 case 'r':
3248 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
3249 the expression. If it is set, we conflict iff we are that rtx or
3250 both are in memory. Otherwise, we check all operands of the
3251 expression recursively. */
3252
3253 switch (TREE_CODE (exp))
3254 {
3255 case ADDR_EXPR:
3256 return staticp (TREE_OPERAND (exp, 0));
3257
3258 case INDIRECT_REF:
3259 if (GET_CODE (x) == MEM)
3260 return 0;
3261 break;
3262
3263 case CALL_EXPR:
3264 exp_rtl = CALL_EXPR_RTL (exp);
3265 if (exp_rtl == 0)
3266 {
3267 /* Assume that the call will clobber all hard registers and
3268 all of memory. */
3269 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
3270 || GET_CODE (x) == MEM)
3271 return 0;
3272 }
3273
3274 break;
3275
3276 case RTL_EXPR:
3277 exp_rtl = RTL_EXPR_RTL (exp);
3278 if (exp_rtl == 0)
3279 /* We don't know what this can modify. */
3280 return 0;
3281
3282 break;
3283
3284 case WITH_CLEANUP_EXPR:
3285 exp_rtl = RTL_EXPR_RTL (exp);
3286 break;
3287
3288 case SAVE_EXPR:
3289 exp_rtl = SAVE_EXPR_RTL (exp);
3290 break;
3291
8129842c
RS
3292 case BIND_EXPR:
3293 /* The only operand we look at is operand 1. The rest aren't
3294 part of the expression. */
3295 return safe_from_p (x, TREE_OPERAND (exp, 1));
3296
bbf6f052
RK
3297 case METHOD_CALL_EXPR:
3298 /* This takes a rtx argument, but shouldn't appear here. */
3299 abort ();
3300 }
3301
3302 /* If we have an rtx, we do not need to scan our operands. */
3303 if (exp_rtl)
3304 break;
3305
3306 nops = tree_code_length[(int) TREE_CODE (exp)];
3307 for (i = 0; i < nops; i++)
3308 if (TREE_OPERAND (exp, i) != 0
3309 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
3310 return 0;
3311 }
3312
3313 /* If we have an rtl, find any enclosed object. Then see if we conflict
3314 with it. */
3315 if (exp_rtl)
3316 {
3317 if (GET_CODE (exp_rtl) == SUBREG)
3318 {
3319 exp_rtl = SUBREG_REG (exp_rtl);
3320 if (GET_CODE (exp_rtl) == REG
3321 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
3322 return 0;
3323 }
3324
3325 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
3326 are memory and EXP is not readonly. */
3327 return ! (rtx_equal_p (x, exp_rtl)
3328 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
3329 && ! TREE_READONLY (exp)));
3330 }
3331
3332 /* If we reach here, it is safe. */
3333 return 1;
3334}
3335
3336/* Subroutine of expand_expr: return nonzero iff EXP is an
3337 expression whose type is statically determinable. */
3338
3339static int
3340fixed_type_p (exp)
3341 tree exp;
3342{
3343 if (TREE_CODE (exp) == PARM_DECL
3344 || TREE_CODE (exp) == VAR_DECL
3345 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
3346 || TREE_CODE (exp) == COMPONENT_REF
3347 || TREE_CODE (exp) == ARRAY_REF)
3348 return 1;
3349 return 0;
3350}
3351\f
3352/* expand_expr: generate code for computing expression EXP.
3353 An rtx for the computed value is returned. The value is never null.
3354 In the case of a void EXP, const0_rtx is returned.
3355
3356 The value may be stored in TARGET if TARGET is nonzero.
3357 TARGET is just a suggestion; callers must assume that
3358 the rtx returned may not be the same as TARGET.
3359
3360 If TARGET is CONST0_RTX, it means that the value will be ignored.
3361
3362 If TMODE is not VOIDmode, it suggests generating the
3363 result in mode TMODE. But this is done only when convenient.
3364 Otherwise, TMODE is ignored and the value generated in its natural mode.
3365 TMODE is just a suggestion; callers must assume that
3366 the rtx returned may not have mode TMODE.
3367
3368 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
3369 with a constant address even if that address is not normally legitimate.
3370 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
3371
3372 If MODIFIER is EXPAND_SUM then when EXP is an addition
3373 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
3374 or a nest of (PLUS ...) and (MINUS ...) where the terms are
3375 products as above, or REG or MEM, or constant.
3376 Ordinarily in such cases we would output mul or add instructions
3377 and then return a pseudo reg containing the sum.
3378
3379 EXPAND_INITIALIZER is much like EXPAND_SUM except that
3380 it also marks a label as absolutely required (it can't be dead).
26fcb35a 3381 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
6dc42e49 3382 This is used for outputting expressions used in initializers. */
bbf6f052
RK
3383
3384rtx
3385expand_expr (exp, target, tmode, modifier)
3386 register tree exp;
3387 rtx target;
3388 enum machine_mode tmode;
3389 enum expand_modifier modifier;
3390{
3391 register rtx op0, op1, temp;
3392 tree type = TREE_TYPE (exp);
3393 int unsignedp = TREE_UNSIGNED (type);
3394 register enum machine_mode mode = TYPE_MODE (type);
3395 register enum tree_code code = TREE_CODE (exp);
3396 optab this_optab;
3397 /* Use subtarget as the target for operand 0 of a binary operation. */
3398 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
3399 rtx original_target = target;
3400 int ignore = target == const0_rtx;
3401 tree context;
3402
3403 /* Don't use hard regs as subtargets, because the combiner
3404 can only handle pseudo regs. */
3405 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
3406 subtarget = 0;
3407 /* Avoid subtargets inside loops,
3408 since they hide some invariant expressions. */
3409 if (preserve_subexpressions_p ())
3410 subtarget = 0;
3411
3412 if (ignore) target = 0, original_target = 0;
3413
3414 /* If will do cse, generate all results into pseudo registers
3415 since 1) that allows cse to find more things
3416 and 2) otherwise cse could produce an insn the machine
3417 cannot support. */
3418
3419 if (! cse_not_expected && mode != BLKmode && target
3420 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
3421 target = subtarget;
3422
3423 /* Ensure we reference a volatile object even if value is ignored. */
3424 if (ignore && TREE_THIS_VOLATILE (exp)
3425 && mode != VOIDmode && mode != BLKmode)
3426 {
3427 target = gen_reg_rtx (mode);
3428 temp = expand_expr (exp, target, VOIDmode, modifier);
3429 if (temp != target)
3430 emit_move_insn (target, temp);
3431 return target;
3432 }
3433
3434 switch (code)
3435 {
3436 case LABEL_DECL:
b552441b
RS
3437 {
3438 tree function = decl_function_context (exp);
3439 /* Handle using a label in a containing function. */
3440 if (function != current_function_decl && function != 0)
3441 {
3442 struct function *p = find_function_data (function);
3443 /* Allocate in the memory associated with the function
3444 that the label is in. */
3445 push_obstacks (p->function_obstack,
3446 p->function_maybepermanent_obstack);
3447
3448 p->forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3449 label_rtx (exp), p->forced_labels);
3450 pop_obstacks ();
3451 }
3452 else if (modifier == EXPAND_INITIALIZER)
3453 forced_labels = gen_rtx (EXPR_LIST, VOIDmode,
3454 label_rtx (exp), forced_labels);
26fcb35a 3455 temp = gen_rtx (MEM, FUNCTION_MODE,
b552441b 3456 gen_rtx (LABEL_REF, Pmode, label_rtx (exp)));
26fcb35a
RS
3457 if (function != current_function_decl && function != 0)
3458 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
3459 return temp;
b552441b 3460 }
bbf6f052
RK
3461
3462 case PARM_DECL:
3463 if (DECL_RTL (exp) == 0)
3464 {
3465 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 3466 return CONST0_RTX (mode);
bbf6f052
RK
3467 }
3468
3469 case FUNCTION_DECL:
3470 case VAR_DECL:
3471 case RESULT_DECL:
3472 if (DECL_RTL (exp) == 0)
3473 abort ();
3474 /* Ensure variable marked as used
3475 even if it doesn't go through a parser. */
3476 TREE_USED (exp) = 1;
3477 /* Handle variables inherited from containing functions. */
3478 context = decl_function_context (exp);
3479
3480 /* We treat inline_function_decl as an alias for the current function
3481 because that is the inline function whose vars, types, etc.
3482 are being merged into the current function.
3483 See expand_inline_function. */
3484 if (context != 0 && context != current_function_decl
3485 && context != inline_function_decl
3486 /* If var is static, we don't need a static chain to access it. */
3487 && ! (GET_CODE (DECL_RTL (exp)) == MEM
3488 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
3489 {
3490 rtx addr;
3491
3492 /* Mark as non-local and addressable. */
81feeecb 3493 DECL_NONLOCAL (exp) = 1;
bbf6f052
RK
3494 mark_addressable (exp);
3495 if (GET_CODE (DECL_RTL (exp)) != MEM)
3496 abort ();
3497 addr = XEXP (DECL_RTL (exp), 0);
3498 if (GET_CODE (addr) == MEM)
3499 addr = gen_rtx (MEM, Pmode, fix_lexical_addr (XEXP (addr, 0), exp));
3500 else
3501 addr = fix_lexical_addr (addr, exp);
3502 return change_address (DECL_RTL (exp), mode, addr);
3503 }
4af3895e 3504
bbf6f052
RK
3505 /* This is the case of an array whose size is to be determined
3506 from its initializer, while the initializer is still being parsed.
3507 See expand_decl. */
3508 if (GET_CODE (DECL_RTL (exp)) == MEM
3509 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
3510 return change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
3511 XEXP (DECL_RTL (exp), 0));
3512 if (GET_CODE (DECL_RTL (exp)) == MEM
3513 && modifier != EXPAND_CONST_ADDRESS
3514 && modifier != EXPAND_SUM
3515 && modifier != EXPAND_INITIALIZER)
3516 {
3517 /* DECL_RTL probably contains a constant address.
3518 On RISC machines where a constant address isn't valid,
3519 make some insns to get that address into a register. */
3520 if (!memory_address_p (DECL_MODE (exp), XEXP (DECL_RTL (exp), 0))
3521 || (flag_force_addr
3522 && CONSTANT_ADDRESS_P (XEXP (DECL_RTL (exp), 0))))
3523 return change_address (DECL_RTL (exp), VOIDmode,
3524 copy_rtx (XEXP (DECL_RTL (exp), 0)));
3525 }
1499e0a8
RK
3526
3527 /* If the mode of DECL_RTL does not match that of the decl, it
3528 must be a promoted value. We return a SUBREG of the wanted mode,
3529 but mark it so that we know that it was already extended. */
3530
3531 if (GET_CODE (DECL_RTL (exp)) == REG
3532 && GET_MODE (DECL_RTL (exp)) != mode)
3533 {
3534 enum machine_mode decl_mode = DECL_MODE (exp);
3535
3536 /* Get the signedness used for this variable. Ensure we get the
3537 same mode we got when the variable was declared. */
3538
3539 PROMOTE_MODE (decl_mode, unsignedp, type);
3540
3541 if (decl_mode != GET_MODE (DECL_RTL (exp)))
3542 abort ();
3543
3544 temp = gen_rtx (SUBREG, mode, DECL_RTL (exp), 0);
3545 SUBREG_PROMOTED_VAR_P (temp) = 1;
3546 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3547 return temp;
3548 }
3549
bbf6f052
RK
3550 return DECL_RTL (exp);
3551
3552 case INTEGER_CST:
3553 return immed_double_const (TREE_INT_CST_LOW (exp),
3554 TREE_INT_CST_HIGH (exp),
3555 mode);
3556
3557 case CONST_DECL:
3558 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, 0);
3559
3560 case REAL_CST:
3561 /* If optimized, generate immediate CONST_DOUBLE
3562 which will be turned into memory by reload if necessary.
3563
3564 We used to force a register so that loop.c could see it. But
3565 this does not allow gen_* patterns to perform optimizations with
3566 the constants. It also produces two insns in cases like "x = 1.0;".
3567 On most machines, floating-point constants are not permitted in
3568 many insns, so we'd end up copying it to a register in any case.
3569
3570 Now, we do the copying in expand_binop, if appropriate. */
3571 return immed_real_const (exp);
3572
3573 case COMPLEX_CST:
3574 case STRING_CST:
3575 if (! TREE_CST_RTL (exp))
3576 output_constant_def (exp);
3577
3578 /* TREE_CST_RTL probably contains a constant address.
3579 On RISC machines where a constant address isn't valid,
3580 make some insns to get that address into a register. */
3581 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
3582 && modifier != EXPAND_CONST_ADDRESS
3583 && modifier != EXPAND_INITIALIZER
3584 && modifier != EXPAND_SUM
3585 && !memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0)))
3586 return change_address (TREE_CST_RTL (exp), VOIDmode,
3587 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
3588 return TREE_CST_RTL (exp);
3589
3590 case SAVE_EXPR:
3591 context = decl_function_context (exp);
3592 /* We treat inline_function_decl as an alias for the current function
3593 because that is the inline function whose vars, types, etc.
3594 are being merged into the current function.
3595 See expand_inline_function. */
3596 if (context == current_function_decl || context == inline_function_decl)
3597 context = 0;
3598
3599 /* If this is non-local, handle it. */
3600 if (context)
3601 {
3602 temp = SAVE_EXPR_RTL (exp);
3603 if (temp && GET_CODE (temp) == REG)
3604 {
3605 put_var_into_stack (exp);
3606 temp = SAVE_EXPR_RTL (exp);
3607 }
3608 if (temp == 0 || GET_CODE (temp) != MEM)
3609 abort ();
3610 return change_address (temp, mode,
3611 fix_lexical_addr (XEXP (temp, 0), exp));
3612 }
3613 if (SAVE_EXPR_RTL (exp) == 0)
3614 {
3615 if (mode == BLKmode)
3616 temp
3617 = assign_stack_temp (mode,
3618 int_size_in_bytes (TREE_TYPE (exp)), 0);
3619 else
1499e0a8
RK
3620 {
3621 enum machine_mode var_mode = mode;
3622
3623 if (TREE_CODE (type) == INTEGER_TYPE
3624 || TREE_CODE (type) == ENUMERAL_TYPE
3625 || TREE_CODE (type) == BOOLEAN_TYPE
3626 || TREE_CODE (type) == CHAR_TYPE
3627 || TREE_CODE (type) == REAL_TYPE
3628 || TREE_CODE (type) == POINTER_TYPE
3629 || TREE_CODE (type) == OFFSET_TYPE)
3630 {
3631 PROMOTE_MODE (var_mode, unsignedp, type);
3632 }
3633
3634 temp = gen_reg_rtx (var_mode);
3635 }
3636
bbf6f052
RK
3637 SAVE_EXPR_RTL (exp) = temp;
3638 store_expr (TREE_OPERAND (exp, 0), temp, 0);
3639 if (!optimize && GET_CODE (temp) == REG)
3640 save_expr_regs = gen_rtx (EXPR_LIST, VOIDmode, temp,
3641 save_expr_regs);
3642 }
1499e0a8
RK
3643
3644 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
3645 must be a promoted value. We return a SUBREG of the wanted mode,
3646 but mark it so that we know that it was already extended. Note
3647 that `unsignedp' was modified above in this case. */
3648
3649 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
3650 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
3651 {
3652 temp = gen_rtx (SUBREG, mode, SAVE_EXPR_RTL (exp), 0);
3653 SUBREG_PROMOTED_VAR_P (temp) = 1;
3654 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
3655 return temp;
3656 }
3657
bbf6f052
RK
3658 return SAVE_EXPR_RTL (exp);
3659
3660 case EXIT_EXPR:
3661 /* Exit the current loop if the body-expression is true. */
3662 {
3663 rtx label = gen_label_rtx ();
906c4e36
RK
3664 do_jump (TREE_OPERAND (exp, 0), label, NULL_RTX);
3665 expand_exit_loop (NULL_PTR);
bbf6f052
RK
3666 emit_label (label);
3667 }
3668 return const0_rtx;
3669
3670 case LOOP_EXPR:
3671 expand_start_loop (1);
3672 expand_expr_stmt (TREE_OPERAND (exp, 0));
3673 expand_end_loop ();
3674
3675 return const0_rtx;
3676
3677 case BIND_EXPR:
3678 {
3679 tree vars = TREE_OPERAND (exp, 0);
3680 int vars_need_expansion = 0;
3681
3682 /* Need to open a binding contour here because
3683 if there are any cleanups they most be contained here. */
3684 expand_start_bindings (0);
3685
2df53c0b
RS
3686 /* Mark the corresponding BLOCK for output in its proper place. */
3687 if (TREE_OPERAND (exp, 2) != 0
3688 && ! TREE_USED (TREE_OPERAND (exp, 2)))
3689 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
3690
3691 /* If VARS have not yet been expanded, expand them now. */
3692 while (vars)
3693 {
3694 if (DECL_RTL (vars) == 0)
3695 {
3696 vars_need_expansion = 1;
3697 expand_decl (vars);
3698 }
3699 expand_decl_init (vars);
3700 vars = TREE_CHAIN (vars);
3701 }
3702
3703 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
3704
3705 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
3706
3707 return temp;
3708 }
3709
3710 case RTL_EXPR:
3711 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
3712 abort ();
3713 emit_insns (RTL_EXPR_SEQUENCE (exp));
3714 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
3715 return RTL_EXPR_RTL (exp);
3716
3717 case CONSTRUCTOR:
4af3895e
JVA
3718 /* All elts simple constants => refer to a constant in memory. But
3719 if this is a non-BLKmode mode, let it store a field at a time
3720 since that should make a CONST_INT or CONST_DOUBLE when we
3721 fold. */
3722 if (TREE_STATIC (exp) && (mode == BLKmode || TREE_ADDRESSABLE (exp)))
bbf6f052
RK
3723 {
3724 rtx constructor = output_constant_def (exp);
b552441b
RS
3725 if (modifier != EXPAND_CONST_ADDRESS
3726 && modifier != EXPAND_INITIALIZER
3727 && modifier != EXPAND_SUM
3728 && !memory_address_p (GET_MODE (constructor),
3729 XEXP (constructor, 0)))
bbf6f052
RK
3730 constructor = change_address (constructor, VOIDmode,
3731 XEXP (constructor, 0));
3732 return constructor;
3733 }
3734
3735 if (ignore)
3736 {
3737 tree elt;
3738 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3739 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
3740 return const0_rtx;
3741 }
3742 else
3743 {
3744 if (target == 0 || ! safe_from_p (target, exp))
3745 {
3746 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
3747 target = gen_reg_rtx (mode);
3748 else
3749 {
3b94d087
RS
3750 enum tree_code c = TREE_CODE (type);
3751 target
3752 = assign_stack_temp (mode, int_size_in_bytes (type), 0);
3753 if (c == RECORD_TYPE || c == UNION_TYPE || c == ARRAY_TYPE)
3754 MEM_IN_STRUCT_P (target) = 1;
bbf6f052
RK
3755 }
3756 }
3757 store_constructor (exp, target);
3758 return target;
3759 }
3760
3761 case INDIRECT_REF:
3762 {
3763 tree exp1 = TREE_OPERAND (exp, 0);
3764 tree exp2;
3765
3766 /* A SAVE_EXPR as the address in an INDIRECT_EXPR is generated
3767 for *PTR += ANYTHING where PTR is put inside the SAVE_EXPR.
3768 This code has the same general effect as simply doing
3769 expand_expr on the save expr, except that the expression PTR
3770 is computed for use as a memory address. This means different
3771 code, suitable for indexing, may be generated. */
3772 if (TREE_CODE (exp1) == SAVE_EXPR
3773 && SAVE_EXPR_RTL (exp1) == 0
3774 && TREE_CODE (exp2 = TREE_OPERAND (exp1, 0)) != ERROR_MARK
3775 && TYPE_MODE (TREE_TYPE (exp1)) == Pmode
3776 && TYPE_MODE (TREE_TYPE (exp2)) == Pmode)
3777 {
906c4e36
RK
3778 temp = expand_expr (TREE_OPERAND (exp1, 0), NULL_RTX,
3779 VOIDmode, EXPAND_SUM);
bbf6f052
RK
3780 op0 = memory_address (mode, temp);
3781 op0 = copy_all_regs (op0);
3782 SAVE_EXPR_RTL (exp1) = op0;
3783 }
3784 else
3785 {
906c4e36 3786 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
3787 op0 = memory_address (mode, op0);
3788 }
8c8a8e34
JW
3789
3790 temp = gen_rtx (MEM, mode, op0);
3791 /* If address was computed by addition,
3792 mark this as an element of an aggregate. */
3793 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
3794 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
3795 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
3796 || TREE_CODE (TREE_TYPE (exp)) == ARRAY_TYPE
3797 || TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3798 || TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
3799 || (TREE_CODE (exp1) == ADDR_EXPR
3800 && (exp2 = TREE_OPERAND (exp1, 0))
3801 && (TREE_CODE (TREE_TYPE (exp2)) == ARRAY_TYPE
3802 || TREE_CODE (TREE_TYPE (exp2)) == RECORD_TYPE
3803 || TREE_CODE (TREE_TYPE (exp2)) == UNION_TYPE)))
3804 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 3805 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
bbf6f052
RK
3806#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
3807 a location is accessed through a pointer to const does not mean
3808 that the value there can never change. */
8c8a8e34 3809 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
bbf6f052 3810#endif
8c8a8e34
JW
3811 return temp;
3812 }
bbf6f052
RK
3813
3814 case ARRAY_REF:
3815 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
3816 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
3817 {
3818 /* Nonconstant array index or nonconstant element size.
3819 Generate the tree for *(&array+index) and expand that,
3820 except do it in a language-independent way
3821 and don't complain about non-lvalue arrays.
3822 `mark_addressable' should already have been called
3823 for any array for which this case will be reached. */
3824
3825 /* Don't forget the const or volatile flag from the array element. */
3826 tree variant_type = build_type_variant (type,
3827 TREE_READONLY (exp),
3828 TREE_THIS_VOLATILE (exp));
3829 tree array_adr = build1 (ADDR_EXPR, build_pointer_type (variant_type),
3830 TREE_OPERAND (exp, 0));
3831 tree index = TREE_OPERAND (exp, 1);
3832 tree elt;
3833
3834 /* Convert the integer argument to a type the same size as a pointer
3835 so the multiply won't overflow spuriously. */
3836 if (TYPE_PRECISION (TREE_TYPE (index)) != POINTER_SIZE)
3837 index = convert (type_for_size (POINTER_SIZE, 0), index);
3838
3839 /* Don't think the address has side effects
3840 just because the array does.
3841 (In some cases the address might have side effects,
3842 and we fail to record that fact here. However, it should not
3843 matter, since expand_expr should not care.) */
3844 TREE_SIDE_EFFECTS (array_adr) = 0;
3845
3846 elt = build1 (INDIRECT_REF, type,
3847 fold (build (PLUS_EXPR, TYPE_POINTER_TO (variant_type),
3848 array_adr,
3849 fold (build (MULT_EXPR,
3850 TYPE_POINTER_TO (variant_type),
3851 index, size_in_bytes (type))))));
3852
3853 /* Volatility, etc., of new expression is same as old expression. */
3854 TREE_SIDE_EFFECTS (elt) = TREE_SIDE_EFFECTS (exp);
3855 TREE_THIS_VOLATILE (elt) = TREE_THIS_VOLATILE (exp);
3856 TREE_READONLY (elt) = TREE_READONLY (exp);
3857
3858 return expand_expr (elt, target, tmode, modifier);
3859 }
3860
3861 /* Fold an expression like: "foo"[2].
3862 This is not done in fold so it won't happen inside &. */
3863 {
3864 int i;
3865 tree arg0 = TREE_OPERAND (exp, 0);
3866 tree arg1 = TREE_OPERAND (exp, 1);
3867
3868 if (TREE_CODE (arg0) == STRING_CST
3869 && TREE_CODE (arg1) == INTEGER_CST
3870 && !TREE_INT_CST_HIGH (arg1)
3871 && (i = TREE_INT_CST_LOW (arg1)) < TREE_STRING_LENGTH (arg0))
3872 {
3873 if (TREE_TYPE (TREE_TYPE (arg0)) == integer_type_node)
3874 {
3875 exp = build_int_2 (((int *)TREE_STRING_POINTER (arg0))[i], 0);
3876 TREE_TYPE (exp) = integer_type_node;
3877 return expand_expr (exp, target, tmode, modifier);
3878 }
3879 if (TREE_TYPE (TREE_TYPE (arg0)) == char_type_node)
3880 {
3881 exp = build_int_2 (TREE_STRING_POINTER (arg0)[i], 0);
3882 TREE_TYPE (exp) = integer_type_node;
3883 return expand_expr (convert (TREE_TYPE (TREE_TYPE (arg0)), exp), target, tmode, modifier);
3884 }
3885 }
3886 }
3887
3888 /* If this is a constant index into a constant array,
4af3895e
JVA
3889 just get the value from the array. Handle both the cases when
3890 we have an explicit constructor and when our operand is a variable
3891 that was declared const. */
3892
3893 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
3894 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
3895 {
3896 tree index = fold (TREE_OPERAND (exp, 1));
3897 if (TREE_CODE (index) == INTEGER_CST
3898 && TREE_INT_CST_HIGH (index) == 0)
3899 {
3900 int i = TREE_INT_CST_LOW (index);
3901 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
3902
3903 while (elem && i--)
3904 elem = TREE_CHAIN (elem);
3905 if (elem)
3906 return expand_expr (fold (TREE_VALUE (elem)), target,
3907 tmode, modifier);
3908 }
3909 }
3910
3911 else if (TREE_READONLY (TREE_OPERAND (exp, 0))
3912 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
3913 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == ARRAY_TYPE
3914 && TREE_CODE (TREE_OPERAND (exp, 0)) == VAR_DECL
3915 && DECL_INITIAL (TREE_OPERAND (exp, 0))
3916 && optimize >= 1
3917 && (TREE_CODE (DECL_INITIAL (TREE_OPERAND (exp, 0)))
3918 != ERROR_MARK))
bbf6f052
RK
3919 {
3920 tree index = fold (TREE_OPERAND (exp, 1));
8c8a8e34
JW
3921 if (TREE_CODE (index) == INTEGER_CST
3922 && TREE_INT_CST_HIGH (index) == 0)
bbf6f052
RK
3923 {
3924 int i = TREE_INT_CST_LOW (index);
8c8a8e34 3925 tree init = DECL_INITIAL (TREE_OPERAND (exp, 0));
bbf6f052 3926
8c8a8e34
JW
3927 if (TREE_CODE (init) == CONSTRUCTOR)
3928 {
3929 tree elem = CONSTRUCTOR_ELTS (init);
3930
3931 while (elem && i--)
3932 elem = TREE_CHAIN (elem);
3933 if (elem)
3934 return expand_expr (fold (TREE_VALUE (elem)), target,
3935 tmode, modifier);
3936 }
3937 else if (TREE_CODE (init) == STRING_CST
3938 && i < TREE_STRING_LENGTH (init))
3939 {
906c4e36 3940 temp = GEN_INT (TREE_STRING_POINTER (init)[i]);
8c8a8e34
JW
3941 return convert_to_mode (mode, temp, 0);
3942 }
bbf6f052
RK
3943 }
3944 }
3945 /* Treat array-ref with constant index as a component-ref. */
3946
3947 case COMPONENT_REF:
3948 case BIT_FIELD_REF:
4af3895e
JVA
3949 /* If the operand is a CONSTRUCTOR, we can just extract the
3950 appropriate field if it is present. */
3951 if (code != ARRAY_REF
3952 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
3953 {
3954 tree elt;
3955
3956 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
3957 elt = TREE_CHAIN (elt))
3958 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
3959 return expand_expr (TREE_VALUE (elt), target, tmode, modifier);
3960 }
3961
bbf6f052
RK
3962 {
3963 enum machine_mode mode1;
3964 int bitsize;
3965 int bitpos;
7bb0943f 3966 tree offset;
bbf6f052 3967 int volatilep = 0;
7bb0943f 3968 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
bbf6f052
RK
3969 &mode1, &unsignedp, &volatilep);
3970
3971 /* In some cases, we will be offsetting OP0's address by a constant.
3972 So get it as a sum, if possible. If we will be using it
3973 directly in an insn, we validate it. */
906c4e36 3974 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052 3975
8c8a8e34
JW
3976 /* If this is a constant, put it into a register if it is a
3977 legimate constant and memory if it isn't. */
3978 if (CONSTANT_P (op0))
3979 {
3980 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
3981 if (LEGITIMATE_CONSTANT_P (op0))
3982 op0 = force_reg (mode, op0);
3983 else
3984 op0 = validize_mem (force_const_mem (mode, op0));
3985 }
3986
7bb0943f
RS
3987 if (offset != 0)
3988 {
906c4e36 3989 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3990
3991 if (GET_CODE (op0) != MEM)
3992 abort ();
3993 op0 = change_address (op0, VOIDmode,
3994 gen_rtx (PLUS, Pmode, XEXP (op0, 0),
3995 force_reg (Pmode, offset_rtx)));
3996 }
3997
bbf6f052
RK
3998 /* Don't forget about volatility even if this is a bitfield. */
3999 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
4000 {
4001 op0 = copy_rtx (op0);
4002 MEM_VOLATILE_P (op0) = 1;
4003 }
4004
4005 if (mode1 == VOIDmode
0bba3f6f
RK
4006 || (mode1 != BLKmode && ! direct_load[(int) mode1]
4007 && modifier != EXPAND_CONST_ADDRESS
4008 && modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
bbf6f052
RK
4009 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG)
4010 {
4011 /* In cases where an aligned union has an unaligned object
4012 as a field, we might be extracting a BLKmode value from
4013 an integer-mode (e.g., SImode) object. Handle this case
4014 by doing the extract into an object as wide as the field
4015 (which we know to be the width of a basic mode), then
4016 storing into memory, and changing the mode to BLKmode. */
4017 enum machine_mode ext_mode = mode;
4018
4019 if (ext_mode == BLKmode)
4020 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
4021
4022 if (ext_mode == BLKmode)
4023 abort ();
4024
4025 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
4026 unsignedp, target, ext_mode, ext_mode,
4027 TYPE_ALIGN (TREE_TYPE (tem)) / BITS_PER_UNIT,
4028 int_size_in_bytes (TREE_TYPE (tem)));
4029 if (mode == BLKmode)
4030 {
4031 rtx new = assign_stack_temp (ext_mode,
4032 bitsize / BITS_PER_UNIT, 0);
4033
4034 emit_move_insn (new, op0);
4035 op0 = copy_rtx (new);
4036 PUT_MODE (op0, BLKmode);
4037 }
4038
4039 return op0;
4040 }
4041
4042 /* Get a reference to just this component. */
4043 if (modifier == EXPAND_CONST_ADDRESS
4044 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4045 op0 = gen_rtx (MEM, mode1, plus_constant (XEXP (op0, 0),
4046 (bitpos / BITS_PER_UNIT)));
4047 else
4048 op0 = change_address (op0, mode1,
4049 plus_constant (XEXP (op0, 0),
4050 (bitpos / BITS_PER_UNIT)));
4051 MEM_IN_STRUCT_P (op0) = 1;
4052 MEM_VOLATILE_P (op0) |= volatilep;
4053 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode)
4054 return op0;
4055 if (target == 0)
4056 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4057 convert_move (target, op0, unsignedp);
4058 return target;
4059 }
4060
4061 case OFFSET_REF:
4062 {
4063 tree base = build_unary_op (ADDR_EXPR, TREE_OPERAND (exp, 0), 0);
4064 tree addr = build (PLUS_EXPR, type, base, TREE_OPERAND (exp, 1));
906c4e36 4065 op0 = expand_expr (addr, NULL_RTX, VOIDmode, EXPAND_SUM);
bbf6f052
RK
4066 temp = gen_rtx (MEM, mode, memory_address (mode, op0));
4067 MEM_IN_STRUCT_P (temp) = 1;
f94cc92f 4068 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp);
bbf6f052
RK
4069#if 0 /* It is incorrectto set RTX_UNCHANGING_P here, because the fact that
4070 a location is accessed through a pointer to const does not mean
4071 that the value there can never change. */
4072 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp);
4073#endif
4074 return temp;
4075 }
4076
4077 /* Intended for a reference to a buffer of a file-object in Pascal.
4078 But it's not certain that a special tree code will really be
4079 necessary for these. INDIRECT_REF might work for them. */
4080 case BUFFER_REF:
4081 abort ();
4082
7308a047
RS
4083 /* IN_EXPR: Inlined pascal set IN expression.
4084
4085 Algorithm:
4086 rlo = set_low - (set_low%bits_per_word);
4087 the_word = set [ (index - rlo)/bits_per_word ];
4088 bit_index = index % bits_per_word;
4089 bitmask = 1 << bit_index;
4090 return !!(the_word & bitmask); */
4091 case IN_EXPR:
4092 preexpand_calls (exp);
4093 {
4094 tree set = TREE_OPERAND (exp, 0);
4095 tree index = TREE_OPERAND (exp, 1);
4096 tree set_type = TREE_TYPE (set);
4097
4098 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
4099 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
4100
4101 rtx index_val;
4102 rtx lo_r;
4103 rtx hi_r;
4104 rtx rlow;
4105 rtx diff, quo, rem, addr, bit, result;
4106 rtx setval, setaddr;
4107 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
4108
4109 if (target == 0)
4110 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
4111
4112 /* If domain is empty, answer is no. */
4113 if (tree_int_cst_lt (set_high_bound, set_low_bound))
4114 return const0_rtx;
4115
4116 index_val = expand_expr (index, 0, VOIDmode, 0);
4117 lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
4118 hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
4119 setval = expand_expr (set, 0, VOIDmode, 0);
4120 setaddr = XEXP (setval, 0);
4121
4122 /* Compare index against bounds, if they are constant. */
4123 if (GET_CODE (index_val) == CONST_INT
4124 && GET_CODE (lo_r) == CONST_INT)
4125 {
4126 if (INTVAL (index_val) < INTVAL (lo_r))
4127 return const0_rtx;
4128 }
4129
4130 if (GET_CODE (index_val) == CONST_INT
4131 && GET_CODE (hi_r) == CONST_INT)
4132 {
4133 if (INTVAL (hi_r) < INTVAL (index_val))
4134 return const0_rtx;
4135 }
4136
4137 /* If we get here, we have to generate the code for both cases
4138 (in range and out of range). */
4139
4140 op0 = gen_label_rtx ();
4141 op1 = gen_label_rtx ();
4142
4143 if (! (GET_CODE (index_val) == CONST_INT
4144 && GET_CODE (lo_r) == CONST_INT))
4145 {
4146 emit_cmp_insn (index_val, lo_r, LT, 0, GET_MODE (index_val), 0, 0);
4147 emit_jump_insn (gen_blt (op1));
4148 }
4149
4150 if (! (GET_CODE (index_val) == CONST_INT
4151 && GET_CODE (hi_r) == CONST_INT))
4152 {
4153 emit_cmp_insn (index_val, hi_r, GT, 0, GET_MODE (index_val), 0, 0);
4154 emit_jump_insn (gen_bgt (op1));
4155 }
4156
4157 /* Calculate the element number of bit zero in the first word
4158 of the set. */
4159 if (GET_CODE (lo_r) == CONST_INT)
4160 rlow = gen_rtx (CONST_INT, VOIDmode,
4161 INTVAL (lo_r) & ~ (1 << BITS_PER_UNIT));
4162 else
4163 rlow = expand_binop (index_mode, and_optab,
4164 lo_r, gen_rtx (CONST_INT, VOIDmode,
4165 ~ (1 << BITS_PER_UNIT)),
4166 0, 0, OPTAB_LIB_WIDEN);
4167
4168 diff = expand_binop (index_mode, sub_optab,
4169 index_val, rlow, 0, 0, OPTAB_LIB_WIDEN);
4170
4171 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
4172 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4173 0, 0);
4174 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
4175 gen_rtx (CONST_INT, VOIDmode, BITS_PER_UNIT),
4176 0, 0);
4177 addr = memory_address (byte_mode,
4178 expand_binop (index_mode, add_optab,
4179 diff, setaddr));
4180 /* Extract the bit we want to examine */
4181 bit = expand_shift (RSHIFT_EXPR, byte_mode,
4182 gen_rtx (MEM, byte_mode, addr), rem, 0, 1);
4183 result = expand_binop (SImode, and_optab, bit, const1_rtx, target,
4184 1, OPTAB_LIB_WIDEN);
4185 emit_move_insn (target, result);
4186
4187 /* Output the code to handle the out-of-range case. */
4188 emit_jump (op0);
4189 emit_label (op1);
4190 emit_move_insn (target, const0_rtx);
4191 emit_label (op0);
4192 return target;
4193 }
4194
bbf6f052
RK
4195 case WITH_CLEANUP_EXPR:
4196 if (RTL_EXPR_RTL (exp) == 0)
4197 {
4198 RTL_EXPR_RTL (exp)
4199 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
906c4e36
RK
4200 cleanups_this_call
4201 = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2), cleanups_this_call);
bbf6f052
RK
4202 /* That's it for this cleanup. */
4203 TREE_OPERAND (exp, 2) = 0;
4204 }
4205 return RTL_EXPR_RTL (exp);
4206
4207 case CALL_EXPR:
4208 /* Check for a built-in function. */
4209 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
4210 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == FUNCTION_DECL
4211 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4212 return expand_builtin (exp, target, subtarget, tmode, ignore);
4213 /* If this call was expanded already by preexpand_calls,
4214 just return the result we got. */
4215 if (CALL_EXPR_RTL (exp) != 0)
4216 return CALL_EXPR_RTL (exp);
8129842c 4217 return expand_call (exp, target, ignore);
bbf6f052
RK
4218
4219 case NON_LVALUE_EXPR:
4220 case NOP_EXPR:
4221 case CONVERT_EXPR:
4222 case REFERENCE_EXPR:
4223 if (TREE_CODE (type) == VOID_TYPE || ignore)
4224 {
4225 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
4226 return const0_rtx;
4227 }
4228 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4229 return expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, modifier);
4230 if (TREE_CODE (type) == UNION_TYPE)
4231 {
4232 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
4233 if (target == 0)
4234 {
4235 if (mode == BLKmode)
4236 {
4237 if (TYPE_SIZE (type) == 0
4238 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4239 abort ();
4240 target = assign_stack_temp (BLKmode,
4241 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4242 + BITS_PER_UNIT - 1)
4243 / BITS_PER_UNIT, 0);
4244 }
4245 else
4246 target = gen_reg_rtx (mode);
4247 }
4248 if (GET_CODE (target) == MEM)
4249 /* Store data into beginning of memory target. */
4250 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
4251 change_address (target, TYPE_MODE (valtype), 0), 0);
4252
bbf6f052
RK
4253 else if (GET_CODE (target) == REG)
4254 /* Store this field into a union of the proper type. */
4255 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
4256 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
4257 VOIDmode, 0, 1,
4258 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
4259 else
4260 abort ();
4261
4262 /* Return the entire union. */
4263 return target;
4264 }
1499e0a8 4265 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
4266 if (GET_MODE (op0) == mode)
4267 return op0;
4268 /* If arg is a constant integer being extended from a narrower mode,
4269 we must really truncate to get the extended bits right. Otherwise
4270 (unsigned long) (unsigned char) ("\377"[0])
4271 would come out as ffffffff. */
4272 if (GET_MODE (op0) == VOIDmode
4273 && (GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
4274 < GET_MODE_BITSIZE (mode)))
4275 {
4276 /* MODE must be narrower than HOST_BITS_PER_INT. */
4277 int width = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
4278
4279 if (width < HOST_BITS_PER_WIDE_INT)
4280 {
4281 HOST_WIDE_INT val = (GET_CODE (op0) == CONST_INT ? INTVAL (op0)
4282 : CONST_DOUBLE_LOW (op0));
4283 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4284 || !(val & ((HOST_WIDE_INT) 1 << (width - 1))))
4285 val &= ((HOST_WIDE_INT) 1 << width) - 1;
4286 else
4287 val |= ~(((HOST_WIDE_INT) 1 << width) - 1);
4288
4289 op0 = GEN_INT (val);
4290 }
4291 else
4292 {
4293 op0 = (simplify_unary_operation
4294 ((TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))
4295 ? ZERO_EXTEND : SIGN_EXTEND),
4296 mode, op0,
4297 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))));
4298 if (op0 == 0)
4299 abort ();
4300 }
4301 }
4302 if (GET_MODE (op0) == VOIDmode)
bbf6f052 4303 return op0;
26fcb35a
RS
4304 if (modifier == EXPAND_INITIALIZER)
4305 return gen_rtx (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
bbf6f052
RK
4306 if (flag_force_mem && GET_CODE (op0) == MEM)
4307 op0 = copy_to_reg (op0);
4308
4309 if (target == 0)
4310 return convert_to_mode (mode, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4311 else
4312 convert_move (target, op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4313 return target;
4314
4315 case PLUS_EXPR:
4316 /* We come here from MINUS_EXPR when the second operand is a constant. */
4317 plus_expr:
4318 this_optab = add_optab;
4319
4320 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
4321 something else, make sure we add the register to the constant and
4322 then to the other thing. This case can occur during strength
4323 reduction and doing it this way will produce better code if the
4324 frame pointer or argument pointer is eliminated.
4325
4326 fold-const.c will ensure that the constant is always in the inner
4327 PLUS_EXPR, so the only case we need to do anything about is if
4328 sp, ap, or fp is our second argument, in which case we must swap
4329 the innermost first argument and our second argument. */
4330
4331 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
4332 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
4333 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
4334 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
4335 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
4336 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
4337 {
4338 tree t = TREE_OPERAND (exp, 1);
4339
4340 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
4341 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
4342 }
4343
4344 /* If the result is to be Pmode and we are adding an integer to
4345 something, we might be forming a constant. So try to use
4346 plus_constant. If it produces a sum and we can't accept it,
4347 use force_operand. This allows P = &ARR[const] to generate
4348 efficient code on machines where a SYMBOL_REF is not a valid
4349 address.
4350
4351 If this is an EXPAND_SUM call, always return the sum. */
4352 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
906c4e36 4353 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
4354 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4355 || mode == Pmode))
4356 {
4357 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
4358 EXPAND_SUM);
4359 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
4360 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4361 op1 = force_operand (op1, target);
4362 return op1;
4363 }
4364
4365 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
4367 && (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
4368 || mode == Pmode))
4369 {
4370 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
4371 EXPAND_SUM);
4372 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
4373 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4374 op0 = force_operand (op0, target);
4375 return op0;
4376 }
4377
4378 /* No sense saving up arithmetic to be done
4379 if it's all in the wrong mode to form part of an address.
4380 And force_operand won't know whether to sign-extend or
4381 zero-extend. */
4382 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
4383 || mode != Pmode) goto binop;
4384
4385 preexpand_calls (exp);
4386 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4387 subtarget = 0;
4388
4389 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
906c4e36 4390 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052
RK
4391
4392 /* Make sure any term that's a sum with a constant comes last. */
4393 if (GET_CODE (op0) == PLUS
4394 && CONSTANT_P (XEXP (op0, 1)))
4395 {
4396 temp = op0;
4397 op0 = op1;
4398 op1 = temp;
4399 }
4400 /* If adding to a sum including a constant,
4401 associate it to put the constant outside. */
4402 if (GET_CODE (op1) == PLUS
4403 && CONSTANT_P (XEXP (op1, 1)))
4404 {
4405 rtx constant_term = const0_rtx;
4406
4407 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
4408 if (temp != 0)
4409 op0 = temp;
6f90e075
JW
4410 /* Ensure that MULT comes first if there is one. */
4411 else if (GET_CODE (op0) == MULT)
4412 op0 = gen_rtx (PLUS, mode, op0, XEXP (op1, 0));
bbf6f052
RK
4413 else
4414 op0 = gen_rtx (PLUS, mode, XEXP (op1, 0), op0);
4415
4416 /* Let's also eliminate constants from op0 if possible. */
4417 op0 = eliminate_constant_term (op0, &constant_term);
4418
4419 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
4420 their sum should be a constant. Form it into OP1, since the
4421 result we want will then be OP0 + OP1. */
4422
4423 temp = simplify_binary_operation (PLUS, mode, constant_term,
4424 XEXP (op1, 1));
4425 if (temp != 0)
4426 op1 = temp;
4427 else
4428 op1 = gen_rtx (PLUS, mode, constant_term, XEXP (op1, 1));
4429 }
4430
4431 /* Put a constant term last and put a multiplication first. */
4432 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
4433 temp = op1, op1 = op0, op0 = temp;
4434
4435 temp = simplify_binary_operation (PLUS, mode, op0, op1);
4436 return temp ? temp : gen_rtx (PLUS, mode, op0, op1);
4437
4438 case MINUS_EXPR:
4439 /* Handle difference of two symbolic constants,
4440 for the sake of an initializer. */
4441 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
4442 && really_constant_p (TREE_OPERAND (exp, 0))
4443 && really_constant_p (TREE_OPERAND (exp, 1)))
4444 {
906c4e36
RK
4445 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
4446 VOIDmode, modifier);
4447 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4448 VOIDmode, modifier);
bbf6f052
RK
4449 return gen_rtx (MINUS, mode, op0, op1);
4450 }
4451 /* Convert A - const to A + (-const). */
4452 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
4453 {
4454 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0),
4455 fold (build1 (NEGATE_EXPR, type,
4456 TREE_OPERAND (exp, 1))));
4457 goto plus_expr;
4458 }
4459 this_optab = sub_optab;
4460 goto binop;
4461
4462 case MULT_EXPR:
4463 preexpand_calls (exp);
4464 /* If first operand is constant, swap them.
4465 Thus the following special case checks need only
4466 check the second operand. */
4467 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
4468 {
4469 register tree t1 = TREE_OPERAND (exp, 0);
4470 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
4471 TREE_OPERAND (exp, 1) = t1;
4472 }
4473
4474 /* Attempt to return something suitable for generating an
4475 indexed address, for machines that support that. */
4476
4477 if (modifier == EXPAND_SUM && mode == Pmode
4478 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 4479 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4480 {
4481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, EXPAND_SUM);
4482
4483 /* Apply distributive law if OP0 is x+c. */
4484 if (GET_CODE (op0) == PLUS
4485 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
4486 return gen_rtx (PLUS, mode,
4487 gen_rtx (MULT, mode, XEXP (op0, 0),
906c4e36
RK
4488 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
4489 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
4490 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
4491
4492 if (GET_CODE (op0) != REG)
906c4e36 4493 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
4494 if (GET_CODE (op0) != REG)
4495 op0 = copy_to_mode_reg (mode, op0);
4496
4497 return gen_rtx (MULT, mode, op0,
906c4e36 4498 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
4499 }
4500
4501 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4502 subtarget = 0;
4503
4504 /* Check for multiplying things that have been extended
4505 from a narrower type. If this machine supports multiplying
4506 in that narrower type with a result in the desired type,
4507 do it that way, and avoid the explicit type-conversion. */
4508 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
4509 && TREE_CODE (type) == INTEGER_TYPE
4510 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4511 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
4512 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
4513 && int_fits_type_p (TREE_OPERAND (exp, 1),
4514 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4515 /* Don't use a widening multiply if a shift will do. */
4516 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 4517 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
4518 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
4519 ||
4520 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
4521 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4522 ==
4523 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
4524 /* If both operands are extended, they must either both
4525 be zero-extended or both be sign-extended. */
4526 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
4527 ==
4528 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
4529 {
4530 enum machine_mode innermode
4531 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
4532 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
4533 ? umul_widen_optab : smul_widen_optab);
4534 if (mode == GET_MODE_WIDER_MODE (innermode)
4535 && this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
4536 {
4537 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
906c4e36 4538 NULL_RTX, VOIDmode, 0);
bbf6f052 4539 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
906c4e36
RK
4540 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
4541 VOIDmode, 0);
bbf6f052
RK
4542 else
4543 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
906c4e36 4544 NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4545 goto binop2;
4546 }
4547 }
4548 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4549 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4550 return expand_mult (mode, op0, op1, target, unsignedp);
4551
4552 case TRUNC_DIV_EXPR:
4553 case FLOOR_DIV_EXPR:
4554 case CEIL_DIV_EXPR:
4555 case ROUND_DIV_EXPR:
4556 case EXACT_DIV_EXPR:
4557 preexpand_calls (exp);
4558 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4559 subtarget = 0;
4560 /* Possible optimization: compute the dividend with EXPAND_SUM
4561 then if the divisor is constant can optimize the case
4562 where some terms of the dividend have coeffs divisible by it. */
4563 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4564 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4565 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
4566
4567 case RDIV_EXPR:
4568 this_optab = flodiv_optab;
4569 goto binop;
4570
4571 case TRUNC_MOD_EXPR:
4572 case FLOOR_MOD_EXPR:
4573 case CEIL_MOD_EXPR:
4574 case ROUND_MOD_EXPR:
4575 preexpand_calls (exp);
4576 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4577 subtarget = 0;
4578 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 4579 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4580 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
4581
4582 case FIX_ROUND_EXPR:
4583 case FIX_FLOOR_EXPR:
4584 case FIX_CEIL_EXPR:
4585 abort (); /* Not used for C. */
4586
4587 case FIX_TRUNC_EXPR:
906c4e36 4588 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4589 if (target == 0)
4590 target = gen_reg_rtx (mode);
4591 expand_fix (target, op0, unsignedp);
4592 return target;
4593
4594 case FLOAT_EXPR:
906c4e36 4595 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4596 if (target == 0)
4597 target = gen_reg_rtx (mode);
4598 /* expand_float can't figure out what to do if FROM has VOIDmode.
4599 So give it the correct mode. With -O, cse will optimize this. */
4600 if (GET_MODE (op0) == VOIDmode)
4601 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
4602 op0);
4603 expand_float (target, op0,
4604 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
4605 return target;
4606
4607 case NEGATE_EXPR:
4608 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4609 temp = expand_unop (mode, neg_optab, op0, target, 0);
4610 if (temp == 0)
4611 abort ();
4612 return temp;
4613
4614 case ABS_EXPR:
4615 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4616
2d7050fd
RS
4617 /* Handle complex values specially. */
4618 {
4619 enum machine_mode opmode
4620 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
4621
4622 if (GET_MODE_CLASS (opmode) == MODE_COMPLEX_INT
4623 || GET_MODE_CLASS (opmode) == MODE_COMPLEX_FLOAT)
4624 return expand_complex_abs (opmode, op0, target, unsignedp);
4625 }
4626
bbf6f052
RK
4627 /* Unsigned abs is simply the operand. Testing here means we don't
4628 risk generating incorrect code below. */
4629 if (TREE_UNSIGNED (type))
4630 return op0;
4631
4632 /* First try to do it with a special abs instruction. */
4633 temp = expand_unop (mode, abs_optab, op0, target, 0);
4634 if (temp != 0)
4635 return temp;
4636
4637 /* If this machine has expensive jumps, we can do integer absolute
4638 value of X as (((signed) x >> (W-1)) ^ x) - ((signed) x >> (W-1)),
4639 where W is the width of MODE. */
4640
4641 if (GET_MODE_CLASS (mode) == MODE_INT && BRANCH_COST >= 2)
4642 {
4643 rtx extended = expand_shift (RSHIFT_EXPR, mode, op0,
4644 size_int (GET_MODE_BITSIZE (mode) - 1),
906c4e36 4645 NULL_RTX, 0);
bbf6f052
RK
4646
4647 temp = expand_binop (mode, xor_optab, extended, op0, target, 0,
4648 OPTAB_LIB_WIDEN);
4649 if (temp != 0)
4650 temp = expand_binop (mode, sub_optab, temp, extended, target, 0,
4651 OPTAB_LIB_WIDEN);
4652
4653 if (temp != 0)
4654 return temp;
4655 }
4656
4657 /* If that does not win, use conditional jump and negate. */
4658 target = original_target;
4659 temp = gen_label_rtx ();
4660 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 0))
4661 || (GET_CODE (target) == REG
4662 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4663 target = gen_reg_rtx (mode);
4664 emit_move_insn (target, op0);
4665 emit_cmp_insn (target,
4666 expand_expr (convert (type, integer_zero_node),
906c4e36
RK
4667 NULL_RTX, VOIDmode, 0),
4668 GE, NULL_RTX, mode, 0, 0);
bbf6f052
RK
4669 NO_DEFER_POP;
4670 emit_jump_insn (gen_bge (temp));
4671 op0 = expand_unop (mode, neg_optab, target, target, 0);
4672 if (op0 != target)
4673 emit_move_insn (target, op0);
4674 emit_label (temp);
4675 OK_DEFER_POP;
4676 return target;
4677
4678 case MAX_EXPR:
4679 case MIN_EXPR:
4680 target = original_target;
4681 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
4682 || (GET_CODE (target) == REG
4683 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4684 target = gen_reg_rtx (mode);
906c4e36 4685 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4686 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4687
4688 /* First try to do it with a special MIN or MAX instruction.
4689 If that does not win, use a conditional jump to select the proper
4690 value. */
4691 this_optab = (TREE_UNSIGNED (type)
4692 ? (code == MIN_EXPR ? umin_optab : umax_optab)
4693 : (code == MIN_EXPR ? smin_optab : smax_optab));
4694
4695 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
4696 OPTAB_WIDEN);
4697 if (temp != 0)
4698 return temp;
4699
4700 if (target != op0)
4701 emit_move_insn (target, op0);
4702 op0 = gen_label_rtx ();
4703 if (code == MAX_EXPR)
4704 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4705 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
4706 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4707 else
4708 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
906c4e36
RK
4709 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
4710 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
bbf6f052
RK
4711 if (temp == const0_rtx)
4712 emit_move_insn (target, op1);
4713 else if (temp != const_true_rtx)
4714 {
4715 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
4716 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
4717 else
4718 abort ();
4719 emit_move_insn (target, op1);
4720 }
4721 emit_label (op0);
4722 return target;
4723
4724/* ??? Can optimize when the operand of this is a bitwise operation,
4725 by using a different bitwise operation. */
4726 case BIT_NOT_EXPR:
4727 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4728 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
4729 if (temp == 0)
4730 abort ();
4731 return temp;
4732
4733 case FFS_EXPR:
4734 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4735 temp = expand_unop (mode, ffs_optab, op0, target, 1);
4736 if (temp == 0)
4737 abort ();
4738 return temp;
4739
4740/* ??? Can optimize bitwise operations with one arg constant.
4741 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
4742 and (a bitwise1 b) bitwise2 b (etc)
4743 but that is probably not worth while. */
4744
4745/* BIT_AND_EXPR is for bitwise anding.
4746 TRUTH_AND_EXPR is for anding two boolean values
4747 when we want in all cases to compute both of them.
4748 In general it is fastest to do TRUTH_AND_EXPR by
4749 computing both operands as actual zero-or-1 values
4750 and then bitwise anding. In cases where there cannot
4751 be any side effects, better code would be made by
4752 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR;
4753 but the question is how to recognize those cases. */
4754
4755 case TRUTH_AND_EXPR:
4756 case BIT_AND_EXPR:
4757 this_optab = and_optab;
4758 goto binop;
4759
4760/* See comment above about TRUTH_AND_EXPR; it applies here too. */
4761 case TRUTH_OR_EXPR:
4762 case BIT_IOR_EXPR:
4763 this_optab = ior_optab;
4764 goto binop;
4765
874726a8 4766 case TRUTH_XOR_EXPR:
bbf6f052
RK
4767 case BIT_XOR_EXPR:
4768 this_optab = xor_optab;
4769 goto binop;
4770
4771 case LSHIFT_EXPR:
4772 case RSHIFT_EXPR:
4773 case LROTATE_EXPR:
4774 case RROTATE_EXPR:
4775 preexpand_calls (exp);
4776 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
4777 subtarget = 0;
4778 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
4779 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
4780 unsignedp);
4781
4782/* Could determine the answer when only additive constants differ.
4783 Also, the addition of one can be handled by changing the condition. */
4784 case LT_EXPR:
4785 case LE_EXPR:
4786 case GT_EXPR:
4787 case GE_EXPR:
4788 case EQ_EXPR:
4789 case NE_EXPR:
4790 preexpand_calls (exp);
4791 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
4792 if (temp != 0)
4793 return temp;
4794 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
4795 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
4796 && original_target
4797 && GET_CODE (original_target) == REG
4798 && (GET_MODE (original_target)
4799 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
4800 {
4801 temp = expand_expr (TREE_OPERAND (exp, 0), original_target, VOIDmode, 0);
4802 if (temp != original_target)
4803 temp = copy_to_reg (temp);
4804 op1 = gen_label_rtx ();
906c4e36 4805 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
4806 GET_MODE (temp), unsignedp, 0);
4807 emit_jump_insn (gen_beq (op1));
4808 emit_move_insn (temp, const1_rtx);
4809 emit_label (op1);
4810 return temp;
4811 }
4812 /* If no set-flag instruction, must generate a conditional
4813 store into a temporary variable. Drop through
4814 and handle this like && and ||. */
4815
4816 case TRUTH_ANDIF_EXPR:
4817 case TRUTH_ORIF_EXPR:
4818 if (target == 0 || ! safe_from_p (target, exp)
4819 /* Make sure we don't have a hard reg (such as function's return
4820 value) live across basic blocks, if not optimizing. */
4821 || (!optimize && GET_CODE (target) == REG
4822 && REGNO (target) < FIRST_PSEUDO_REGISTER))
4823 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
4824 emit_clr_insn (target);
4825 op1 = gen_label_rtx ();
4826 jumpifnot (exp, op1);
4827 emit_0_to_1_insn (target);
4828 emit_label (op1);
4829 return target;
4830
4831 case TRUTH_NOT_EXPR:
4832 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
4833 /* The parser is careful to generate TRUTH_NOT_EXPR
4834 only with operands that are always zero or one. */
906c4e36 4835 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
4836 target, 1, OPTAB_LIB_WIDEN);
4837 if (temp == 0)
4838 abort ();
4839 return temp;
4840
4841 case COMPOUND_EXPR:
4842 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4843 emit_queue ();
4844 return expand_expr (TREE_OPERAND (exp, 1),
4845 (ignore ? const0_rtx : target),
4846 VOIDmode, 0);
4847
4848 case COND_EXPR:
4849 {
4850 /* Note that COND_EXPRs whose type is a structure or union
4851 are required to be constructed to contain assignments of
4852 a temporary variable, so that we can evaluate them here
4853 for side effect only. If type is void, we must do likewise. */
4854
4855 /* If an arm of the branch requires a cleanup,
4856 only that cleanup is performed. */
4857
4858 tree singleton = 0;
4859 tree binary_op = 0, unary_op = 0;
4860 tree old_cleanups = cleanups_this_call;
4861 cleanups_this_call = 0;
4862
4863 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
4864 convert it to our mode, if necessary. */
4865 if (integer_onep (TREE_OPERAND (exp, 1))
4866 && integer_zerop (TREE_OPERAND (exp, 2))
4867 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4868 {
4869 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
4870 if (GET_MODE (op0) == mode)
4871 return op0;
4872 if (target == 0)
4873 target = gen_reg_rtx (mode);
4874 convert_move (target, op0, unsignedp);
4875 return target;
4876 }
4877
4878 /* If we are not to produce a result, we have no target. Otherwise,
4879 if a target was specified use it; it will not be used as an
4880 intermediate target unless it is safe. If no target, use a
4881 temporary. */
4882
4883 if (mode == VOIDmode || ignore)
4884 temp = 0;
4885 else if (original_target
4886 && safe_from_p (original_target, TREE_OPERAND (exp, 0)))
4887 temp = original_target;
4888 else if (mode == BLKmode)
4889 {
4890 if (TYPE_SIZE (type) == 0
4891 || TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST)
4892 abort ();
4893 temp = assign_stack_temp (BLKmode,
4894 (TREE_INT_CST_LOW (TYPE_SIZE (type))
4895 + BITS_PER_UNIT - 1)
4896 / BITS_PER_UNIT, 0);
4897 }
4898 else
4899 temp = gen_reg_rtx (mode);
4900
4901 /* Check for X ? A + B : A. If we have this, we can copy
4902 A to the output and conditionally add B. Similarly for unary
4903 operations. Don't do this if X has side-effects because
4904 those side effects might affect A or B and the "?" operation is
4905 a sequence point in ANSI. (We test for side effects later.) */
4906
4907 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
4908 && operand_equal_p (TREE_OPERAND (exp, 2),
4909 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4910 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
4911 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
4912 && operand_equal_p (TREE_OPERAND (exp, 1),
4913 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4914 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
4915 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
4916 && operand_equal_p (TREE_OPERAND (exp, 2),
4917 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
4918 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
4919 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
4920 && operand_equal_p (TREE_OPERAND (exp, 1),
4921 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
4922 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
4923
4924 /* If we had X ? A + 1 : A and we can do the test of X as a store-flag
4925 operation, do this as A + (X != 0). Similarly for other simple
4926 binary operators. */
4927 if (singleton && binary_op
4928 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
4929 && (TREE_CODE (binary_op) == PLUS_EXPR
4930 || TREE_CODE (binary_op) == MINUS_EXPR
4931 || TREE_CODE (binary_op) == BIT_IOR_EXPR
4932 || TREE_CODE (binary_op) == BIT_XOR_EXPR
4933 || TREE_CODE (binary_op) == BIT_AND_EXPR)
4934 && integer_onep (TREE_OPERAND (binary_op, 1))
4935 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
4936 {
4937 rtx result;
4938 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
4939 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
4940 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
4941 : TREE_CODE (binary_op) == BIT_XOR_EXPR ? xor_optab
4942 : and_optab);
4943
4944 /* If we had X ? A : A + 1, do this as A + (X == 0).
4945
4946 We have to invert the truth value here and then put it
4947 back later if do_store_flag fails. We cannot simply copy
4948 TREE_OPERAND (exp, 0) to another variable and modify that
4949 because invert_truthvalue can modify the tree pointed to
4950 by its argument. */
4951 if (singleton == TREE_OPERAND (exp, 1))
4952 TREE_OPERAND (exp, 0)
4953 = invert_truthvalue (TREE_OPERAND (exp, 0));
4954
4955 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
4956 (safe_from_p (temp, singleton)
4957 ? temp : NULL_RTX),
bbf6f052
RK
4958 mode, BRANCH_COST <= 1);
4959
4960 if (result)
4961 {
906c4e36 4962 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4963 return expand_binop (mode, boptab, op1, result, temp,
4964 unsignedp, OPTAB_LIB_WIDEN);
4965 }
4966 else if (singleton == TREE_OPERAND (exp, 1))
4967 TREE_OPERAND (exp, 0)
4968 = invert_truthvalue (TREE_OPERAND (exp, 0));
4969 }
4970
4971 NO_DEFER_POP;
4972 op0 = gen_label_rtx ();
4973
4974 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
4975 {
4976 if (temp != 0)
4977 {
4978 /* If the target conflicts with the other operand of the
4979 binary op, we can't use it. Also, we can't use the target
4980 if it is a hard register, because evaluating the condition
4981 might clobber it. */
4982 if ((binary_op
4983 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
4984 || (GET_CODE (temp) == REG
4985 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
4986 temp = gen_reg_rtx (mode);
4987 store_expr (singleton, temp, 0);
4988 }
4989 else
906c4e36
RK
4990 expand_expr (singleton,
4991 ignore ? const1_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
4992 if (cleanups_this_call)
4993 {
4994 sorry ("aggregate value in COND_EXPR");
4995 cleanups_this_call = 0;
4996 }
4997 if (singleton == TREE_OPERAND (exp, 1))
4998 jumpif (TREE_OPERAND (exp, 0), op0);
4999 else
5000 jumpifnot (TREE_OPERAND (exp, 0), op0);
5001
5002 if (binary_op && temp == 0)
5003 /* Just touch the other operand. */
5004 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 5005 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5006 else if (binary_op)
5007 store_expr (build (TREE_CODE (binary_op), type,
5008 make_tree (type, temp),
5009 TREE_OPERAND (binary_op, 1)),
5010 temp, 0);
5011 else
5012 store_expr (build1 (TREE_CODE (unary_op), type,
5013 make_tree (type, temp)),
5014 temp, 0);
5015 op1 = op0;
5016 }
5017#if 0
5018 /* This is now done in jump.c and is better done there because it
5019 produces shorter register lifetimes. */
5020
5021 /* Check for both possibilities either constants or variables
5022 in registers (but not the same as the target!). If so, can
5023 save branches by assigning one, branching, and assigning the
5024 other. */
5025 else if (temp && GET_MODE (temp) != BLKmode
5026 && (TREE_CONSTANT (TREE_OPERAND (exp, 1))
5027 || ((TREE_CODE (TREE_OPERAND (exp, 1)) == PARM_DECL
5028 || TREE_CODE (TREE_OPERAND (exp, 1)) == VAR_DECL)
5029 && DECL_RTL (TREE_OPERAND (exp, 1))
5030 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 1))) == REG
5031 && DECL_RTL (TREE_OPERAND (exp, 1)) != temp))
5032 && (TREE_CONSTANT (TREE_OPERAND (exp, 2))
5033 || ((TREE_CODE (TREE_OPERAND (exp, 2)) == PARM_DECL
5034 || TREE_CODE (TREE_OPERAND (exp, 2)) == VAR_DECL)
5035 && DECL_RTL (TREE_OPERAND (exp, 2))
5036 && GET_CODE (DECL_RTL (TREE_OPERAND (exp, 2))) == REG
5037 && DECL_RTL (TREE_OPERAND (exp, 2)) != temp)))
5038 {
5039 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5040 temp = gen_reg_rtx (mode);
5041 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5042 jumpifnot (TREE_OPERAND (exp, 0), op0);
5043 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5044 op1 = op0;
5045 }
5046#endif
5047 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
5048 comparison operator. If we have one of these cases, set the
5049 output to A, branch on A (cse will merge these two references),
5050 then set the output to FOO. */
5051 else if (temp
5052 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5053 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5054 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5055 TREE_OPERAND (exp, 1), 0)
5056 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5057 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
5058 {
5059 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5060 temp = gen_reg_rtx (mode);
5061 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5062 jumpif (TREE_OPERAND (exp, 0), op0);
5063 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5064 op1 = op0;
5065 }
5066 else if (temp
5067 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
5068 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
5069 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
5070 TREE_OPERAND (exp, 2), 0)
5071 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
5072 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
5073 {
5074 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
5075 temp = gen_reg_rtx (mode);
5076 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5077 jumpifnot (TREE_OPERAND (exp, 0), op0);
5078 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5079 op1 = op0;
5080 }
5081 else
5082 {
5083 op1 = gen_label_rtx ();
5084 jumpifnot (TREE_OPERAND (exp, 0), op0);
5085 if (temp != 0)
5086 store_expr (TREE_OPERAND (exp, 1), temp, 0);
5087 else
906c4e36
RK
5088 expand_expr (TREE_OPERAND (exp, 1),
5089 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5090 if (cleanups_this_call)
5091 {
5092 sorry ("aggregate value in COND_EXPR");
5093 cleanups_this_call = 0;
5094 }
5095
5096 emit_queue ();
5097 emit_jump_insn (gen_jump (op1));
5098 emit_barrier ();
5099 emit_label (op0);
5100 if (temp != 0)
5101 store_expr (TREE_OPERAND (exp, 2), temp, 0);
5102 else
906c4e36
RK
5103 expand_expr (TREE_OPERAND (exp, 2),
5104 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5105 }
5106
5107 if (cleanups_this_call)
5108 {
5109 sorry ("aggregate value in COND_EXPR");
5110 cleanups_this_call = 0;
5111 }
5112
5113 emit_queue ();
5114 emit_label (op1);
5115 OK_DEFER_POP;
5116 cleanups_this_call = old_cleanups;
5117 return temp;
5118 }
5119
5120 case TARGET_EXPR:
5121 {
5122 /* Something needs to be initialized, but we didn't know
5123 where that thing was when building the tree. For example,
5124 it could be the return value of a function, or a parameter
5125 to a function which lays down in the stack, or a temporary
5126 variable which must be passed by reference.
5127
5128 We guarantee that the expression will either be constructed
5129 or copied into our original target. */
5130
5131 tree slot = TREE_OPERAND (exp, 0);
5c062816 5132 tree exp1;
bbf6f052
RK
5133
5134 if (TREE_CODE (slot) != VAR_DECL)
5135 abort ();
5136
5137 if (target == 0)
5138 {
5139 if (DECL_RTL (slot) != 0)
ac993f4f
MS
5140 {
5141 target = DECL_RTL (slot);
5c062816 5142 /* If we have already expanded the slot, so don't do
ac993f4f 5143 it again. (mrs) */
5c062816
MS
5144 if (TREE_OPERAND (exp, 1) == NULL_TREE)
5145 return target;
ac993f4f 5146 }
bbf6f052
RK
5147 else
5148 {
5149 target = assign_stack_temp (mode, int_size_in_bytes (type), 0);
5150 /* All temp slots at this level must not conflict. */
5151 preserve_temp_slots (target);
5152 DECL_RTL (slot) = target;
5153 }
5154
5155#if 0
ac993f4f
MS
5156 /* I bet this needs to be done, and I bet that it needs to
5157 be above, inside the else clause. The reason is
5158 simple, how else is it going to get cleaned up? (mrs)
5159
5160 The reason is probably did not work before, and was
5161 commented out is because this was re-expanding already
5162 expanded target_exprs (target == 0 and DECL_RTL (slot)
5163 != 0) also cleaning them up many times as well. :-( */
5164
bbf6f052
RK
5165 /* Since SLOT is not known to the called function
5166 to belong to its stack frame, we must build an explicit
5167 cleanup. This case occurs when we must build up a reference
5168 to pass the reference as an argument. In this case,
5169 it is very likely that such a reference need not be
5170 built here. */
5171
5172 if (TREE_OPERAND (exp, 2) == 0)
5173 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
5174 if (TREE_OPERAND (exp, 2))
906c4e36
RK
5175 cleanups_this_call = tree_cons (NULL_TREE, TREE_OPERAND (exp, 2),
5176 cleanups_this_call);
bbf6f052
RK
5177#endif
5178 }
5179 else
5180 {
5181 /* This case does occur, when expanding a parameter which
5182 needs to be constructed on the stack. The target
5183 is the actual stack address that we want to initialize.
5184 The function we call will perform the cleanup in this case. */
5185
5186 DECL_RTL (slot) = target;
5187 }
5188
5c062816
MS
5189 exp1 = TREE_OPERAND (exp, 1);
5190 /* Mark it as expanded. */
5191 TREE_OPERAND (exp, 1) = NULL_TREE;
5192
5193 return expand_expr (exp1, target, tmode, modifier);
bbf6f052
RK
5194 }
5195
5196 case INIT_EXPR:
5197 {
5198 tree lhs = TREE_OPERAND (exp, 0);
5199 tree rhs = TREE_OPERAND (exp, 1);
5200 tree noncopied_parts = 0;
5201 tree lhs_type = TREE_TYPE (lhs);
5202
5203 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5204 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
5205 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
5206 TYPE_NONCOPIED_PARTS (lhs_type));
5207 while (noncopied_parts != 0)
5208 {
5209 expand_assignment (TREE_VALUE (noncopied_parts),
5210 TREE_PURPOSE (noncopied_parts), 0, 0);
5211 noncopied_parts = TREE_CHAIN (noncopied_parts);
5212 }
5213 return temp;
5214 }
5215
5216 case MODIFY_EXPR:
5217 {
5218 /* If lhs is complex, expand calls in rhs before computing it.
5219 That's so we don't compute a pointer and save it over a call.
5220 If lhs is simple, compute it first so we can give it as a
5221 target if the rhs is just a call. This avoids an extra temp and copy
5222 and that prevents a partial-subsumption which makes bad code.
5223 Actually we could treat component_ref's of vars like vars. */
5224
5225 tree lhs = TREE_OPERAND (exp, 0);
5226 tree rhs = TREE_OPERAND (exp, 1);
5227 tree noncopied_parts = 0;
5228 tree lhs_type = TREE_TYPE (lhs);
5229
5230 temp = 0;
5231
5232 if (TREE_CODE (lhs) != VAR_DECL
5233 && TREE_CODE (lhs) != RESULT_DECL
5234 && TREE_CODE (lhs) != PARM_DECL)
5235 preexpand_calls (exp);
5236
5237 /* Check for |= or &= of a bitfield of size one into another bitfield
5238 of size 1. In this case, (unless we need the result of the
5239 assignment) we can do this more efficiently with a
5240 test followed by an assignment, if necessary.
5241
5242 ??? At this point, we can't get a BIT_FIELD_REF here. But if
5243 things change so we do, this code should be enhanced to
5244 support it. */
5245 if (ignore
5246 && TREE_CODE (lhs) == COMPONENT_REF
5247 && (TREE_CODE (rhs) == BIT_IOR_EXPR
5248 || TREE_CODE (rhs) == BIT_AND_EXPR)
5249 && TREE_OPERAND (rhs, 0) == lhs
5250 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
5251 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
5252 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
5253 {
5254 rtx label = gen_label_rtx ();
5255
5256 do_jump (TREE_OPERAND (rhs, 1),
5257 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
5258 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
5259 expand_assignment (lhs, convert (TREE_TYPE (rhs),
5260 (TREE_CODE (rhs) == BIT_IOR_EXPR
5261 ? integer_one_node
5262 : integer_zero_node)),
5263 0, 0);
e7c33f54 5264 do_pending_stack_adjust ();
bbf6f052
RK
5265 emit_label (label);
5266 return const0_rtx;
5267 }
5268
5269 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
5270 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
5271 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
5272 TYPE_NONCOPIED_PARTS (lhs_type));
5273
5274 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
5275 while (noncopied_parts != 0)
5276 {
5277 expand_assignment (TREE_PURPOSE (noncopied_parts),
5278 TREE_VALUE (noncopied_parts), 0, 0);
5279 noncopied_parts = TREE_CHAIN (noncopied_parts);
5280 }
5281 return temp;
5282 }
5283
5284 case PREINCREMENT_EXPR:
5285 case PREDECREMENT_EXPR:
5286 return expand_increment (exp, 0);
5287
5288 case POSTINCREMENT_EXPR:
5289 case POSTDECREMENT_EXPR:
5290 /* Faster to treat as pre-increment if result is not used. */
5291 return expand_increment (exp, ! ignore);
5292
5293 case ADDR_EXPR:
5294 /* Are we taking the address of a nested function? */
5295 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
5296 && decl_function_context (TREE_OPERAND (exp, 0)) != 0)
5297 {
5298 op0 = trampoline_address (TREE_OPERAND (exp, 0));
5299 op0 = force_operand (op0, target);
5300 }
5301 else
5302 {
906c4e36 5303 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
bbf6f052
RK
5304 (modifier == EXPAND_INITIALIZER
5305 ? modifier : EXPAND_CONST_ADDRESS));
5306 if (GET_CODE (op0) != MEM)
5307 abort ();
5308
5309 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
5310 return XEXP (op0, 0);
5311 op0 = force_operand (XEXP (op0, 0), target);
5312 }
5313 if (flag_force_addr && GET_CODE (op0) != REG)
5314 return force_reg (Pmode, op0);
5315 return op0;
5316
5317 case ENTRY_VALUE_EXPR:
5318 abort ();
5319
7308a047
RS
5320 /* COMPLEX type for Extended Pascal & Fortran */
5321 case COMPLEX_EXPR:
5322 {
5323 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5324
5325 rtx prev;
5326
5327 /* Get the rtx code of the operands. */
5328 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5329 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
5330
5331 if (! target)
5332 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5333
5334 prev = get_last_insn ();
5335
5336 /* Tell flow that the whole of the destination is being set. */
5337 if (GET_CODE (target) == REG)
5338 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5339
5340 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
5341 emit_move_insn (gen_realpart (mode, target), op0);
5342 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047
RS
5343
5344 /* Complex construction should appear as a single unit. */
5345 group_insns (prev);
5346
5347 return target;
5348 }
5349
5350 case REALPART_EXPR:
2d7050fd
RS
5351 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5352 return gen_realpart (mode, op0);
7308a047
RS
5353
5354 case IMAGPART_EXPR:
2d7050fd
RS
5355 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5356 return gen_imagpart (mode, op0);
7308a047
RS
5357
5358 case CONJ_EXPR:
5359 {
5360 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
5361 rtx imag_t;
5362 rtx prev;
5363
5364 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
5365
5366 if (! target)
5367 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5368
5369 prev = get_last_insn ();
5370
5371 /* Tell flow that the whole of the destination is being set. */
5372 if (GET_CODE (target) == REG)
5373 emit_insn (gen_rtx (CLOBBER, VOIDmode, target));
5374
5375 /* Store the realpart and the negated imagpart to target. */
2d7050fd 5376 emit_move_insn (gen_realpart (mode, target), gen_realpart (mode, op0));
7308a047 5377
2d7050fd 5378 imag_t = gen_imagpart (mode, target);
7308a047 5379 temp = expand_unop (mode, neg_optab,
2d7050fd 5380 gen_imagpart (mode, op0), imag_t, 0);
7308a047
RS
5381 if (temp != imag_t)
5382 emit_move_insn (imag_t, temp);
5383
5384 /* Conjugate should appear as a single unit */
5385 group_insns (prev);
5386
5387 return target;
5388 }
5389
bbf6f052
RK
5390 case ERROR_MARK:
5391 return const0_rtx;
5392
5393 default:
5394 return (*lang_expand_expr) (exp, target, tmode, modifier);
5395 }
5396
5397 /* Here to do an ordinary binary operator, generating an instruction
5398 from the optab already placed in `this_optab'. */
5399 binop:
5400 preexpand_calls (exp);
5401 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
5402 subtarget = 0;
5403 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 5404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5405 binop2:
5406 temp = expand_binop (mode, this_optab, op0, op1, target,
5407 unsignedp, OPTAB_LIB_WIDEN);
5408 if (temp == 0)
5409 abort ();
5410 return temp;
5411}
5412\f
e87b4f3f
RS
5413/* Return the alignment in bits of EXP, a pointer valued expression.
5414 But don't return more than MAX_ALIGN no matter what.
bbf6f052
RK
5415 The alignment returned is, by default, the alignment of the thing that
5416 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
5417
5418 Otherwise, look at the expression to see if we can do better, i.e., if the
5419 expression is actually pointing at an object whose alignment is tighter. */
5420
5421static int
5422get_pointer_alignment (exp, max_align)
5423 tree exp;
5424 unsigned max_align;
5425{
5426 unsigned align, inner;
5427
5428 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5429 return 0;
5430
5431 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5432 align = MIN (align, max_align);
5433
5434 while (1)
5435 {
5436 switch (TREE_CODE (exp))
5437 {
5438 case NOP_EXPR:
5439 case CONVERT_EXPR:
5440 case NON_LVALUE_EXPR:
5441 exp = TREE_OPERAND (exp, 0);
5442 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
5443 return align;
5444 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
5445 inner = MIN (inner, max_align);
5446 align = MAX (align, inner);
5447 break;
5448
5449 case PLUS_EXPR:
5450 /* If sum of pointer + int, restrict our maximum alignment to that
5451 imposed by the integer. If not, we can't do any better than
5452 ALIGN. */
5453 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
5454 return align;
5455
e87b4f3f
RS
5456 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
5457 & (max_align - 1))
5458 != 0)
bbf6f052
RK
5459 max_align >>= 1;
5460
5461 exp = TREE_OPERAND (exp, 0);
5462 break;
5463
5464 case ADDR_EXPR:
5465 /* See what we are pointing at and look at its alignment. */
5466 exp = TREE_OPERAND (exp, 0);
e7c33f54
RK
5467 if (TREE_CODE (exp) == FUNCTION_DECL)
5468 align = MAX (align, FUNCTION_BOUNDARY);
5469 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
bbf6f052
RK
5470 align = MAX (align, DECL_ALIGN (exp));
5471#ifdef CONSTANT_ALIGNMENT
5472 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
5473 align = CONSTANT_ALIGNMENT (exp, align);
5474#endif
5475 return MIN (align, max_align);
5476
5477 default:
5478 return align;
5479 }
5480 }
5481}
5482\f
5483/* Return the tree node and offset if a given argument corresponds to
5484 a string constant. */
5485
5486static tree
5487string_constant (arg, ptr_offset)
5488 tree arg;
5489 tree *ptr_offset;
5490{
5491 STRIP_NOPS (arg);
5492
5493 if (TREE_CODE (arg) == ADDR_EXPR
5494 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
5495 {
5496 *ptr_offset = integer_zero_node;
5497 return TREE_OPERAND (arg, 0);
5498 }
5499 else if (TREE_CODE (arg) == PLUS_EXPR)
5500 {
5501 tree arg0 = TREE_OPERAND (arg, 0);
5502 tree arg1 = TREE_OPERAND (arg, 1);
5503
5504 STRIP_NOPS (arg0);
5505 STRIP_NOPS (arg1);
5506
5507 if (TREE_CODE (arg0) == ADDR_EXPR
5508 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
5509 {
5510 *ptr_offset = arg1;
5511 return TREE_OPERAND (arg0, 0);
5512 }
5513 else if (TREE_CODE (arg1) == ADDR_EXPR
5514 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
5515 {
5516 *ptr_offset = arg0;
5517 return TREE_OPERAND (arg1, 0);
5518 }
5519 }
5520
5521 return 0;
5522}
5523
5524/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
5525 way, because it could contain a zero byte in the middle.
5526 TREE_STRING_LENGTH is the size of the character array, not the string.
5527
5528 Unfortunately, string_constant can't access the values of const char
5529 arrays with initializers, so neither can we do so here. */
5530
5531static tree
5532c_strlen (src)
5533 tree src;
5534{
5535 tree offset_node;
5536 int offset, max;
5537 char *ptr;
5538
5539 src = string_constant (src, &offset_node);
5540 if (src == 0)
5541 return 0;
5542 max = TREE_STRING_LENGTH (src);
5543 ptr = TREE_STRING_POINTER (src);
5544 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
5545 {
5546 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
5547 compute the offset to the following null if we don't know where to
5548 start searching for it. */
5549 int i;
5550 for (i = 0; i < max; i++)
5551 if (ptr[i] == 0)
5552 return 0;
5553 /* We don't know the starting offset, but we do know that the string
5554 has no internal zero bytes. We can assume that the offset falls
5555 within the bounds of the string; otherwise, the programmer deserves
5556 what he gets. Subtract the offset from the length of the string,
5557 and return that. */
5558 /* This would perhaps not be valid if we were dealing with named
5559 arrays in addition to literal string constants. */
5560 return size_binop (MINUS_EXPR, size_int (max), offset_node);
5561 }
5562
5563 /* We have a known offset into the string. Start searching there for
5564 a null character. */
5565 if (offset_node == 0)
5566 offset = 0;
5567 else
5568 {
5569 /* Did we get a long long offset? If so, punt. */
5570 if (TREE_INT_CST_HIGH (offset_node) != 0)
5571 return 0;
5572 offset = TREE_INT_CST_LOW (offset_node);
5573 }
5574 /* If the offset is known to be out of bounds, warn, and call strlen at
5575 runtime. */
5576 if (offset < 0 || offset > max)
5577 {
5578 warning ("offset outside bounds of constant string");
5579 return 0;
5580 }
5581 /* Use strlen to search for the first zero byte. Since any strings
5582 constructed with build_string will have nulls appended, we win even
5583 if we get handed something like (char[4])"abcd".
5584
5585 Since OFFSET is our starting index into the string, no further
5586 calculation is needed. */
5587 return size_int (strlen (ptr + offset));
5588}
5589\f
5590/* Expand an expression EXP that calls a built-in function,
5591 with result going to TARGET if that's convenient
5592 (and in mode MODE if that's convenient).
5593 SUBTARGET may be used as the target for computing one of EXP's operands.
5594 IGNORE is nonzero if the value is to be ignored. */
5595
5596static rtx
5597expand_builtin (exp, target, subtarget, mode, ignore)
5598 tree exp;
5599 rtx target;
5600 rtx subtarget;
5601 enum machine_mode mode;
5602 int ignore;
5603{
5604 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
5605 tree arglist = TREE_OPERAND (exp, 1);
5606 rtx op0;
60bac6ea 5607 rtx lab1, insns;
bbf6f052 5608 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
1bbddf11 5609 optab builtin_optab;
bbf6f052
RK
5610
5611 switch (DECL_FUNCTION_CODE (fndecl))
5612 {
5613 case BUILT_IN_ABS:
5614 case BUILT_IN_LABS:
5615 case BUILT_IN_FABS:
5616 /* build_function_call changes these into ABS_EXPR. */
5617 abort ();
5618
1bbddf11
JVA
5619 case BUILT_IN_SIN:
5620 case BUILT_IN_COS:
e87b4f3f
RS
5621 case BUILT_IN_FSQRT:
5622 /* If not optimizing, call the library function. */
8c8a8e34 5623 if (! optimize)
e87b4f3f
RS
5624 break;
5625
5626 if (arglist == 0
19deaec9 5627 /* Arg could be wrong type if user redeclared this fcn wrong. */
e87b4f3f 5628 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
19deaec9 5629 return CONST0_RTX (TYPE_MODE (TREE_TYPE (exp)));
e87b4f3f 5630
db0e6d01
RS
5631 /* Stabilize and compute the argument. */
5632 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
5633 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
5634 {
5635 exp = copy_node (exp);
5636 arglist = copy_node (arglist);
5637 TREE_OPERAND (exp, 1) = arglist;
5638 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
5639 }
e87b4f3f 5640 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
e7c33f54
RK
5641
5642 /* Make a suitable register to place result in. */
5643 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
5644
c1f7c223 5645 emit_queue ();
8c8a8e34 5646 start_sequence ();
e7c33f54 5647
1bbddf11
JVA
5648 switch (DECL_FUNCTION_CODE (fndecl))
5649 {
5650 case BUILT_IN_SIN:
5651 builtin_optab = sin_optab; break;
5652 case BUILT_IN_COS:
5653 builtin_optab = cos_optab; break;
5654 case BUILT_IN_FSQRT:
5655 builtin_optab = sqrt_optab; break;
5656 default:
5657 abort ();
5658 }
5659
5660 /* Compute into TARGET.
e87b4f3f
RS
5661 Set TARGET to wherever the result comes back. */
5662 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
1bbddf11 5663 builtin_optab, op0, target, 0);
e7c33f54
RK
5664
5665 /* If we were unable to expand via the builtin, stop the
5666 sequence (without outputting the insns) and break, causing
5667 a call the the library function. */
e87b4f3f 5668 if (target == 0)
e7c33f54 5669 {
8c8a8e34 5670 end_sequence ();
e7c33f54
RK
5671 break;
5672 }
e87b4f3f 5673
60bac6ea
RS
5674 /* Check the results by default. But if flag_fast_math is turned on,
5675 then assume sqrt will always be called with valid arguments. */
5676
5677 if (! flag_fast_math)
5678 {
1bbddf11 5679 /* Don't define the builtin FP instructions
60bac6ea
RS
5680 if your machine is not IEEE. */
5681 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
5682 abort ();
5683
5684 lab1 = gen_label_rtx ();
5685
5686 /* Test the result; if it is NaN, set errno=EDOM because
5687 the argument was not in the domain. */
5688 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
5689 emit_jump_insn (gen_beq (lab1));
5690
5691#if TARGET_EDOM
5692 {
5693#ifdef GEN_ERRNO_RTX
5694 rtx errno_rtx = GEN_ERRNO_RTX;
5695#else
5696 rtx errno_rtx
5697 = gen_rtx (MEM, word_mode, gen_rtx (SYMBOL_REF, Pmode, "*errno"));
5698#endif
5699
5700 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
5701 }
5702#else
5703 /* We can't set errno=EDOM directly; let the library call do it.
5704 Pop the arguments right away in case the call gets deleted. */
5705 NO_DEFER_POP;
5706 expand_call (exp, target, 0);
5707 OK_DEFER_POP;
5708#endif
5709
5710 emit_label (lab1);
5711 }
e87b4f3f 5712
e7c33f54 5713 /* Output the entire sequence. */
8c8a8e34
JW
5714 insns = get_insns ();
5715 end_sequence ();
5716 emit_insns (insns);
e7c33f54
RK
5717
5718 return target;
5719
bbf6f052
RK
5720 case BUILT_IN_SAVEREGS:
5721 /* Don't do __builtin_saveregs more than once in a function.
5722 Save the result of the first call and reuse it. */
5723 if (saveregs_value != 0)
5724 return saveregs_value;
5725 {
5726 /* When this function is called, it means that registers must be
5727 saved on entry to this function. So we migrate the
5728 call to the first insn of this function. */
5729 rtx temp;
5730 rtx seq;
5731 rtx valreg, saved_valreg;
5732
5733 /* Now really call the function. `expand_call' does not call
5734 expand_builtin, so there is no danger of infinite recursion here. */
5735 start_sequence ();
5736
5737#ifdef EXPAND_BUILTIN_SAVEREGS
5738 /* Do whatever the machine needs done in this case. */
5739 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
5740#else
5741 /* The register where the function returns its value
5742 is likely to have something else in it, such as an argument.
5743 So preserve that register around the call. */
5744 if (value_mode != VOIDmode)
5745 {
5746 valreg = hard_libcall_value (value_mode);
5747 saved_valreg = gen_reg_rtx (value_mode);
5748 emit_move_insn (saved_valreg, valreg);
5749 }
5750
5751 /* Generate the call, putting the value in a pseudo. */
5752 temp = expand_call (exp, target, ignore);
5753
5754 if (value_mode != VOIDmode)
5755 emit_move_insn (valreg, saved_valreg);
5756#endif
5757
5758 seq = get_insns ();
5759 end_sequence ();
5760
5761 saveregs_value = temp;
5762
5763 /* This won't work inside a SEQUENCE--it really has to be
5764 at the start of the function. */
5765 if (in_sequence_p ())
5766 {
5767 /* Better to do this than to crash. */
5768 error ("`va_start' used within `({...})'");
5769 return temp;
5770 }
5771
5772 /* Put the sequence after the NOTE that starts the function. */
5773 emit_insns_before (seq, NEXT_INSN (get_insns ()));
5774 return temp;
5775 }
5776
5777 /* __builtin_args_info (N) returns word N of the arg space info
5778 for the current function. The number and meanings of words
5779 is controlled by the definition of CUMULATIVE_ARGS. */
5780 case BUILT_IN_ARGS_INFO:
5781 {
5782 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
5783 int i;
5784 int *word_ptr = (int *) &current_function_args_info;
5785 tree type, elts, result;
5786
5787 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
5788 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
5789 __FILE__, __LINE__);
5790
5791 if (arglist != 0)
5792 {
5793 tree arg = TREE_VALUE (arglist);
5794 if (TREE_CODE (arg) != INTEGER_CST)
5795 error ("argument of __builtin_args_info must be constant");
5796 else
5797 {
5798 int wordnum = TREE_INT_CST_LOW (arg);
5799
5800 if (wordnum < 0 || wordnum >= nwords)
5801 error ("argument of __builtin_args_info out of range");
5802 else
906c4e36 5803 return GEN_INT (word_ptr[wordnum]);
bbf6f052
RK
5804 }
5805 }
5806 else
5807 error ("missing argument in __builtin_args_info");
5808
5809 return const0_rtx;
5810
5811#if 0
5812 for (i = 0; i < nwords; i++)
5813 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
5814
5815 type = build_array_type (integer_type_node,
5816 build_index_type (build_int_2 (nwords, 0)));
5817 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
5818 TREE_CONSTANT (result) = 1;
5819 TREE_STATIC (result) = 1;
5820 result = build (INDIRECT_REF, build_pointer_type (type), result);
5821 TREE_CONSTANT (result) = 1;
906c4e36 5822 return expand_expr (result, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5823#endif
5824 }
5825
5826 /* Return the address of the first anonymous stack arg. */
5827 case BUILT_IN_NEXT_ARG:
5828 {
5829 tree fntype = TREE_TYPE (current_function_decl);
5830 if (!(TYPE_ARG_TYPES (fntype) != 0
5831 && (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
5832 != void_type_node)))
5833 {
5834 error ("`va_start' used in function with fixed args");
5835 return const0_rtx;
5836 }
5837 }
5838
5839 return expand_binop (Pmode, add_optab,
5840 current_function_internal_arg_pointer,
5841 current_function_arg_offset_rtx,
906c4e36 5842 NULL_RTX, 0, OPTAB_LIB_WIDEN);
bbf6f052
RK
5843
5844 case BUILT_IN_CLASSIFY_TYPE:
5845 if (arglist != 0)
5846 {
5847 tree type = TREE_TYPE (TREE_VALUE (arglist));
5848 enum tree_code code = TREE_CODE (type);
5849 if (code == VOID_TYPE)
906c4e36 5850 return GEN_INT (void_type_class);
bbf6f052 5851 if (code == INTEGER_TYPE)
906c4e36 5852 return GEN_INT (integer_type_class);
bbf6f052 5853 if (code == CHAR_TYPE)
906c4e36 5854 return GEN_INT (char_type_class);
bbf6f052 5855 if (code == ENUMERAL_TYPE)
906c4e36 5856 return GEN_INT (enumeral_type_class);
bbf6f052 5857 if (code == BOOLEAN_TYPE)
906c4e36 5858 return GEN_INT (boolean_type_class);
bbf6f052 5859 if (code == POINTER_TYPE)
906c4e36 5860 return GEN_INT (pointer_type_class);
bbf6f052 5861 if (code == REFERENCE_TYPE)
906c4e36 5862 return GEN_INT (reference_type_class);
bbf6f052 5863 if (code == OFFSET_TYPE)
906c4e36 5864 return GEN_INT (offset_type_class);
bbf6f052 5865 if (code == REAL_TYPE)
906c4e36 5866 return GEN_INT (real_type_class);
bbf6f052 5867 if (code == COMPLEX_TYPE)
906c4e36 5868 return GEN_INT (complex_type_class);
bbf6f052 5869 if (code == FUNCTION_TYPE)
906c4e36 5870 return GEN_INT (function_type_class);
bbf6f052 5871 if (code == METHOD_TYPE)
906c4e36 5872 return GEN_INT (method_type_class);
bbf6f052 5873 if (code == RECORD_TYPE)
906c4e36 5874 return GEN_INT (record_type_class);
bbf6f052 5875 if (code == UNION_TYPE)
906c4e36 5876 return GEN_INT (union_type_class);
bbf6f052 5877 if (code == ARRAY_TYPE)
906c4e36 5878 return GEN_INT (array_type_class);
bbf6f052 5879 if (code == STRING_TYPE)
906c4e36 5880 return GEN_INT (string_type_class);
bbf6f052 5881 if (code == SET_TYPE)
906c4e36 5882 return GEN_INT (set_type_class);
bbf6f052 5883 if (code == FILE_TYPE)
906c4e36 5884 return GEN_INT (file_type_class);
bbf6f052 5885 if (code == LANG_TYPE)
906c4e36 5886 return GEN_INT (lang_type_class);
bbf6f052 5887 }
906c4e36 5888 return GEN_INT (no_type_class);
bbf6f052
RK
5889
5890 case BUILT_IN_CONSTANT_P:
5891 if (arglist == 0)
5892 return const0_rtx;
5893 else
cda0ec81 5894 return (TREE_CODE_CLASS (TREE_CODE (TREE_VALUE (arglist))) == 'c'
bbf6f052
RK
5895 ? const1_rtx : const0_rtx);
5896
5897 case BUILT_IN_FRAME_ADDRESS:
5898 /* The argument must be a nonnegative integer constant.
5899 It counts the number of frames to scan up the stack.
5900 The value is the address of that frame. */
5901 case BUILT_IN_RETURN_ADDRESS:
5902 /* The argument must be a nonnegative integer constant.
5903 It counts the number of frames to scan up the stack.
5904 The value is the return address saved in that frame. */
5905 if (arglist == 0)
5906 /* Warning about missing arg was already issued. */
5907 return const0_rtx;
5908 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST)
5909 {
5910 error ("invalid arg to __builtin_return_address");
5911 return const0_rtx;
5912 }
5913 else if (tree_int_cst_lt (TREE_VALUE (arglist), integer_zero_node))
5914 {
5915 error ("invalid arg to __builtin_return_address");
5916 return const0_rtx;
5917 }
5918 else
5919 {
5920 int count = TREE_INT_CST_LOW (TREE_VALUE (arglist));
5921 rtx tem = frame_pointer_rtx;
5922 int i;
5923
5924 /* Scan back COUNT frames to the specified frame. */
5925 for (i = 0; i < count; i++)
5926 {
5927 /* Assume the dynamic chain pointer is in the word that
5928 the frame address points to, unless otherwise specified. */
5929#ifdef DYNAMIC_CHAIN_ADDRESS
5930 tem = DYNAMIC_CHAIN_ADDRESS (tem);
5931#endif
5932 tem = memory_address (Pmode, tem);
5933 tem = copy_to_reg (gen_rtx (MEM, Pmode, tem));
5934 }
5935
5936 /* For __builtin_frame_address, return what we've got. */
5937 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
5938 return tem;
5939
5940 /* For __builtin_return_address,
5941 Get the return address from that frame. */
5942#ifdef RETURN_ADDR_RTX
5943 return RETURN_ADDR_RTX (count, tem);
5944#else
5945 tem = memory_address (Pmode,
5946 plus_constant (tem, GET_MODE_SIZE (Pmode)));
5947 return copy_to_reg (gen_rtx (MEM, Pmode, tem));
5948#endif
5949 }
5950
5951 case BUILT_IN_ALLOCA:
5952 if (arglist == 0
5953 /* Arg could be non-integer if user redeclared this fcn wrong. */
5954 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5955 return const0_rtx;
5956 current_function_calls_alloca = 1;
5957 /* Compute the argument. */
906c4e36 5958 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
5959
5960 /* Allocate the desired space. */
8c8a8e34 5961 target = allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
bbf6f052
RK
5962
5963 /* Record the new stack level for nonlocal gotos. */
6dc42e49 5964 if (nonlocal_goto_handler_slot != 0)
906c4e36 5965 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
bbf6f052
RK
5966 return target;
5967
5968 case BUILT_IN_FFS:
5969 /* If not optimizing, call the library function. */
5970 if (!optimize)
5971 break;
5972
5973 if (arglist == 0
5974 /* Arg could be non-integer if user redeclared this fcn wrong. */
5975 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
5976 return const0_rtx;
5977
5978 /* Compute the argument. */
5979 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
5980 /* Compute ffs, into TARGET if possible.
5981 Set TARGET to wherever the result comes back. */
5982 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
5983 ffs_optab, op0, target, 1);
5984 if (target == 0)
5985 abort ();
5986 return target;
5987
5988 case BUILT_IN_STRLEN:
5989 /* If not optimizing, call the library function. */
5990 if (!optimize)
5991 break;
5992
5993 if (arglist == 0
5994 /* Arg could be non-pointer if user redeclared this fcn wrong. */
5995 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
5996 return const0_rtx;
5997 else
5998 {
e7c33f54
RK
5999 tree src = TREE_VALUE (arglist);
6000 tree len = c_strlen (src);
bbf6f052 6001
e7c33f54
RK
6002 int align
6003 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6004
6005 rtx result, src_rtx, char_rtx;
6006 enum machine_mode insn_mode = value_mode, char_mode;
6007 enum insn_code icode;
6008
6009 /* If the length is known, just return it. */
6010 if (len != 0)
6011 return expand_expr (len, target, mode, 0);
6012
6013 /* If SRC is not a pointer type, don't do this operation inline. */
6014 if (align == 0)
6015 break;
6016
6017 /* Call a function if we can't compute strlen in the right mode. */
6018
6019 while (insn_mode != VOIDmode)
6020 {
6021 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
6022 if (icode != CODE_FOR_nothing)
6023 break;
6024
6025 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
6026 }
6027 if (insn_mode == VOIDmode)
bbf6f052 6028 break;
e7c33f54
RK
6029
6030 /* Make a place to write the result of the instruction. */
6031 result = target;
6032 if (! (result != 0
6033 && GET_CODE (result) == REG
6034 && GET_MODE (result) == insn_mode
6035 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6036 result = gen_reg_rtx (insn_mode);
6037
4d613828 6038 /* Make sure the operands are acceptable to the predicates. */
e7c33f54 6039
4d613828 6040 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
e7c33f54
RK
6041 result = gen_reg_rtx (insn_mode);
6042
6043 src_rtx = memory_address (BLKmode,
906c4e36 6044 expand_expr (src, NULL_RTX, Pmode,
e7c33f54 6045 EXPAND_NORMAL));
4d613828 6046 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
e7c33f54
RK
6047 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
6048
6049 char_rtx = const0_rtx;
4d613828
RS
6050 char_mode = insn_operand_mode[(int)icode][2];
6051 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
e7c33f54
RK
6052 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
6053
6054 emit_insn (GEN_FCN (icode) (result,
6055 gen_rtx (MEM, BLKmode, src_rtx),
906c4e36 6056 char_rtx, GEN_INT (align)));
e7c33f54
RK
6057
6058 /* Return the value in the proper mode for this function. */
6059 if (GET_MODE (result) == value_mode)
6060 return result;
6061 else if (target != 0)
6062 {
6063 convert_move (target, result, 0);
6064 return target;
6065 }
6066 else
6067 return convert_to_mode (value_mode, result, 0);
bbf6f052
RK
6068 }
6069
6070 case BUILT_IN_STRCPY:
6071 /* If not optimizing, call the library function. */
6072 if (!optimize)
6073 break;
6074
6075 if (arglist == 0
6076 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6077 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6078 || TREE_CHAIN (arglist) == 0
6079 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6080 return const0_rtx;
6081 else
6082 {
6083 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
6084
6085 if (len == 0)
6086 break;
6087
6088 len = size_binop (PLUS_EXPR, len, integer_one_node);
6089
906c4e36 6090 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6091 }
6092
6093 /* Drops in. */
6094 case BUILT_IN_MEMCPY:
6095 /* If not optimizing, call the library function. */
6096 if (!optimize)
6097 break;
6098
6099 if (arglist == 0
6100 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6101 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6102 || TREE_CHAIN (arglist) == 0
6103 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6104 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6105 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6106 return const0_rtx;
6107 else
6108 {
6109 tree dest = TREE_VALUE (arglist);
6110 tree src = TREE_VALUE (TREE_CHAIN (arglist));
6111 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6112
6113 int src_align
6114 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6115 int dest_align
6116 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6117 rtx dest_rtx;
6118
6119 /* If either SRC or DEST is not a pointer type, don't do
6120 this operation in-line. */
6121 if (src_align == 0 || dest_align == 0)
6122 {
6123 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
6124 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6125 break;
6126 }
6127
906c4e36 6128 dest_rtx = expand_expr (dest, NULL_RTX, Pmode, EXPAND_NORMAL);
bbf6f052
RK
6129
6130 /* Copy word part most expediently. */
6131 emit_block_move (gen_rtx (MEM, BLKmode,
6132 memory_address (BLKmode, dest_rtx)),
6133 gen_rtx (MEM, BLKmode,
6134 memory_address (BLKmode,
906c4e36
RK
6135 expand_expr (src, NULL_RTX,
6136 Pmode,
bbf6f052 6137 EXPAND_NORMAL))),
906c4e36 6138 expand_expr (len, NULL_RTX, VOIDmode, 0),
bbf6f052
RK
6139 MIN (src_align, dest_align));
6140 return dest_rtx;
6141 }
6142
6143/* These comparison functions need an instruction that returns an actual
6144 index. An ordinary compare that just sets the condition codes
6145 is not enough. */
6146#ifdef HAVE_cmpstrsi
6147 case BUILT_IN_STRCMP:
6148 /* If not optimizing, call the library function. */
6149 if (!optimize)
6150 break;
6151
6152 if (arglist == 0
6153 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6154 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6155 || TREE_CHAIN (arglist) == 0
6156 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
6157 return const0_rtx;
6158 else if (!HAVE_cmpstrsi)
6159 break;
6160 {
6161 tree arg1 = TREE_VALUE (arglist);
6162 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6163 tree offset;
6164 tree len, len2;
6165
6166 len = c_strlen (arg1);
6167 if (len)
6168 len = size_binop (PLUS_EXPR, integer_one_node, len);
6169 len2 = c_strlen (arg2);
6170 if (len2)
6171 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
6172
6173 /* If we don't have a constant length for the first, use the length
6174 of the second, if we know it. We don't require a constant for
6175 this case; some cost analysis could be done if both are available
6176 but neither is constant. For now, assume they're equally cheap.
6177
6178 If both strings have constant lengths, use the smaller. This
6179 could arise if optimization results in strcpy being called with
6180 two fixed strings, or if the code was machine-generated. We should
6181 add some code to the `memcmp' handler below to deal with such
6182 situations, someday. */
6183 if (!len || TREE_CODE (len) != INTEGER_CST)
6184 {
6185 if (len2)
6186 len = len2;
6187 else if (len == 0)
6188 break;
6189 }
6190 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
6191 {
6192 if (tree_int_cst_lt (len2, len))
6193 len = len2;
6194 }
6195
906c4e36 6196 chainon (arglist, build_tree_list (NULL_TREE, len));
bbf6f052
RK
6197 }
6198
6199 /* Drops in. */
6200 case BUILT_IN_MEMCMP:
6201 /* If not optimizing, call the library function. */
6202 if (!optimize)
6203 break;
6204
6205 if (arglist == 0
6206 /* Arg could be non-pointer if user redeclared this fcn wrong. */
6207 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
6208 || TREE_CHAIN (arglist) == 0
6209 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
6210 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
6211 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
6212 return const0_rtx;
6213 else if (!HAVE_cmpstrsi)
6214 break;
6215 {
6216 tree arg1 = TREE_VALUE (arglist);
6217 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
6218 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
6219 rtx result;
6220
6221 int arg1_align
6222 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6223 int arg2_align
6224 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
6225 enum machine_mode insn_mode
6226 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
6227
6228 /* If we don't have POINTER_TYPE, call the function. */
6229 if (arg1_align == 0 || arg2_align == 0)
6230 {
6231 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
6232 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
6233 break;
6234 }
6235
6236 /* Make a place to write the result of the instruction. */
6237 result = target;
6238 if (! (result != 0
6239 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
6240 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
6241 result = gen_reg_rtx (insn_mode);
6242
6243 emit_insn (gen_cmpstrsi (result,
6244 gen_rtx (MEM, BLKmode,
906c4e36
RK
6245 expand_expr (arg1, NULL_RTX, Pmode,
6246 EXPAND_NORMAL)),
bbf6f052 6247 gen_rtx (MEM, BLKmode,
906c4e36
RK
6248 expand_expr (arg2, NULL_RTX, Pmode,
6249 EXPAND_NORMAL)),
6250 expand_expr (len, NULL_RTX, VOIDmode, 0),
6251 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052
RK
6252
6253 /* Return the value in the proper mode for this function. */
6254 mode = TYPE_MODE (TREE_TYPE (exp));
6255 if (GET_MODE (result) == mode)
6256 return result;
6257 else if (target != 0)
6258 {
6259 convert_move (target, result, 0);
6260 return target;
6261 }
6262 else
6263 return convert_to_mode (mode, result, 0);
6264 }
6265#else
6266 case BUILT_IN_STRCMP:
6267 case BUILT_IN_MEMCMP:
6268 break;
6269#endif
6270
6271 default: /* just do library call, if unknown builtin */
6272 error ("built-in function %s not currently supported",
6273 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
6274 }
6275
6276 /* The switch statement above can drop through to cause the function
6277 to be called normally. */
6278
6279 return expand_call (exp, target, ignore);
6280}
6281\f
6282/* Expand code for a post- or pre- increment or decrement
6283 and return the RTX for the result.
6284 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
6285
6286static rtx
6287expand_increment (exp, post)
6288 register tree exp;
6289 int post;
6290{
6291 register rtx op0, op1;
6292 register rtx temp, value;
6293 register tree incremented = TREE_OPERAND (exp, 0);
6294 optab this_optab = add_optab;
6295 int icode;
6296 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
6297 int op0_is_copy = 0;
6298
6299 /* Stabilize any component ref that might need to be
6300 evaluated more than once below. */
6301 if (TREE_CODE (incremented) == BIT_FIELD_REF
6302 || (TREE_CODE (incremented) == COMPONENT_REF
6303 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
6304 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
6305 incremented = stabilize_reference (incremented);
6306
6307 /* Compute the operands as RTX.
6308 Note whether OP0 is the actual lvalue or a copy of it:
94a58076 6309 I believe it is a copy iff it is a register or subreg
1499e0a8
RK
6310 and insns were generated in computing it. */
6311
bbf6f052 6312 temp = get_last_insn ();
906c4e36 6313 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
1499e0a8
RK
6314
6315 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
6316 in place but intead must do sign- or zero-extension during assignment,
6317 so we copy it into a new register and let the code below use it as
6318 a copy.
6319
6320 Note that we can safely modify this SUBREG since it is know not to be
6321 shared (it was made by the expand_expr call above). */
6322
6323 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
6324 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
6325
94a58076
RS
6326 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
6327 && temp != get_last_insn ());
906c4e36 6328 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6329
6330 /* Decide whether incrementing or decrementing. */
6331 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
6332 || TREE_CODE (exp) == PREDECREMENT_EXPR)
6333 this_optab = sub_optab;
6334
6335 /* If OP0 is not the actual lvalue, but rather a copy in a register,
6336 then we cannot just increment OP0. We must
6337 therefore contrive to increment the original value.
6338 Then we can return OP0 since it is a copy of the old value. */
6339 if (op0_is_copy)
6340 {
6341 /* This is the easiest way to increment the value wherever it is.
6342 Problems with multiple evaluation of INCREMENTED
6343 are prevented because either (1) it is a component_ref,
6344 in which case it was stabilized above, or (2) it is an array_ref
6345 with constant index in an array in a register, which is
6346 safe to reevaluate. */
6347 tree newexp = build ((this_optab == add_optab
6348 ? PLUS_EXPR : MINUS_EXPR),
6349 TREE_TYPE (exp),
6350 incremented,
6351 TREE_OPERAND (exp, 1));
6352 temp = expand_assignment (incremented, newexp, ! post, 0);
6353 return post ? op0 : temp;
6354 }
6355
6356 /* Convert decrement by a constant into a negative increment. */
6357 if (this_optab == sub_optab
6358 && GET_CODE (op1) == CONST_INT)
6359 {
906c4e36 6360 op1 = GEN_INT (- INTVAL (op1));
bbf6f052
RK
6361 this_optab = add_optab;
6362 }
6363
6364 if (post)
6365 {
6366 /* We have a true reference to the value in OP0.
6367 If there is an insn to add or subtract in this mode, queue it. */
6368
6369#if 0 /* Turned off to avoid making extra insn for indexed memref. */
6370 op0 = stabilize (op0);
6371#endif
6372
6373 icode = (int) this_optab->handlers[(int) mode].insn_code;
6374 if (icode != (int) CODE_FOR_nothing
6375 /* Make sure that OP0 is valid for operands 0 and 1
6376 of the insn we want to queue. */
6377 && (*insn_operand_predicate[icode][0]) (op0, mode)
6378 && (*insn_operand_predicate[icode][1]) (op0, mode))
6379 {
6380 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
6381 op1 = force_reg (mode, op1);
6382
6383 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
6384 }
6385 }
6386
6387 /* Preincrement, or we can't increment with one simple insn. */
6388 if (post)
6389 /* Save a copy of the value before inc or dec, to return it later. */
6390 temp = value = copy_to_reg (op0);
6391 else
6392 /* Arrange to return the incremented value. */
6393 /* Copy the rtx because expand_binop will protect from the queue,
6394 and the results of that would be invalid for us to return
6395 if our caller does emit_queue before using our result. */
6396 temp = copy_rtx (value = op0);
6397
6398 /* Increment however we can. */
6399 op1 = expand_binop (mode, this_optab, value, op1, op0,
6400 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
6401 /* Make sure the value is stored into OP0. */
6402 if (op1 != op0)
6403 emit_move_insn (op0, op1);
6404
6405 return temp;
6406}
6407\f
6408/* Expand all function calls contained within EXP, innermost ones first.
6409 But don't look within expressions that have sequence points.
6410 For each CALL_EXPR, record the rtx for its value
6411 in the CALL_EXPR_RTL field. */
6412
6413static void
6414preexpand_calls (exp)
6415 tree exp;
6416{
6417 register int nops, i;
6418 int type = TREE_CODE_CLASS (TREE_CODE (exp));
6419
6420 if (! do_preexpand_calls)
6421 return;
6422
6423 /* Only expressions and references can contain calls. */
6424
6425 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
6426 return;
6427
6428 switch (TREE_CODE (exp))
6429 {
6430 case CALL_EXPR:
6431 /* Do nothing if already expanded. */
6432 if (CALL_EXPR_RTL (exp) != 0)
6433 return;
6434
6435 /* Do nothing to built-in functions. */
6436 if (TREE_CODE (TREE_OPERAND (exp, 0)) != ADDR_EXPR
6437 || TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != FUNCTION_DECL
6438 || ! DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
906c4e36 6439 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
bbf6f052
RK
6440 return;
6441
6442 case COMPOUND_EXPR:
6443 case COND_EXPR:
6444 case TRUTH_ANDIF_EXPR:
6445 case TRUTH_ORIF_EXPR:
6446 /* If we find one of these, then we can be sure
6447 the adjust will be done for it (since it makes jumps).
6448 Do it now, so that if this is inside an argument
6449 of a function, we don't get the stack adjustment
6450 after some other args have already been pushed. */
6451 do_pending_stack_adjust ();
6452 return;
6453
6454 case BLOCK:
6455 case RTL_EXPR:
6456 case WITH_CLEANUP_EXPR:
6457 return;
6458
6459 case SAVE_EXPR:
6460 if (SAVE_EXPR_RTL (exp) != 0)
6461 return;
6462 }
6463
6464 nops = tree_code_length[(int) TREE_CODE (exp)];
6465 for (i = 0; i < nops; i++)
6466 if (TREE_OPERAND (exp, i) != 0)
6467 {
6468 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
6469 if (type == 'e' || type == '<' || type == '1' || type == '2'
6470 || type == 'r')
6471 preexpand_calls (TREE_OPERAND (exp, i));
6472 }
6473}
6474\f
6475/* At the start of a function, record that we have no previously-pushed
6476 arguments waiting to be popped. */
6477
6478void
6479init_pending_stack_adjust ()
6480{
6481 pending_stack_adjust = 0;
6482}
6483
6484/* When exiting from function, if safe, clear out any pending stack adjust
6485 so the adjustment won't get done. */
6486
6487void
6488clear_pending_stack_adjust ()
6489{
6490#ifdef EXIT_IGNORE_STACK
6491 if (! flag_omit_frame_pointer && EXIT_IGNORE_STACK
81feeecb 6492 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
bbf6f052
RK
6493 && ! flag_inline_functions)
6494 pending_stack_adjust = 0;
6495#endif
6496}
6497
6498/* Pop any previously-pushed arguments that have not been popped yet. */
6499
6500void
6501do_pending_stack_adjust ()
6502{
6503 if (inhibit_defer_pop == 0)
6504 {
6505 if (pending_stack_adjust != 0)
906c4e36 6506 adjust_stack (GEN_INT (pending_stack_adjust));
bbf6f052
RK
6507 pending_stack_adjust = 0;
6508 }
6509}
6510
6511/* Expand all cleanups up to OLD_CLEANUPS.
6512 Needed here, and also for language-dependent calls. */
6513
6514void
6515expand_cleanups_to (old_cleanups)
6516 tree old_cleanups;
6517{
6518 while (cleanups_this_call != old_cleanups)
6519 {
906c4e36 6520 expand_expr (TREE_VALUE (cleanups_this_call), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6521 cleanups_this_call = TREE_CHAIN (cleanups_this_call);
6522 }
6523}
6524\f
6525/* Expand conditional expressions. */
6526
6527/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
6528 LABEL is an rtx of code CODE_LABEL, in this function and all the
6529 functions here. */
6530
6531void
6532jumpifnot (exp, label)
6533 tree exp;
6534 rtx label;
6535{
906c4e36 6536 do_jump (exp, label, NULL_RTX);
bbf6f052
RK
6537}
6538
6539/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
6540
6541void
6542jumpif (exp, label)
6543 tree exp;
6544 rtx label;
6545{
906c4e36 6546 do_jump (exp, NULL_RTX, label);
bbf6f052
RK
6547}
6548
6549/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
6550 the result is zero, or IF_TRUE_LABEL if the result is one.
6551 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
6552 meaning fall through in that case.
6553
e7c33f54
RK
6554 do_jump always does any pending stack adjust except when it does not
6555 actually perform a jump. An example where there is no jump
6556 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
6557
bbf6f052
RK
6558 This function is responsible for optimizing cases such as
6559 &&, || and comparison operators in EXP. */
6560
6561void
6562do_jump (exp, if_false_label, if_true_label)
6563 tree exp;
6564 rtx if_false_label, if_true_label;
6565{
6566 register enum tree_code code = TREE_CODE (exp);
6567 /* Some cases need to create a label to jump to
6568 in order to properly fall through.
6569 These cases set DROP_THROUGH_LABEL nonzero. */
6570 rtx drop_through_label = 0;
6571 rtx temp;
6572 rtx comparison = 0;
6573 int i;
6574 tree type;
6575
6576 emit_queue ();
6577
6578 switch (code)
6579 {
6580 case ERROR_MARK:
6581 break;
6582
6583 case INTEGER_CST:
6584 temp = integer_zerop (exp) ? if_false_label : if_true_label;
6585 if (temp)
6586 emit_jump (temp);
6587 break;
6588
6589#if 0
6590 /* This is not true with #pragma weak */
6591 case ADDR_EXPR:
6592 /* The address of something can never be zero. */
6593 if (if_true_label)
6594 emit_jump (if_true_label);
6595 break;
6596#endif
6597
6598 case NOP_EXPR:
6599 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
6600 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
6601 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
6602 goto normal;
6603 case CONVERT_EXPR:
6604 /* If we are narrowing the operand, we have to do the compare in the
6605 narrower mode. */
6606 if ((TYPE_PRECISION (TREE_TYPE (exp))
6607 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6608 goto normal;
6609 case NON_LVALUE_EXPR:
6610 case REFERENCE_EXPR:
6611 case ABS_EXPR:
6612 case NEGATE_EXPR:
6613 case LROTATE_EXPR:
6614 case RROTATE_EXPR:
6615 /* These cannot change zero->non-zero or vice versa. */
6616 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6617 break;
6618
6619#if 0
6620 /* This is never less insns than evaluating the PLUS_EXPR followed by
6621 a test and can be longer if the test is eliminated. */
6622 case PLUS_EXPR:
6623 /* Reduce to minus. */
6624 exp = build (MINUS_EXPR, TREE_TYPE (exp),
6625 TREE_OPERAND (exp, 0),
6626 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
6627 TREE_OPERAND (exp, 1))));
6628 /* Process as MINUS. */
6629#endif
6630
6631 case MINUS_EXPR:
6632 /* Non-zero iff operands of minus differ. */
6633 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
6634 TREE_OPERAND (exp, 0),
6635 TREE_OPERAND (exp, 1)),
6636 NE, NE);
6637 break;
6638
6639 case BIT_AND_EXPR:
6640 /* If we are AND'ing with a small constant, do this comparison in the
6641 smallest type that fits. If the machine doesn't have comparisons
6642 that small, it will be converted back to the wider comparison.
6643 This helps if we are testing the sign bit of a narrower object.
6644 combine can't do this for us because it can't know whether a
6645 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
6646
08af8e09
RK
6647 if (! SLOW_BYTE_ACCESS
6648 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6649 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
bbf6f052
RK
6650 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
6651 && (type = type_for_size (i + 1, 1)) != 0
08af8e09
RK
6652 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6653 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6654 != CODE_FOR_nothing))
bbf6f052
RK
6655 {
6656 do_jump (convert (type, exp), if_false_label, if_true_label);
6657 break;
6658 }
6659 goto normal;
6660
6661 case TRUTH_NOT_EXPR:
6662 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6663 break;
6664
6665 case TRUTH_ANDIF_EXPR:
6666 if (if_false_label == 0)
6667 if_false_label = drop_through_label = gen_label_rtx ();
906c4e36 6668 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
bbf6f052
RK
6669 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6670 break;
6671
6672 case TRUTH_ORIF_EXPR:
6673 if (if_true_label == 0)
6674 if_true_label = drop_through_label = gen_label_rtx ();
906c4e36 6675 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
bbf6f052
RK
6676 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6677 break;
6678
6679 case COMPOUND_EXPR:
6680 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6681 free_temp_slots ();
6682 emit_queue ();
e7c33f54 6683 do_pending_stack_adjust ();
bbf6f052
RK
6684 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
6685 break;
6686
6687 case COMPONENT_REF:
6688 case BIT_FIELD_REF:
6689 case ARRAY_REF:
6690 {
6691 int bitsize, bitpos, unsignedp;
6692 enum machine_mode mode;
6693 tree type;
7bb0943f 6694 tree offset;
bbf6f052
RK
6695 int volatilep = 0;
6696
6697 /* Get description of this reference. We don't actually care
6698 about the underlying object here. */
7bb0943f
RS
6699 get_inner_reference (exp, &bitsize, &bitpos, &offset,
6700 &mode, &unsignedp, &volatilep);
bbf6f052
RK
6701
6702 type = type_for_size (bitsize, unsignedp);
08af8e09
RK
6703 if (! SLOW_BYTE_ACCESS
6704 && type != 0 && bitsize >= 0
6705 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
6706 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
6707 != CODE_FOR_nothing))
bbf6f052
RK
6708 {
6709 do_jump (convert (type, exp), if_false_label, if_true_label);
6710 break;
6711 }
6712 goto normal;
6713 }
6714
6715 case COND_EXPR:
6716 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
6717 if (integer_onep (TREE_OPERAND (exp, 1))
6718 && integer_zerop (TREE_OPERAND (exp, 2)))
6719 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6720
6721 else if (integer_zerop (TREE_OPERAND (exp, 1))
6722 && integer_onep (TREE_OPERAND (exp, 2)))
6723 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6724
6725 else
6726 {
6727 register rtx label1 = gen_label_rtx ();
6728 drop_through_label = gen_label_rtx ();
906c4e36 6729 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052
RK
6730 /* Now the THEN-expression. */
6731 do_jump (TREE_OPERAND (exp, 1),
6732 if_false_label ? if_false_label : drop_through_label,
6733 if_true_label ? if_true_label : drop_through_label);
e7c33f54
RK
6734 /* In case the do_jump just above never jumps. */
6735 do_pending_stack_adjust ();
bbf6f052
RK
6736 emit_label (label1);
6737 /* Now the ELSE-expression. */
6738 do_jump (TREE_OPERAND (exp, 2),
6739 if_false_label ? if_false_label : drop_through_label,
6740 if_true_label ? if_true_label : drop_through_label);
6741 }
6742 break;
6743
6744 case EQ_EXPR:
6745 if (integer_zerop (TREE_OPERAND (exp, 1)))
6746 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
6747 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6748 == MODE_INT)
6749 &&
6750 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6751 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
6752 else
6753 comparison = compare (exp, EQ, EQ);
6754 break;
6755
6756 case NE_EXPR:
6757 if (integer_zerop (TREE_OPERAND (exp, 1)))
6758 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
6759 else if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6760 == MODE_INT)
6761 &&
6762 !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6763 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
6764 else
6765 comparison = compare (exp, NE, NE);
6766 break;
6767
6768 case LT_EXPR:
6769 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6770 == MODE_INT)
6771 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6772 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
6773 else
6774 comparison = compare (exp, LT, LTU);
6775 break;
6776
6777 case LE_EXPR:
6778 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6779 == MODE_INT)
6780 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6781 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
6782 else
6783 comparison = compare (exp, LE, LEU);
6784 break;
6785
6786 case GT_EXPR:
6787 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6788 == MODE_INT)
6789 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6790 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
6791 else
6792 comparison = compare (exp, GT, GTU);
6793 break;
6794
6795 case GE_EXPR:
6796 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6797 == MODE_INT)
6798 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6799 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
6800 else
6801 comparison = compare (exp, GE, GEU);
6802 break;
6803
6804 default:
6805 normal:
906c4e36 6806 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6807#if 0
6808 /* This is not needed any more and causes poor code since it causes
6809 comparisons and tests from non-SI objects to have different code
6810 sequences. */
6811 /* Copy to register to avoid generating bad insns by cse
6812 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
6813 if (!cse_not_expected && GET_CODE (temp) == MEM)
6814 temp = copy_to_reg (temp);
6815#endif
6816 do_pending_stack_adjust ();
6817 if (GET_CODE (temp) == CONST_INT)
6818 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
6819 else if (GET_CODE (temp) == LABEL_REF)
6820 comparison = const_true_rtx;
6821 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
6822 && !can_compare_p (GET_MODE (temp)))
6823 /* Note swapping the labels gives us not-equal. */
6824 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
6825 else if (GET_MODE (temp) != VOIDmode)
6826 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
cd1b4b44
RK
6827 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
6828 GET_MODE (temp), NULL_RTX, 0);
bbf6f052
RK
6829 else
6830 abort ();
6831 }
6832
6833 /* Do any postincrements in the expression that was tested. */
6834 emit_queue ();
6835
6836 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
6837 straight into a conditional jump instruction as the jump condition.
6838 Otherwise, all the work has been done already. */
6839
6840 if (comparison == const_true_rtx)
6841 {
6842 if (if_true_label)
6843 emit_jump (if_true_label);
6844 }
6845 else if (comparison == const0_rtx)
6846 {
6847 if (if_false_label)
6848 emit_jump (if_false_label);
6849 }
6850 else if (comparison)
6851 do_jump_for_compare (comparison, if_false_label, if_true_label);
6852
6853 free_temp_slots ();
6854
6855 if (drop_through_label)
e7c33f54
RK
6856 {
6857 /* If do_jump produces code that might be jumped around,
6858 do any stack adjusts from that code, before the place
6859 where control merges in. */
6860 do_pending_stack_adjust ();
6861 emit_label (drop_through_label);
6862 }
bbf6f052
RK
6863}
6864\f
6865/* Given a comparison expression EXP for values too wide to be compared
6866 with one insn, test the comparison and jump to the appropriate label.
6867 The code of EXP is ignored; we always test GT if SWAP is 0,
6868 and LT if SWAP is 1. */
6869
6870static void
6871do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
6872 tree exp;
6873 int swap;
6874 rtx if_false_label, if_true_label;
6875{
906c4e36
RK
6876 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
6877 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6878 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6879 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6880 rtx drop_through_label = 0;
6881 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
6882 int i;
6883
6884 if (! if_true_label || ! if_false_label)
6885 drop_through_label = gen_label_rtx ();
6886 if (! if_true_label)
6887 if_true_label = drop_through_label;
6888 if (! if_false_label)
6889 if_false_label = drop_through_label;
6890
6891 /* Compare a word at a time, high order first. */
6892 for (i = 0; i < nwords; i++)
6893 {
6894 rtx comp;
6895 rtx op0_word, op1_word;
6896
6897 if (WORDS_BIG_ENDIAN)
6898 {
6899 op0_word = operand_subword_force (op0, i, mode);
6900 op1_word = operand_subword_force (op1, i, mode);
6901 }
6902 else
6903 {
6904 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
6905 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
6906 }
6907
6908 /* All but high-order word must be compared as unsigned. */
6909 comp = compare_from_rtx (op0_word, op1_word,
6910 (unsignedp || i > 0) ? GTU : GT,
906c4e36 6911 unsignedp, word_mode, NULL_RTX, 0);
bbf6f052
RK
6912 if (comp == const_true_rtx)
6913 emit_jump (if_true_label);
6914 else if (comp != const0_rtx)
906c4e36 6915 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052
RK
6916
6917 /* Consider lower words only if these are equal. */
6918 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
906c4e36 6919 NULL_RTX, 0);
bbf6f052
RK
6920 if (comp == const_true_rtx)
6921 emit_jump (if_false_label);
6922 else if (comp != const0_rtx)
906c4e36 6923 do_jump_for_compare (comp, NULL_RTX, if_false_label);
bbf6f052
RK
6924 }
6925
6926 if (if_false_label)
6927 emit_jump (if_false_label);
6928 if (drop_through_label)
6929 emit_label (drop_through_label);
6930}
6931
6932/* Given an EQ_EXPR expression EXP for values too wide to be compared
6933 with one insn, test the comparison and jump to the appropriate label. */
6934
6935static void
6936do_jump_by_parts_equality (exp, if_false_label, if_true_label)
6937 tree exp;
6938 rtx if_false_label, if_true_label;
6939{
906c4e36
RK
6940 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
6941 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6942 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6943 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
6944 int i;
6945 rtx drop_through_label = 0;
6946
6947 if (! if_false_label)
6948 drop_through_label = if_false_label = gen_label_rtx ();
6949
6950 for (i = 0; i < nwords; i++)
6951 {
6952 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
6953 operand_subword_force (op1, i, mode),
cd1b4b44
RK
6954 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
6955 word_mode, NULL_RTX, 0);
bbf6f052
RK
6956 if (comp == const_true_rtx)
6957 emit_jump (if_false_label);
6958 else if (comp != const0_rtx)
906c4e36 6959 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6960 }
6961
6962 if (if_true_label)
6963 emit_jump (if_true_label);
6964 if (drop_through_label)
6965 emit_label (drop_through_label);
6966}
6967\f
6968/* Jump according to whether OP0 is 0.
6969 We assume that OP0 has an integer mode that is too wide
6970 for the available compare insns. */
6971
6972static void
6973do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
6974 rtx op0;
6975 rtx if_false_label, if_true_label;
6976{
6977 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
6978 int i;
6979 rtx drop_through_label = 0;
6980
6981 if (! if_false_label)
6982 drop_through_label = if_false_label = gen_label_rtx ();
6983
6984 for (i = 0; i < nwords; i++)
6985 {
6986 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
6987 GET_MODE (op0)),
cd1b4b44 6988 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
bbf6f052
RK
6989 if (comp == const_true_rtx)
6990 emit_jump (if_false_label);
6991 else if (comp != const0_rtx)
906c4e36 6992 do_jump_for_compare (comp, if_false_label, NULL_RTX);
bbf6f052
RK
6993 }
6994
6995 if (if_true_label)
6996 emit_jump (if_true_label);
6997 if (drop_through_label)
6998 emit_label (drop_through_label);
6999}
7000
7001/* Given a comparison expression in rtl form, output conditional branches to
7002 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
7003
7004static void
7005do_jump_for_compare (comparison, if_false_label, if_true_label)
7006 rtx comparison, if_false_label, if_true_label;
7007{
7008 if (if_true_label)
7009 {
7010 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7011 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
7012 else
7013 abort ();
7014
7015 if (if_false_label)
7016 emit_jump (if_false_label);
7017 }
7018 else if (if_false_label)
7019 {
7020 rtx insn;
7021 rtx prev = PREV_INSN (get_last_insn ());
7022 rtx branch = 0;
7023
7024 /* Output the branch with the opposite condition. Then try to invert
7025 what is generated. If more than one insn is a branch, or if the
7026 branch is not the last insn written, abort. If we can't invert
7027 the branch, emit make a true label, redirect this jump to that,
7028 emit a jump to the false label and define the true label. */
7029
7030 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
7031 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_false_label));
7032 else
7033 abort ();
7034
7035 /* Here we get the insn before what was just emitted.
7036 On some machines, emitting the branch can discard
7037 the previous compare insn and emit a replacement. */
7038 if (prev == 0)
7039 /* If there's only one preceding insn... */
7040 insn = get_insns ();
7041 else
7042 insn = NEXT_INSN (prev);
7043
7044 for (insn = NEXT_INSN (insn); insn; insn = NEXT_INSN (insn))
7045 if (GET_CODE (insn) == JUMP_INSN)
7046 {
7047 if (branch)
7048 abort ();
7049 branch = insn;
7050 }
7051
7052 if (branch != get_last_insn ())
7053 abort ();
7054
7055 if (! invert_jump (branch, if_false_label))
7056 {
7057 if_true_label = gen_label_rtx ();
7058 redirect_jump (branch, if_true_label);
7059 emit_jump (if_false_label);
7060 emit_label (if_true_label);
7061 }
7062 }
7063}
7064\f
7065/* Generate code for a comparison expression EXP
7066 (including code to compute the values to be compared)
7067 and set (CC0) according to the result.
7068 SIGNED_CODE should be the rtx operation for this comparison for
7069 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
7070
7071 We force a stack adjustment unless there are currently
7072 things pushed on the stack that aren't yet used. */
7073
7074static rtx
7075compare (exp, signed_code, unsigned_code)
7076 register tree exp;
7077 enum rtx_code signed_code, unsigned_code;
7078{
906c4e36
RK
7079 register rtx op0
7080 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
7081 register rtx op1
7082 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7083 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
7084 register enum machine_mode mode = TYPE_MODE (type);
7085 int unsignedp = TREE_UNSIGNED (type);
7086 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
7087
7088 return compare_from_rtx (op0, op1, code, unsignedp, mode,
7089 ((mode == BLKmode)
906c4e36 7090 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
bbf6f052
RK
7091 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
7092}
7093
7094/* Like compare but expects the values to compare as two rtx's.
7095 The decision as to signed or unsigned comparison must be made by the caller.
7096
7097 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
7098 compared.
7099
7100 If ALIGN is non-zero, it is the alignment of this type; if zero, the
7101 size of MODE should be used. */
7102
7103rtx
7104compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
7105 register rtx op0, op1;
7106 enum rtx_code code;
7107 int unsignedp;
7108 enum machine_mode mode;
7109 rtx size;
7110 int align;
7111{
7112 /* If one operand is constant, make it the second one. */
7113
7114 if (GET_CODE (op0) == CONST_INT || GET_CODE (op0) == CONST_DOUBLE)
7115 {
7116 rtx tem = op0;
7117 op0 = op1;
7118 op1 = tem;
7119 code = swap_condition (code);
7120 }
7121
7122 if (flag_force_mem)
7123 {
7124 op0 = force_not_mem (op0);
7125 op1 = force_not_mem (op1);
7126 }
7127
7128 do_pending_stack_adjust ();
7129
7130 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT)
7131 return simplify_relational_operation (code, mode, op0, op1);
7132
cd1b4b44
RK
7133#if 0
7134 /* There's no need to do this now that combine.c can eliminate lots of
7135 sign extensions. This can be less efficient in certain cases on other
7136 machines.
7137
bbf6f052
RK
7138 /* If this is a signed equality comparison, we can do it as an
7139 unsigned comparison since zero-extension is cheaper than sign
77fa0940
RK
7140 extension and comparisons with zero are done as unsigned. This is
7141 the case even on machines that can do fast sign extension, since
7142 zero-extension is easier to combinen with other operations than
7143 sign-extension is. If we are comparing against a constant, we must
7144 convert it to what it would look like unsigned. */
bbf6f052 7145 if ((code == EQ || code == NE) && ! unsignedp
906c4e36 7146 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7147 {
7148 if (GET_CODE (op1) == CONST_INT
7149 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
906c4e36 7150 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
bbf6f052
RK
7151 unsignedp = 1;
7152 }
cd1b4b44 7153#endif
bbf6f052
RK
7154
7155 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
7156
7157 return gen_rtx (code, VOIDmode, cc0_rtx, const0_rtx);
7158}
7159\f
7160/* Generate code to calculate EXP using a store-flag instruction
e7c33f54
RK
7161 and return an rtx for the result. EXP is either a comparison
7162 or a TRUTH_NOT_EXPR whose operand is a comparison.
7163
bbf6f052
RK
7164 If TARGET is nonzero, store the result there if convenient.
7165
7166 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
7167 cheap.
7168
7169 Return zero if there is no suitable set-flag instruction
7170 available on this machine.
7171
7172 Once expand_expr has been called on the arguments of the comparison,
7173 we are committed to doing the store flag, since it is not safe to
7174 re-evaluate the expression. We emit the store-flag insn by calling
7175 emit_store_flag, but only expand the arguments if we have a reason
7176 to believe that emit_store_flag will be successful. If we think that
7177 it will, but it isn't, we have to simulate the store-flag with a
7178 set/jump/set sequence. */
7179
7180static rtx
7181do_store_flag (exp, target, mode, only_cheap)
7182 tree exp;
7183 rtx target;
7184 enum machine_mode mode;
7185 int only_cheap;
7186{
7187 enum rtx_code code;
e7c33f54 7188 tree arg0, arg1, type;
bbf6f052 7189 tree tem;
e7c33f54
RK
7190 enum machine_mode operand_mode;
7191 int invert = 0;
7192 int unsignedp;
bbf6f052
RK
7193 rtx op0, op1;
7194 enum insn_code icode;
7195 rtx subtarget = target;
7196 rtx result, label, pattern, jump_pat;
7197
e7c33f54
RK
7198 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
7199 result at the end. We can't simply invert the test since it would
7200 have already been inverted if it were valid. This case occurs for
7201 some floating-point comparisons. */
7202
7203 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
7204 invert = 1, exp = TREE_OPERAND (exp, 0);
7205
7206 arg0 = TREE_OPERAND (exp, 0);
7207 arg1 = TREE_OPERAND (exp, 1);
7208 type = TREE_TYPE (arg0);
7209 operand_mode = TYPE_MODE (type);
7210 unsignedp = TREE_UNSIGNED (type);
7211
bbf6f052
RK
7212 /* We won't bother with BLKmode store-flag operations because it would mean
7213 passing a lot of information to emit_store_flag. */
7214 if (operand_mode == BLKmode)
7215 return 0;
7216
d964285c
CH
7217 STRIP_NOPS (arg0);
7218 STRIP_NOPS (arg1);
bbf6f052
RK
7219
7220 /* Get the rtx comparison code to use. We know that EXP is a comparison
7221 operation of some type. Some comparisons against 1 and -1 can be
7222 converted to comparisons with zero. Do so here so that the tests
7223 below will be aware that we have a comparison with zero. These
7224 tests will not catch constants in the first operand, but constants
7225 are rarely passed as the first operand. */
7226
7227 switch (TREE_CODE (exp))
7228 {
7229 case EQ_EXPR:
7230 code = EQ;
7231 break;
7232 case NE_EXPR:
7233 code = NE;
7234 break;
7235 case LT_EXPR:
7236 if (integer_onep (arg1))
7237 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
7238 else
7239 code = unsignedp ? LTU : LT;
7240 break;
7241 case LE_EXPR:
7242 if (integer_all_onesp (arg1))
7243 arg1 = integer_zero_node, code = unsignedp ? LTU : LT;
7244 else
7245 code = unsignedp ? LEU : LE;
7246 break;
7247 case GT_EXPR:
7248 if (integer_all_onesp (arg1))
7249 arg1 = integer_zero_node, code = unsignedp ? GEU : GE;
7250 else
7251 code = unsignedp ? GTU : GT;
7252 break;
7253 case GE_EXPR:
7254 if (integer_onep (arg1))
7255 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
7256 else
7257 code = unsignedp ? GEU : GE;
7258 break;
7259 default:
7260 abort ();
7261 }
7262
7263 /* Put a constant second. */
7264 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
7265 {
7266 tem = arg0; arg0 = arg1; arg1 = tem;
7267 code = swap_condition (code);
7268 }
7269
7270 /* If this is an equality or inequality test of a single bit, we can
7271 do this by shifting the bit being tested to the low-order bit and
7272 masking the result with the constant 1. If the condition was EQ,
7273 we xor it with 1. This does not require an scc insn and is faster
7274 than an scc insn even if we have it. */
7275
7276 if ((code == NE || code == EQ)
7277 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
7278 && integer_pow2p (TREE_OPERAND (arg0, 1))
906c4e36 7279 && TYPE_PRECISION (type) <= HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7280 {
7281 int bitnum = exact_log2 (INTVAL (expand_expr (TREE_OPERAND (arg0, 1),
906c4e36 7282 NULL_RTX, VOIDmode, 0)));
bbf6f052
RK
7283
7284 if (subtarget == 0 || GET_CODE (subtarget) != REG
7285 || GET_MODE (subtarget) != operand_mode
7286 || ! safe_from_p (subtarget, TREE_OPERAND (arg0, 0)))
7287 subtarget = 0;
7288
7289 op0 = expand_expr (TREE_OPERAND (arg0, 0), subtarget, VOIDmode, 0);
7290
7291 if (bitnum != 0)
7292 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
7293 size_int (bitnum), target, 1);
7294
7295 if (GET_MODE (op0) != mode)
7296 op0 = convert_to_mode (mode, op0, 1);
7297
7298 if (bitnum != TYPE_PRECISION (type) - 1)
7299 op0 = expand_and (op0, const1_rtx, target);
7300
e7c33f54 7301 if ((code == EQ && ! invert) || (code == NE && invert))
bbf6f052
RK
7302 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, target, 0,
7303 OPTAB_LIB_WIDEN);
7304
7305 return op0;
7306 }
7307
7308 /* Now see if we are likely to be able to do this. Return if not. */
7309 if (! can_compare_p (operand_mode))
7310 return 0;
7311 icode = setcc_gen_code[(int) code];
7312 if (icode == CODE_FOR_nothing
7313 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
7314 {
7315 /* We can only do this if it is one of the special cases that
7316 can be handled without an scc insn. */
7317 if ((code == LT && integer_zerop (arg1))
7318 || (! only_cheap && code == GE && integer_zerop (arg1)))
7319 ;
7320 else if (BRANCH_COST >= 0
7321 && ! only_cheap && (code == NE || code == EQ)
7322 && TREE_CODE (type) != REAL_TYPE
7323 && ((abs_optab->handlers[(int) operand_mode].insn_code
7324 != CODE_FOR_nothing)
7325 || (ffs_optab->handlers[(int) operand_mode].insn_code
7326 != CODE_FOR_nothing)))
7327 ;
7328 else
7329 return 0;
7330 }
7331
7332 preexpand_calls (exp);
7333 if (subtarget == 0 || GET_CODE (subtarget) != REG
7334 || GET_MODE (subtarget) != operand_mode
7335 || ! safe_from_p (subtarget, arg1))
7336 subtarget = 0;
7337
7338 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
906c4e36 7339 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7340
7341 if (target == 0)
7342 target = gen_reg_rtx (mode);
7343
d39985fa
RK
7344 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
7345 because, if the emit_store_flag does anything it will succeed and
7346 OP0 and OP1 will not be used subsequently. */
7347
7348 result = emit_store_flag (target, code,
7349 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
7350 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
7351 operand_mode, unsignedp, 1);
bbf6f052
RK
7352
7353 if (result)
e7c33f54
RK
7354 {
7355 if (invert)
7356 result = expand_binop (mode, xor_optab, result, const1_rtx,
7357 result, 0, OPTAB_LIB_WIDEN);
7358 return result;
7359 }
bbf6f052
RK
7360
7361 /* If this failed, we have to do this with set/compare/jump/set code. */
7362 if (target == 0 || GET_CODE (target) != REG
7363 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
7364 target = gen_reg_rtx (GET_MODE (target));
7365
e7c33f54 7366 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
906c4e36
RK
7367 result = compare_from_rtx (op0, op1, code, unsignedp,
7368 operand_mode, NULL_RTX, 0);
bbf6f052 7369 if (GET_CODE (result) == CONST_INT)
e7c33f54
RK
7370 return (((result == const0_rtx && ! invert)
7371 || (result != const0_rtx && invert))
7372 ? const0_rtx : const1_rtx);
bbf6f052
RK
7373
7374 label = gen_label_rtx ();
7375 if (bcc_gen_fctn[(int) code] == 0)
7376 abort ();
7377
7378 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
e7c33f54 7379 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
bbf6f052
RK
7380 emit_label (label);
7381
7382 return target;
7383}
7384\f
7385/* Generate a tablejump instruction (used for switch statements). */
7386
7387#ifdef HAVE_tablejump
7388
7389/* INDEX is the value being switched on, with the lowest value
7390 in the table already subtracted.
88d3b7f0 7391 MODE is its expected mode (needed if INDEX is constant).
bbf6f052
RK
7392 RANGE is the length of the jump table.
7393 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
7394
7395 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
7396 index value is out of range. */
7397
7398void
e87b4f3f 7399do_tablejump (index, mode, range, table_label, default_label)
bbf6f052 7400 rtx index, range, table_label, default_label;
e87b4f3f 7401 enum machine_mode mode;
bbf6f052
RK
7402{
7403 register rtx temp, vector;
7404
88d3b7f0
RS
7405 /* Do an unsigned comparison (in the proper mode) between the index
7406 expression and the value which represents the length of the range.
7407 Since we just finished subtracting the lower bound of the range
7408 from the index expression, this comparison allows us to simultaneously
7409 check that the original index expression value is both greater than
7410 or equal to the minimum value of the range and less than or equal to
7411 the maximum value of the range. */
e87b4f3f 7412
906c4e36 7413 emit_cmp_insn (range, index, LTU, NULL_RTX, mode, 0, 0);
bbf6f052 7414 emit_jump_insn (gen_bltu (default_label));
88d3b7f0
RS
7415
7416 /* If index is in range, it must fit in Pmode.
7417 Convert to Pmode so we can index with it. */
7418 if (mode != Pmode)
7419 index = convert_to_mode (Pmode, index, 1);
7420
bbf6f052
RK
7421 /* If flag_force_addr were to affect this address
7422 it could interfere with the tricky assumptions made
7423 about addresses that contain label-refs,
7424 which may be valid only very near the tablejump itself. */
7425 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
7426 GET_MODE_SIZE, because this indicates how large insns are. The other
7427 uses should all be Pmode, because they are addresses. This code
7428 could fail if addresses and insns are not the same size. */
7429 index = memory_address_noforce
7430 (CASE_VECTOR_MODE,
7431 gen_rtx (PLUS, Pmode,
7432 gen_rtx (MULT, Pmode, index,
906c4e36 7433 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
bbf6f052
RK
7434 gen_rtx (LABEL_REF, Pmode, table_label)));
7435 temp = gen_reg_rtx (CASE_VECTOR_MODE);
7436 vector = gen_rtx (MEM, CASE_VECTOR_MODE, index);
7437 RTX_UNCHANGING_P (vector) = 1;
7438 convert_move (temp, vector, 0);
7439
7440 emit_jump_insn (gen_tablejump (temp, table_label));
7441
7442#ifndef CASE_VECTOR_PC_RELATIVE
7443 /* If we are generating PIC code or if the table is PC-relative, the
7444 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
7445 if (! flag_pic)
7446 emit_barrier ();
7447#endif
7448}
7449
7450#endif /* HAVE_tablejump */
This page took 0.827649 seconds and 5 git commands to generate.