]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Makefile.in: Remove all bytecode support.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
e9a25f70 23#include <stdio.h>
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
35#include "expr.h"
36#include "insn-config.h"
37#include "recog.h"
38#include "output.h"
bbf6f052 39#include "typeclass.h"
ca55abae 40#include "defaults.h"
bbf6f052
RK
41
42#define CEIL(x,y) (((x) + (y) - 1) / (y))
43
44/* Decide whether a function's arguments should be processed
bbc8a071
RK
45 from first to last or from last to first.
46
47 They should if the stack and args grow in opposite directions, but
48 only if we have push insns. */
bbf6f052 49
bbf6f052 50#ifdef PUSH_ROUNDING
bbc8a071 51
3319a347 52#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
53#define PUSH_ARGS_REVERSED /* If it's last to first */
54#endif
bbc8a071 55
bbf6f052
RK
56#endif
57
58#ifndef STACK_PUSH_CODE
59#ifdef STACK_GROWS_DOWNWARD
60#define STACK_PUSH_CODE PRE_DEC
61#else
62#define STACK_PUSH_CODE PRE_INC
63#endif
64#endif
65
66/* Like STACK_BOUNDARY but in units of bytes, not bits. */
67#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
68
18543a22
ILT
69/* Assume that case vectors are not pc-relative. */
70#ifndef CASE_VECTOR_PC_RELATIVE
71#define CASE_VECTOR_PC_RELATIVE 0
72#endif
73
bbf6f052
RK
74/* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85int do_preexpand_calls = 1;
86
87/* Number of units that we should eventually pop off the stack.
88 These are the arguments to function calls that have already returned. */
89int pending_stack_adjust;
90
91/* Nonzero means stack pops must not be deferred, and deferred stack
92 pops must not be output. It is nonzero inside a function call,
93 inside a conditional expression, inside a statement expression,
94 and in other cases as well. */
95int inhibit_defer_pop;
96
d93d4205
MS
97/* When temporaries are created by TARGET_EXPRs, they are created at
98 this level of temp_slot_level, so that they can remain allocated
99 until no longer needed. CLEANUP_POINT_EXPRs define the lifetime
100 of TARGET_EXPRs. */
101int target_temp_slot_level;
102
bbf6f052
RK
103/* Nonzero means __builtin_saveregs has already been done in this function.
104 The value is the pseudoreg containing the value __builtin_saveregs
105 returned. */
106static rtx saveregs_value;
107
dcf76fff
TW
108/* Similarly for __builtin_apply_args. */
109static rtx apply_args_value;
110
956d6950
JL
111/* Don't check memory usage, since code is being emitted to check a memory
112 usage. Used when flag_check_memory_usage is true, to avoid infinite
113 recursion. */
114static int in_check_memory_usage;
115
4969d05d
RK
116/* This structure is used by move_by_pieces to describe the move to
117 be performed. */
4969d05d
RK
118struct move_by_pieces
119{
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
e9cf6a97 124 int to_struct;
4969d05d
RK
125 rtx from;
126 rtx from_addr;
127 int autinc_from;
128 int explicit_inc_from;
e9cf6a97 129 int from_struct;
4969d05d
RK
130 int len;
131 int offset;
132 int reverse;
133};
134
9de08200
RK
135/* This structure is used by clear_by_pieces to describe the clear to
136 be performed. */
137
138struct clear_by_pieces
139{
140 rtx to;
141 rtx to_addr;
142 int autinc_to;
143 int explicit_inc_to;
144 int to_struct;
145 int len;
146 int offset;
147 int reverse;
148};
149
c02bd5d9
JB
150/* Used to generate bytecodes: keep track of size of local variables,
151 as well as depth of arithmetic stack. (Notice that variables are
152 stored on the machine's stack, not the arithmetic stack.) */
153
921b3427 154static rtx get_push_address PROTO ((int));
186f92ce 155extern int local_vars_size;
c02bd5d9
JB
156extern int stack_depth;
157extern int max_stack_depth;
292b1216 158extern struct obstack permanent_obstack;
4ed67205 159extern rtx arg_pointer_save_area;
c02bd5d9 160
4969d05d
RK
161static rtx enqueue_insn PROTO((rtx, rtx));
162static int queued_subexp_p PROTO((rtx));
163static void init_queue PROTO((void));
164static void move_by_pieces PROTO((rtx, rtx, int, int));
165static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 166static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 167 struct move_by_pieces *));
9de08200 168static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 169static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
170 struct clear_by_pieces *));
171static int is_zeros_p PROTO((tree));
172static int mostly_zeros_p PROTO((tree));
e1a43f73 173static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
174static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
175 enum machine_mode, int, int, int));
176static tree save_noncopied_parts PROTO((tree, tree));
177static tree init_noncopied_parts PROTO((tree, tree));
178static int safe_from_p PROTO((rtx, tree));
179static int fixed_type_p PROTO((tree));
01c8a7c8 180static rtx var_rtx PROTO((tree));
4969d05d
RK
181static int get_pointer_alignment PROTO((tree, unsigned));
182static tree string_constant PROTO((tree, tree *));
183static tree c_strlen PROTO((tree));
307b821c
RK
184static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
0006469d
TW
186static int apply_args_size PROTO((void));
187static int apply_result_size PROTO((void));
188static rtx result_vector PROTO((int, rtx));
189static rtx expand_builtin_apply_args PROTO((void));
190static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191static void expand_builtin_return PROTO((rtx));
7b8b9722 192static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
193static void preexpand_calls PROTO((tree));
194static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
2e5ec6cf 195void do_jump_by_parts_greater_rtx PROTO((enum machine_mode, int, rtx, rtx, rtx, rtx));
4969d05d
RK
196static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
197static void do_jump_by_parts_equality_rtx PROTO((rtx, rtx, rtx));
198static void do_jump_for_compare PROTO((rtx, rtx, rtx));
199static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
200static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
16545b0a 201extern tree truthvalue_conversion PROTO((tree));
bbf6f052 202
4fa52007
RK
203/* Record for each mode whether we can move a register directly to or
204 from an object of that mode in memory. If we can't, we won't try
205 to use that mode directly when accessing a field of that mode. */
206
207static char direct_load[NUM_MACHINE_MODES];
208static char direct_store[NUM_MACHINE_MODES];
209
bbf6f052
RK
210/* MOVE_RATIO is the number of move instructions that is better than
211 a block move. */
212
213#ifndef MOVE_RATIO
266007a7 214#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
215#define MOVE_RATIO 2
216#else
217/* A value of around 6 would minimize code size; infinity would minimize
218 execution time. */
219#define MOVE_RATIO 15
220#endif
221#endif
e87b4f3f 222
266007a7 223/* This array records the insn_code of insns to perform block moves. */
e6677db3 224enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 225
9de08200
RK
226/* This array records the insn_code of insns to perform block clears. */
227enum insn_code clrstr_optab[NUM_MACHINE_MODES];
228
0f41302f 229/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
230
231#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 232#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 233#endif
0006469d
TW
234
235/* Register mappings for target machines without register windows. */
236#ifndef INCOMING_REGNO
237#define INCOMING_REGNO(OUT) (OUT)
238#endif
239#ifndef OUTGOING_REGNO
240#define OUTGOING_REGNO(IN) (IN)
241#endif
bbf6f052 242\f
4fa52007 243/* This is run once per compilation to set up which modes can be used
266007a7 244 directly in memory and to initialize the block move optab. */
4fa52007
RK
245
246void
247init_expr_once ()
248{
249 rtx insn, pat;
250 enum machine_mode mode;
e2549997
RS
251 /* Try indexing by frame ptr and try by stack ptr.
252 It is known that on the Convex the stack ptr isn't a valid index.
253 With luck, one or the other is valid on any machine. */
38a448ca
RH
254 rtx mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
255 rtx mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007
RK
256
257 start_sequence ();
38a448ca 258 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
259 pat = PATTERN (insn);
260
261 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
262 mode = (enum machine_mode) ((int) mode + 1))
263 {
264 int regno;
265 rtx reg;
266 int num_clobbers;
267
268 direct_load[(int) mode] = direct_store[(int) mode] = 0;
269 PUT_MODE (mem, mode);
e2549997 270 PUT_MODE (mem1, mode);
4fa52007 271
e6fe56a4
RK
272 /* See if there is some register that can be used in this mode and
273 directly loaded or stored from memory. */
274
7308a047
RS
275 if (mode != VOIDmode && mode != BLKmode)
276 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
277 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
278 regno++)
279 {
280 if (! HARD_REGNO_MODE_OK (regno, mode))
281 continue;
e6fe56a4 282
38a448ca 283 reg = gen_rtx_REG (mode, regno);
e6fe56a4 284
7308a047
RS
285 SET_SRC (pat) = mem;
286 SET_DEST (pat) = reg;
287 if (recog (pat, insn, &num_clobbers) >= 0)
288 direct_load[(int) mode] = 1;
e6fe56a4 289
e2549997
RS
290 SET_SRC (pat) = mem1;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
294
7308a047
RS
295 SET_SRC (pat) = reg;
296 SET_DEST (pat) = mem;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_store[(int) mode] = 1;
e2549997
RS
299
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem1;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
7308a047 304 }
4fa52007
RK
305 }
306
307 end_sequence ();
308}
309
bbf6f052
RK
310/* This is run at the start of compiling a function. */
311
312void
313init_expr ()
314{
315 init_queue ();
316
317 pending_stack_adjust = 0;
318 inhibit_defer_pop = 0;
bbf6f052 319 saveregs_value = 0;
0006469d 320 apply_args_value = 0;
e87b4f3f 321 forced_labels = 0;
bbf6f052
RK
322}
323
324/* Save all variables describing the current status into the structure *P.
325 This is used before starting a nested function. */
326
327void
328save_expr_status (p)
329 struct function *p;
330{
331 /* Instead of saving the postincrement queue, empty it. */
332 emit_queue ();
333
334 p->pending_stack_adjust = pending_stack_adjust;
335 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 336 p->saveregs_value = saveregs_value;
0006469d 337 p->apply_args_value = apply_args_value;
e87b4f3f 338 p->forced_labels = forced_labels;
bbf6f052
RK
339
340 pending_stack_adjust = 0;
341 inhibit_defer_pop = 0;
bbf6f052 342 saveregs_value = 0;
0006469d 343 apply_args_value = 0;
e87b4f3f 344 forced_labels = 0;
bbf6f052
RK
345}
346
347/* Restore all variables describing the current status from the structure *P.
348 This is used after a nested function. */
349
350void
351restore_expr_status (p)
352 struct function *p;
353{
354 pending_stack_adjust = p->pending_stack_adjust;
355 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 356 saveregs_value = p->saveregs_value;
0006469d 357 apply_args_value = p->apply_args_value;
e87b4f3f 358 forced_labels = p->forced_labels;
bbf6f052
RK
359}
360\f
361/* Manage the queue of increment instructions to be output
362 for POSTINCREMENT_EXPR expressions, etc. */
363
364static rtx pending_chain;
365
366/* Queue up to increment (or change) VAR later. BODY says how:
367 BODY should be the same thing you would pass to emit_insn
368 to increment right away. It will go to emit_insn later on.
369
370 The value is a QUEUED expression to be used in place of VAR
371 where you want to guarantee the pre-incrementation value of VAR. */
372
373static rtx
374enqueue_insn (var, body)
375 rtx var, body;
376{
38a448ca
RH
377 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
378 var, NULL_RTX, NULL_RTX, body,
379 pending_chain);
bbf6f052
RK
380 return pending_chain;
381}
382
383/* Use protect_from_queue to convert a QUEUED expression
384 into something that you can put immediately into an instruction.
385 If the queued incrementation has not happened yet,
386 protect_from_queue returns the variable itself.
387 If the incrementation has happened, protect_from_queue returns a temp
388 that contains a copy of the old value of the variable.
389
390 Any time an rtx which might possibly be a QUEUED is to be put
391 into an instruction, it must be passed through protect_from_queue first.
392 QUEUED expressions are not meaningful in instructions.
393
394 Do not pass a value through protect_from_queue and then hold
395 on to it for a while before putting it in an instruction!
396 If the queue is flushed in between, incorrect code will result. */
397
398rtx
399protect_from_queue (x, modify)
400 register rtx x;
401 int modify;
402{
403 register RTX_CODE code = GET_CODE (x);
404
405#if 0 /* A QUEUED can hang around after the queue is forced out. */
406 /* Shortcut for most common case. */
407 if (pending_chain == 0)
408 return x;
409#endif
410
411 if (code != QUEUED)
412 {
e9baa644
RK
413 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
414 use of autoincrement. Make a copy of the contents of the memory
415 location rather than a copy of the address, but not if the value is
416 of mode BLKmode. Don't modify X in place since it might be
417 shared. */
bbf6f052
RK
418 if (code == MEM && GET_MODE (x) != BLKmode
419 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
420 {
421 register rtx y = XEXP (x, 0);
38a448ca 422 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
423
424 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
425 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
426 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
427
bbf6f052
RK
428 if (QUEUED_INSN (y))
429 {
e9baa644
RK
430 register rtx temp = gen_reg_rtx (GET_MODE (new));
431 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
432 QUEUED_INSN (y));
433 return temp;
434 }
e9baa644 435 return new;
bbf6f052
RK
436 }
437 /* Otherwise, recursively protect the subexpressions of all
438 the kinds of rtx's that can contain a QUEUED. */
439 if (code == MEM)
3f15938e
RS
440 {
441 rtx tem = protect_from_queue (XEXP (x, 0), 0);
442 if (tem != XEXP (x, 0))
443 {
444 x = copy_rtx (x);
445 XEXP (x, 0) = tem;
446 }
447 }
bbf6f052
RK
448 else if (code == PLUS || code == MULT)
449 {
3f15938e
RS
450 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
451 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
452 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
453 {
454 x = copy_rtx (x);
455 XEXP (x, 0) = new0;
456 XEXP (x, 1) = new1;
457 }
bbf6f052
RK
458 }
459 return x;
460 }
461 /* If the increment has not happened, use the variable itself. */
462 if (QUEUED_INSN (x) == 0)
463 return QUEUED_VAR (x);
464 /* If the increment has happened and a pre-increment copy exists,
465 use that copy. */
466 if (QUEUED_COPY (x) != 0)
467 return QUEUED_COPY (x);
468 /* The increment has happened but we haven't set up a pre-increment copy.
469 Set one up now, and use it. */
470 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
471 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
472 QUEUED_INSN (x));
473 return QUEUED_COPY (x);
474}
475
476/* Return nonzero if X contains a QUEUED expression:
477 if it contains anything that will be altered by a queued increment.
478 We handle only combinations of MEM, PLUS, MINUS and MULT operators
479 since memory addresses generally contain only those. */
480
481static int
482queued_subexp_p (x)
483 rtx x;
484{
485 register enum rtx_code code = GET_CODE (x);
486 switch (code)
487 {
488 case QUEUED:
489 return 1;
490 case MEM:
491 return queued_subexp_p (XEXP (x, 0));
492 case MULT:
493 case PLUS:
494 case MINUS:
e9a25f70
JL
495 return (queued_subexp_p (XEXP (x, 0))
496 || queued_subexp_p (XEXP (x, 1)));
497 default:
498 return 0;
bbf6f052 499 }
bbf6f052
RK
500}
501
502/* Perform all the pending incrementations. */
503
504void
505emit_queue ()
506{
507 register rtx p;
508 while (p = pending_chain)
509 {
510 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
511 pending_chain = QUEUED_NEXT (p);
512 }
513}
514
515static void
516init_queue ()
517{
518 if (pending_chain)
519 abort ();
520}
521\f
522/* Copy data from FROM to TO, where the machine modes are not the same.
523 Both modes may be integer, or both may be floating.
524 UNSIGNEDP should be nonzero if FROM is an unsigned type.
525 This causes zero-extension instead of sign-extension. */
526
527void
528convert_move (to, from, unsignedp)
529 register rtx to, from;
530 int unsignedp;
531{
532 enum machine_mode to_mode = GET_MODE (to);
533 enum machine_mode from_mode = GET_MODE (from);
534 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
535 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
536 enum insn_code code;
537 rtx libcall;
538
539 /* rtx code for making an equivalent value. */
540 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
541
542 to = protect_from_queue (to, 1);
543 from = protect_from_queue (from, 0);
544
545 if (to_real != from_real)
546 abort ();
547
1499e0a8
RK
548 /* If FROM is a SUBREG that indicates that we have already done at least
549 the required extension, strip it. We don't handle such SUBREGs as
550 TO here. */
551
552 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
553 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
554 >= GET_MODE_SIZE (to_mode))
555 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
556 from = gen_lowpart (to_mode, from), from_mode = to_mode;
557
558 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
559 abort ();
560
bbf6f052
RK
561 if (to_mode == from_mode
562 || (from_mode == VOIDmode && CONSTANT_P (from)))
563 {
564 emit_move_insn (to, from);
565 return;
566 }
567
568 if (to_real)
569 {
81d79e2c
RS
570 rtx value;
571
2b01c326 572 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 573 {
2b01c326
RK
574 /* Try converting directly if the insn is supported. */
575 if ((code = can_extend_p (to_mode, from_mode, 0))
576 != CODE_FOR_nothing)
577 {
578 emit_unop_insn (code, to, from, UNKNOWN);
579 return;
580 }
bbf6f052 581 }
2b01c326 582
b424402e
RS
583#ifdef HAVE_trunchfqf2
584 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
585 {
586 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
587 return;
588 }
589#endif
704af6a1
JL
590#ifdef HAVE_trunctqfqf2
591 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
592 {
593 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597#ifdef HAVE_truncsfqf2
598 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
599 {
600 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
604#ifdef HAVE_truncdfqf2
605 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611#ifdef HAVE_truncxfqf2
612 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
615 return;
616 }
617#endif
618#ifdef HAVE_trunctfqf2
619 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
622 return;
623 }
624#endif
03747aa3
RK
625
626#ifdef HAVE_trunctqfhf2
627 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
628 {
629 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
b424402e
RS
633#ifdef HAVE_truncsfhf2
634 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncdfhf2
641 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_truncxfhf2
648 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654#ifdef HAVE_trunctfhf2
655 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
2b01c326
RK
661
662#ifdef HAVE_truncsftqf2
663 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
669#ifdef HAVE_truncdftqf2
670 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
676#ifdef HAVE_truncxftqf2
677 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
683#ifdef HAVE_trunctftqf2
684 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
685 {
686 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
690
bbf6f052
RK
691#ifdef HAVE_truncdfsf2
692 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
b092b471
JW
698#ifdef HAVE_truncxfsf2
699 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
bbf6f052
RK
705#ifdef HAVE_trunctfsf2
706 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
707 {
708 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
b092b471
JW
712#ifdef HAVE_truncxfdf2
713 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
714 {
715 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
716 return;
717 }
718#endif
bbf6f052
RK
719#ifdef HAVE_trunctfdf2
720 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
721 {
722 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
723 return;
724 }
725#endif
726
b092b471
JW
727 libcall = (rtx) 0;
728 switch (from_mode)
729 {
730 case SFmode:
731 switch (to_mode)
732 {
733 case DFmode:
734 libcall = extendsfdf2_libfunc;
735 break;
736
737 case XFmode:
738 libcall = extendsfxf2_libfunc;
739 break;
740
741 case TFmode:
742 libcall = extendsftf2_libfunc;
743 break;
e9a25f70
JL
744
745 default:
746 break;
b092b471
JW
747 }
748 break;
749
750 case DFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = truncdfsf2_libfunc;
755 break;
756
757 case XFmode:
758 libcall = extenddfxf2_libfunc;
759 break;
760
761 case TFmode:
762 libcall = extenddftf2_libfunc;
763 break;
e9a25f70
JL
764
765 default:
766 break;
b092b471
JW
767 }
768 break;
769
770 case XFmode:
771 switch (to_mode)
772 {
773 case SFmode:
774 libcall = truncxfsf2_libfunc;
775 break;
776
777 case DFmode:
778 libcall = truncxfdf2_libfunc;
779 break;
e9a25f70
JL
780
781 default:
782 break;
b092b471
JW
783 }
784 break;
785
786 case TFmode:
787 switch (to_mode)
788 {
789 case SFmode:
790 libcall = trunctfsf2_libfunc;
791 break;
792
793 case DFmode:
794 libcall = trunctfdf2_libfunc;
795 break;
e9a25f70
JL
796
797 default:
798 break;
b092b471
JW
799 }
800 break;
e9a25f70
JL
801
802 default:
803 break;
b092b471
JW
804 }
805
806 if (libcall == (rtx) 0)
807 /* This conversion is not implemented yet. */
bbf6f052
RK
808 abort ();
809
81d79e2c
RS
810 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
811 1, from, from_mode);
812 emit_move_insn (to, value);
bbf6f052
RK
813 return;
814 }
815
816 /* Now both modes are integers. */
817
818 /* Handle expanding beyond a word. */
819 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
820 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 {
822 rtx insns;
823 rtx lowpart;
824 rtx fill_value;
825 rtx lowfrom;
826 int i;
827 enum machine_mode lowpart_mode;
828 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
829
830 /* Try converting directly if the insn is supported. */
831 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
832 != CODE_FOR_nothing)
833 {
cd1b4b44
RK
834 /* If FROM is a SUBREG, put it into a register. Do this
835 so that we always generate the same set of insns for
836 better cse'ing; if an intermediate assignment occurred,
837 we won't be doing the operation directly on the SUBREG. */
838 if (optimize > 0 && GET_CODE (from) == SUBREG)
839 from = force_reg (from_mode, from);
bbf6f052
RK
840 emit_unop_insn (code, to, from, equiv_code);
841 return;
842 }
843 /* Next, try converting via full word. */
844 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
845 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
846 != CODE_FOR_nothing))
847 {
a81fee56 848 if (GET_CODE (to) == REG)
38a448ca 849 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
850 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
851 emit_unop_insn (code, to,
852 gen_lowpart (word_mode, to), equiv_code);
853 return;
854 }
855
856 /* No special multiword conversion insn; do it by hand. */
857 start_sequence ();
858
5c5033c3
RK
859 /* Since we will turn this into a no conflict block, we must ensure
860 that the source does not overlap the target. */
861
862 if (reg_overlap_mentioned_p (to, from))
863 from = force_reg (from_mode, from);
864
bbf6f052
RK
865 /* Get a copy of FROM widened to a word, if necessary. */
866 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
867 lowpart_mode = word_mode;
868 else
869 lowpart_mode = from_mode;
870
871 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
872
873 lowpart = gen_lowpart (lowpart_mode, to);
874 emit_move_insn (lowpart, lowfrom);
875
876 /* Compute the value to put in each remaining word. */
877 if (unsignedp)
878 fill_value = const0_rtx;
879 else
880 {
881#ifdef HAVE_slt
882 if (HAVE_slt
883 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
884 && STORE_FLAG_VALUE == -1)
885 {
906c4e36
RK
886 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
887 lowpart_mode, 0, 0);
bbf6f052
RK
888 fill_value = gen_reg_rtx (word_mode);
889 emit_insn (gen_slt (fill_value));
890 }
891 else
892#endif
893 {
894 fill_value
895 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
896 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 897 NULL_RTX, 0);
bbf6f052
RK
898 fill_value = convert_to_mode (word_mode, fill_value, 1);
899 }
900 }
901
902 /* Fill the remaining words. */
903 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
904 {
905 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
906 rtx subword = operand_subword (to, index, 1, to_mode);
907
908 if (subword == 0)
909 abort ();
910
911 if (fill_value != subword)
912 emit_move_insn (subword, fill_value);
913 }
914
915 insns = get_insns ();
916 end_sequence ();
917
906c4e36 918 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 919 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
920 return;
921 }
922
d3c64ee3
RS
923 /* Truncating multi-word to a word or less. */
924 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
925 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 926 {
431a6eca
JW
927 if (!((GET_CODE (from) == MEM
928 && ! MEM_VOLATILE_P (from)
929 && direct_load[(int) to_mode]
930 && ! mode_dependent_address_p (XEXP (from, 0)))
931 || GET_CODE (from) == REG
932 || GET_CODE (from) == SUBREG))
933 from = force_reg (from_mode, from);
bbf6f052
RK
934 convert_move (to, gen_lowpart (word_mode, from), 0);
935 return;
936 }
937
938 /* Handle pointer conversion */ /* SPEE 900220 */
939 if (to_mode == PSImode)
940 {
941 if (from_mode != SImode)
942 from = convert_to_mode (SImode, from, unsignedp);
943
1f584163
DE
944#ifdef HAVE_truncsipsi2
945 if (HAVE_truncsipsi2)
bbf6f052 946 {
1f584163 947 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
948 return;
949 }
1f584163 950#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
951 abort ();
952 }
953
954 if (from_mode == PSImode)
955 {
956 if (to_mode != SImode)
957 {
958 from = convert_to_mode (SImode, from, unsignedp);
959 from_mode = SImode;
960 }
961 else
962 {
1f584163
DE
963#ifdef HAVE_extendpsisi2
964 if (HAVE_extendpsisi2)
bbf6f052 965 {
1f584163 966 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
967 return;
968 }
1f584163 969#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
970 abort ();
971 }
972 }
973
0407367d
RK
974 if (to_mode == PDImode)
975 {
976 if (from_mode != DImode)
977 from = convert_to_mode (DImode, from, unsignedp);
978
979#ifdef HAVE_truncdipdi2
980 if (HAVE_truncdipdi2)
981 {
982 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
983 return;
984 }
985#endif /* HAVE_truncdipdi2 */
986 abort ();
987 }
988
989 if (from_mode == PDImode)
990 {
991 if (to_mode != DImode)
992 {
993 from = convert_to_mode (DImode, from, unsignedp);
994 from_mode = DImode;
995 }
996 else
997 {
998#ifdef HAVE_extendpdidi2
999 if (HAVE_extendpdidi2)
1000 {
1001 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1002 return;
1003 }
1004#endif /* HAVE_extendpdidi2 */
1005 abort ();
1006 }
1007 }
1008
bbf6f052
RK
1009 /* Now follow all the conversions between integers
1010 no more than a word long. */
1011
1012 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1013 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1014 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1015 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1016 {
d3c64ee3
RS
1017 if (!((GET_CODE (from) == MEM
1018 && ! MEM_VOLATILE_P (from)
1019 && direct_load[(int) to_mode]
1020 && ! mode_dependent_address_p (XEXP (from, 0)))
1021 || GET_CODE (from) == REG
1022 || GET_CODE (from) == SUBREG))
1023 from = force_reg (from_mode, from);
34aa3599
RK
1024 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1025 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1026 from = copy_to_reg (from);
bbf6f052
RK
1027 emit_move_insn (to, gen_lowpart (to_mode, from));
1028 return;
1029 }
1030
d3c64ee3 1031 /* Handle extension. */
bbf6f052
RK
1032 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1033 {
1034 /* Convert directly if that works. */
1035 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1036 != CODE_FOR_nothing)
1037 {
1038 emit_unop_insn (code, to, from, equiv_code);
1039 return;
1040 }
1041 else
1042 {
1043 enum machine_mode intermediate;
1044
1045 /* Search for a mode to convert via. */
1046 for (intermediate = from_mode; intermediate != VOIDmode;
1047 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1048 if (((can_extend_p (to_mode, intermediate, unsignedp)
1049 != CODE_FOR_nothing)
1050 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1051 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1052 && (can_extend_p (intermediate, from_mode, unsignedp)
1053 != CODE_FOR_nothing))
1054 {
1055 convert_move (to, convert_to_mode (intermediate, from,
1056 unsignedp), unsignedp);
1057 return;
1058 }
1059
1060 /* No suitable intermediate mode. */
1061 abort ();
1062 }
1063 }
1064
1065 /* Support special truncate insns for certain modes. */
1066
1067 if (from_mode == DImode && to_mode == SImode)
1068 {
1069#ifdef HAVE_truncdisi2
1070 if (HAVE_truncdisi2)
1071 {
1072 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1073 return;
1074 }
1075#endif
1076 convert_move (to, force_reg (from_mode, from), unsignedp);
1077 return;
1078 }
1079
1080 if (from_mode == DImode && to_mode == HImode)
1081 {
1082#ifdef HAVE_truncdihi2
1083 if (HAVE_truncdihi2)
1084 {
1085 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1086 return;
1087 }
1088#endif
1089 convert_move (to, force_reg (from_mode, from), unsignedp);
1090 return;
1091 }
1092
1093 if (from_mode == DImode && to_mode == QImode)
1094 {
1095#ifdef HAVE_truncdiqi2
1096 if (HAVE_truncdiqi2)
1097 {
1098 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1099 return;
1100 }
1101#endif
1102 convert_move (to, force_reg (from_mode, from), unsignedp);
1103 return;
1104 }
1105
1106 if (from_mode == SImode && to_mode == HImode)
1107 {
1108#ifdef HAVE_truncsihi2
1109 if (HAVE_truncsihi2)
1110 {
1111 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1112 return;
1113 }
1114#endif
1115 convert_move (to, force_reg (from_mode, from), unsignedp);
1116 return;
1117 }
1118
1119 if (from_mode == SImode && to_mode == QImode)
1120 {
1121#ifdef HAVE_truncsiqi2
1122 if (HAVE_truncsiqi2)
1123 {
1124 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1125 return;
1126 }
1127#endif
1128 convert_move (to, force_reg (from_mode, from), unsignedp);
1129 return;
1130 }
1131
1132 if (from_mode == HImode && to_mode == QImode)
1133 {
1134#ifdef HAVE_trunchiqi2
1135 if (HAVE_trunchiqi2)
1136 {
1137 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1138 return;
1139 }
1140#endif
1141 convert_move (to, force_reg (from_mode, from), unsignedp);
1142 return;
1143 }
1144
b9bcad65
RK
1145 if (from_mode == TImode && to_mode == DImode)
1146 {
1147#ifdef HAVE_trunctidi2
1148 if (HAVE_trunctidi2)
1149 {
1150 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1151 return;
1152 }
1153#endif
1154 convert_move (to, force_reg (from_mode, from), unsignedp);
1155 return;
1156 }
1157
1158 if (from_mode == TImode && to_mode == SImode)
1159 {
1160#ifdef HAVE_trunctisi2
1161 if (HAVE_trunctisi2)
1162 {
1163 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1164 return;
1165 }
1166#endif
1167 convert_move (to, force_reg (from_mode, from), unsignedp);
1168 return;
1169 }
1170
1171 if (from_mode == TImode && to_mode == HImode)
1172 {
1173#ifdef HAVE_trunctihi2
1174 if (HAVE_trunctihi2)
1175 {
1176 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1177 return;
1178 }
1179#endif
1180 convert_move (to, force_reg (from_mode, from), unsignedp);
1181 return;
1182 }
1183
1184 if (from_mode == TImode && to_mode == QImode)
1185 {
1186#ifdef HAVE_trunctiqi2
1187 if (HAVE_trunctiqi2)
1188 {
1189 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1190 return;
1191 }
1192#endif
1193 convert_move (to, force_reg (from_mode, from), unsignedp);
1194 return;
1195 }
1196
bbf6f052
RK
1197 /* Handle truncation of volatile memrefs, and so on;
1198 the things that couldn't be truncated directly,
1199 and for which there was no special instruction. */
1200 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1201 {
1202 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1203 emit_move_insn (to, temp);
1204 return;
1205 }
1206
1207 /* Mode combination is not recognized. */
1208 abort ();
1209}
1210
1211/* Return an rtx for a value that would result
1212 from converting X to mode MODE.
1213 Both X and MODE may be floating, or both integer.
1214 UNSIGNEDP is nonzero if X is an unsigned value.
1215 This can be done by referring to a part of X in place
5d901c31
RS
1216 or by copying to a new temporary with conversion.
1217
1218 This function *must not* call protect_from_queue
1219 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1220
1221rtx
1222convert_to_mode (mode, x, unsignedp)
1223 enum machine_mode mode;
1224 rtx x;
1225 int unsignedp;
5ffe63ed
RS
1226{
1227 return convert_modes (mode, VOIDmode, x, unsignedp);
1228}
1229
1230/* Return an rtx for a value that would result
1231 from converting X from mode OLDMODE to mode MODE.
1232 Both modes may be floating, or both integer.
1233 UNSIGNEDP is nonzero if X is an unsigned value.
1234
1235 This can be done by referring to a part of X in place
1236 or by copying to a new temporary with conversion.
1237
1238 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1239
1240 This function *must not* call protect_from_queue
1241 except when putting X into an insn (in which case convert_move does it). */
1242
1243rtx
1244convert_modes (mode, oldmode, x, unsignedp)
1245 enum machine_mode mode, oldmode;
1246 rtx x;
1247 int unsignedp;
bbf6f052
RK
1248{
1249 register rtx temp;
5ffe63ed 1250
1499e0a8
RK
1251 /* If FROM is a SUBREG that indicates that we have already done at least
1252 the required extension, strip it. */
1253
1254 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1255 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1256 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1257 x = gen_lowpart (mode, x);
bbf6f052 1258
64791b18
RK
1259 if (GET_MODE (x) != VOIDmode)
1260 oldmode = GET_MODE (x);
1261
5ffe63ed 1262 if (mode == oldmode)
bbf6f052
RK
1263 return x;
1264
1265 /* There is one case that we must handle specially: If we are converting
906c4e36 1266 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1267 we are to interpret the constant as unsigned, gen_lowpart will do
1268 the wrong if the constant appears negative. What we want to do is
1269 make the high-order word of the constant zero, not all ones. */
1270
1271 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1272 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1273 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1274 {
1275 HOST_WIDE_INT val = INTVAL (x);
1276
1277 if (oldmode != VOIDmode
1278 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1279 {
1280 int width = GET_MODE_BITSIZE (oldmode);
1281
1282 /* We need to zero extend VAL. */
1283 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1284 }
1285
1286 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1287 }
bbf6f052
RK
1288
1289 /* We can do this with a gen_lowpart if both desired and current modes
1290 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1291 non-volatile MEM. Except for the constant case where MODE is no
1292 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1293
ba2e110c
RK
1294 if ((GET_CODE (x) == CONST_INT
1295 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1296 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1297 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1298 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1299 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1300 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1301 && direct_load[(int) mode])
2bf29316
JW
1302 || (GET_CODE (x) == REG
1303 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1304 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1305 {
1306 /* ?? If we don't know OLDMODE, we have to assume here that
1307 X does not need sign- or zero-extension. This may not be
1308 the case, but it's the best we can do. */
1309 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1310 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1311 {
1312 HOST_WIDE_INT val = INTVAL (x);
1313 int width = GET_MODE_BITSIZE (oldmode);
1314
1315 /* We must sign or zero-extend in this case. Start by
1316 zero-extending, then sign extend if we need to. */
1317 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1318 if (! unsignedp
1319 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1320 val |= (HOST_WIDE_INT) (-1) << width;
1321
1322 return GEN_INT (val);
1323 }
1324
1325 return gen_lowpart (mode, x);
1326 }
bbf6f052
RK
1327
1328 temp = gen_reg_rtx (mode);
1329 convert_move (temp, x, unsignedp);
1330 return temp;
1331}
1332\f
1333/* Generate several move instructions to copy LEN bytes
1334 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1335 The caller must pass FROM and TO
1336 through protect_from_queue before calling.
1337 ALIGN (in bytes) is maximum alignment we can assume. */
1338
bbf6f052
RK
1339static void
1340move_by_pieces (to, from, len, align)
1341 rtx to, from;
1342 int len, align;
1343{
1344 struct move_by_pieces data;
1345 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1346 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1347
1348 data.offset = 0;
1349 data.to_addr = to_addr;
1350 data.from_addr = from_addr;
1351 data.to = to;
1352 data.from = from;
1353 data.autinc_to
1354 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1355 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1356 data.autinc_from
1357 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1358 || GET_CODE (from_addr) == POST_INC
1359 || GET_CODE (from_addr) == POST_DEC);
1360
1361 data.explicit_inc_from = 0;
1362 data.explicit_inc_to = 0;
1363 data.reverse
1364 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1365 if (data.reverse) data.offset = len;
1366 data.len = len;
1367
e9cf6a97
JW
1368 data.to_struct = MEM_IN_STRUCT_P (to);
1369 data.from_struct = MEM_IN_STRUCT_P (from);
1370
bbf6f052
RK
1371 /* If copying requires more than two move insns,
1372 copy addresses to registers (to make displacements shorter)
1373 and use post-increment if available. */
1374 if (!(data.autinc_from && data.autinc_to)
1375 && move_by_pieces_ninsns (len, align) > 2)
1376 {
1377#ifdef HAVE_PRE_DECREMENT
1378 if (data.reverse && ! data.autinc_from)
1379 {
1380 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1381 data.autinc_from = 1;
1382 data.explicit_inc_from = -1;
1383 }
1384#endif
1385#ifdef HAVE_POST_INCREMENT
1386 if (! data.autinc_from)
1387 {
1388 data.from_addr = copy_addr_to_reg (from_addr);
1389 data.autinc_from = 1;
1390 data.explicit_inc_from = 1;
1391 }
1392#endif
1393 if (!data.autinc_from && CONSTANT_P (from_addr))
1394 data.from_addr = copy_addr_to_reg (from_addr);
1395#ifdef HAVE_PRE_DECREMENT
1396 if (data.reverse && ! data.autinc_to)
1397 {
1398 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1399 data.autinc_to = 1;
1400 data.explicit_inc_to = -1;
1401 }
1402#endif
1403#ifdef HAVE_POST_INCREMENT
1404 if (! data.reverse && ! data.autinc_to)
1405 {
1406 data.to_addr = copy_addr_to_reg (to_addr);
1407 data.autinc_to = 1;
1408 data.explicit_inc_to = 1;
1409 }
1410#endif
1411 if (!data.autinc_to && CONSTANT_P (to_addr))
1412 data.to_addr = copy_addr_to_reg (to_addr);
1413 }
1414
c7a7ac46 1415 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1416 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1417 align = MOVE_MAX;
bbf6f052
RK
1418
1419 /* First move what we can in the largest integer mode, then go to
1420 successively smaller modes. */
1421
1422 while (max_size > 1)
1423 {
1424 enum machine_mode mode = VOIDmode, tmode;
1425 enum insn_code icode;
1426
e7c33f54
RK
1427 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1428 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1429 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1430 mode = tmode;
1431
1432 if (mode == VOIDmode)
1433 break;
1434
1435 icode = mov_optab->handlers[(int) mode].insn_code;
1436 if (icode != CODE_FOR_nothing
1437 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1438 GET_MODE_SIZE (mode)))
1439 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1440
1441 max_size = GET_MODE_SIZE (mode);
1442 }
1443
1444 /* The code above should have handled everything. */
2a8e278c 1445 if (data.len > 0)
bbf6f052
RK
1446 abort ();
1447}
1448
1449/* Return number of insns required to move L bytes by pieces.
1450 ALIGN (in bytes) is maximum alignment we can assume. */
1451
1452static int
1453move_by_pieces_ninsns (l, align)
1454 unsigned int l;
1455 int align;
1456{
1457 register int n_insns = 0;
e87b4f3f 1458 int max_size = MOVE_MAX + 1;
bbf6f052 1459
c7a7ac46 1460 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1461 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1462 align = MOVE_MAX;
bbf6f052
RK
1463
1464 while (max_size > 1)
1465 {
1466 enum machine_mode mode = VOIDmode, tmode;
1467 enum insn_code icode;
1468
e7c33f54
RK
1469 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1470 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1471 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1472 mode = tmode;
1473
1474 if (mode == VOIDmode)
1475 break;
1476
1477 icode = mov_optab->handlers[(int) mode].insn_code;
1478 if (icode != CODE_FOR_nothing
1479 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1480 GET_MODE_SIZE (mode)))
1481 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1482
1483 max_size = GET_MODE_SIZE (mode);
1484 }
1485
1486 return n_insns;
1487}
1488
1489/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1490 with move instructions for mode MODE. GENFUN is the gen_... function
1491 to make a move insn for that mode. DATA has all the other info. */
1492
1493static void
1494move_by_pieces_1 (genfun, mode, data)
eae4b970 1495 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1496 enum machine_mode mode;
1497 struct move_by_pieces *data;
1498{
1499 register int size = GET_MODE_SIZE (mode);
1500 register rtx to1, from1;
1501
1502 while (data->len >= size)
1503 {
1504 if (data->reverse) data->offset -= size;
1505
1506 to1 = (data->autinc_to
38a448ca 1507 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1508 : copy_rtx (change_address (data->to, mode,
1509 plus_constant (data->to_addr,
1510 data->offset))));
e9cf6a97 1511 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1512
db3cf6fb
MS
1513 from1
1514 = (data->autinc_from
38a448ca 1515 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1516 : copy_rtx (change_address (data->from, mode,
1517 plus_constant (data->from_addr,
1518 data->offset))));
e9cf6a97 1519 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1520
1521#ifdef HAVE_PRE_DECREMENT
1522 if (data->explicit_inc_to < 0)
906c4e36 1523 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1524 if (data->explicit_inc_from < 0)
906c4e36 1525 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1526#endif
1527
1528 emit_insn ((*genfun) (to1, from1));
1529#ifdef HAVE_POST_INCREMENT
1530 if (data->explicit_inc_to > 0)
906c4e36 1531 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1532 if (data->explicit_inc_from > 0)
906c4e36 1533 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1534#endif
1535
1536 if (! data->reverse) data->offset += size;
1537
1538 data->len -= size;
1539 }
1540}
1541\f
1542/* Emit code to move a block Y to a block X.
1543 This may be done with string-move instructions,
1544 with multiple scalar move instructions, or with a library call.
1545
1546 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1547 with mode BLKmode.
1548 SIZE is an rtx that says how long they are.
1549 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1550 measured in bytes.
bbf6f052 1551
e9a25f70
JL
1552 Return the address of the new block, if memcpy is called and returns it,
1553 0 otherwise. */
1554
1555rtx
bbf6f052
RK
1556emit_block_move (x, y, size, align)
1557 rtx x, y;
1558 rtx size;
1559 int align;
1560{
e9a25f70
JL
1561 rtx retval = 0;
1562
bbf6f052
RK
1563 if (GET_MODE (x) != BLKmode)
1564 abort ();
1565
1566 if (GET_MODE (y) != BLKmode)
1567 abort ();
1568
1569 x = protect_from_queue (x, 1);
1570 y = protect_from_queue (y, 0);
5d901c31 1571 size = protect_from_queue (size, 0);
bbf6f052
RK
1572
1573 if (GET_CODE (x) != MEM)
1574 abort ();
1575 if (GET_CODE (y) != MEM)
1576 abort ();
1577 if (size == 0)
1578 abort ();
1579
1580 if (GET_CODE (size) == CONST_INT
906c4e36 1581 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1582 move_by_pieces (x, y, INTVAL (size), align);
1583 else
1584 {
1585 /* Try the most limited insn first, because there's no point
1586 including more than one in the machine description unless
1587 the more limited one has some advantage. */
266007a7 1588
0bba3f6f 1589 rtx opalign = GEN_INT (align);
266007a7
RK
1590 enum machine_mode mode;
1591
1592 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1593 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1594 {
266007a7 1595 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1596
1597 if (code != CODE_FOR_nothing
803090c4
RK
1598 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1599 here because if SIZE is less than the mode mask, as it is
8008b228 1600 returned by the macro, it will definitely be less than the
803090c4 1601 actual mode mask. */
8ca00751
RK
1602 && ((GET_CODE (size) == CONST_INT
1603 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1604 <= GET_MODE_MASK (mode)))
1605 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1606 && (insn_operand_predicate[(int) code][0] == 0
1607 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1608 && (insn_operand_predicate[(int) code][1] == 0
1609 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1610 && (insn_operand_predicate[(int) code][3] == 0
1611 || (*insn_operand_predicate[(int) code][3]) (opalign,
1612 VOIDmode)))
bbf6f052 1613 {
1ba1e2a8 1614 rtx op2;
266007a7
RK
1615 rtx last = get_last_insn ();
1616 rtx pat;
1617
1ba1e2a8 1618 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1619 if (insn_operand_predicate[(int) code][2] != 0
1620 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1621 op2 = copy_to_mode_reg (mode, op2);
1622
1623 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1624 if (pat)
1625 {
1626 emit_insn (pat);
e9a25f70 1627 return 0;
266007a7
RK
1628 }
1629 else
1630 delete_insns_since (last);
bbf6f052
RK
1631 }
1632 }
bbf6f052
RK
1633
1634#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
1635 retval
1636 = emit_library_call_value (memcpy_libfunc, NULL_RTX, 0,
1637 ptr_mode, 3, XEXP (x, 0), Pmode,
1638 XEXP (y, 0), Pmode,
1639 convert_to_mode (TYPE_MODE (sizetype), size,
1640 TREE_UNSIGNED (sizetype)),
1641 TYPE_MODE (sizetype));
bbf6f052 1642#else
d562e42e 1643 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1644 VOIDmode, 3, XEXP (y, 0), Pmode,
1645 XEXP (x, 0), Pmode,
3b6f75e2
JW
1646 convert_to_mode (TYPE_MODE (integer_type_node), size,
1647 TREE_UNSIGNED (integer_type_node)),
1648 TYPE_MODE (integer_type_node));
bbf6f052
RK
1649#endif
1650 }
e9a25f70
JL
1651
1652 return retval;
bbf6f052
RK
1653}
1654\f
1655/* Copy all or part of a value X into registers starting at REGNO.
1656 The number of registers to be filled is NREGS. */
1657
1658void
1659move_block_to_reg (regno, x, nregs, mode)
1660 int regno;
1661 rtx x;
1662 int nregs;
1663 enum machine_mode mode;
1664{
1665 int i;
1666 rtx pat, last;
1667
72bb9717
RK
1668 if (nregs == 0)
1669 return;
1670
bbf6f052
RK
1671 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1672 x = validize_mem (force_const_mem (mode, x));
1673
1674 /* See if the machine can do this with a load multiple insn. */
1675#ifdef HAVE_load_multiple
c3a02afe 1676 if (HAVE_load_multiple)
bbf6f052 1677 {
c3a02afe 1678 last = get_last_insn ();
38a448ca 1679 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1680 GEN_INT (nregs));
1681 if (pat)
1682 {
1683 emit_insn (pat);
1684 return;
1685 }
1686 else
1687 delete_insns_since (last);
bbf6f052 1688 }
bbf6f052
RK
1689#endif
1690
1691 for (i = 0; i < nregs; i++)
38a448ca 1692 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1693 operand_subword_force (x, i, mode));
1694}
1695
1696/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1697 The number of registers to be filled is NREGS. SIZE indicates the number
1698 of bytes in the object X. */
1699
bbf6f052
RK
1700
1701void
0040593d 1702move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1703 int regno;
1704 rtx x;
1705 int nregs;
0040593d 1706 int size;
bbf6f052
RK
1707{
1708 int i;
1709 rtx pat, last;
58a32c5c 1710 enum machine_mode mode;
bbf6f052 1711
58a32c5c
DE
1712 /* If SIZE is that of a mode no bigger than a word, just use that
1713 mode's store operation. */
1714 if (size <= UNITS_PER_WORD
1715 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1716 {
1717 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1718 gen_rtx_REG (mode, regno));
58a32c5c
DE
1719 return;
1720 }
1721
0040593d 1722 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1723 to the left before storing to memory. Note that the previous test
1724 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1725 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1726 {
1727 rtx tem = operand_subword (x, 0, 1, BLKmode);
1728 rtx shift;
1729
1730 if (tem == 0)
1731 abort ();
1732
1733 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1734 gen_rtx_REG (word_mode, regno),
0040593d
JW
1735 build_int_2 ((UNITS_PER_WORD - size)
1736 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1737 emit_move_insn (tem, shift);
1738 return;
1739 }
1740
bbf6f052
RK
1741 /* See if the machine can do this with a store multiple insn. */
1742#ifdef HAVE_store_multiple
c3a02afe 1743 if (HAVE_store_multiple)
bbf6f052 1744 {
c3a02afe 1745 last = get_last_insn ();
38a448ca 1746 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1747 GEN_INT (nregs));
1748 if (pat)
1749 {
1750 emit_insn (pat);
1751 return;
1752 }
1753 else
1754 delete_insns_since (last);
bbf6f052 1755 }
bbf6f052
RK
1756#endif
1757
1758 for (i = 0; i < nregs; i++)
1759 {
1760 rtx tem = operand_subword (x, i, 1, BLKmode);
1761
1762 if (tem == 0)
1763 abort ();
1764
38a448ca 1765 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1766 }
1767}
1768
fffa9c1d
JW
1769/* Emit code to move a block Y to a block X, where X is non-consecutive
1770 registers represented by a PARALLEL. */
1771
1772void
1773emit_group_load (x, y)
1774 rtx x, y;
1775{
1776 rtx target_reg, source;
1777 int i;
1778
1779 if (GET_CODE (x) != PARALLEL)
1780 abort ();
1781
1782 /* Check for a NULL entry, used to indicate that the parameter goes
1783 both on the stack and in registers. */
1784 if (XEXP (XVECEXP (x, 0, 0), 0))
1785 i = 0;
1786 else
1787 i = 1;
1788
1789 for (; i < XVECLEN (x, 0); i++)
1790 {
1791 rtx element = XVECEXP (x, 0, i);
1792
1793 target_reg = XEXP (element, 0);
1794
1795 if (GET_CODE (y) == MEM)
1796 source = change_address (y, GET_MODE (target_reg),
1797 plus_constant (XEXP (y, 0),
1798 INTVAL (XEXP (element, 1))));
1799 else if (XEXP (element, 1) == const0_rtx)
1800 {
1801 if (GET_MODE (target_reg) == GET_MODE (y))
1802 source = y;
eaa9b4d9
MM
1803 /* Allow for the target_reg to be smaller than the input register
1804 to allow for AIX with 4 DF arguments after a single SI arg. The
1805 last DF argument will only load 1 word into the integer registers,
1806 but load a DF value into the float registers. */
aff4d29b
JW
1807 else if ((GET_MODE_SIZE (GET_MODE (target_reg))
1808 <= GET_MODE_SIZE (GET_MODE (y)))
1809 && GET_MODE (target_reg) == word_mode)
1810 /* This might be a const_double, so we can't just use SUBREG. */
1811 source = operand_subword (y, 0, 0, VOIDmode);
d7d775a0
JW
1812 else if (GET_MODE_SIZE (GET_MODE (target_reg))
1813 == GET_MODE_SIZE (GET_MODE (y)))
1814 source = gen_lowpart (GET_MODE (target_reg), y);
fffa9c1d
JW
1815 else
1816 abort ();
1817 }
1818 else
1819 abort ();
1820
1821 emit_move_insn (target_reg, source);
1822 }
1823}
1824
1825/* Emit code to move a block Y to a block X, where Y is non-consecutive
1826 registers represented by a PARALLEL. */
1827
1828void
1829emit_group_store (x, y)
1830 rtx x, y;
1831{
1832 rtx source_reg, target;
1833 int i;
1834
1835 if (GET_CODE (y) != PARALLEL)
1836 abort ();
1837
1838 /* Check for a NULL entry, used to indicate that the parameter goes
1839 both on the stack and in registers. */
1840 if (XEXP (XVECEXP (y, 0, 0), 0))
1841 i = 0;
1842 else
1843 i = 1;
1844
1845 for (; i < XVECLEN (y, 0); i++)
1846 {
1847 rtx element = XVECEXP (y, 0, i);
1848
1849 source_reg = XEXP (element, 0);
1850
1851 if (GET_CODE (x) == MEM)
1852 target = change_address (x, GET_MODE (source_reg),
1853 plus_constant (XEXP (x, 0),
1854 INTVAL (XEXP (element, 1))));
1855 else if (XEXP (element, 1) == const0_rtx)
71bc0330
JW
1856 {
1857 target = x;
1858 if (GET_MODE (target) != GET_MODE (source_reg))
1859 target = gen_lowpart (GET_MODE (source_reg), target);
1860 }
fffa9c1d
JW
1861 else
1862 abort ();
1863
1864 emit_move_insn (target, source_reg);
1865 }
1866}
1867
94b25f81
RK
1868/* Add a USE expression for REG to the (possibly empty) list pointed
1869 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
1870
1871void
b3f8cf4a
RK
1872use_reg (call_fusage, reg)
1873 rtx *call_fusage, reg;
1874{
0304dfbb
DE
1875 if (GET_CODE (reg) != REG
1876 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
1877 abort();
1878
1879 *call_fusage
38a448ca
RH
1880 = gen_rtx_EXPR_LIST (VOIDmode,
1881 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
1882}
1883
94b25f81
RK
1884/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
1885 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
1886
1887void
0304dfbb
DE
1888use_regs (call_fusage, regno, nregs)
1889 rtx *call_fusage;
bbf6f052
RK
1890 int regno;
1891 int nregs;
1892{
0304dfbb 1893 int i;
bbf6f052 1894
0304dfbb
DE
1895 if (regno + nregs > FIRST_PSEUDO_REGISTER)
1896 abort ();
1897
1898 for (i = 0; i < nregs; i++)
38a448ca 1899 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 1900}
fffa9c1d
JW
1901
1902/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
1903 PARALLEL REGS. This is for calls that pass values in multiple
1904 non-contiguous locations. The Irix 6 ABI has examples of this. */
1905
1906void
1907use_group_regs (call_fusage, regs)
1908 rtx *call_fusage;
1909 rtx regs;
1910{
1911 int i;
1912
6bd35f86
DE
1913 for (i = 0; i < XVECLEN (regs, 0); i++)
1914 {
1915 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 1916
6bd35f86
DE
1917 /* A NULL entry means the parameter goes both on the stack and in
1918 registers. This can also be a MEM for targets that pass values
1919 partially on the stack and partially in registers. */
e9a25f70 1920 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
1921 use_reg (call_fusage, reg);
1922 }
fffa9c1d 1923}
bbf6f052 1924\f
9de08200
RK
1925/* Generate several move instructions to clear LEN bytes of block TO.
1926 (A MEM rtx with BLKmode). The caller must pass TO through
1927 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
1928 we can assume. */
1929
1930static void
1931clear_by_pieces (to, len, align)
1932 rtx to;
1933 int len, align;
1934{
1935 struct clear_by_pieces data;
1936 rtx to_addr = XEXP (to, 0);
1937 int max_size = MOVE_MAX + 1;
1938
1939 data.offset = 0;
1940 data.to_addr = to_addr;
1941 data.to = to;
1942 data.autinc_to
1943 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1944 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1945
1946 data.explicit_inc_to = 0;
1947 data.reverse
1948 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1949 if (data.reverse) data.offset = len;
1950 data.len = len;
1951
1952 data.to_struct = MEM_IN_STRUCT_P (to);
1953
1954 /* If copying requires more than two move insns,
1955 copy addresses to registers (to make displacements shorter)
1956 and use post-increment if available. */
1957 if (!data.autinc_to
1958 && move_by_pieces_ninsns (len, align) > 2)
1959 {
1960#ifdef HAVE_PRE_DECREMENT
1961 if (data.reverse && ! data.autinc_to)
1962 {
1963 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1964 data.autinc_to = 1;
1965 data.explicit_inc_to = -1;
1966 }
1967#endif
1968#ifdef HAVE_POST_INCREMENT
1969 if (! data.reverse && ! data.autinc_to)
1970 {
1971 data.to_addr = copy_addr_to_reg (to_addr);
1972 data.autinc_to = 1;
1973 data.explicit_inc_to = 1;
1974 }
1975#endif
1976 if (!data.autinc_to && CONSTANT_P (to_addr))
1977 data.to_addr = copy_addr_to_reg (to_addr);
1978 }
1979
1980 if (! SLOW_UNALIGNED_ACCESS
1981 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
1982 align = MOVE_MAX;
1983
1984 /* First move what we can in the largest integer mode, then go to
1985 successively smaller modes. */
1986
1987 while (max_size > 1)
1988 {
1989 enum machine_mode mode = VOIDmode, tmode;
1990 enum insn_code icode;
1991
1992 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1993 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1994 if (GET_MODE_SIZE (tmode) < max_size)
1995 mode = tmode;
1996
1997 if (mode == VOIDmode)
1998 break;
1999
2000 icode = mov_optab->handlers[(int) mode].insn_code;
2001 if (icode != CODE_FOR_nothing
2002 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2003 GET_MODE_SIZE (mode)))
2004 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2005
2006 max_size = GET_MODE_SIZE (mode);
2007 }
2008
2009 /* The code above should have handled everything. */
2010 if (data.len != 0)
2011 abort ();
2012}
2013
2014/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2015 with move instructions for mode MODE. GENFUN is the gen_... function
2016 to make a move insn for that mode. DATA has all the other info. */
2017
2018static void
2019clear_by_pieces_1 (genfun, mode, data)
eae4b970 2020 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2021 enum machine_mode mode;
2022 struct clear_by_pieces *data;
2023{
2024 register int size = GET_MODE_SIZE (mode);
2025 register rtx to1;
2026
2027 while (data->len >= size)
2028 {
2029 if (data->reverse) data->offset -= size;
2030
2031 to1 = (data->autinc_to
38a448ca 2032 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2033 : copy_rtx (change_address (data->to, mode,
2034 plus_constant (data->to_addr,
2035 data->offset))));
9de08200
RK
2036 MEM_IN_STRUCT_P (to1) = data->to_struct;
2037
2038#ifdef HAVE_PRE_DECREMENT
2039 if (data->explicit_inc_to < 0)
2040 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2041#endif
2042
2043 emit_insn ((*genfun) (to1, const0_rtx));
2044#ifdef HAVE_POST_INCREMENT
2045 if (data->explicit_inc_to > 0)
2046 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2047#endif
2048
2049 if (! data->reverse) data->offset += size;
2050
2051 data->len -= size;
2052 }
2053}
2054\f
bbf6f052 2055/* Write zeros through the storage of OBJECT.
9de08200 2056 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2057 the maximum alignment we can is has, measured in bytes.
bbf6f052 2058
e9a25f70
JL
2059 If we call a function that returns the length of the block, return it. */
2060
2061rtx
9de08200 2062clear_storage (object, size, align)
bbf6f052 2063 rtx object;
4c08eef0 2064 rtx size;
9de08200 2065 int align;
bbf6f052 2066{
e9a25f70
JL
2067 rtx retval = 0;
2068
bbf6f052
RK
2069 if (GET_MODE (object) == BLKmode)
2070 {
9de08200
RK
2071 object = protect_from_queue (object, 1);
2072 size = protect_from_queue (size, 0);
2073
2074 if (GET_CODE (size) == CONST_INT
2075 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2076 clear_by_pieces (object, INTVAL (size), align);
2077
2078 else
2079 {
2080 /* Try the most limited insn first, because there's no point
2081 including more than one in the machine description unless
2082 the more limited one has some advantage. */
2083
2084 rtx opalign = GEN_INT (align);
2085 enum machine_mode mode;
2086
2087 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2088 mode = GET_MODE_WIDER_MODE (mode))
2089 {
2090 enum insn_code code = clrstr_optab[(int) mode];
2091
2092 if (code != CODE_FOR_nothing
2093 /* We don't need MODE to be narrower than
2094 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2095 the mode mask, as it is returned by the macro, it will
2096 definitely be less than the actual mode mask. */
2097 && ((GET_CODE (size) == CONST_INT
2098 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2099 <= GET_MODE_MASK (mode)))
2100 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2101 && (insn_operand_predicate[(int) code][0] == 0
2102 || (*insn_operand_predicate[(int) code][0]) (object,
2103 BLKmode))
2104 && (insn_operand_predicate[(int) code][2] == 0
2105 || (*insn_operand_predicate[(int) code][2]) (opalign,
2106 VOIDmode)))
2107 {
2108 rtx op1;
2109 rtx last = get_last_insn ();
2110 rtx pat;
2111
2112 op1 = convert_to_mode (mode, size, 1);
2113 if (insn_operand_predicate[(int) code][1] != 0
2114 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2115 mode))
2116 op1 = copy_to_mode_reg (mode, op1);
2117
2118 pat = GEN_FCN ((int) code) (object, op1, opalign);
2119 if (pat)
2120 {
2121 emit_insn (pat);
e9a25f70 2122 return 0;
9de08200
RK
2123 }
2124 else
2125 delete_insns_since (last);
2126 }
2127 }
2128
2129
bbf6f052 2130#ifdef TARGET_MEM_FUNCTIONS
e9a25f70
JL
2131 retval
2132 = emit_library_call_value (memset_libfunc, NULL_RTX, 0,
2133 ptr_mode, 3,
2134 XEXP (object, 0), Pmode,
2135 const0_rtx,
2136 TYPE_MODE (integer_type_node),
2137 convert_to_mode
2138 (TYPE_MODE (sizetype), size,
2139 TREE_UNSIGNED (sizetype)),
2140 TYPE_MODE (sizetype));
bbf6f052 2141#else
9de08200
RK
2142 emit_library_call (bzero_libfunc, 0,
2143 VOIDmode, 2,
2144 XEXP (object, 0), Pmode,
e9a25f70
JL
2145 convert_to_mode
2146 (TYPE_MODE (integer_type_node), size,
2147 TREE_UNSIGNED (integer_type_node)),
9de08200 2148 TYPE_MODE (integer_type_node));
bbf6f052 2149#endif
9de08200 2150 }
bbf6f052
RK
2151 }
2152 else
66ed0683 2153 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2154
2155 return retval;
bbf6f052
RK
2156}
2157
2158/* Generate code to copy Y into X.
2159 Both Y and X must have the same mode, except that
2160 Y can be a constant with VOIDmode.
2161 This mode cannot be BLKmode; use emit_block_move for that.
2162
2163 Return the last instruction emitted. */
2164
2165rtx
2166emit_move_insn (x, y)
2167 rtx x, y;
2168{
2169 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2170
2171 x = protect_from_queue (x, 1);
2172 y = protect_from_queue (y, 0);
2173
2174 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2175 abort ();
2176
2177 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2178 y = force_const_mem (mode, y);
2179
2180 /* If X or Y are memory references, verify that their addresses are valid
2181 for the machine. */
2182 if (GET_CODE (x) == MEM
2183 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2184 && ! push_operand (x, GET_MODE (x)))
2185 || (flag_force_addr
2186 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2187 x = change_address (x, VOIDmode, XEXP (x, 0));
2188
2189 if (GET_CODE (y) == MEM
2190 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2191 || (flag_force_addr
2192 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2193 y = change_address (y, VOIDmode, XEXP (y, 0));
2194
2195 if (mode == BLKmode)
2196 abort ();
2197
261c4230
RS
2198 return emit_move_insn_1 (x, y);
2199}
2200
2201/* Low level part of emit_move_insn.
2202 Called just like emit_move_insn, but assumes X and Y
2203 are basically valid. */
2204
2205rtx
2206emit_move_insn_1 (x, y)
2207 rtx x, y;
2208{
2209 enum machine_mode mode = GET_MODE (x);
2210 enum machine_mode submode;
2211 enum mode_class class = GET_MODE_CLASS (mode);
2212 int i;
2213
bbf6f052
RK
2214 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2215 return
2216 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2217
89742723 2218 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2219 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2220 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2221 * BITS_PER_UNIT),
2222 (class == MODE_COMPLEX_INT
2223 ? MODE_INT : MODE_FLOAT),
2224 0))
7308a047
RS
2225 && (mov_optab->handlers[(int) submode].insn_code
2226 != CODE_FOR_nothing))
2227 {
2228 /* Don't split destination if it is a stack push. */
2229 int stack = push_operand (x, GET_MODE (x));
6551fa4d 2230 rtx insns;
7308a047 2231
7308a047
RS
2232 /* If this is a stack, push the highpart first, so it
2233 will be in the argument order.
2234
2235 In that case, change_address is used only to convert
2236 the mode, not to change the address. */
c937357e
RS
2237 if (stack)
2238 {
e33c0d66
RS
2239 /* Note that the real part always precedes the imag part in memory
2240 regardless of machine's endianness. */
c937357e
RS
2241#ifdef STACK_GROWS_DOWNWARD
2242 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2243 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2244 gen_imagpart (submode, y)));
c937357e 2245 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2246 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2247 gen_realpart (submode, y)));
c937357e
RS
2248#else
2249 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2250 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2251 gen_realpart (submode, y)));
c937357e 2252 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2253 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2254 gen_imagpart (submode, y)));
c937357e
RS
2255#endif
2256 }
2257 else
2258 {
2259 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2260 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2261 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2262 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2263 }
7308a047 2264
7a1ab50a 2265 return get_last_insn ();
7308a047
RS
2266 }
2267
bbf6f052
RK
2268 /* This will handle any multi-word mode that lacks a move_insn pattern.
2269 However, you will get better code if you define such patterns,
2270 even if they must turn into multiple assembler instructions. */
a4320483 2271 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2272 {
2273 rtx last_insn = 0;
6551fa4d
JW
2274 rtx insns;
2275
a98c9f1a
RK
2276#ifdef PUSH_ROUNDING
2277
2278 /* If X is a push on the stack, do the push now and replace
2279 X with a reference to the stack pointer. */
2280 if (push_operand (x, GET_MODE (x)))
2281 {
2282 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2283 x = change_address (x, VOIDmode, stack_pointer_rtx);
2284 }
2285#endif
2286
15a7a8ec 2287 /* Show the output dies here. */
43e046cb 2288 if (x != y)
38a448ca 2289 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2290
bbf6f052
RK
2291 for (i = 0;
2292 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2293 i++)
2294 {
2295 rtx xpart = operand_subword (x, i, 1, mode);
2296 rtx ypart = operand_subword (y, i, 1, mode);
2297
2298 /* If we can't get a part of Y, put Y into memory if it is a
2299 constant. Otherwise, force it into a register. If we still
2300 can't get a part of Y, abort. */
2301 if (ypart == 0 && CONSTANT_P (y))
2302 {
2303 y = force_const_mem (mode, y);
2304 ypart = operand_subword (y, i, 1, mode);
2305 }
2306 else if (ypart == 0)
2307 ypart = operand_subword_force (y, i, mode);
2308
2309 if (xpart == 0 || ypart == 0)
2310 abort ();
2311
2312 last_insn = emit_move_insn (xpart, ypart);
2313 }
6551fa4d 2314
bbf6f052
RK
2315 return last_insn;
2316 }
2317 else
2318 abort ();
2319}
2320\f
2321/* Pushing data onto the stack. */
2322
2323/* Push a block of length SIZE (perhaps variable)
2324 and return an rtx to address the beginning of the block.
2325 Note that it is not possible for the value returned to be a QUEUED.
2326 The value may be virtual_outgoing_args_rtx.
2327
2328 EXTRA is the number of bytes of padding to push in addition to SIZE.
2329 BELOW nonzero means this padding comes at low addresses;
2330 otherwise, the padding comes at high addresses. */
2331
2332rtx
2333push_block (size, extra, below)
2334 rtx size;
2335 int extra, below;
2336{
2337 register rtx temp;
88f63c77
RK
2338
2339 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2340 if (CONSTANT_P (size))
2341 anti_adjust_stack (plus_constant (size, extra));
2342 else if (GET_CODE (size) == REG && extra == 0)
2343 anti_adjust_stack (size);
2344 else
2345 {
2346 rtx temp = copy_to_mode_reg (Pmode, size);
2347 if (extra != 0)
906c4e36 2348 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2349 temp, 0, OPTAB_LIB_WIDEN);
2350 anti_adjust_stack (temp);
2351 }
2352
2353#ifdef STACK_GROWS_DOWNWARD
2354 temp = virtual_outgoing_args_rtx;
2355 if (extra != 0 && below)
2356 temp = plus_constant (temp, extra);
2357#else
2358 if (GET_CODE (size) == CONST_INT)
2359 temp = plus_constant (virtual_outgoing_args_rtx,
2360 - INTVAL (size) - (below ? 0 : extra));
2361 else if (extra != 0 && !below)
38a448ca 2362 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2363 negate_rtx (Pmode, plus_constant (size, extra)));
2364 else
38a448ca 2365 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2366 negate_rtx (Pmode, size));
2367#endif
2368
2369 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2370}
2371
87e38d84 2372rtx
bbf6f052
RK
2373gen_push_operand ()
2374{
38a448ca 2375 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2376}
2377
921b3427
RK
2378/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2379 block of SIZE bytes. */
2380
2381static rtx
2382get_push_address (size)
2383 int size;
2384{
2385 register rtx temp;
2386
2387 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2388 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2389 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2390 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2391 else
2392 temp = stack_pointer_rtx;
2393
c85f7c16 2394 return copy_to_reg (temp);
921b3427
RK
2395}
2396
bbf6f052
RK
2397/* Generate code to push X onto the stack, assuming it has mode MODE and
2398 type TYPE.
2399 MODE is redundant except when X is a CONST_INT (since they don't
2400 carry mode info).
2401 SIZE is an rtx for the size of data to be copied (in bytes),
2402 needed only if X is BLKmode.
2403
2404 ALIGN (in bytes) is maximum alignment we can assume.
2405
cd048831
RK
2406 If PARTIAL and REG are both nonzero, then copy that many of the first
2407 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2408 The amount of space pushed is decreased by PARTIAL words,
2409 rounded *down* to a multiple of PARM_BOUNDARY.
2410 REG must be a hard register in this case.
cd048831
RK
2411 If REG is zero but PARTIAL is not, take any all others actions for an
2412 argument partially in registers, but do not actually load any
2413 registers.
bbf6f052
RK
2414
2415 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2416 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2417
2418 On a machine that lacks real push insns, ARGS_ADDR is the address of
2419 the bottom of the argument block for this call. We use indexing off there
2420 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2421 argument block has not been preallocated.
2422
2423 ARGS_SO_FAR is the size of args previously pushed for this call. */
2424
2425void
2426emit_push_insn (x, mode, type, size, align, partial, reg, extra,
2427 args_addr, args_so_far)
2428 register rtx x;
2429 enum machine_mode mode;
2430 tree type;
2431 rtx size;
2432 int align;
2433 int partial;
2434 rtx reg;
2435 int extra;
2436 rtx args_addr;
2437 rtx args_so_far;
2438{
2439 rtx xinner;
2440 enum direction stack_direction
2441#ifdef STACK_GROWS_DOWNWARD
2442 = downward;
2443#else
2444 = upward;
2445#endif
2446
2447 /* Decide where to pad the argument: `downward' for below,
2448 `upward' for above, or `none' for don't pad it.
2449 Default is below for small data on big-endian machines; else above. */
2450 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2451
2452 /* Invert direction if stack is post-update. */
2453 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2454 if (where_pad != none)
2455 where_pad = (where_pad == downward ? upward : downward);
2456
2457 xinner = x = protect_from_queue (x, 0);
2458
2459 if (mode == BLKmode)
2460 {
2461 /* Copy a block into the stack, entirely or partially. */
2462
2463 register rtx temp;
2464 int used = partial * UNITS_PER_WORD;
2465 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2466 int skip;
2467
2468 if (size == 0)
2469 abort ();
2470
2471 used -= offset;
2472
2473 /* USED is now the # of bytes we need not copy to the stack
2474 because registers will take care of them. */
2475
2476 if (partial != 0)
2477 xinner = change_address (xinner, BLKmode,
2478 plus_constant (XEXP (xinner, 0), used));
2479
2480 /* If the partial register-part of the arg counts in its stack size,
2481 skip the part of stack space corresponding to the registers.
2482 Otherwise, start copying to the beginning of the stack space,
2483 by setting SKIP to 0. */
2484#ifndef REG_PARM_STACK_SPACE
2485 skip = 0;
2486#else
2487 skip = used;
2488#endif
2489
2490#ifdef PUSH_ROUNDING
2491 /* Do it with several push insns if that doesn't take lots of insns
2492 and if there is no difficulty with push insns that skip bytes
2493 on the stack for alignment purposes. */
2494 if (args_addr == 0
2495 && GET_CODE (size) == CONST_INT
2496 && skip == 0
2497 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2498 < MOVE_RATIO)
bbf6f052
RK
2499 /* Here we avoid the case of a structure whose weak alignment
2500 forces many pushes of a small amount of data,
2501 and such small pushes do rounding that causes trouble. */
c7a7ac46 2502 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2503 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2504 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2505 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2506 {
2507 /* Push padding now if padding above and stack grows down,
2508 or if padding below and stack grows up.
2509 But if space already allocated, this has already been done. */
2510 if (extra && args_addr == 0
2511 && where_pad != none && where_pad != stack_direction)
906c4e36 2512 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2513
38a448ca 2514 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2515 INTVAL (size) - used, align);
921b3427 2516
956d6950 2517 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2518 {
2519 rtx temp;
2520
956d6950 2521 in_check_memory_usage = 1;
921b3427 2522 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2523 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2524 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2525 temp, ptr_mode,
2526 XEXP (xinner, 0), ptr_mode,
2527 GEN_INT (INTVAL(size) - used),
2528 TYPE_MODE (sizetype));
2529 else
2530 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2531 temp, ptr_mode,
2532 GEN_INT (INTVAL(size) - used),
2533 TYPE_MODE (sizetype),
956d6950
JL
2534 GEN_INT (MEMORY_USE_RW),
2535 TYPE_MODE (integer_type_node));
2536 in_check_memory_usage = 0;
921b3427 2537 }
bbf6f052
RK
2538 }
2539 else
2540#endif /* PUSH_ROUNDING */
2541 {
2542 /* Otherwise make space on the stack and copy the data
2543 to the address of that space. */
2544
2545 /* Deduct words put into registers from the size we must copy. */
2546 if (partial != 0)
2547 {
2548 if (GET_CODE (size) == CONST_INT)
906c4e36 2549 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2550 else
2551 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2552 GEN_INT (used), NULL_RTX, 0,
2553 OPTAB_LIB_WIDEN);
bbf6f052
RK
2554 }
2555
2556 /* Get the address of the stack space.
2557 In this case, we do not deal with EXTRA separately.
2558 A single stack adjust will do. */
2559 if (! args_addr)
2560 {
2561 temp = push_block (size, extra, where_pad == downward);
2562 extra = 0;
2563 }
2564 else if (GET_CODE (args_so_far) == CONST_INT)
2565 temp = memory_address (BLKmode,
2566 plus_constant (args_addr,
2567 skip + INTVAL (args_so_far)));
2568 else
2569 temp = memory_address (BLKmode,
38a448ca
RH
2570 plus_constant (gen_rtx_PLUS (Pmode,
2571 args_addr,
2572 args_so_far),
bbf6f052 2573 skip));
956d6950 2574 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2575 {
2576 rtx target;
2577
956d6950 2578 in_check_memory_usage = 1;
921b3427 2579 target = copy_to_reg (temp);
c85f7c16 2580 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2581 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2582 target, ptr_mode,
2583 XEXP (xinner, 0), ptr_mode,
2584 size, TYPE_MODE (sizetype));
2585 else
2586 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2587 target, ptr_mode,
2588 size, TYPE_MODE (sizetype),
956d6950
JL
2589 GEN_INT (MEMORY_USE_RW),
2590 TYPE_MODE (integer_type_node));
2591 in_check_memory_usage = 0;
921b3427 2592 }
bbf6f052
RK
2593
2594 /* TEMP is the address of the block. Copy the data there. */
2595 if (GET_CODE (size) == CONST_INT
2596 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2597 < MOVE_RATIO))
2598 {
38a448ca 2599 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2600 INTVAL (size), align);
2601 goto ret;
2602 }
2603 /* Try the most limited insn first, because there's no point
2604 including more than one in the machine description unless
2605 the more limited one has some advantage. */
2606#ifdef HAVE_movstrqi
2607 if (HAVE_movstrqi
2608 && GET_CODE (size) == CONST_INT
2609 && ((unsigned) INTVAL (size)
2610 < (1 << (GET_MODE_BITSIZE (QImode) - 1))))
2611 {
38a448ca 2612 rtx pat = gen_movstrqi (gen_rtx_MEM (BLKmode, temp),
c841050e
RS
2613 xinner, size, GEN_INT (align));
2614 if (pat != 0)
2615 {
2616 emit_insn (pat);
2617 goto ret;
2618 }
bbf6f052
RK
2619 }
2620#endif
2621#ifdef HAVE_movstrhi
2622 if (HAVE_movstrhi
2623 && GET_CODE (size) == CONST_INT
2624 && ((unsigned) INTVAL (size)
2625 < (1 << (GET_MODE_BITSIZE (HImode) - 1))))
2626 {
38a448ca 2627 rtx pat = gen_movstrhi (gen_rtx_MEM (BLKmode, temp),
c841050e
RS
2628 xinner, size, GEN_INT (align));
2629 if (pat != 0)
2630 {
2631 emit_insn (pat);
2632 goto ret;
2633 }
bbf6f052
RK
2634 }
2635#endif
2636#ifdef HAVE_movstrsi
2637 if (HAVE_movstrsi)
2638 {
38a448ca 2639 rtx pat = gen_movstrsi (gen_rtx_MEM (BLKmode, temp),
c841050e
RS
2640 xinner, size, GEN_INT (align));
2641 if (pat != 0)
2642 {
2643 emit_insn (pat);
2644 goto ret;
2645 }
bbf6f052
RK
2646 }
2647#endif
2648#ifdef HAVE_movstrdi
2649 if (HAVE_movstrdi)
2650 {
38a448ca 2651 rtx pat = gen_movstrdi (gen_rtx_MEM (BLKmode, temp),
c841050e
RS
2652 xinner, size, GEN_INT (align));
2653 if (pat != 0)
2654 {
2655 emit_insn (pat);
2656 goto ret;
2657 }
bbf6f052
RK
2658 }
2659#endif
2660
2661#ifndef ACCUMULATE_OUTGOING_ARGS
2662 /* If the source is referenced relative to the stack pointer,
2663 copy it to another register to stabilize it. We do not need
2664 to do this if we know that we won't be changing sp. */
2665
2666 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2667 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2668 temp = copy_to_reg (temp);
2669#endif
2670
2671 /* Make inhibit_defer_pop nonzero around the library call
2672 to force it to pop the bcopy-arguments right away. */
2673 NO_DEFER_POP;
2674#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2675 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2676 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2677 convert_to_mode (TYPE_MODE (sizetype),
2678 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2679 TYPE_MODE (sizetype));
bbf6f052 2680#else
d562e42e 2681 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2682 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2683 convert_to_mode (TYPE_MODE (integer_type_node),
2684 size,
2685 TREE_UNSIGNED (integer_type_node)),
2686 TYPE_MODE (integer_type_node));
bbf6f052
RK
2687#endif
2688 OK_DEFER_POP;
2689 }
2690 }
2691 else if (partial > 0)
2692 {
2693 /* Scalar partly in registers. */
2694
2695 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2696 int i;
2697 int not_stack;
2698 /* # words of start of argument
2699 that we must make space for but need not store. */
2700 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2701 int args_offset = INTVAL (args_so_far);
2702 int skip;
2703
2704 /* Push padding now if padding above and stack grows down,
2705 or if padding below and stack grows up.
2706 But if space already allocated, this has already been done. */
2707 if (extra && args_addr == 0
2708 && where_pad != none && where_pad != stack_direction)
906c4e36 2709 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2710
2711 /* If we make space by pushing it, we might as well push
2712 the real data. Otherwise, we can leave OFFSET nonzero
2713 and leave the space uninitialized. */
2714 if (args_addr == 0)
2715 offset = 0;
2716
2717 /* Now NOT_STACK gets the number of words that we don't need to
2718 allocate on the stack. */
2719 not_stack = partial - offset;
2720
2721 /* If the partial register-part of the arg counts in its stack size,
2722 skip the part of stack space corresponding to the registers.
2723 Otherwise, start copying to the beginning of the stack space,
2724 by setting SKIP to 0. */
2725#ifndef REG_PARM_STACK_SPACE
2726 skip = 0;
2727#else
2728 skip = not_stack;
2729#endif
2730
2731 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2732 x = validize_mem (force_const_mem (mode, x));
2733
2734 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2735 SUBREGs of such registers are not allowed. */
2736 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2737 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2738 x = copy_to_reg (x);
2739
2740 /* Loop over all the words allocated on the stack for this arg. */
2741 /* We can do it by words, because any scalar bigger than a word
2742 has a size a multiple of a word. */
2743#ifndef PUSH_ARGS_REVERSED
2744 for (i = not_stack; i < size; i++)
2745#else
2746 for (i = size - 1; i >= not_stack; i--)
2747#endif
2748 if (i >= not_stack + offset)
2749 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2750 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2751 0, args_addr,
2752 GEN_INT (args_offset + ((i - not_stack + skip)
bbf6f052
RK
2753 * UNITS_PER_WORD)));
2754 }
2755 else
2756 {
2757 rtx addr;
921b3427 2758 rtx target = NULL_RTX;
bbf6f052
RK
2759
2760 /* Push padding now if padding above and stack grows down,
2761 or if padding below and stack grows up.
2762 But if space already allocated, this has already been done. */
2763 if (extra && args_addr == 0
2764 && where_pad != none && where_pad != stack_direction)
906c4e36 2765 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2766
2767#ifdef PUSH_ROUNDING
2768 if (args_addr == 0)
2769 addr = gen_push_operand ();
2770 else
2771#endif
921b3427
RK
2772 {
2773 if (GET_CODE (args_so_far) == CONST_INT)
2774 addr
2775 = memory_address (mode,
2776 plus_constant (args_addr,
2777 INTVAL (args_so_far)));
2778 else
38a448ca
RH
2779 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
2780 args_so_far));
921b3427
RK
2781 target = addr;
2782 }
bbf6f052 2783
38a448ca 2784 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 2785
956d6950 2786 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 2787 {
956d6950 2788 in_check_memory_usage = 1;
921b3427
RK
2789 if (target == 0)
2790 target = get_push_address (GET_MODE_SIZE (mode));
2791
c85f7c16 2792 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2793 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2794 target, ptr_mode,
2795 XEXP (x, 0), ptr_mode,
2796 GEN_INT (GET_MODE_SIZE (mode)),
2797 TYPE_MODE (sizetype));
2798 else
2799 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2800 target, ptr_mode,
2801 GEN_INT (GET_MODE_SIZE (mode)),
2802 TYPE_MODE (sizetype),
956d6950
JL
2803 GEN_INT (MEMORY_USE_RW),
2804 TYPE_MODE (integer_type_node));
2805 in_check_memory_usage = 0;
921b3427 2806 }
bbf6f052
RK
2807 }
2808
2809 ret:
2810 /* If part should go in registers, copy that part
2811 into the appropriate registers. Do this now, at the end,
2812 since mem-to-mem copies above may do function calls. */
cd048831 2813 if (partial > 0 && reg != 0)
fffa9c1d
JW
2814 {
2815 /* Handle calls that pass values in multiple non-contiguous locations.
2816 The Irix 6 ABI has examples of this. */
2817 if (GET_CODE (reg) == PARALLEL)
2818 emit_group_load (reg, x);
2819 else
2820 move_block_to_reg (REGNO (reg), x, partial, mode);
2821 }
bbf6f052
RK
2822
2823 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 2824 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2825}
2826\f
bbf6f052
RK
2827/* Expand an assignment that stores the value of FROM into TO.
2828 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
2829 (This may contain a QUEUED rtx;
2830 if the value is constant, this rtx is a constant.)
2831 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
2832
2833 SUGGEST_REG is no longer actually used.
2834 It used to mean, copy the value through a register
2835 and return that register, if that is possible.
709f5be1 2836 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
2837
2838rtx
2839expand_assignment (to, from, want_value, suggest_reg)
2840 tree to, from;
2841 int want_value;
2842 int suggest_reg;
2843{
2844 register rtx to_rtx = 0;
2845 rtx result;
2846
2847 /* Don't crash if the lhs of the assignment was erroneous. */
2848
2849 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
2850 {
2851 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
2852 return want_value ? result : NULL_RTX;
2853 }
bbf6f052
RK
2854
2855 /* Assignment of a structure component needs special treatment
2856 if the structure component's rtx is not simply a MEM.
6be58303
JW
2857 Assignment of an array element at a constant index, and assignment of
2858 an array element in an unaligned packed structure field, has the same
2859 problem. */
bbf6f052 2860
08293add
RK
2861 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
2862 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
2863 {
2864 enum machine_mode mode1;
2865 int bitsize;
2866 int bitpos;
7bb0943f 2867 tree offset;
bbf6f052
RK
2868 int unsignedp;
2869 int volatilep = 0;
0088fcb1 2870 tree tem;
d78d243c 2871 int alignment;
0088fcb1
RK
2872
2873 push_temp_slots ();
839c4796
RK
2874 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
2875 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
2876
2877 /* If we are going to use store_bit_field and extract_bit_field,
2878 make sure to_rtx will be safe for multiple use. */
2879
2880 if (mode1 == VOIDmode && want_value)
2881 tem = stabilize_reference (tem);
2882
921b3427 2883 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
2884 if (offset != 0)
2885 {
906c4e36 2886 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
2887
2888 if (GET_CODE (to_rtx) != MEM)
2889 abort ();
2890 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
2891 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
2892 force_reg (ptr_mode, offset_rtx)));
7bb0943f 2893 }
bbf6f052
RK
2894 if (volatilep)
2895 {
2896 if (GET_CODE (to_rtx) == MEM)
01188446
JW
2897 {
2898 /* When the offset is zero, to_rtx is the address of the
2899 structure we are storing into, and hence may be shared.
2900 We must make a new MEM before setting the volatile bit. */
2901 if (offset == 0)
effbcc6a
RK
2902 to_rtx = copy_rtx (to_rtx);
2903
01188446
JW
2904 MEM_VOLATILE_P (to_rtx) = 1;
2905 }
bbf6f052
RK
2906#if 0 /* This was turned off because, when a field is volatile
2907 in an object which is not volatile, the object may be in a register,
2908 and then we would abort over here. */
2909 else
2910 abort ();
2911#endif
2912 }
2913
956d6950
JL
2914 if (TREE_CODE (to) == COMPONENT_REF
2915 && TREE_READONLY (TREE_OPERAND (to, 1)))
2916 {
2917 if (offset = 0)
2918 to_rtx = copy_rtx (to_rtx);
2919
2920 RTX_UNCHANGING_P (to_rtx) = 1;
2921 }
2922
921b3427
RK
2923 /* Check the access. */
2924 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
2925 {
2926 rtx to_addr;
2927 int size;
2928 int best_mode_size;
2929 enum machine_mode best_mode;
2930
2931 best_mode = get_best_mode (bitsize, bitpos,
2932 TYPE_ALIGN (TREE_TYPE (tem)),
2933 mode1, volatilep);
2934 if (best_mode == VOIDmode)
2935 best_mode = QImode;
2936
2937 best_mode_size = GET_MODE_BITSIZE (best_mode);
2938 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
2939 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
2940 size *= GET_MODE_SIZE (best_mode);
2941
2942 /* Check the access right of the pointer. */
e9a25f70
JL
2943 if (size)
2944 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
2945 to_addr, ptr_mode,
2946 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
2947 GEN_INT (MEMORY_USE_WO),
2948 TYPE_MODE (integer_type_node));
921b3427
RK
2949 }
2950
bbf6f052
RK
2951 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
2952 (want_value
2953 /* Spurious cast makes HPUX compiler happy. */
2954 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
2955 : VOIDmode),
2956 unsignedp,
2957 /* Required alignment of containing datum. */
d78d243c 2958 alignment,
bbf6f052
RK
2959 int_size_in_bytes (TREE_TYPE (tem)));
2960 preserve_temp_slots (result);
2961 free_temp_slots ();
0088fcb1 2962 pop_temp_slots ();
bbf6f052 2963
709f5be1
RS
2964 /* If the value is meaningful, convert RESULT to the proper mode.
2965 Otherwise, return nothing. */
5ffe63ed
RS
2966 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
2967 TYPE_MODE (TREE_TYPE (from)),
2968 result,
2969 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 2970 : NULL_RTX);
bbf6f052
RK
2971 }
2972
cd1db108
RS
2973 /* If the rhs is a function call and its value is not an aggregate,
2974 call the function before we start to compute the lhs.
2975 This is needed for correct code for cases such as
2976 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
2977 requires loading up part of an address in a separate insn.
2978
2979 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
2980 a promoted variable where the zero- or sign- extension needs to be done.
2981 Handling this in the normal way is safe because no computation is done
2982 before the call. */
2983 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 2984 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 2985 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 2986 {
0088fcb1
RK
2987 rtx value;
2988
2989 push_temp_slots ();
2990 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 2991 if (to_rtx == 0)
921b3427 2992 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 2993
fffa9c1d
JW
2994 /* Handle calls that return values in multiple non-contiguous locations.
2995 The Irix 6 ABI has examples of this. */
2996 if (GET_CODE (to_rtx) == PARALLEL)
2997 emit_group_load (to_rtx, value);
2998 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 2999 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3000 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3001 else
3002 emit_move_insn (to_rtx, value);
cd1db108
RS
3003 preserve_temp_slots (to_rtx);
3004 free_temp_slots ();
0088fcb1 3005 pop_temp_slots ();
709f5be1 3006 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3007 }
3008
bbf6f052
RK
3009 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3010 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3011
3012 if (to_rtx == 0)
921b3427 3013 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
bbf6f052 3014
86d38d25
RS
3015 /* Don't move directly into a return register. */
3016 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3017 {
0088fcb1
RK
3018 rtx temp;
3019
3020 push_temp_slots ();
3021 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3022 emit_move_insn (to_rtx, temp);
3023 preserve_temp_slots (to_rtx);
3024 free_temp_slots ();
0088fcb1 3025 pop_temp_slots ();
709f5be1 3026 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3027 }
3028
bbf6f052
RK
3029 /* In case we are returning the contents of an object which overlaps
3030 the place the value is being stored, use a safe function when copying
3031 a value through a pointer into a structure value return block. */
3032 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3033 && current_function_returns_struct
3034 && !current_function_returns_pcc_struct)
3035 {
0088fcb1
RK
3036 rtx from_rtx, size;
3037
3038 push_temp_slots ();
33a20d10 3039 size = expr_size (from);
921b3427
RK
3040 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3041 EXPAND_MEMORY_USE_DONT);
3042
3043 /* Copy the rights of the bitmap. */
3044 if (flag_check_memory_usage)
3045 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3046 XEXP (to_rtx, 0), ptr_mode,
3047 XEXP (from_rtx, 0), ptr_mode,
3048 convert_to_mode (TYPE_MODE (sizetype),
3049 size, TREE_UNSIGNED (sizetype)),
3050 TYPE_MODE (sizetype));
bbf6f052
RK
3051
3052#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3053 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3054 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3055 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3056 convert_to_mode (TYPE_MODE (sizetype),
3057 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3058 TYPE_MODE (sizetype));
bbf6f052 3059#else
d562e42e 3060 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3061 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3062 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3063 convert_to_mode (TYPE_MODE (integer_type_node),
3064 size, TREE_UNSIGNED (integer_type_node)),
3065 TYPE_MODE (integer_type_node));
bbf6f052
RK
3066#endif
3067
3068 preserve_temp_slots (to_rtx);
3069 free_temp_slots ();
0088fcb1 3070 pop_temp_slots ();
709f5be1 3071 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3072 }
3073
3074 /* Compute FROM and store the value in the rtx we got. */
3075
0088fcb1 3076 push_temp_slots ();
bbf6f052
RK
3077 result = store_expr (from, to_rtx, want_value);
3078 preserve_temp_slots (result);
3079 free_temp_slots ();
0088fcb1 3080 pop_temp_slots ();
709f5be1 3081 return want_value ? result : NULL_RTX;
bbf6f052
RK
3082}
3083
3084/* Generate code for computing expression EXP,
3085 and storing the value into TARGET.
bbf6f052
RK
3086 TARGET may contain a QUEUED rtx.
3087
709f5be1
RS
3088 If WANT_VALUE is nonzero, return a copy of the value
3089 not in TARGET, so that we can be sure to use the proper
3090 value in a containing expression even if TARGET has something
3091 else stored in it. If possible, we copy the value through a pseudo
3092 and return that pseudo. Or, if the value is constant, we try to
3093 return the constant. In some cases, we return a pseudo
3094 copied *from* TARGET.
3095
3096 If the mode is BLKmode then we may return TARGET itself.
3097 It turns out that in BLKmode it doesn't cause a problem.
3098 because C has no operators that could combine two different
3099 assignments into the same BLKmode object with different values
3100 with no sequence point. Will other languages need this to
3101 be more thorough?
3102
3103 If WANT_VALUE is 0, we return NULL, to make sure
3104 to catch quickly any cases where the caller uses the value
3105 and fails to set WANT_VALUE. */
bbf6f052
RK
3106
3107rtx
709f5be1 3108store_expr (exp, target, want_value)
bbf6f052
RK
3109 register tree exp;
3110 register rtx target;
709f5be1 3111 int want_value;
bbf6f052
RK
3112{
3113 register rtx temp;
3114 int dont_return_target = 0;
3115
3116 if (TREE_CODE (exp) == COMPOUND_EXPR)
3117 {
3118 /* Perform first part of compound expression, then assign from second
3119 part. */
3120 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3121 emit_queue ();
709f5be1 3122 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3123 }
3124 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3125 {
3126 /* For conditional expression, get safe form of the target. Then
3127 test the condition, doing the appropriate assignment on either
3128 side. This avoids the creation of unnecessary temporaries.
3129 For non-BLKmode, it is more efficient not to do this. */
3130
3131 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3132
3133 emit_queue ();
3134 target = protect_from_queue (target, 1);
3135
dabf8373 3136 do_pending_stack_adjust ();
bbf6f052
RK
3137 NO_DEFER_POP;
3138 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3139 start_cleanup_deferral ();
709f5be1 3140 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3141 end_cleanup_deferral ();
bbf6f052
RK
3142 emit_queue ();
3143 emit_jump_insn (gen_jump (lab2));
3144 emit_barrier ();
3145 emit_label (lab1);
956d6950 3146 start_cleanup_deferral ();
709f5be1 3147 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3148 end_cleanup_deferral ();
bbf6f052
RK
3149 emit_queue ();
3150 emit_label (lab2);
3151 OK_DEFER_POP;
a3a58acc 3152
709f5be1 3153 return want_value ? target : NULL_RTX;
bbf6f052 3154 }
709f5be1 3155 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3156 && GET_MODE (target) != BLKmode)
3157 /* If target is in memory and caller wants value in a register instead,
3158 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3159 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3160 We know expand_expr will not use the target in that case.
3161 Don't do this if TARGET is volatile because we are supposed
3162 to write it and then read it. */
bbf6f052 3163 {
906c4e36 3164 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3165 GET_MODE (target), 0);
3166 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3167 temp = copy_to_reg (temp);
3168 dont_return_target = 1;
3169 }
3170 else if (queued_subexp_p (target))
709f5be1
RS
3171 /* If target contains a postincrement, let's not risk
3172 using it as the place to generate the rhs. */
bbf6f052
RK
3173 {
3174 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3175 {
3176 /* Expand EXP into a new pseudo. */
3177 temp = gen_reg_rtx (GET_MODE (target));
3178 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3179 }
3180 else
906c4e36 3181 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3182
3183 /* If target is volatile, ANSI requires accessing the value
3184 *from* the target, if it is accessed. So make that happen.
3185 In no case return the target itself. */
3186 if (! MEM_VOLATILE_P (target) && want_value)
3187 dont_return_target = 1;
bbf6f052 3188 }
1499e0a8
RK
3189 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3190 /* If this is an scalar in a register that is stored in a wider mode
3191 than the declared mode, compute the result into its declared mode
3192 and then convert to the wider mode. Our value is the computed
3193 expression. */
3194 {
5a32d038 3195 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3196 which will often result in some optimizations. Do the conversion
3197 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3198 the extend. But don't do this if the type of EXP is a subtype
3199 of something else since then the conversion might involve
3200 more than just converting modes. */
3201 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3202 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3203 {
3204 if (TREE_UNSIGNED (TREE_TYPE (exp))
3205 != SUBREG_PROMOTED_UNSIGNED_P (target))
3206 exp
3207 = convert
3208 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3209 TREE_TYPE (exp)),
3210 exp);
3211
3212 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3213 SUBREG_PROMOTED_UNSIGNED_P (target)),
3214 exp);
3215 }
5a32d038 3216
1499e0a8 3217 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3218
766f36c7 3219 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3220 the access now so it gets done only once. Likewise if
3221 it contains TARGET. */
3222 if (GET_CODE (temp) == MEM && want_value
3223 && (MEM_VOLATILE_P (temp)
3224 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3225 temp = copy_to_reg (temp);
3226
b258707c
RS
3227 /* If TEMP is a VOIDmode constant, use convert_modes to make
3228 sure that we properly convert it. */
3229 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3230 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3231 TYPE_MODE (TREE_TYPE (exp)), temp,
3232 SUBREG_PROMOTED_UNSIGNED_P (target));
3233
1499e0a8
RK
3234 convert_move (SUBREG_REG (target), temp,
3235 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3236 return want_value ? temp : NULL_RTX;
1499e0a8 3237 }
bbf6f052
RK
3238 else
3239 {
3240 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3241 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3242 If TARGET is a volatile mem ref, either return TARGET
3243 or return a reg copied *from* TARGET; ANSI requires this.
3244
3245 Otherwise, if TEMP is not TARGET, return TEMP
3246 if it is constant (for efficiency),
3247 or if we really want the correct value. */
bbf6f052
RK
3248 if (!(target && GET_CODE (target) == REG
3249 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3250 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3251 && ! rtx_equal_p (temp, target)
709f5be1 3252 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3253 dont_return_target = 1;
3254 }
3255
b258707c
RS
3256 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3257 the same as that of TARGET, adjust the constant. This is needed, for
3258 example, in case it is a CONST_DOUBLE and we want only a word-sized
3259 value. */
3260 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3261 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3262 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3263 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3264 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3265
921b3427
RK
3266 if (flag_check_memory_usage
3267 && GET_CODE (target) == MEM
3268 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3269 {
3270 if (GET_CODE (temp) == MEM)
3271 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3272 XEXP (target, 0), ptr_mode,
3273 XEXP (temp, 0), ptr_mode,
3274 expr_size (exp), TYPE_MODE (sizetype));
3275 else
3276 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3277 XEXP (target, 0), ptr_mode,
3278 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3279 GEN_INT (MEMORY_USE_WO),
3280 TYPE_MODE (integer_type_node));
921b3427
RK
3281 }
3282
bbf6f052
RK
3283 /* If value was not generated in the target, store it there.
3284 Convert the value to TARGET's type first if nec. */
3285
effbcc6a 3286 if (! rtx_equal_p (temp, target) && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3287 {
3288 target = protect_from_queue (target, 1);
3289 if (GET_MODE (temp) != GET_MODE (target)
3290 && GET_MODE (temp) != VOIDmode)
3291 {
3292 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3293 if (dont_return_target)
3294 {
3295 /* In this case, we will return TEMP,
3296 so make sure it has the proper mode.
3297 But don't forget to store the value into TARGET. */
3298 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3299 emit_move_insn (target, temp);
3300 }
3301 else
3302 convert_move (target, temp, unsignedp);
3303 }
3304
3305 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3306 {
3307 /* Handle copying a string constant into an array.
3308 The string constant may be shorter than the array.
3309 So copy just the string's actual length, and clear the rest. */
3310 rtx size;
22619c3f 3311 rtx addr;
bbf6f052 3312
e87b4f3f
RS
3313 /* Get the size of the data type of the string,
3314 which is actually the size of the target. */
3315 size = expr_size (exp);
3316 if (GET_CODE (size) == CONST_INT
3317 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3318 emit_block_move (target, temp, size,
3319 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3320 else
bbf6f052 3321 {
e87b4f3f
RS
3322 /* Compute the size of the data to copy from the string. */
3323 tree copy_size
c03b7665 3324 = size_binop (MIN_EXPR,
b50d17a1 3325 make_tree (sizetype, size),
c03b7665
RK
3326 convert (sizetype,
3327 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3328 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3329 VOIDmode, 0);
e87b4f3f
RS
3330 rtx label = 0;
3331
3332 /* Copy that much. */
3333 emit_block_move (target, temp, copy_size_rtx,
3334 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3335
88f63c77
RK
3336 /* Figure out how much is left in TARGET that we have to clear.
3337 Do all calculations in ptr_mode. */
3338
3339 addr = XEXP (target, 0);
3340 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3341
e87b4f3f
RS
3342 if (GET_CODE (copy_size_rtx) == CONST_INT)
3343 {
88f63c77 3344 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3345 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3346 }
3347 else
3348 {
88f63c77
RK
3349 addr = force_reg (ptr_mode, addr);
3350 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3351 copy_size_rtx, NULL_RTX, 0,
3352 OPTAB_LIB_WIDEN);
e87b4f3f 3353
88f63c77 3354 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3355 copy_size_rtx, NULL_RTX, 0,
3356 OPTAB_LIB_WIDEN);
e87b4f3f 3357
906c4e36 3358 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3359 GET_MODE (size), 0, 0);
3360 label = gen_label_rtx ();
3361 emit_jump_insn (gen_blt (label));
3362 }
3363
3364 if (size != const0_rtx)
3365 {
921b3427
RK
3366 /* Be sure we can write on ADDR. */
3367 if (flag_check_memory_usage)
3368 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3369 addr, ptr_mode,
3370 size, TYPE_MODE (sizetype),
956d6950
JL
3371 GEN_INT (MEMORY_USE_WO),
3372 TYPE_MODE (integer_type_node));
bbf6f052 3373#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3374 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3375 addr, ptr_mode,
3b6f75e2
JW
3376 const0_rtx, TYPE_MODE (integer_type_node),
3377 convert_to_mode (TYPE_MODE (sizetype),
3378 size,
3379 TREE_UNSIGNED (sizetype)),
3380 TYPE_MODE (sizetype));
bbf6f052 3381#else
d562e42e 3382 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3383 addr, ptr_mode,
3b6f75e2
JW
3384 convert_to_mode (TYPE_MODE (integer_type_node),
3385 size,
3386 TREE_UNSIGNED (integer_type_node)),
3387 TYPE_MODE (integer_type_node));
bbf6f052 3388#endif
e87b4f3f 3389 }
22619c3f 3390
e87b4f3f
RS
3391 if (label)
3392 emit_label (label);
bbf6f052
RK
3393 }
3394 }
fffa9c1d
JW
3395 /* Handle calls that return values in multiple non-contiguous locations.
3396 The Irix 6 ABI has examples of this. */
3397 else if (GET_CODE (target) == PARALLEL)
3398 emit_group_load (target, temp);
bbf6f052
RK
3399 else if (GET_MODE (temp) == BLKmode)
3400 emit_block_move (target, temp, expr_size (exp),
3401 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3402 else
3403 emit_move_insn (target, temp);
3404 }
709f5be1 3405
766f36c7
RK
3406 /* If we don't want a value, return NULL_RTX. */
3407 if (! want_value)
3408 return NULL_RTX;
3409
3410 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3411 ??? The latter test doesn't seem to make sense. */
3412 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3413 return temp;
766f36c7
RK
3414
3415 /* Return TARGET itself if it is a hard register. */
3416 else if (want_value && GET_MODE (target) != BLKmode
3417 && ! (GET_CODE (target) == REG
3418 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3419 return copy_to_reg (target);
766f36c7
RK
3420
3421 else
709f5be1 3422 return target;
bbf6f052
RK
3423}
3424\f
9de08200
RK
3425/* Return 1 if EXP just contains zeros. */
3426
3427static int
3428is_zeros_p (exp)
3429 tree exp;
3430{
3431 tree elt;
3432
3433 switch (TREE_CODE (exp))
3434 {
3435 case CONVERT_EXPR:
3436 case NOP_EXPR:
3437 case NON_LVALUE_EXPR:
3438 return is_zeros_p (TREE_OPERAND (exp, 0));
3439
3440 case INTEGER_CST:
3441 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3442
3443 case COMPLEX_CST:
3444 return
3445 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3446
3447 case REAL_CST:
41c9120b 3448 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3449
3450 case CONSTRUCTOR:
e1a43f73
PB
3451 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3452 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3453 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3454 if (! is_zeros_p (TREE_VALUE (elt)))
3455 return 0;
3456
3457 return 1;
e9a25f70
JL
3458
3459 default:
3460 return 0;
9de08200 3461 }
9de08200
RK
3462}
3463
3464/* Return 1 if EXP contains mostly (3/4) zeros. */
3465
3466static int
3467mostly_zeros_p (exp)
3468 tree exp;
3469{
9de08200
RK
3470 if (TREE_CODE (exp) == CONSTRUCTOR)
3471 {
e1a43f73
PB
3472 int elts = 0, zeros = 0;
3473 tree elt = CONSTRUCTOR_ELTS (exp);
3474 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3475 {
3476 /* If there are no ranges of true bits, it is all zero. */
3477 return elt == NULL_TREE;
3478 }
3479 for (; elt; elt = TREE_CHAIN (elt))
3480 {
3481 /* We do not handle the case where the index is a RANGE_EXPR,
3482 so the statistic will be somewhat inaccurate.
3483 We do make a more accurate count in store_constructor itself,
3484 so since this function is only used for nested array elements,
0f41302f 3485 this should be close enough. */
e1a43f73
PB
3486 if (mostly_zeros_p (TREE_VALUE (elt)))
3487 zeros++;
3488 elts++;
3489 }
9de08200
RK
3490
3491 return 4 * zeros >= 3 * elts;
3492 }
3493
3494 return is_zeros_p (exp);
3495}
3496\f
e1a43f73
PB
3497/* Helper function for store_constructor.
3498 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3499 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3500 CLEARED is as for store_constructor.
3501
3502 This provides a recursive shortcut back to store_constructor when it isn't
3503 necessary to go through store_field. This is so that we can pass through
3504 the cleared field to let store_constructor know that we may not have to
3505 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3506
3507static void
3508store_constructor_field (target, bitsize, bitpos,
3509 mode, exp, type, cleared)
3510 rtx target;
3511 int bitsize, bitpos;
3512 enum machine_mode mode;
3513 tree exp, type;
3514 int cleared;
3515{
3516 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3517 && bitpos % BITS_PER_UNIT == 0
3518 /* If we have a non-zero bitpos for a register target, then we just
3519 let store_field do the bitfield handling. This is unlikely to
3520 generate unnecessary clear instructions anyways. */
3521 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3522 {
126e5b0d
JW
3523 if (bitpos != 0)
3524 target = change_address (target, VOIDmode,
3525 plus_constant (XEXP (target, 0),
3526 bitpos / BITS_PER_UNIT));
3527 store_constructor (exp, target, cleared);
e1a43f73
PB
3528 }
3529 else
3530 store_field (target, bitsize, bitpos, mode, exp,
3531 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3532 int_size_in_bytes (type));
3533}
3534
bbf6f052 3535/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3536 TARGET is either a REG or a MEM.
0f41302f 3537 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3538
3539static void
e1a43f73 3540store_constructor (exp, target, cleared)
bbf6f052
RK
3541 tree exp;
3542 rtx target;
e1a43f73 3543 int cleared;
bbf6f052 3544{
4af3895e
JVA
3545 tree type = TREE_TYPE (exp);
3546
bbf6f052
RK
3547 /* We know our target cannot conflict, since safe_from_p has been called. */
3548#if 0
3549 /* Don't try copying piece by piece into a hard register
3550 since that is vulnerable to being clobbered by EXP.
3551 Instead, construct in a pseudo register and then copy it all. */
3552 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3553 {
3554 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3555 store_constructor (exp, temp, 0);
bbf6f052
RK
3556 emit_move_insn (target, temp);
3557 return;
3558 }
3559#endif
3560
e44842fe
RK
3561 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3562 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3563 {
3564 register tree elt;
3565
4af3895e 3566 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3567 if (TREE_CODE (type) == UNION_TYPE
3568 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3569 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3570
3571 /* If we are building a static constructor into a register,
3572 set the initial value as zero so we can fold the value into
67225c15
RK
3573 a constant. But if more than one register is involved,
3574 this probably loses. */
3575 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3576 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3577 {
3578 if (! cleared)
e9a25f70 3579 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3580
9de08200
RK
3581 cleared = 1;
3582 }
3583
3584 /* If the constructor has fewer fields than the structure
3585 or if we are initializing the structure to mostly zeros,
bbf6f052 3586 clear the whole structure first. */
9de08200
RK
3587 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3588 != list_length (TYPE_FIELDS (type)))
3589 || mostly_zeros_p (exp))
3590 {
3591 if (! cleared)
3592 clear_storage (target, expr_size (exp),
3593 TYPE_ALIGN (type) / BITS_PER_UNIT);
3594
3595 cleared = 1;
3596 }
bbf6f052
RK
3597 else
3598 /* Inform later passes that the old value is dead. */
38a448ca 3599 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3600
3601 /* Store each element of the constructor into
3602 the corresponding field of TARGET. */
3603
3604 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3605 {
3606 register tree field = TREE_PURPOSE (elt);
3607 register enum machine_mode mode;
3608 int bitsize;
b50d17a1 3609 int bitpos = 0;
bbf6f052 3610 int unsignedp;
b50d17a1
RK
3611 tree pos, constant = 0, offset = 0;
3612 rtx to_rtx = target;
bbf6f052 3613
f32fd778
RS
3614 /* Just ignore missing fields.
3615 We cleared the whole structure, above,
3616 if any fields are missing. */
3617 if (field == 0)
3618 continue;
3619
e1a43f73
PB
3620 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3621 continue;
9de08200 3622
bbf6f052
RK
3623 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3624 unsignedp = TREE_UNSIGNED (field);
3625 mode = DECL_MODE (field);
3626 if (DECL_BIT_FIELD (field))
3627 mode = VOIDmode;
3628
b50d17a1
RK
3629 pos = DECL_FIELD_BITPOS (field);
3630 if (TREE_CODE (pos) == INTEGER_CST)
3631 constant = pos;
3632 else if (TREE_CODE (pos) == PLUS_EXPR
3633 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3634 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3635 else
3636 offset = pos;
3637
3638 if (constant)
cd11b87e 3639 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3640
3641 if (offset)
3642 {
3643 rtx offset_rtx;
3644
3645 if (contains_placeholder_p (offset))
3646 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3647 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3648
b50d17a1
RK
3649 offset = size_binop (FLOOR_DIV_EXPR, offset,
3650 size_int (BITS_PER_UNIT));
bbf6f052 3651
b50d17a1
RK
3652 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3653 if (GET_CODE (to_rtx) != MEM)
3654 abort ();
3655
3656 to_rtx
3657 = change_address (to_rtx, VOIDmode,
38a448ca 3658 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3659 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3660 }
cf04eb80
RK
3661 if (TREE_READONLY (field))
3662 {
9151b3bf 3663 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3664 to_rtx = copy_rtx (to_rtx);
3665
cf04eb80
RK
3666 RTX_UNCHANGING_P (to_rtx) = 1;
3667 }
3668
e1a43f73
PB
3669 store_constructor_field (to_rtx, bitsize, bitpos,
3670 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3671 }
3672 }
4af3895e 3673 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3674 {
3675 register tree elt;
3676 register int i;
e1a43f73 3677 int need_to_clear;
4af3895e 3678 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3679 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3680 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3681 tree elttype = TREE_TYPE (type);
bbf6f052 3682
e1a43f73
PB
3683 /* If the constructor has fewer elements than the array,
3684 clear the whole array first. Similarly if this this is
3685 static constructor of a non-BLKmode object. */
3686 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3687 need_to_clear = 1;
3688 else
3689 {
3690 HOST_WIDE_INT count = 0, zero_count = 0;
3691 need_to_clear = 0;
3692 /* This loop is a more accurate version of the loop in
3693 mostly_zeros_p (it handles RANGE_EXPR in an index).
3694 It is also needed to check for missing elements. */
3695 for (elt = CONSTRUCTOR_ELTS (exp);
3696 elt != NULL_TREE;
df0faff1 3697 elt = TREE_CHAIN (elt))
e1a43f73
PB
3698 {
3699 tree index = TREE_PURPOSE (elt);
3700 HOST_WIDE_INT this_node_count;
3701 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3702 {
3703 tree lo_index = TREE_OPERAND (index, 0);
3704 tree hi_index = TREE_OPERAND (index, 1);
3705 if (TREE_CODE (lo_index) != INTEGER_CST
3706 || TREE_CODE (hi_index) != INTEGER_CST)
3707 {
3708 need_to_clear = 1;
3709 break;
3710 }
3711 this_node_count = TREE_INT_CST_LOW (hi_index)
3712 - TREE_INT_CST_LOW (lo_index) + 1;
3713 }
3714 else
3715 this_node_count = 1;
3716 count += this_node_count;
3717 if (mostly_zeros_p (TREE_VALUE (elt)))
3718 zero_count += this_node_count;
3719 }
8e958f70 3720 /* Clear the entire array first if there are any missing elements,
0f41302f 3721 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
3722 if (count < maxelt - minelt + 1
3723 || 4 * zero_count >= 3 * count)
e1a43f73
PB
3724 need_to_clear = 1;
3725 }
3726 if (need_to_clear)
9de08200
RK
3727 {
3728 if (! cleared)
3729 clear_storage (target, expr_size (exp),
3730 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
3731 cleared = 1;
3732 }
bbf6f052
RK
3733 else
3734 /* Inform later passes that the old value is dead. */
38a448ca 3735 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3736
3737 /* Store each element of the constructor into
3738 the corresponding element of TARGET, determined
3739 by counting the elements. */
3740 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
3741 elt;
3742 elt = TREE_CHAIN (elt), i++)
3743 {
3744 register enum machine_mode mode;
3745 int bitsize;
3746 int bitpos;
3747 int unsignedp;
e1a43f73 3748 tree value = TREE_VALUE (elt);
03dc44a6
RS
3749 tree index = TREE_PURPOSE (elt);
3750 rtx xtarget = target;
bbf6f052 3751
e1a43f73
PB
3752 if (cleared && is_zeros_p (value))
3753 continue;
9de08200 3754
bbf6f052
RK
3755 mode = TYPE_MODE (elttype);
3756 bitsize = GET_MODE_BITSIZE (mode);
3757 unsignedp = TREE_UNSIGNED (elttype);
3758
e1a43f73
PB
3759 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3760 {
3761 tree lo_index = TREE_OPERAND (index, 0);
3762 tree hi_index = TREE_OPERAND (index, 1);
3763 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
3764 struct nesting *loop;
05c0b405
PB
3765 HOST_WIDE_INT lo, hi, count;
3766 tree position;
e1a43f73 3767
0f41302f 3768 /* If the range is constant and "small", unroll the loop. */
e1a43f73 3769 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
3770 && TREE_CODE (hi_index) == INTEGER_CST
3771 && (lo = TREE_INT_CST_LOW (lo_index),
3772 hi = TREE_INT_CST_LOW (hi_index),
3773 count = hi - lo + 1,
3774 (GET_CODE (target) != MEM
3775 || count <= 2
3776 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
3777 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
3778 <= 40 * 8))))
e1a43f73 3779 {
05c0b405
PB
3780 lo -= minelt; hi -= minelt;
3781 for (; lo <= hi; lo++)
e1a43f73 3782 {
05c0b405
PB
3783 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
3784 store_constructor_field (target, bitsize, bitpos,
3785 mode, value, type, cleared);
e1a43f73
PB
3786 }
3787 }
3788 else
3789 {
3790 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
3791 loop_top = gen_label_rtx ();
3792 loop_end = gen_label_rtx ();
3793
3794 unsignedp = TREE_UNSIGNED (domain);
3795
3796 index = build_decl (VAR_DECL, NULL_TREE, domain);
3797
3798 DECL_RTL (index) = index_r
3799 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
3800 &unsignedp, 0));
3801
3802 if (TREE_CODE (value) == SAVE_EXPR
3803 && SAVE_EXPR_RTL (value) == 0)
3804 {
0f41302f
MS
3805 /* Make sure value gets expanded once before the
3806 loop. */
e1a43f73
PB
3807 expand_expr (value, const0_rtx, VOIDmode, 0);
3808 emit_queue ();
3809 }
3810 store_expr (lo_index, index_r, 0);
3811 loop = expand_start_loop (0);
3812
0f41302f 3813 /* Assign value to element index. */
e1a43f73
PB
3814 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3815 size_int (BITS_PER_UNIT));
3816 position = size_binop (MULT_EXPR,
3817 size_binop (MINUS_EXPR, index,
3818 TYPE_MIN_VALUE (domain)),
3819 position);
3820 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3821 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
3822 xtarget = change_address (target, mode, addr);
3823 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 3824 store_constructor (value, xtarget, cleared);
e1a43f73
PB
3825 else
3826 store_expr (value, xtarget, 0);
3827
3828 expand_exit_loop_if_false (loop,
3829 build (LT_EXPR, integer_type_node,
3830 index, hi_index));
3831
3832 expand_increment (build (PREINCREMENT_EXPR,
3833 TREE_TYPE (index),
7b8b9722 3834 index, integer_one_node), 0, 0);
e1a43f73
PB
3835 expand_end_loop ();
3836 emit_label (loop_end);
3837
3838 /* Needed by stupid register allocation. to extend the
3839 lifetime of pseudo-regs used by target past the end
3840 of the loop. */
38a448ca 3841 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
3842 }
3843 }
3844 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 3845 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 3846 {
e1a43f73 3847 rtx pos_rtx, addr;
03dc44a6
RS
3848 tree position;
3849
5b6c44ff
RK
3850 if (index == 0)
3851 index = size_int (i);
3852
e1a43f73
PB
3853 if (minelt)
3854 index = size_binop (MINUS_EXPR, index,
3855 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
3856 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
3857 size_int (BITS_PER_UNIT));
3858 position = size_binop (MULT_EXPR, index, position);
03dc44a6 3859 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 3860 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 3861 xtarget = change_address (target, mode, addr);
e1a43f73 3862 store_expr (value, xtarget, 0);
03dc44a6
RS
3863 }
3864 else
3865 {
3866 if (index != 0)
7c314719 3867 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
3868 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
3869 else
3870 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
3871 store_constructor_field (target, bitsize, bitpos,
3872 mode, value, type, cleared);
03dc44a6 3873 }
bbf6f052
RK
3874 }
3875 }
071a6595
PB
3876 /* set constructor assignments */
3877 else if (TREE_CODE (type) == SET_TYPE)
3878 {
e1a43f73 3879 tree elt = CONSTRUCTOR_ELTS (exp);
071a6595
PB
3880 rtx xtarget = XEXP (target, 0);
3881 int set_word_size = TYPE_ALIGN (type);
e1a43f73 3882 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
3883 tree domain = TYPE_DOMAIN (type);
3884 tree domain_min, domain_max, bitlength;
3885
9faa82d8 3886 /* The default implementation strategy is to extract the constant
071a6595
PB
3887 parts of the constructor, use that to initialize the target,
3888 and then "or" in whatever non-constant ranges we need in addition.
3889
3890 If a large set is all zero or all ones, it is
3891 probably better to set it using memset (if available) or bzero.
3892 Also, if a large set has just a single range, it may also be
3893 better to first clear all the first clear the set (using
0f41302f 3894 bzero/memset), and set the bits we want. */
071a6595 3895
0f41302f 3896 /* Check for all zeros. */
e1a43f73 3897 if (elt == NULL_TREE)
071a6595 3898 {
e1a43f73
PB
3899 if (!cleared)
3900 clear_storage (target, expr_size (exp),
3901 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
3902 return;
3903 }
3904
071a6595
PB
3905 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
3906 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
3907 bitlength = size_binop (PLUS_EXPR,
3908 size_binop (MINUS_EXPR, domain_max, domain_min),
3909 size_one_node);
3910
e1a43f73
PB
3911 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
3912 abort ();
3913 nbits = TREE_INT_CST_LOW (bitlength);
3914
3915 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
3916 are "complicated" (more than one range), initialize (the
3917 constant parts) by copying from a constant. */
3918 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
3919 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 3920 {
b4ee5a72
PB
3921 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
3922 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 3923 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
3924 HOST_WIDE_INT word = 0;
3925 int bit_pos = 0;
3926 int ibit = 0;
0f41302f 3927 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 3928 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 3929 for (;;)
071a6595 3930 {
b4ee5a72
PB
3931 if (bit_buffer[ibit])
3932 {
b09f3348 3933 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
3934 word |= (1 << (set_word_size - 1 - bit_pos));
3935 else
3936 word |= 1 << bit_pos;
3937 }
3938 bit_pos++; ibit++;
3939 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 3940 {
e1a43f73
PB
3941 if (word != 0 || ! cleared)
3942 {
3943 rtx datum = GEN_INT (word);
3944 rtx to_rtx;
0f41302f
MS
3945 /* The assumption here is that it is safe to use
3946 XEXP if the set is multi-word, but not if
3947 it's single-word. */
e1a43f73
PB
3948 if (GET_CODE (target) == MEM)
3949 {
3950 to_rtx = plus_constant (XEXP (target, 0), offset);
3951 to_rtx = change_address (target, mode, to_rtx);
3952 }
3953 else if (offset == 0)
3954 to_rtx = target;
3955 else
3956 abort ();
3957 emit_move_insn (to_rtx, datum);
3958 }
b4ee5a72
PB
3959 if (ibit == nbits)
3960 break;
3961 word = 0;
3962 bit_pos = 0;
3963 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
3964 }
3965 }
071a6595 3966 }
e1a43f73
PB
3967 else if (!cleared)
3968 {
0f41302f 3969 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
3970 if (TREE_CHAIN (elt) != NULL_TREE
3971 || (TREE_PURPOSE (elt) == NULL_TREE
3972 ? nbits != 1
3973 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
3974 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
3975 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
3976 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
3977 != nbits))))
3978 clear_storage (target, expr_size (exp),
3979 TYPE_ALIGN (type) / BITS_PER_UNIT);
3980 }
3981
3982 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
3983 {
3984 /* start of range of element or NULL */
3985 tree startbit = TREE_PURPOSE (elt);
3986 /* end of range of element, or element value */
3987 tree endbit = TREE_VALUE (elt);
3988 HOST_WIDE_INT startb, endb;
3989 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
3990
3991 bitlength_rtx = expand_expr (bitlength,
3992 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
3993
3994 /* handle non-range tuple element like [ expr ] */
3995 if (startbit == NULL_TREE)
3996 {
3997 startbit = save_expr (endbit);
3998 endbit = startbit;
3999 }
4000 startbit = convert (sizetype, startbit);
4001 endbit = convert (sizetype, endbit);
4002 if (! integer_zerop (domain_min))
4003 {
4004 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4005 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4006 }
4007 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4008 EXPAND_CONST_ADDRESS);
4009 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4010 EXPAND_CONST_ADDRESS);
4011
4012 if (REG_P (target))
4013 {
4014 targetx = assign_stack_temp (GET_MODE (target),
4015 GET_MODE_SIZE (GET_MODE (target)),
4016 0);
4017 emit_move_insn (targetx, target);
4018 }
4019 else if (GET_CODE (target) == MEM)
4020 targetx = target;
4021 else
4022 abort ();
4023
4024#ifdef TARGET_MEM_FUNCTIONS
4025 /* Optimization: If startbit and endbit are
9faa82d8 4026 constants divisible by BITS_PER_UNIT,
0f41302f 4027 call memset instead. */
071a6595
PB
4028 if (TREE_CODE (startbit) == INTEGER_CST
4029 && TREE_CODE (endbit) == INTEGER_CST
4030 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4031 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4032 {
071a6595
PB
4033 emit_library_call (memset_libfunc, 0,
4034 VOIDmode, 3,
e1a43f73
PB
4035 plus_constant (XEXP (targetx, 0),
4036 startb / BITS_PER_UNIT),
071a6595 4037 Pmode,
3b6f75e2 4038 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4039 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4040 TYPE_MODE (sizetype));
071a6595
PB
4041 }
4042 else
4043#endif
4044 {
38a448ca 4045 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4046 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4047 bitlength_rtx, TYPE_MODE (sizetype),
4048 startbit_rtx, TYPE_MODE (sizetype),
4049 endbit_rtx, TYPE_MODE (sizetype));
4050 }
4051 if (REG_P (target))
4052 emit_move_insn (target, targetx);
4053 }
4054 }
bbf6f052
RK
4055
4056 else
4057 abort ();
4058}
4059
4060/* Store the value of EXP (an expression tree)
4061 into a subfield of TARGET which has mode MODE and occupies
4062 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4063 If MODE is VOIDmode, it means that we are storing into a bit-field.
4064
4065 If VALUE_MODE is VOIDmode, return nothing in particular.
4066 UNSIGNEDP is not used in this case.
4067
4068 Otherwise, return an rtx for the value stored. This rtx
4069 has mode VALUE_MODE if that is convenient to do.
4070 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4071
4072 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4073 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4074
4075static rtx
4076store_field (target, bitsize, bitpos, mode, exp, value_mode,
4077 unsignedp, align, total_size)
4078 rtx target;
4079 int bitsize, bitpos;
4080 enum machine_mode mode;
4081 tree exp;
4082 enum machine_mode value_mode;
4083 int unsignedp;
4084 int align;
4085 int total_size;
4086{
906c4e36 4087 HOST_WIDE_INT width_mask = 0;
bbf6f052 4088
e9a25f70
JL
4089 if (TREE_CODE (exp) == ERROR_MARK)
4090 return const0_rtx;
4091
906c4e36
RK
4092 if (bitsize < HOST_BITS_PER_WIDE_INT)
4093 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4094
4095 /* If we are storing into an unaligned field of an aligned union that is
4096 in a register, we may have the mode of TARGET being an integer mode but
4097 MODE == BLKmode. In that case, get an aligned object whose size and
4098 alignment are the same as TARGET and store TARGET into it (we can avoid
4099 the store if the field being stored is the entire width of TARGET). Then
4100 call ourselves recursively to store the field into a BLKmode version of
4101 that object. Finally, load from the object into TARGET. This is not
4102 very efficient in general, but should only be slightly more expensive
4103 than the otherwise-required unaligned accesses. Perhaps this can be
4104 cleaned up later. */
4105
4106 if (mode == BLKmode
4107 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4108 {
4109 rtx object = assign_stack_temp (GET_MODE (target),
4110 GET_MODE_SIZE (GET_MODE (target)), 0);
4111 rtx blk_object = copy_rtx (object);
4112
24a13950
JW
4113 MEM_IN_STRUCT_P (object) = 1;
4114 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4115 PUT_MODE (blk_object, BLKmode);
4116
4117 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4118 emit_move_insn (object, target);
4119
4120 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4121 align, total_size);
4122
46093b97
RS
4123 /* Even though we aren't returning target, we need to
4124 give it the updated value. */
bbf6f052
RK
4125 emit_move_insn (target, object);
4126
46093b97 4127 return blk_object;
bbf6f052
RK
4128 }
4129
4130 /* If the structure is in a register or if the component
4131 is a bit field, we cannot use addressing to access it.
4132 Use bit-field techniques or SUBREG to store in it. */
4133
4fa52007
RK
4134 if (mode == VOIDmode
4135 || (mode != BLKmode && ! direct_store[(int) mode])
4136 || GET_CODE (target) == REG
c980ac49 4137 || GET_CODE (target) == SUBREG
ccc98036
RS
4138 /* If the field isn't aligned enough to store as an ordinary memref,
4139 store it as a bit field. */
c7a7ac46 4140 || (SLOW_UNALIGNED_ACCESS
ccc98036 4141 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4142 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4143 {
906c4e36 4144 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4145
ef19912d
RK
4146 /* If BITSIZE is narrower than the size of the type of EXP
4147 we will be narrowing TEMP. Normally, what's wanted are the
4148 low-order bits. However, if EXP's type is a record and this is
4149 big-endian machine, we want the upper BITSIZE bits. */
4150 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4151 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4152 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4153 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4154 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4155 - bitsize),
4156 temp, 1);
4157
bbd6cf73
RK
4158 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4159 MODE. */
4160 if (mode != VOIDmode && mode != BLKmode
4161 && mode != TYPE_MODE (TREE_TYPE (exp)))
4162 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4163
a281e72d
RK
4164 /* If the modes of TARGET and TEMP are both BLKmode, both
4165 must be in memory and BITPOS must be aligned on a byte
4166 boundary. If so, we simply do a block copy. */
4167 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4168 {
4169 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4170 || bitpos % BITS_PER_UNIT != 0)
4171 abort ();
4172
0086427c
RK
4173 target = change_address (target, VOIDmode,
4174 plus_constant (XEXP (target, 0),
a281e72d
RK
4175 bitpos / BITS_PER_UNIT));
4176
4177 emit_block_move (target, temp,
4178 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4179 / BITS_PER_UNIT),
4180 1);
4181
4182 return value_mode == VOIDmode ? const0_rtx : target;
4183 }
4184
bbf6f052
RK
4185 /* Store the value in the bitfield. */
4186 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4187 if (value_mode != VOIDmode)
4188 {
4189 /* The caller wants an rtx for the value. */
4190 /* If possible, avoid refetching from the bitfield itself. */
4191 if (width_mask != 0
4192 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4193 {
9074de27 4194 tree count;
5c4d7cfb 4195 enum machine_mode tmode;
86a2c12a 4196
5c4d7cfb
RS
4197 if (unsignedp)
4198 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4199 tmode = GET_MODE (temp);
86a2c12a
RS
4200 if (tmode == VOIDmode)
4201 tmode = value_mode;
5c4d7cfb
RS
4202 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4203 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4204 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4205 }
bbf6f052 4206 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4207 NULL_RTX, value_mode, 0, align,
4208 total_size);
bbf6f052
RK
4209 }
4210 return const0_rtx;
4211 }
4212 else
4213 {
4214 rtx addr = XEXP (target, 0);
4215 rtx to_rtx;
4216
4217 /* If a value is wanted, it must be the lhs;
4218 so make the address stable for multiple use. */
4219
4220 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4221 && ! CONSTANT_ADDRESS_P (addr)
4222 /* A frame-pointer reference is already stable. */
4223 && ! (GET_CODE (addr) == PLUS
4224 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4225 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4226 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4227 addr = copy_to_reg (addr);
4228
4229 /* Now build a reference to just the desired component. */
4230
effbcc6a
RK
4231 to_rtx = copy_rtx (change_address (target, mode,
4232 plus_constant (addr,
4233 (bitpos
4234 / BITS_PER_UNIT))));
bbf6f052
RK
4235 MEM_IN_STRUCT_P (to_rtx) = 1;
4236
4237 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4238 }
4239}
4240\f
4241/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4242 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4243 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4244
4245 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4246 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4247 If the position of the field is variable, we store a tree
4248 giving the variable offset (in units) in *POFFSET.
4249 This offset is in addition to the bit position.
4250 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4251 We set *PALIGNMENT to the alignment in bytes of the address that will be
4252 computed. This is the alignment of the thing we return if *POFFSET
4253 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4254
4255 If any of the extraction expressions is volatile,
4256 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4257
4258 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4259 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4260 is redundant.
4261
4262 If the field describes a variable-sized object, *PMODE is set to
4263 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4264 this case, but the address of the object can be found. */
bbf6f052
RK
4265
4266tree
4969d05d 4267get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4268 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4269 tree exp;
4270 int *pbitsize;
4271 int *pbitpos;
7bb0943f 4272 tree *poffset;
bbf6f052
RK
4273 enum machine_mode *pmode;
4274 int *punsignedp;
4275 int *pvolatilep;
839c4796 4276 int *palignment;
bbf6f052 4277{
b50d17a1 4278 tree orig_exp = exp;
bbf6f052
RK
4279 tree size_tree = 0;
4280 enum machine_mode mode = VOIDmode;
742920c7 4281 tree offset = integer_zero_node;
839c4796 4282 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4283
4284 if (TREE_CODE (exp) == COMPONENT_REF)
4285 {
4286 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4287 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4288 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4289 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4290 }
4291 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4292 {
4293 size_tree = TREE_OPERAND (exp, 1);
4294 *punsignedp = TREE_UNSIGNED (exp);
4295 }
4296 else
4297 {
4298 mode = TYPE_MODE (TREE_TYPE (exp));
4299 *pbitsize = GET_MODE_BITSIZE (mode);
4300 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4301 }
4302
4303 if (size_tree)
4304 {
4305 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4306 mode = BLKmode, *pbitsize = -1;
4307 else
4308 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4309 }
4310
4311 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4312 and find the ultimate containing object. */
4313
4314 *pbitpos = 0;
4315
4316 while (1)
4317 {
7bb0943f 4318 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4319 {
7bb0943f
RS
4320 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4321 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4322 : TREE_OPERAND (exp, 2));
e6d8c385 4323 tree constant = integer_zero_node, var = pos;
bbf6f052 4324
e7f3c83f
RK
4325 /* If this field hasn't been filled in yet, don't go
4326 past it. This should only happen when folding expressions
4327 made during type construction. */
4328 if (pos == 0)
4329 break;
4330
e6d8c385
RK
4331 /* Assume here that the offset is a multiple of a unit.
4332 If not, there should be an explicitly added constant. */
4333 if (TREE_CODE (pos) == PLUS_EXPR
4334 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4335 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4336 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4337 constant = pos, var = integer_zero_node;
4338
4339 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4340 offset = size_binop (PLUS_EXPR, offset,
4341 size_binop (EXACT_DIV_EXPR, var,
4342 size_int (BITS_PER_UNIT)));
bbf6f052 4343 }
bbf6f052 4344
742920c7 4345 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4346 {
742920c7
RK
4347 /* This code is based on the code in case ARRAY_REF in expand_expr
4348 below. We assume here that the size of an array element is
4349 always an integral multiple of BITS_PER_UNIT. */
4350
4351 tree index = TREE_OPERAND (exp, 1);
4352 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4353 tree low_bound
4354 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4355 tree index_type = TREE_TYPE (index);
4356
4c08eef0 4357 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4358 {
4c08eef0
RK
4359 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4360 index);
742920c7
RK
4361 index_type = TREE_TYPE (index);
4362 }
4363
ca0f2220
RH
4364 if (! integer_zerop (low_bound))
4365 index = fold (build (MINUS_EXPR, index_type, index, low_bound));
4366
742920c7 4367 index = fold (build (MULT_EXPR, index_type, index,
0d15e60c
RK
4368 convert (index_type,
4369 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7
RK
4370
4371 if (TREE_CODE (index) == INTEGER_CST
4372 && TREE_INT_CST_HIGH (index) == 0)
4373 *pbitpos += TREE_INT_CST_LOW (index);
4374 else
956d6950
JL
4375 {
4376 offset = size_binop (PLUS_EXPR, offset,
4377 size_binop (FLOOR_DIV_EXPR, index,
4378 size_int (BITS_PER_UNIT)));
4379
4380 if (contains_placeholder_p (offset))
4381 offset = build (WITH_RECORD_EXPR, sizetype, offset, exp);
4382 }
bbf6f052
RK
4383 }
4384 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4385 && ! ((TREE_CODE (exp) == NOP_EXPR
4386 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4387 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4388 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4389 != UNION_TYPE))
bbf6f052
RK
4390 && (TYPE_MODE (TREE_TYPE (exp))
4391 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4392 break;
7bb0943f
RS
4393
4394 /* If any reference in the chain is volatile, the effect is volatile. */
4395 if (TREE_THIS_VOLATILE (exp))
4396 *pvolatilep = 1;
839c4796
RK
4397
4398 /* If the offset is non-constant already, then we can't assume any
4399 alignment more than the alignment here. */
4400 if (! integer_zerop (offset))
4401 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4402
bbf6f052
RK
4403 exp = TREE_OPERAND (exp, 0);
4404 }
4405
839c4796
RK
4406 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4407 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4408 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4409 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4410
742920c7
RK
4411 if (integer_zerop (offset))
4412 offset = 0;
4413
b50d17a1
RK
4414 if (offset != 0 && contains_placeholder_p (offset))
4415 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4416
bbf6f052 4417 *pmode = mode;
7bb0943f 4418 *poffset = offset;
839c4796 4419 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4420 return exp;
4421}
921b3427
RK
4422
4423/* Subroutine of expand_exp: compute memory_usage from modifier. */
4424static enum memory_use_mode
4425get_memory_usage_from_modifier (modifier)
4426 enum expand_modifier modifier;
4427{
4428 switch (modifier)
4429 {
4430 case EXPAND_NORMAL:
4431 return MEMORY_USE_RO;
4432 break;
4433 case EXPAND_MEMORY_USE_WO:
4434 return MEMORY_USE_WO;
4435 break;
4436 case EXPAND_MEMORY_USE_RW:
4437 return MEMORY_USE_RW;
4438 break;
4439 case EXPAND_INITIALIZER:
4440 case EXPAND_MEMORY_USE_DONT:
4441 case EXPAND_SUM:
4442 case EXPAND_CONST_ADDRESS:
4443 return MEMORY_USE_DONT;
4444 case EXPAND_MEMORY_USE_BAD:
4445 default:
4446 abort ();
4447 }
4448}
bbf6f052
RK
4449\f
4450/* Given an rtx VALUE that may contain additions and multiplications,
4451 return an equivalent value that just refers to a register or memory.
4452 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4453 and returning a pseudo-register containing the value.
4454
4455 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4456
4457rtx
4458force_operand (value, target)
4459 rtx value, target;
4460{
4461 register optab binoptab = 0;
4462 /* Use a temporary to force order of execution of calls to
4463 `force_operand'. */
4464 rtx tmp;
4465 register rtx op2;
4466 /* Use subtarget as the target for operand 0 of a binary operation. */
4467 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4468
4469 if (GET_CODE (value) == PLUS)
4470 binoptab = add_optab;
4471 else if (GET_CODE (value) == MINUS)
4472 binoptab = sub_optab;
4473 else if (GET_CODE (value) == MULT)
4474 {
4475 op2 = XEXP (value, 1);
4476 if (!CONSTANT_P (op2)
4477 && !(GET_CODE (op2) == REG && op2 != subtarget))
4478 subtarget = 0;
4479 tmp = force_operand (XEXP (value, 0), subtarget);
4480 return expand_mult (GET_MODE (value), tmp,
906c4e36 4481 force_operand (op2, NULL_RTX),
bbf6f052
RK
4482 target, 0);
4483 }
4484
4485 if (binoptab)
4486 {
4487 op2 = XEXP (value, 1);
4488 if (!CONSTANT_P (op2)
4489 && !(GET_CODE (op2) == REG && op2 != subtarget))
4490 subtarget = 0;
4491 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4492 {
4493 binoptab = add_optab;
4494 op2 = negate_rtx (GET_MODE (value), op2);
4495 }
4496
4497 /* Check for an addition with OP2 a constant integer and our first
4498 operand a PLUS of a virtual register and something else. In that
4499 case, we want to emit the sum of the virtual register and the
4500 constant first and then add the other value. This allows virtual
4501 register instantiation to simply modify the constant rather than
4502 creating another one around this addition. */
4503 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4504 && GET_CODE (XEXP (value, 0)) == PLUS
4505 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4506 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4507 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4508 {
4509 rtx temp = expand_binop (GET_MODE (value), binoptab,
4510 XEXP (XEXP (value, 0), 0), op2,
4511 subtarget, 0, OPTAB_LIB_WIDEN);
4512 return expand_binop (GET_MODE (value), binoptab, temp,
4513 force_operand (XEXP (XEXP (value, 0), 1), 0),
4514 target, 0, OPTAB_LIB_WIDEN);
4515 }
4516
4517 tmp = force_operand (XEXP (value, 0), subtarget);
4518 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4519 force_operand (op2, NULL_RTX),
bbf6f052 4520 target, 0, OPTAB_LIB_WIDEN);
8008b228 4521 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4522 because the only operations we are expanding here are signed ones. */
4523 }
4524 return value;
4525}
4526\f
4527/* Subroutine of expand_expr:
4528 save the non-copied parts (LIST) of an expr (LHS), and return a list
4529 which can restore these values to their previous values,
4530 should something modify their storage. */
4531
4532static tree
4533save_noncopied_parts (lhs, list)
4534 tree lhs;
4535 tree list;
4536{
4537 tree tail;
4538 tree parts = 0;
4539
4540 for (tail = list; tail; tail = TREE_CHAIN (tail))
4541 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4542 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4543 else
4544 {
4545 tree part = TREE_VALUE (tail);
4546 tree part_type = TREE_TYPE (part);
906c4e36 4547 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4548 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4549 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4550 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4551 parts = tree_cons (to_be_saved,
906c4e36
RK
4552 build (RTL_EXPR, part_type, NULL_TREE,
4553 (tree) target),
bbf6f052
RK
4554 parts);
4555 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4556 }
4557 return parts;
4558}
4559
4560/* Subroutine of expand_expr:
4561 record the non-copied parts (LIST) of an expr (LHS), and return a list
4562 which specifies the initial values of these parts. */
4563
4564static tree
4565init_noncopied_parts (lhs, list)
4566 tree lhs;
4567 tree list;
4568{
4569 tree tail;
4570 tree parts = 0;
4571
4572 for (tail = list; tail; tail = TREE_CHAIN (tail))
4573 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4574 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4575 else
4576 {
4577 tree part = TREE_VALUE (tail);
4578 tree part_type = TREE_TYPE (part);
906c4e36 4579 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4580 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4581 }
4582 return parts;
4583}
4584
4585/* Subroutine of expand_expr: return nonzero iff there is no way that
4586 EXP can reference X, which is being modified. */
4587
4588static int
4589safe_from_p (x, exp)
4590 rtx x;
4591 tree exp;
4592{
4593 rtx exp_rtl = 0;
4594 int i, nops;
4595
6676e72f
RK
4596 if (x == 0
4597 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4598 have no way of allocating temporaries of variable size
4599 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4600 So we assume here that something at a higher level has prevented a
f4510f37
RK
4601 clash. This is somewhat bogus, but the best we can do. Only
4602 do this when X is BLKmode. */
45524ce9 4603 || (TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4604 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4605 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4606 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4607 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4608 != INTEGER_CST)
f4510f37 4609 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4610 return 1;
4611
4612 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4613 find the underlying pseudo. */
4614 if (GET_CODE (x) == SUBREG)
4615 {
4616 x = SUBREG_REG (x);
4617 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4618 return 0;
4619 }
4620
4621 /* If X is a location in the outgoing argument area, it is always safe. */
4622 if (GET_CODE (x) == MEM
4623 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4624 || (GET_CODE (XEXP (x, 0)) == PLUS
4625 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4626 return 1;
4627
4628 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4629 {
4630 case 'd':
4631 exp_rtl = DECL_RTL (exp);
4632 break;
4633
4634 case 'c':
4635 return 1;
4636
4637 case 'x':
4638 if (TREE_CODE (exp) == TREE_LIST)
f32fd778
RS
4639 return ((TREE_VALUE (exp) == 0
4640 || safe_from_p (x, TREE_VALUE (exp)))
bbf6f052
RK
4641 && (TREE_CHAIN (exp) == 0
4642 || safe_from_p (x, TREE_CHAIN (exp))));
4643 else
4644 return 0;
4645
4646 case '1':
4647 return safe_from_p (x, TREE_OPERAND (exp, 0));
4648
4649 case '2':
4650 case '<':
4651 return (safe_from_p (x, TREE_OPERAND (exp, 0))
4652 && safe_from_p (x, TREE_OPERAND (exp, 1)));
4653
4654 case 'e':
4655 case 'r':
4656 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
4657 the expression. If it is set, we conflict iff we are that rtx or
4658 both are in memory. Otherwise, we check all operands of the
4659 expression recursively. */
4660
4661 switch (TREE_CODE (exp))
4662 {
4663 case ADDR_EXPR:
e44842fe
RK
4664 return (staticp (TREE_OPERAND (exp, 0))
4665 || safe_from_p (x, TREE_OPERAND (exp, 0)));
bbf6f052
RK
4666
4667 case INDIRECT_REF:
4668 if (GET_CODE (x) == MEM)
4669 return 0;
4670 break;
4671
4672 case CALL_EXPR:
4673 exp_rtl = CALL_EXPR_RTL (exp);
4674 if (exp_rtl == 0)
4675 {
4676 /* Assume that the call will clobber all hard registers and
4677 all of memory. */
4678 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4679 || GET_CODE (x) == MEM)
4680 return 0;
4681 }
4682
4683 break;
4684
4685 case RTL_EXPR:
3bb5826a
RK
4686 /* If a sequence exists, we would have to scan every instruction
4687 in the sequence to see if it was safe. This is probably not
4688 worthwhile. */
4689 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
4690 return 0;
4691
3bb5826a 4692 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
4693 break;
4694
4695 case WITH_CLEANUP_EXPR:
4696 exp_rtl = RTL_EXPR_RTL (exp);
4697 break;
4698
5dab5552
MS
4699 case CLEANUP_POINT_EXPR:
4700 return safe_from_p (x, TREE_OPERAND (exp, 0));
4701
bbf6f052
RK
4702 case SAVE_EXPR:
4703 exp_rtl = SAVE_EXPR_RTL (exp);
4704 break;
4705
8129842c
RS
4706 case BIND_EXPR:
4707 /* The only operand we look at is operand 1. The rest aren't
4708 part of the expression. */
4709 return safe_from_p (x, TREE_OPERAND (exp, 1));
4710
bbf6f052 4711 case METHOD_CALL_EXPR:
0f41302f 4712 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 4713 abort ();
e9a25f70
JL
4714
4715 default:
4716 break;
bbf6f052
RK
4717 }
4718
4719 /* If we have an rtx, we do not need to scan our operands. */
4720 if (exp_rtl)
4721 break;
4722
4723 nops = tree_code_length[(int) TREE_CODE (exp)];
4724 for (i = 0; i < nops; i++)
4725 if (TREE_OPERAND (exp, i) != 0
4726 && ! safe_from_p (x, TREE_OPERAND (exp, i)))
4727 return 0;
4728 }
4729
4730 /* If we have an rtl, find any enclosed object. Then see if we conflict
4731 with it. */
4732 if (exp_rtl)
4733 {
4734 if (GET_CODE (exp_rtl) == SUBREG)
4735 {
4736 exp_rtl = SUBREG_REG (exp_rtl);
4737 if (GET_CODE (exp_rtl) == REG
4738 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
4739 return 0;
4740 }
4741
4742 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
4743 are memory and EXP is not readonly. */
4744 return ! (rtx_equal_p (x, exp_rtl)
4745 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
4746 && ! TREE_READONLY (exp)));
4747 }
4748
4749 /* If we reach here, it is safe. */
4750 return 1;
4751}
4752
4753/* Subroutine of expand_expr: return nonzero iff EXP is an
4754 expression whose type is statically determinable. */
4755
4756static int
4757fixed_type_p (exp)
4758 tree exp;
4759{
4760 if (TREE_CODE (exp) == PARM_DECL
4761 || TREE_CODE (exp) == VAR_DECL
4762 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
4763 || TREE_CODE (exp) == COMPONENT_REF
4764 || TREE_CODE (exp) == ARRAY_REF)
4765 return 1;
4766 return 0;
4767}
01c8a7c8
RK
4768
4769/* Subroutine of expand_expr: return rtx if EXP is a
4770 variable or parameter; else return 0. */
4771
4772static rtx
4773var_rtx (exp)
4774 tree exp;
4775{
4776 STRIP_NOPS (exp);
4777 switch (TREE_CODE (exp))
4778 {
4779 case PARM_DECL:
4780 case VAR_DECL:
4781 return DECL_RTL (exp);
4782 default:
4783 return 0;
4784 }
4785}
bbf6f052
RK
4786\f
4787/* expand_expr: generate code for computing expression EXP.
4788 An rtx for the computed value is returned. The value is never null.
4789 In the case of a void EXP, const0_rtx is returned.
4790
4791 The value may be stored in TARGET if TARGET is nonzero.
4792 TARGET is just a suggestion; callers must assume that
4793 the rtx returned may not be the same as TARGET.
4794
4795 If TARGET is CONST0_RTX, it means that the value will be ignored.
4796
4797 If TMODE is not VOIDmode, it suggests generating the
4798 result in mode TMODE. But this is done only when convenient.
4799 Otherwise, TMODE is ignored and the value generated in its natural mode.
4800 TMODE is just a suggestion; callers must assume that
4801 the rtx returned may not have mode TMODE.
4802
d6a5ac33
RK
4803 Note that TARGET may have neither TMODE nor MODE. In that case, it
4804 probably will not be used.
bbf6f052
RK
4805
4806 If MODIFIER is EXPAND_SUM then when EXP is an addition
4807 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
4808 or a nest of (PLUS ...) and (MINUS ...) where the terms are
4809 products as above, or REG or MEM, or constant.
4810 Ordinarily in such cases we would output mul or add instructions
4811 and then return a pseudo reg containing the sum.
4812
4813 EXPAND_INITIALIZER is much like EXPAND_SUM except that
4814 it also marks a label as absolutely required (it can't be dead).
26fcb35a 4815 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
4816 This is used for outputting expressions used in initializers.
4817
4818 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
4819 with a constant address even if that address is not normally legitimate.
4820 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
4821
4822rtx
4823expand_expr (exp, target, tmode, modifier)
4824 register tree exp;
4825 rtx target;
4826 enum machine_mode tmode;
4827 enum expand_modifier modifier;
4828{
b50d17a1
RK
4829 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
4830 This is static so it will be accessible to our recursive callees. */
4831 static tree placeholder_list = 0;
bbf6f052
RK
4832 register rtx op0, op1, temp;
4833 tree type = TREE_TYPE (exp);
4834 int unsignedp = TREE_UNSIGNED (type);
4835 register enum machine_mode mode = TYPE_MODE (type);
4836 register enum tree_code code = TREE_CODE (exp);
4837 optab this_optab;
4838 /* Use subtarget as the target for operand 0 of a binary operation. */
4839 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4840 rtx original_target = target;
dd27116b
RK
4841 int ignore = (target == const0_rtx
4842 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
4843 || code == CONVERT_EXPR || code == REFERENCE_EXPR
4844 || code == COND_EXPR)
dd27116b 4845 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 4846 tree context;
921b3427
RK
4847 /* Used by check-memory-usage to make modifier read only. */
4848 enum expand_modifier ro_modifier;
bbf6f052 4849
921b3427
RK
4850 /* Make a read-only version of the modifier. */
4851 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
4852 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
4853 ro_modifier = modifier;
4854 else
4855 ro_modifier = EXPAND_NORMAL;
ca695ac9 4856
bbf6f052
RK
4857 /* Don't use hard regs as subtargets, because the combiner
4858 can only handle pseudo regs. */
4859 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
4860 subtarget = 0;
4861 /* Avoid subtargets inside loops,
4862 since they hide some invariant expressions. */
4863 if (preserve_subexpressions_p ())
4864 subtarget = 0;
4865
dd27116b
RK
4866 /* If we are going to ignore this result, we need only do something
4867 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
4868 is, short-circuit the most common cases here. Note that we must
4869 not call expand_expr with anything but const0_rtx in case this
4870 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 4871
dd27116b
RK
4872 if (ignore)
4873 {
4874 if (! TREE_SIDE_EFFECTS (exp))
4875 return const0_rtx;
4876
4877 /* Ensure we reference a volatile object even if value is ignored. */
4878 if (TREE_THIS_VOLATILE (exp)
4879 && TREE_CODE (exp) != FUNCTION_DECL
4880 && mode != VOIDmode && mode != BLKmode)
4881 {
921b3427 4882 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
4883 if (GET_CODE (temp) == MEM)
4884 temp = copy_to_reg (temp);
4885 return const0_rtx;
4886 }
4887
4888 if (TREE_CODE_CLASS (code) == '1')
4889 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4890 VOIDmode, ro_modifier);
dd27116b
RK
4891 else if (TREE_CODE_CLASS (code) == '2'
4892 || TREE_CODE_CLASS (code) == '<')
4893 {
921b3427
RK
4894 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
4895 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
4896 return const0_rtx;
4897 }
4898 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
4899 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
4900 /* If the second operand has no side effects, just evaluate
0f41302f 4901 the first. */
dd27116b 4902 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 4903 VOIDmode, ro_modifier);
dd27116b 4904
90764a87 4905 target = 0;
dd27116b 4906 }
bbf6f052 4907
e44842fe
RK
4908 /* If will do cse, generate all results into pseudo registers
4909 since 1) that allows cse to find more things
4910 and 2) otherwise cse could produce an insn the machine
4911 cannot support. */
4912
bbf6f052
RK
4913 if (! cse_not_expected && mode != BLKmode && target
4914 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
4915 target = subtarget;
4916
bbf6f052
RK
4917 switch (code)
4918 {
4919 case LABEL_DECL:
b552441b
RS
4920 {
4921 tree function = decl_function_context (exp);
4922 /* Handle using a label in a containing function. */
d0977240
RK
4923 if (function != current_function_decl
4924 && function != inline_function_decl && function != 0)
b552441b
RS
4925 {
4926 struct function *p = find_function_data (function);
4927 /* Allocate in the memory associated with the function
4928 that the label is in. */
4929 push_obstacks (p->function_obstack,
4930 p->function_maybepermanent_obstack);
4931
38a448ca
RH
4932 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4933 label_rtx (exp),
4934 p->forced_labels);
b552441b
RS
4935 pop_obstacks ();
4936 }
4937 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
4938 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
4939 label_rtx (exp), forced_labels);
4940 temp = gen_rtx_MEM (FUNCTION_MODE,
4941 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
4942 if (function != current_function_decl
4943 && function != inline_function_decl && function != 0)
26fcb35a
RS
4944 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
4945 return temp;
b552441b 4946 }
bbf6f052
RK
4947
4948 case PARM_DECL:
4949 if (DECL_RTL (exp) == 0)
4950 {
4951 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 4952 return CONST0_RTX (mode);
bbf6f052
RK
4953 }
4954
0f41302f 4955 /* ... fall through ... */
d6a5ac33 4956
bbf6f052 4957 case VAR_DECL:
2dca20cd
RS
4958 /* If a static var's type was incomplete when the decl was written,
4959 but the type is complete now, lay out the decl now. */
4960 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
4961 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
4962 {
4963 push_obstacks_nochange ();
4964 end_temporary_allocation ();
4965 layout_decl (exp, 0);
4966 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
4967 pop_obstacks ();
4968 }
d6a5ac33 4969
921b3427
RK
4970 /* Only check automatic variables. Currently, function arguments are
4971 not checked (this can be done at compile-time with prototypes).
4972 Aggregates are not checked. */
4973 if (flag_check_memory_usage && code == VAR_DECL
4974 && GET_CODE (DECL_RTL (exp)) == MEM
4975 && DECL_CONTEXT (exp) != NULL_TREE
4976 && ! TREE_STATIC (exp)
4977 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4978 {
4979 enum memory_use_mode memory_usage;
4980 memory_usage = get_memory_usage_from_modifier (modifier);
4981
4982 if (memory_usage != MEMORY_USE_DONT)
4983 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
4984 XEXP (DECL_RTL (exp), 0), ptr_mode,
4985 GEN_INT (int_size_in_bytes (type)),
4986 TYPE_MODE (sizetype),
956d6950
JL
4987 GEN_INT (memory_usage),
4988 TYPE_MODE (integer_type_node));
921b3427
RK
4989 }
4990
0f41302f 4991 /* ... fall through ... */
d6a5ac33 4992
2dca20cd 4993 case FUNCTION_DECL:
bbf6f052
RK
4994 case RESULT_DECL:
4995 if (DECL_RTL (exp) == 0)
4996 abort ();
d6a5ac33 4997
e44842fe
RK
4998 /* Ensure variable marked as used even if it doesn't go through
4999 a parser. If it hasn't be used yet, write out an external
5000 definition. */
5001 if (! TREE_USED (exp))
5002 {
5003 assemble_external (exp);
5004 TREE_USED (exp) = 1;
5005 }
5006
dc6d66b3
RK
5007 /* Show we haven't gotten RTL for this yet. */
5008 temp = 0;
5009
bbf6f052
RK
5010 /* Handle variables inherited from containing functions. */
5011 context = decl_function_context (exp);
5012
5013 /* We treat inline_function_decl as an alias for the current function
5014 because that is the inline function whose vars, types, etc.
5015 are being merged into the current function.
5016 See expand_inline_function. */
d6a5ac33 5017
bbf6f052
RK
5018 if (context != 0 && context != current_function_decl
5019 && context != inline_function_decl
5020 /* If var is static, we don't need a static chain to access it. */
5021 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5022 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5023 {
5024 rtx addr;
5025
5026 /* Mark as non-local and addressable. */
81feeecb 5027 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5028 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5029 abort ();
bbf6f052
RK
5030 mark_addressable (exp);
5031 if (GET_CODE (DECL_RTL (exp)) != MEM)
5032 abort ();
5033 addr = XEXP (DECL_RTL (exp), 0);
5034 if (GET_CODE (addr) == MEM)
38a448ca
RH
5035 addr = gen_rtx_MEM (Pmode,
5036 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5037 else
5038 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5039 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5040 }
4af3895e 5041
bbf6f052
RK
5042 /* This is the case of an array whose size is to be determined
5043 from its initializer, while the initializer is still being parsed.
5044 See expand_decl. */
d6a5ac33 5045
dc6d66b3
RK
5046 else if (GET_CODE (DECL_RTL (exp)) == MEM
5047 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5048 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5049 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5050
5051 /* If DECL_RTL is memory, we are in the normal case and either
5052 the address is not valid or it is not a register and -fforce-addr
5053 is specified, get the address into a register. */
5054
dc6d66b3
RK
5055 else if (GET_CODE (DECL_RTL (exp)) == MEM
5056 && modifier != EXPAND_CONST_ADDRESS
5057 && modifier != EXPAND_SUM
5058 && modifier != EXPAND_INITIALIZER
5059 && (! memory_address_p (DECL_MODE (exp),
5060 XEXP (DECL_RTL (exp), 0))
5061 || (flag_force_addr
5062 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5063 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5064 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5065
dc6d66b3
RK
5066 /* If we got something, return it. But first, set the alignment
5067 the address is a register. */
5068 if (temp != 0)
5069 {
5070 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5071 mark_reg_pointer (XEXP (temp, 0),
5072 DECL_ALIGN (exp) / BITS_PER_UNIT);
5073
5074 return temp;
5075 }
5076
1499e0a8
RK
5077 /* If the mode of DECL_RTL does not match that of the decl, it
5078 must be a promoted value. We return a SUBREG of the wanted mode,
5079 but mark it so that we know that it was already extended. */
5080
5081 if (GET_CODE (DECL_RTL (exp)) == REG
5082 && GET_MODE (DECL_RTL (exp)) != mode)
5083 {
1499e0a8
RK
5084 /* Get the signedness used for this variable. Ensure we get the
5085 same mode we got when the variable was declared. */
78911e8b
RK
5086 if (GET_MODE (DECL_RTL (exp))
5087 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5088 abort ();
5089
38a448ca 5090 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5091 SUBREG_PROMOTED_VAR_P (temp) = 1;
5092 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5093 return temp;
5094 }
5095
bbf6f052
RK
5096 return DECL_RTL (exp);
5097
5098 case INTEGER_CST:
5099 return immed_double_const (TREE_INT_CST_LOW (exp),
5100 TREE_INT_CST_HIGH (exp),
5101 mode);
5102
5103 case CONST_DECL:
921b3427
RK
5104 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5105 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5106
5107 case REAL_CST:
5108 /* If optimized, generate immediate CONST_DOUBLE
5109 which will be turned into memory by reload if necessary.
5110
5111 We used to force a register so that loop.c could see it. But
5112 this does not allow gen_* patterns to perform optimizations with
5113 the constants. It also produces two insns in cases like "x = 1.0;".
5114 On most machines, floating-point constants are not permitted in
5115 many insns, so we'd end up copying it to a register in any case.
5116
5117 Now, we do the copying in expand_binop, if appropriate. */
5118 return immed_real_const (exp);
5119
5120 case COMPLEX_CST:
5121 case STRING_CST:
5122 if (! TREE_CST_RTL (exp))
5123 output_constant_def (exp);
5124
5125 /* TREE_CST_RTL probably contains a constant address.
5126 On RISC machines where a constant address isn't valid,
5127 make some insns to get that address into a register. */
5128 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5129 && modifier != EXPAND_CONST_ADDRESS
5130 && modifier != EXPAND_INITIALIZER
5131 && modifier != EXPAND_SUM
d6a5ac33
RK
5132 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5133 || (flag_force_addr
5134 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5135 return change_address (TREE_CST_RTL (exp), VOIDmode,
5136 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5137 return TREE_CST_RTL (exp);
5138
5139 case SAVE_EXPR:
5140 context = decl_function_context (exp);
d6a5ac33 5141
d0977240
RK
5142 /* If this SAVE_EXPR was at global context, assume we are an
5143 initialization function and move it into our context. */
5144 if (context == 0)
5145 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5146
bbf6f052
RK
5147 /* We treat inline_function_decl as an alias for the current function
5148 because that is the inline function whose vars, types, etc.
5149 are being merged into the current function.
5150 See expand_inline_function. */
5151 if (context == current_function_decl || context == inline_function_decl)
5152 context = 0;
5153
5154 /* If this is non-local, handle it. */
5155 if (context)
5156 {
d0977240
RK
5157 /* The following call just exists to abort if the context is
5158 not of a containing function. */
5159 find_function_data (context);
5160
bbf6f052
RK
5161 temp = SAVE_EXPR_RTL (exp);
5162 if (temp && GET_CODE (temp) == REG)
5163 {
5164 put_var_into_stack (exp);
5165 temp = SAVE_EXPR_RTL (exp);
5166 }
5167 if (temp == 0 || GET_CODE (temp) != MEM)
5168 abort ();
5169 return change_address (temp, mode,
5170 fix_lexical_addr (XEXP (temp, 0), exp));
5171 }
5172 if (SAVE_EXPR_RTL (exp) == 0)
5173 {
06089a8b
RK
5174 if (mode == VOIDmode)
5175 temp = const0_rtx;
5176 else
5177 temp = assign_temp (type, 0, 0, 0);
1499e0a8 5178
bbf6f052 5179 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5180 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5181 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5182 save_expr_regs);
ff78f773
RK
5183
5184 /* If the mode of TEMP does not match that of the expression, it
5185 must be a promoted value. We pass store_expr a SUBREG of the
5186 wanted mode but mark it so that we know that it was already
5187 extended. Note that `unsignedp' was modified above in
5188 this case. */
5189
5190 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5191 {
38a448ca 5192 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5193 SUBREG_PROMOTED_VAR_P (temp) = 1;
5194 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5195 }
5196
4c7a0be9 5197 if (temp == const0_rtx)
921b3427
RK
5198 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5199 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5200 else
5201 store_expr (TREE_OPERAND (exp, 0), temp, 0);
bbf6f052 5202 }
1499e0a8
RK
5203
5204 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5205 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5206 but mark it so that we know that it was already extended. */
1499e0a8
RK
5207
5208 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5209 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5210 {
e70d22c8
RK
5211 /* Compute the signedness and make the proper SUBREG. */
5212 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5213 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5214 SUBREG_PROMOTED_VAR_P (temp) = 1;
5215 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5216 return temp;
5217 }
5218
bbf6f052
RK
5219 return SAVE_EXPR_RTL (exp);
5220
679163cf
MS
5221 case UNSAVE_EXPR:
5222 {
5223 rtx temp;
5224 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5225 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5226 return temp;
5227 }
5228
b50d17a1 5229 case PLACEHOLDER_EXPR:
e9a25f70
JL
5230 {
5231 tree placeholder_expr;
5232
5233 /* If there is an object on the head of the placeholder list,
6d8ccdbb 5234 see if some object in its references is of type TYPE. For
e9a25f70
JL
5235 further information, see tree.def. */
5236 for (placeholder_expr = placeholder_list;
5237 placeholder_expr != 0;
5238 placeholder_expr = TREE_CHAIN (placeholder_expr))
5239 {
5240 tree need_type = TYPE_MAIN_VARIANT (type);
5241 tree object = 0;
5242 tree old_list = placeholder_list;
5243 tree elt;
5244
5245 /* See if the object is the type that we want. */
5246 if ((TYPE_MAIN_VARIANT (TREE_TYPE
5247 (TREE_PURPOSE (placeholder_expr)))
5248 == need_type))
5249 object = TREE_PURPOSE (placeholder_expr);
5250
6d8ccdbb 5251 /* Find the outermost reference that is of the type we want. */
e9a25f70 5252 for (elt = TREE_PURPOSE (placeholder_expr);
6d8ccdbb 5253 elt != 0 && object == 0
e9a25f70
JL
5254 && (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5255 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5256 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5257 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e');
5258 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
5259 || TREE_CODE (elt) == COND_EXPR)
5260 ? TREE_OPERAND (elt, 1) : TREE_OPERAND (elt, 0)))
5261 if (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5262 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (elt, 0)))
5263 == need_type))
6d8ccdbb 5264 object = TREE_OPERAND (elt, 0);
dc500fa1 5265
e9a25f70 5266 if (object != 0)
2cde2255 5267 {
e9a25f70
JL
5268 /* Expand this object skipping the list entries before
5269 it was found in case it is also a PLACEHOLDER_EXPR.
5270 In that case, we want to translate it using subsequent
5271 entries. */
5272 placeholder_list = TREE_CHAIN (placeholder_expr);
5273 temp = expand_expr (object, original_target, tmode,
5274 ro_modifier);
5275 placeholder_list = old_list;
5276 return temp;
2cde2255 5277 }
e9a25f70
JL
5278 }
5279 }
b50d17a1
RK
5280
5281 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5282 abort ();
5283
5284 case WITH_RECORD_EXPR:
5285 /* Put the object on the placeholder list, expand our first operand,
5286 and pop the list. */
5287 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5288 placeholder_list);
5289 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5290 tmode, ro_modifier);
b50d17a1
RK
5291 placeholder_list = TREE_CHAIN (placeholder_list);
5292 return target;
5293
bbf6f052 5294 case EXIT_EXPR:
e44842fe
RK
5295 expand_exit_loop_if_false (NULL_PTR,
5296 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5297 return const0_rtx;
5298
5299 case LOOP_EXPR:
0088fcb1 5300 push_temp_slots ();
bbf6f052
RK
5301 expand_start_loop (1);
5302 expand_expr_stmt (TREE_OPERAND (exp, 0));
5303 expand_end_loop ();
0088fcb1 5304 pop_temp_slots ();
bbf6f052
RK
5305
5306 return const0_rtx;
5307
5308 case BIND_EXPR:
5309 {
5310 tree vars = TREE_OPERAND (exp, 0);
5311 int vars_need_expansion = 0;
5312
5313 /* Need to open a binding contour here because
e976b8b2 5314 if there are any cleanups they must be contained here. */
bbf6f052
RK
5315 expand_start_bindings (0);
5316
2df53c0b
RS
5317 /* Mark the corresponding BLOCK for output in its proper place. */
5318 if (TREE_OPERAND (exp, 2) != 0
5319 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5320 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5321
5322 /* If VARS have not yet been expanded, expand them now. */
5323 while (vars)
5324 {
5325 if (DECL_RTL (vars) == 0)
5326 {
5327 vars_need_expansion = 1;
5328 expand_decl (vars);
5329 }
5330 expand_decl_init (vars);
5331 vars = TREE_CHAIN (vars);
5332 }
5333
921b3427 5334 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5335
5336 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5337
5338 return temp;
5339 }
5340
5341 case RTL_EXPR:
83b853c9
JM
5342 if (RTL_EXPR_SEQUENCE (exp))
5343 {
5344 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5345 abort ();
5346 emit_insns (RTL_EXPR_SEQUENCE (exp));
5347 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5348 }
99310285 5349 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5350 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5351 return RTL_EXPR_RTL (exp);
5352
5353 case CONSTRUCTOR:
dd27116b
RK
5354 /* If we don't need the result, just ensure we evaluate any
5355 subexpressions. */
5356 if (ignore)
5357 {
5358 tree elt;
5359 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5360 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5361 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5362 return const0_rtx;
5363 }
3207b172 5364
4af3895e
JVA
5365 /* All elts simple constants => refer to a constant in memory. But
5366 if this is a non-BLKmode mode, let it store a field at a time
5367 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5368 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5369 store directly into the target unless the type is large enough
5370 that memcpy will be used. If we are making an initializer and
3207b172 5371 all operands are constant, put it in memory as well. */
dd27116b 5372 else if ((TREE_STATIC (exp)
3207b172
RK
5373 && ((mode == BLKmode
5374 && ! (target != 0 && safe_from_p (target, exp)))
d720b9d1
RK
5375 || TREE_ADDRESSABLE (exp)
5376 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5377 && (move_by_pieces_ninsns
67225c15
RK
5378 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5379 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5380 > MOVE_RATIO)
5381 && ! mostly_zeros_p (exp))))
dd27116b 5382 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5383 {
5384 rtx constructor = output_constant_def (exp);
b552441b
RS
5385 if (modifier != EXPAND_CONST_ADDRESS
5386 && modifier != EXPAND_INITIALIZER
5387 && modifier != EXPAND_SUM
d6a5ac33
RK
5388 && (! memory_address_p (GET_MODE (constructor),
5389 XEXP (constructor, 0))
5390 || (flag_force_addr
5391 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5392 constructor = change_address (constructor, VOIDmode,
5393 XEXP (constructor, 0));
5394 return constructor;
5395 }
5396
bbf6f052
RK
5397 else
5398 {
e9ac02a6
JW
5399 /* Handle calls that pass values in multiple non-contiguous
5400 locations. The Irix 6 ABI has examples of this. */
5401 if (target == 0 || ! safe_from_p (target, exp)
5402 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5403 {
5404 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5405 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5406 else
5407 target = assign_temp (type, 0, 1, 1);
5408 }
07604beb
RK
5409
5410 if (TREE_READONLY (exp))
5411 {
9151b3bf 5412 if (GET_CODE (target) == MEM)
effbcc6a
RK
5413 target = copy_rtx (target);
5414
07604beb
RK
5415 RTX_UNCHANGING_P (target) = 1;
5416 }
5417
e1a43f73 5418 store_constructor (exp, target, 0);
bbf6f052
RK
5419 return target;
5420 }
5421
5422 case INDIRECT_REF:
5423 {
5424 tree exp1 = TREE_OPERAND (exp, 0);
5425 tree exp2;
7581a30f
JW
5426 tree index;
5427 tree string = string_constant (exp1, &index);
5428 int i;
5429
5430 if (string
5431 && TREE_CODE (string) == STRING_CST
5432 && TREE_CODE (index) == INTEGER_CST
5433 && !TREE_INT_CST_HIGH (index)
5434 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5435 && GET_MODE_CLASS (mode) == MODE_INT
5436 && GET_MODE_SIZE (mode) == 1)
5437 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5438
405f0da6
JW
5439 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5440 op0 = memory_address (mode, op0);
8c8a8e34 5441
921b3427
RK
5442 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5443 {
5444 enum memory_use_mode memory_usage;
5445 memory_usage = get_memory_usage_from_modifier (modifier);
5446
5447 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
5448 {
5449 in_check_memory_usage = 1;
5450 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5451 op0, ptr_mode,
5452 GEN_INT (int_size_in_bytes (type)),
5453 TYPE_MODE (sizetype),
5454 GEN_INT (memory_usage),
5455 TYPE_MODE (integer_type_node));
5456 in_check_memory_usage = 0;
5457 }
921b3427
RK
5458 }
5459
38a448ca 5460 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
5461 /* If address was computed by addition,
5462 mark this as an element of an aggregate. */
5463 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
5464 || (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR
5465 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == PLUS_EXPR)
05e3bdb9 5466 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5467 || (TREE_CODE (exp1) == ADDR_EXPR
5468 && (exp2 = TREE_OPERAND (exp1, 0))
05e3bdb9 5469 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5470 MEM_IN_STRUCT_P (temp) = 1;
2c4c436a 5471 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
1125706f
RK
5472
5473 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5474 here, because, in C and C++, the fact that a location is accessed
5475 through a pointer to const does not mean that the value there can
5476 never change. Languages where it can never change should
5477 also set TREE_STATIC. */
5cb7a25a 5478 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
5479 return temp;
5480 }
bbf6f052
RK
5481
5482 case ARRAY_REF:
742920c7
RK
5483 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
5484 abort ();
bbf6f052 5485
bbf6f052 5486 {
742920c7
RK
5487 tree array = TREE_OPERAND (exp, 0);
5488 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
5489 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5490 tree index = TREE_OPERAND (exp, 1);
5491 tree index_type = TREE_TYPE (index);
08293add 5492 HOST_WIDE_INT i;
b50d17a1 5493
d4c89139
PB
5494 /* Optimize the special-case of a zero lower bound.
5495
5496 We convert the low_bound to sizetype to avoid some problems
5497 with constant folding. (E.g. suppose the lower bound is 1,
5498 and its mode is QI. Without the conversion, (ARRAY
5499 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5500 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5501
5502 But sizetype isn't quite right either (especially if
5503 the lowbound is negative). FIXME */
5504
742920c7 5505 if (! integer_zerop (low_bound))
d4c89139
PB
5506 index = fold (build (MINUS_EXPR, index_type, index,
5507 convert (sizetype, low_bound)));
742920c7 5508
742920c7 5509 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
5510 This is not done in fold so it won't happen inside &.
5511 Don't fold if this is for wide characters since it's too
5512 difficult to do correctly and this is a very rare case. */
742920c7
RK
5513
5514 if (TREE_CODE (array) == STRING_CST
5515 && TREE_CODE (index) == INTEGER_CST
5516 && !TREE_INT_CST_HIGH (index)
307b821c 5517 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
5518 && GET_MODE_CLASS (mode) == MODE_INT
5519 && GET_MODE_SIZE (mode) == 1)
307b821c 5520 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 5521
742920c7
RK
5522 /* If this is a constant index into a constant array,
5523 just get the value from the array. Handle both the cases when
5524 we have an explicit constructor and when our operand is a variable
5525 that was declared const. */
4af3895e 5526
742920c7
RK
5527 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
5528 {
5529 if (TREE_CODE (index) == INTEGER_CST
5530 && TREE_INT_CST_HIGH (index) == 0)
5531 {
5532 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
5533
5534 i = TREE_INT_CST_LOW (index);
5535 while (elem && i--)
5536 elem = TREE_CHAIN (elem);
5537 if (elem)
5538 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5539 tmode, ro_modifier);
742920c7
RK
5540 }
5541 }
4af3895e 5542
742920c7
RK
5543 else if (optimize >= 1
5544 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
5545 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
5546 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
5547 {
08293add 5548 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
5549 {
5550 tree init = DECL_INITIAL (array);
5551
5552 i = TREE_INT_CST_LOW (index);
5553 if (TREE_CODE (init) == CONSTRUCTOR)
5554 {
5555 tree elem = CONSTRUCTOR_ELTS (init);
5556
03dc44a6
RS
5557 while (elem
5558 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
5559 elem = TREE_CHAIN (elem);
5560 if (elem)
5561 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 5562 tmode, ro_modifier);
742920c7
RK
5563 }
5564 else if (TREE_CODE (init) == STRING_CST
08293add
RK
5565 && TREE_INT_CST_HIGH (index) == 0
5566 && (TREE_INT_CST_LOW (index)
5567 < TREE_STRING_LENGTH (init)))
5568 return (GEN_INT
5569 (TREE_STRING_POINTER
5570 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
5571 }
5572 }
5573 }
8c8a8e34 5574
08293add 5575 /* ... fall through ... */
bbf6f052
RK
5576
5577 case COMPONENT_REF:
5578 case BIT_FIELD_REF:
4af3895e 5579 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
5580 appropriate field if it is present. Don't do this if we have
5581 already written the data since we want to refer to that copy
5582 and varasm.c assumes that's what we'll do. */
4af3895e 5583 if (code != ARRAY_REF
7a0b7b9a
RK
5584 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
5585 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
5586 {
5587 tree elt;
5588
5589 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
5590 elt = TREE_CHAIN (elt))
86b5812c
RK
5591 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
5592 /* We can normally use the value of the field in the
5593 CONSTRUCTOR. However, if this is a bitfield in
5594 an integral mode that we can fit in a HOST_WIDE_INT,
5595 we must mask only the number of bits in the bitfield,
5596 since this is done implicitly by the constructor. If
5597 the bitfield does not meet either of those conditions,
5598 we can't do this optimization. */
5599 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
5600 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
5601 == MODE_INT)
5602 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
5603 <= HOST_BITS_PER_WIDE_INT))))
5604 {
5605 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
5606 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
5607 {
5608 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
5609 enum machine_mode imode
5610 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
5611
5612 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
5613 {
5614 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
5615 op0 = expand_and (op0, op1, target);
5616 }
5617 else
5618 {
5619 tree count
5620 = build_int_2 (imode - bitsize, 0);
5621
5622 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
5623 target, 0);
5624 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
5625 target, 0);
5626 }
5627 }
5628
5629 return op0;
5630 }
4af3895e
JVA
5631 }
5632
bbf6f052
RK
5633 {
5634 enum machine_mode mode1;
5635 int bitsize;
5636 int bitpos;
7bb0943f 5637 tree offset;
bbf6f052 5638 int volatilep = 0;
034f9101 5639 int alignment;
839c4796
RK
5640 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
5641 &mode1, &unsignedp, &volatilep,
5642 &alignment);
bbf6f052 5643
e7f3c83f
RK
5644 /* If we got back the original object, something is wrong. Perhaps
5645 we are evaluating an expression too early. In any event, don't
5646 infinitely recurse. */
5647 if (tem == exp)
5648 abort ();
5649
3d27140a 5650 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
5651 computation, since it will need a temporary and TARGET is known
5652 to have to do. This occurs in unchecked conversion in Ada. */
5653
5654 op0 = expand_expr (tem,
5655 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
5656 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
5657 != INTEGER_CST)
5658 ? target : NULL_RTX),
4ed67205
RK
5659 VOIDmode,
5660 modifier == EXPAND_INITIALIZER ? modifier : 0);
bbf6f052 5661
8c8a8e34 5662 /* If this is a constant, put it into a register if it is a
8008b228 5663 legitimate constant and memory if it isn't. */
8c8a8e34
JW
5664 if (CONSTANT_P (op0))
5665 {
5666 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 5667 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
5668 op0 = force_reg (mode, op0);
5669 else
5670 op0 = validize_mem (force_const_mem (mode, op0));
5671 }
5672
7bb0943f
RS
5673 if (offset != 0)
5674 {
906c4e36 5675 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
5676
5677 if (GET_CODE (op0) != MEM)
5678 abort ();
2d48c13d
JL
5679
5680 if (GET_MODE (offset_rtx) != ptr_mode)
5681#ifdef POINTERS_EXTEND_UNSIGNED
5682 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 1);
5683#else
5684 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
5685#endif
5686
7bb0943f 5687 op0 = change_address (op0, VOIDmode,
38a448ca
RH
5688 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
5689 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
5690 }
5691
bbf6f052
RK
5692 /* Don't forget about volatility even if this is a bitfield. */
5693 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
5694 {
5695 op0 = copy_rtx (op0);
5696 MEM_VOLATILE_P (op0) = 1;
5697 }
5698
921b3427
RK
5699 /* Check the access. */
5700 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
5701 {
5702 enum memory_use_mode memory_usage;
5703 memory_usage = get_memory_usage_from_modifier (modifier);
5704
5705 if (memory_usage != MEMORY_USE_DONT)
5706 {
5707 rtx to;
5708 int size;
5709
5710 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
5711 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
5712
5713 /* Check the access right of the pointer. */
e9a25f70
JL
5714 if (size > BITS_PER_UNIT)
5715 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5716 to, ptr_mode,
5717 GEN_INT (size / BITS_PER_UNIT),
5718 TYPE_MODE (sizetype),
956d6950
JL
5719 GEN_INT (memory_usage),
5720 TYPE_MODE (integer_type_node));
921b3427
RK
5721 }
5722 }
5723
ccc98036
RS
5724 /* In cases where an aligned union has an unaligned object
5725 as a field, we might be extracting a BLKmode value from
5726 an integer-mode (e.g., SImode) object. Handle this case
5727 by doing the extract into an object as wide as the field
5728 (which we know to be the width of a basic mode), then
f2420d0b
JW
5729 storing into memory, and changing the mode to BLKmode.
5730 If we ultimately want the address (EXPAND_CONST_ADDRESS or
5731 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 5732 if (mode1 == VOIDmode
ccc98036 5733 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 5734 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 5735 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
5736 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
5737 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5738 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
5739 /* If the field isn't aligned enough to fetch as a memref,
5740 fetch it as a bit field. */
5741 || (SLOW_UNALIGNED_ACCESS
5742 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
5743 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 5744 {
bbf6f052
RK
5745 enum machine_mode ext_mode = mode;
5746
5747 if (ext_mode == BLKmode)
5748 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
5749
5750 if (ext_mode == BLKmode)
a281e72d
RK
5751 {
5752 /* In this case, BITPOS must start at a byte boundary and
5753 TARGET, if specified, must be a MEM. */
5754 if (GET_CODE (op0) != MEM
5755 || (target != 0 && GET_CODE (target) != MEM)
5756 || bitpos % BITS_PER_UNIT != 0)
5757 abort ();
5758
5759 op0 = change_address (op0, VOIDmode,
5760 plus_constant (XEXP (op0, 0),
5761 bitpos / BITS_PER_UNIT));
5762 if (target == 0)
5763 target = assign_temp (type, 0, 1, 1);
5764
5765 emit_block_move (target, op0,
5766 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5767 / BITS_PER_UNIT),
5768 1);
5769
5770 return target;
5771 }
bbf6f052 5772
dc6d66b3
RK
5773 op0 = validize_mem (op0);
5774
5775 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
5776 mark_reg_pointer (XEXP (op0, 0), alignment);
5777
5778 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 5779 unsignedp, target, ext_mode, ext_mode,
034f9101 5780 alignment,
bbf6f052 5781 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
5782
5783 /* If the result is a record type and BITSIZE is narrower than
5784 the mode of OP0, an integral mode, and this is a big endian
5785 machine, we must put the field into the high-order bits. */
5786 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
5787 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
5788 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
5789 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
5790 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
5791 - bitsize),
5792 op0, 1);
5793
bbf6f052
RK
5794 if (mode == BLKmode)
5795 {
5796 rtx new = assign_stack_temp (ext_mode,
5797 bitsize / BITS_PER_UNIT, 0);
5798
5799 emit_move_insn (new, op0);
5800 op0 = copy_rtx (new);
5801 PUT_MODE (op0, BLKmode);
092dded9 5802 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
5803 }
5804
5805 return op0;
5806 }
5807
05019f83
RK
5808 /* If the result is BLKmode, use that to access the object
5809 now as well. */
5810 if (mode == BLKmode)
5811 mode1 = BLKmode;
5812
bbf6f052
RK
5813 /* Get a reference to just this component. */
5814 if (modifier == EXPAND_CONST_ADDRESS
5815 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
5816 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
5817 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
5818 else
5819 op0 = change_address (op0, mode1,
5820 plus_constant (XEXP (op0, 0),
5821 (bitpos / BITS_PER_UNIT)));
dc6d66b3
RK
5822 if (GET_CODE (XEXP (op0, 0)) == REG)
5823 mark_reg_pointer (XEXP (op0, 0), alignment);
5824
bbf6f052
RK
5825 MEM_IN_STRUCT_P (op0) = 1;
5826 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 5827 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 5828 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 5829 || modifier == EXPAND_INITIALIZER)
bbf6f052 5830 return op0;
0d15e60c 5831 else if (target == 0)
bbf6f052 5832 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 5833
bbf6f052
RK
5834 convert_move (target, op0, unsignedp);
5835 return target;
5836 }
5837
bbf6f052
RK
5838 /* Intended for a reference to a buffer of a file-object in Pascal.
5839 But it's not certain that a special tree code will really be
5840 necessary for these. INDIRECT_REF might work for them. */
5841 case BUFFER_REF:
5842 abort ();
5843
7308a047 5844 case IN_EXPR:
7308a047 5845 {
d6a5ac33
RK
5846 /* Pascal set IN expression.
5847
5848 Algorithm:
5849 rlo = set_low - (set_low%bits_per_word);
5850 the_word = set [ (index - rlo)/bits_per_word ];
5851 bit_index = index % bits_per_word;
5852 bitmask = 1 << bit_index;
5853 return !!(the_word & bitmask); */
5854
7308a047
RS
5855 tree set = TREE_OPERAND (exp, 0);
5856 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 5857 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 5858 tree set_type = TREE_TYPE (set);
7308a047
RS
5859 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
5860 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
5861 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
5862 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
5863 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
5864 rtx setval = expand_expr (set, 0, VOIDmode, 0);
5865 rtx setaddr = XEXP (setval, 0);
5866 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
5867 rtx rlow;
5868 rtx diff, quo, rem, addr, bit, result;
7308a047 5869
d6a5ac33
RK
5870 preexpand_calls (exp);
5871
5872 /* If domain is empty, answer is no. Likewise if index is constant
5873 and out of bounds. */
5874 if ((TREE_CODE (set_high_bound) == INTEGER_CST
5875 && TREE_CODE (set_low_bound) == INTEGER_CST
5876 && tree_int_cst_lt (set_high_bound, set_low_bound)
5877 || (TREE_CODE (index) == INTEGER_CST
5878 && TREE_CODE (set_low_bound) == INTEGER_CST
5879 && tree_int_cst_lt (index, set_low_bound))
5880 || (TREE_CODE (set_high_bound) == INTEGER_CST
5881 && TREE_CODE (index) == INTEGER_CST
5882 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
5883 return const0_rtx;
5884
d6a5ac33
RK
5885 if (target == 0)
5886 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
5887
5888 /* If we get here, we have to generate the code for both cases
5889 (in range and out of range). */
5890
5891 op0 = gen_label_rtx ();
5892 op1 = gen_label_rtx ();
5893
5894 if (! (GET_CODE (index_val) == CONST_INT
5895 && GET_CODE (lo_r) == CONST_INT))
5896 {
17938e57 5897 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 5898 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5899 emit_jump_insn (gen_blt (op1));
5900 }
5901
5902 if (! (GET_CODE (index_val) == CONST_INT
5903 && GET_CODE (hi_r) == CONST_INT))
5904 {
17938e57 5905 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 5906 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
5907 emit_jump_insn (gen_bgt (op1));
5908 }
5909
5910 /* Calculate the element number of bit zero in the first word
5911 of the set. */
5912 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
5913 rlow = GEN_INT (INTVAL (lo_r)
5914 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 5915 else
17938e57
RK
5916 rlow = expand_binop (index_mode, and_optab, lo_r,
5917 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 5918 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 5919
d6a5ac33
RK
5920 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
5921 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
5922
5923 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 5924 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 5925 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
5926 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
5927
7308a047 5928 addr = memory_address (byte_mode,
d6a5ac33
RK
5929 expand_binop (index_mode, add_optab, diff,
5930 setaddr, NULL_RTX, iunsignedp,
17938e57 5931 OPTAB_LIB_WIDEN));
d6a5ac33 5932
7308a047
RS
5933 /* Extract the bit we want to examine */
5934 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 5935 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
5936 make_tree (TREE_TYPE (index), rem),
5937 NULL_RTX, 1);
5938 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
5939 GET_MODE (target) == byte_mode ? target : 0,
7308a047 5940 1, OPTAB_LIB_WIDEN);
17938e57
RK
5941
5942 if (result != target)
5943 convert_move (target, result, 1);
7308a047
RS
5944
5945 /* Output the code to handle the out-of-range case. */
5946 emit_jump (op0);
5947 emit_label (op1);
5948 emit_move_insn (target, const0_rtx);
5949 emit_label (op0);
5950 return target;
5951 }
5952
bbf6f052
RK
5953 case WITH_CLEANUP_EXPR:
5954 if (RTL_EXPR_RTL (exp) == 0)
5955 {
5956 RTL_EXPR_RTL (exp)
921b3427 5957 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
5958 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
5959
bbf6f052
RK
5960 /* That's it for this cleanup. */
5961 TREE_OPERAND (exp, 2) = 0;
5962 }
5963 return RTL_EXPR_RTL (exp);
5964
5dab5552
MS
5965 case CLEANUP_POINT_EXPR:
5966 {
d93d4205 5967 extern int temp_slot_level;
e976b8b2
MS
5968 /* Start a new binding layer that will keep track of all cleanup
5969 actions to be performed. */
5970 expand_start_bindings (0);
5971
d93d4205 5972 target_temp_slot_level = temp_slot_level;
e976b8b2 5973
921b3427 5974 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
5975 /* If we're going to use this value, load it up now. */
5976 if (! ignore)
5977 op0 = force_not_mem (op0);
d93d4205 5978 preserve_temp_slots (op0);
e976b8b2 5979 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
5980 }
5981 return op0;
5982
bbf6f052
RK
5983 case CALL_EXPR:
5984 /* Check for a built-in function. */
5985 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
5986 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
5987 == FUNCTION_DECL)
bbf6f052
RK
5988 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
5989 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 5990
bbf6f052
RK
5991 /* If this call was expanded already by preexpand_calls,
5992 just return the result we got. */
5993 if (CALL_EXPR_RTL (exp) != 0)
5994 return CALL_EXPR_RTL (exp);
d6a5ac33 5995
8129842c 5996 return expand_call (exp, target, ignore);
bbf6f052
RK
5997
5998 case NON_LVALUE_EXPR:
5999 case NOP_EXPR:
6000 case CONVERT_EXPR:
6001 case REFERENCE_EXPR:
bbf6f052
RK
6002 if (TREE_CODE (type) == UNION_TYPE)
6003 {
6004 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6005 if (target == 0)
06089a8b
RK
6006 {
6007 if (mode != BLKmode)
6008 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6009 else
6010 target = assign_temp (type, 0, 1, 1);
6011 }
d6a5ac33 6012
bbf6f052
RK
6013 if (GET_CODE (target) == MEM)
6014 /* Store data into beginning of memory target. */
6015 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6016 change_address (target, TYPE_MODE (valtype), 0), 0);
6017
bbf6f052
RK
6018 else if (GET_CODE (target) == REG)
6019 /* Store this field into a union of the proper type. */
6020 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6021 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6022 VOIDmode, 0, 1,
6023 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6024 else
6025 abort ();
6026
6027 /* Return the entire union. */
6028 return target;
6029 }
d6a5ac33 6030
7f62854a
RK
6031 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6032 {
6033 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6034 ro_modifier);
7f62854a
RK
6035
6036 /* If the signedness of the conversion differs and OP0 is
6037 a promoted SUBREG, clear that indication since we now
6038 have to do the proper extension. */
6039 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6040 && GET_CODE (op0) == SUBREG)
6041 SUBREG_PROMOTED_VAR_P (op0) = 0;
6042
6043 return op0;
6044 }
6045
1499e0a8 6046 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6047 if (GET_MODE (op0) == mode)
6048 return op0;
12342f90 6049
d6a5ac33
RK
6050 /* If OP0 is a constant, just convert it into the proper mode. */
6051 if (CONSTANT_P (op0))
6052 return
6053 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6054 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6055
26fcb35a 6056 if (modifier == EXPAND_INITIALIZER)
38a448ca 6057 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6058
bbf6f052 6059 if (target == 0)
d6a5ac33
RK
6060 return
6061 convert_to_mode (mode, op0,
6062 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6063 else
d6a5ac33
RK
6064 convert_move (target, op0,
6065 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6066 return target;
6067
6068 case PLUS_EXPR:
0f41302f
MS
6069 /* We come here from MINUS_EXPR when the second operand is a
6070 constant. */
bbf6f052
RK
6071 plus_expr:
6072 this_optab = add_optab;
6073
6074 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6075 something else, make sure we add the register to the constant and
6076 then to the other thing. This case can occur during strength
6077 reduction and doing it this way will produce better code if the
6078 frame pointer or argument pointer is eliminated.
6079
6080 fold-const.c will ensure that the constant is always in the inner
6081 PLUS_EXPR, so the only case we need to do anything about is if
6082 sp, ap, or fp is our second argument, in which case we must swap
6083 the innermost first argument and our second argument. */
6084
6085 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6086 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6087 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6088 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6089 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6090 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6091 {
6092 tree t = TREE_OPERAND (exp, 1);
6093
6094 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6095 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6096 }
6097
88f63c77 6098 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6099 something, we might be forming a constant. So try to use
6100 plus_constant. If it produces a sum and we can't accept it,
6101 use force_operand. This allows P = &ARR[const] to generate
6102 efficient code on machines where a SYMBOL_REF is not a valid
6103 address.
6104
6105 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6106 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6107 || mode == ptr_mode)
bbf6f052 6108 {
c980ac49
RS
6109 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6110 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6111 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6112 {
6113 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6114 EXPAND_SUM);
6115 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6116 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6117 op1 = force_operand (op1, target);
6118 return op1;
6119 }
bbf6f052 6120
c980ac49
RS
6121 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6122 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6123 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6124 {
6125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6126 EXPAND_SUM);
6127 if (! CONSTANT_P (op0))
6128 {
6129 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6130 VOIDmode, modifier);
709f5be1
RS
6131 /* Don't go to both_summands if modifier
6132 says it's not right to return a PLUS. */
6133 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6134 goto binop2;
c980ac49
RS
6135 goto both_summands;
6136 }
6137 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6138 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6139 op0 = force_operand (op0, target);
6140 return op0;
6141 }
bbf6f052
RK
6142 }
6143
6144 /* No sense saving up arithmetic to be done
6145 if it's all in the wrong mode to form part of an address.
6146 And force_operand won't know whether to sign-extend or
6147 zero-extend. */
6148 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6149 || mode != ptr_mode)
c980ac49 6150 goto binop;
bbf6f052
RK
6151
6152 preexpand_calls (exp);
6153 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6154 subtarget = 0;
6155
921b3427
RK
6156 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6157 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6158
c980ac49 6159 both_summands:
bbf6f052
RK
6160 /* Make sure any term that's a sum with a constant comes last. */
6161 if (GET_CODE (op0) == PLUS
6162 && CONSTANT_P (XEXP (op0, 1)))
6163 {
6164 temp = op0;
6165 op0 = op1;
6166 op1 = temp;
6167 }
6168 /* If adding to a sum including a constant,
6169 associate it to put the constant outside. */
6170 if (GET_CODE (op1) == PLUS
6171 && CONSTANT_P (XEXP (op1, 1)))
6172 {
6173 rtx constant_term = const0_rtx;
6174
6175 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6176 if (temp != 0)
6177 op0 = temp;
6f90e075
JW
6178 /* Ensure that MULT comes first if there is one. */
6179 else if (GET_CODE (op0) == MULT)
38a448ca 6180 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6181 else
38a448ca 6182 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6183
6184 /* Let's also eliminate constants from op0 if possible. */
6185 op0 = eliminate_constant_term (op0, &constant_term);
6186
6187 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6188 their sum should be a constant. Form it into OP1, since the
6189 result we want will then be OP0 + OP1. */
6190
6191 temp = simplify_binary_operation (PLUS, mode, constant_term,
6192 XEXP (op1, 1));
6193 if (temp != 0)
6194 op1 = temp;
6195 else
38a448ca 6196 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6197 }
6198
6199 /* Put a constant term last and put a multiplication first. */
6200 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6201 temp = op1, op1 = op0, op0 = temp;
6202
6203 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6204 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6205
6206 case MINUS_EXPR:
ea87523e
RK
6207 /* For initializers, we are allowed to return a MINUS of two
6208 symbolic constants. Here we handle all cases when both operands
6209 are constant. */
bbf6f052
RK
6210 /* Handle difference of two symbolic constants,
6211 for the sake of an initializer. */
6212 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6213 && really_constant_p (TREE_OPERAND (exp, 0))
6214 && really_constant_p (TREE_OPERAND (exp, 1)))
6215 {
906c4e36 6216 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6217 VOIDmode, ro_modifier);
906c4e36 6218 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6219 VOIDmode, ro_modifier);
ea87523e 6220
ea87523e
RK
6221 /* If the last operand is a CONST_INT, use plus_constant of
6222 the negated constant. Else make the MINUS. */
6223 if (GET_CODE (op1) == CONST_INT)
6224 return plus_constant (op0, - INTVAL (op1));
6225 else
38a448ca 6226 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6227 }
6228 /* Convert A - const to A + (-const). */
6229 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6230 {
ae431183
RK
6231 tree negated = fold (build1 (NEGATE_EXPR, type,
6232 TREE_OPERAND (exp, 1)));
6233
6234 /* Deal with the case where we can't negate the constant
6235 in TYPE. */
6236 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6237 {
6238 tree newtype = signed_type (type);
6239 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6240 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6241 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6242
6243 if (! TREE_OVERFLOW (newneg))
6244 return expand_expr (convert (type,
6245 build (PLUS_EXPR, newtype,
6246 newop0, newneg)),
921b3427 6247 target, tmode, ro_modifier);
ae431183
RK
6248 }
6249 else
6250 {
6251 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6252 goto plus_expr;
6253 }
bbf6f052
RK
6254 }
6255 this_optab = sub_optab;
6256 goto binop;
6257
6258 case MULT_EXPR:
6259 preexpand_calls (exp);
6260 /* If first operand is constant, swap them.
6261 Thus the following special case checks need only
6262 check the second operand. */
6263 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6264 {
6265 register tree t1 = TREE_OPERAND (exp, 0);
6266 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6267 TREE_OPERAND (exp, 1) = t1;
6268 }
6269
6270 /* Attempt to return something suitable for generating an
6271 indexed address, for machines that support that. */
6272
88f63c77 6273 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6274 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6275 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6276 {
921b3427
RK
6277 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6278 EXPAND_SUM);
bbf6f052
RK
6279
6280 /* Apply distributive law if OP0 is x+c. */
6281 if (GET_CODE (op0) == PLUS
6282 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6283 return gen_rtx_PLUS (mode,
6284 gen_rtx_MULT (mode, XEXP (op0, 0),
6285 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6286 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6287 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6288
6289 if (GET_CODE (op0) != REG)
906c4e36 6290 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6291 if (GET_CODE (op0) != REG)
6292 op0 = copy_to_mode_reg (mode, op0);
6293
38a448ca
RH
6294 return gen_rtx_MULT (mode, op0,
6295 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6296 }
6297
6298 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6299 subtarget = 0;
6300
6301 /* Check for multiplying things that have been extended
6302 from a narrower type. If this machine supports multiplying
6303 in that narrower type with a result in the desired type,
6304 do it that way, and avoid the explicit type-conversion. */
6305 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6306 && TREE_CODE (type) == INTEGER_TYPE
6307 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6308 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6309 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6310 && int_fits_type_p (TREE_OPERAND (exp, 1),
6311 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6312 /* Don't use a widening multiply if a shift will do. */
6313 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6314 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6315 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6316 ||
6317 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6318 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6319 ==
6320 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6321 /* If both operands are extended, they must either both
6322 be zero-extended or both be sign-extended. */
6323 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6324 ==
6325 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6326 {
6327 enum machine_mode innermode
6328 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6329 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6330 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6331 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6332 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6333 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6334 {
b10af0c8
TG
6335 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6336 {
6337 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6338 NULL_RTX, VOIDmode, 0);
6339 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6340 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6341 VOIDmode, 0);
6342 else
6343 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6344 NULL_RTX, VOIDmode, 0);
6345 goto binop2;
6346 }
6347 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6348 && innermode == word_mode)
6349 {
6350 rtx htem;
6351 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6352 NULL_RTX, VOIDmode, 0);
6353 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6354 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6355 VOIDmode, 0);
6356 else
6357 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6358 NULL_RTX, VOIDmode, 0);
6359 temp = expand_binop (mode, other_optab, op0, op1, target,
6360 unsignedp, OPTAB_LIB_WIDEN);
6361 htem = expand_mult_highpart_adjust (innermode,
6362 gen_highpart (innermode, temp),
6363 op0, op1,
6364 gen_highpart (innermode, temp),
6365 unsignedp);
6366 emit_move_insn (gen_highpart (innermode, temp), htem);
6367 return temp;
6368 }
bbf6f052
RK
6369 }
6370 }
6371 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6372 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6373 return expand_mult (mode, op0, op1, target, unsignedp);
6374
6375 case TRUNC_DIV_EXPR:
6376 case FLOOR_DIV_EXPR:
6377 case CEIL_DIV_EXPR:
6378 case ROUND_DIV_EXPR:
6379 case EXACT_DIV_EXPR:
6380 preexpand_calls (exp);
6381 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6382 subtarget = 0;
6383 /* Possible optimization: compute the dividend with EXPAND_SUM
6384 then if the divisor is constant can optimize the case
6385 where some terms of the dividend have coeffs divisible by it. */
6386 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6387 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6388 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6389
6390 case RDIV_EXPR:
6391 this_optab = flodiv_optab;
6392 goto binop;
6393
6394 case TRUNC_MOD_EXPR:
6395 case FLOOR_MOD_EXPR:
6396 case CEIL_MOD_EXPR:
6397 case ROUND_MOD_EXPR:
6398 preexpand_calls (exp);
6399 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6400 subtarget = 0;
6401 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6402 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6403 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6404
6405 case FIX_ROUND_EXPR:
6406 case FIX_FLOOR_EXPR:
6407 case FIX_CEIL_EXPR:
6408 abort (); /* Not used for C. */
6409
6410 case FIX_TRUNC_EXPR:
906c4e36 6411 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6412 if (target == 0)
6413 target = gen_reg_rtx (mode);
6414 expand_fix (target, op0, unsignedp);
6415 return target;
6416
6417 case FLOAT_EXPR:
906c4e36 6418 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6419 if (target == 0)
6420 target = gen_reg_rtx (mode);
6421 /* expand_float can't figure out what to do if FROM has VOIDmode.
6422 So give it the correct mode. With -O, cse will optimize this. */
6423 if (GET_MODE (op0) == VOIDmode)
6424 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6425 op0);
6426 expand_float (target, op0,
6427 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6428 return target;
6429
6430 case NEGATE_EXPR:
5b22bee8 6431 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6432 temp = expand_unop (mode, neg_optab, op0, target, 0);
6433 if (temp == 0)
6434 abort ();
6435 return temp;
6436
6437 case ABS_EXPR:
6438 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6439
2d7050fd 6440 /* Handle complex values specially. */
d6a5ac33
RK
6441 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6442 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6443 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6444
bbf6f052
RK
6445 /* Unsigned abs is simply the operand. Testing here means we don't
6446 risk generating incorrect code below. */
6447 if (TREE_UNSIGNED (type))
6448 return op0;
6449
2e5ec6cf
RK
6450 return expand_abs (mode, op0, target, unsignedp,
6451 safe_from_p (target, TREE_OPERAND (exp, 0)));
bbf6f052
RK
6452
6453 case MAX_EXPR:
6454 case MIN_EXPR:
6455 target = original_target;
6456 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1))
fc155707 6457 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 6458 || GET_MODE (target) != mode
bbf6f052
RK
6459 || (GET_CODE (target) == REG
6460 && REGNO (target) < FIRST_PSEUDO_REGISTER))
6461 target = gen_reg_rtx (mode);
906c4e36 6462 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6463 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6464
6465 /* First try to do it with a special MIN or MAX instruction.
6466 If that does not win, use a conditional jump to select the proper
6467 value. */
6468 this_optab = (TREE_UNSIGNED (type)
6469 ? (code == MIN_EXPR ? umin_optab : umax_optab)
6470 : (code == MIN_EXPR ? smin_optab : smax_optab));
6471
6472 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
6473 OPTAB_WIDEN);
6474 if (temp != 0)
6475 return temp;
6476
fa2981d8
JW
6477 /* At this point, a MEM target is no longer useful; we will get better
6478 code without it. */
6479
6480 if (GET_CODE (target) == MEM)
6481 target = gen_reg_rtx (mode);
6482
ee456b1c
RK
6483 if (target != op0)
6484 emit_move_insn (target, op0);
d6a5ac33 6485
bbf6f052 6486 op0 = gen_label_rtx ();
d6a5ac33 6487
f81497d9
RS
6488 /* If this mode is an integer too wide to compare properly,
6489 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 6490 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 6491 {
f81497d9 6492 if (code == MAX_EXPR)
d6a5ac33
RK
6493 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6494 target, op1, NULL_RTX, op0);
bbf6f052 6495 else
d6a5ac33
RK
6496 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
6497 op1, target, NULL_RTX, op0);
ee456b1c 6498 emit_move_insn (target, op1);
bbf6f052 6499 }
f81497d9
RS
6500 else
6501 {
6502 if (code == MAX_EXPR)
6503 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6504 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
6505 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
6506 else
6507 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
6508 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
6509 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 6510 if (temp == const0_rtx)
ee456b1c 6511 emit_move_insn (target, op1);
f81497d9
RS
6512 else if (temp != const_true_rtx)
6513 {
6514 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
6515 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
6516 else
6517 abort ();
ee456b1c 6518 emit_move_insn (target, op1);
f81497d9
RS
6519 }
6520 }
bbf6f052
RK
6521 emit_label (op0);
6522 return target;
6523
bbf6f052
RK
6524 case BIT_NOT_EXPR:
6525 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6526 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
6527 if (temp == 0)
6528 abort ();
6529 return temp;
6530
6531 case FFS_EXPR:
6532 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6533 temp = expand_unop (mode, ffs_optab, op0, target, 1);
6534 if (temp == 0)
6535 abort ();
6536 return temp;
6537
d6a5ac33
RK
6538 /* ??? Can optimize bitwise operations with one arg constant.
6539 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
6540 and (a bitwise1 b) bitwise2 b (etc)
6541 but that is probably not worth while. */
6542
6543 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
6544 boolean values when we want in all cases to compute both of them. In
6545 general it is fastest to do TRUTH_AND_EXPR by computing both operands
6546 as actual zero-or-1 values and then bitwise anding. In cases where
6547 there cannot be any side effects, better code would be made by
6548 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
6549 how to recognize those cases. */
6550
bbf6f052
RK
6551 case TRUTH_AND_EXPR:
6552 case BIT_AND_EXPR:
6553 this_optab = and_optab;
6554 goto binop;
6555
bbf6f052
RK
6556 case TRUTH_OR_EXPR:
6557 case BIT_IOR_EXPR:
6558 this_optab = ior_optab;
6559 goto binop;
6560
874726a8 6561 case TRUTH_XOR_EXPR:
bbf6f052
RK
6562 case BIT_XOR_EXPR:
6563 this_optab = xor_optab;
6564 goto binop;
6565
6566 case LSHIFT_EXPR:
6567 case RSHIFT_EXPR:
6568 case LROTATE_EXPR:
6569 case RROTATE_EXPR:
6570 preexpand_calls (exp);
6571 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
6572 subtarget = 0;
6573 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6574 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
6575 unsignedp);
6576
d6a5ac33
RK
6577 /* Could determine the answer when only additive constants differ. Also,
6578 the addition of one can be handled by changing the condition. */
bbf6f052
RK
6579 case LT_EXPR:
6580 case LE_EXPR:
6581 case GT_EXPR:
6582 case GE_EXPR:
6583 case EQ_EXPR:
6584 case NE_EXPR:
6585 preexpand_calls (exp);
6586 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
6587 if (temp != 0)
6588 return temp;
d6a5ac33 6589
0f41302f 6590 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
6591 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
6592 && original_target
6593 && GET_CODE (original_target) == REG
6594 && (GET_MODE (original_target)
6595 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
6596 {
d6a5ac33
RK
6597 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
6598 VOIDmode, 0);
6599
bbf6f052
RK
6600 if (temp != original_target)
6601 temp = copy_to_reg (temp);
d6a5ac33 6602
bbf6f052 6603 op1 = gen_label_rtx ();
906c4e36 6604 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
6605 GET_MODE (temp), unsignedp, 0);
6606 emit_jump_insn (gen_beq (op1));
6607 emit_move_insn (temp, const1_rtx);
6608 emit_label (op1);
6609 return temp;
6610 }
d6a5ac33 6611
bbf6f052
RK
6612 /* If no set-flag instruction, must generate a conditional
6613 store into a temporary variable. Drop through
6614 and handle this like && and ||. */
6615
6616 case TRUTH_ANDIF_EXPR:
6617 case TRUTH_ORIF_EXPR:
e44842fe
RK
6618 if (! ignore
6619 && (target == 0 || ! safe_from_p (target, exp)
6620 /* Make sure we don't have a hard reg (such as function's return
6621 value) live across basic blocks, if not optimizing. */
6622 || (!optimize && GET_CODE (target) == REG
6623 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 6624 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
6625
6626 if (target)
6627 emit_clr_insn (target);
6628
bbf6f052
RK
6629 op1 = gen_label_rtx ();
6630 jumpifnot (exp, op1);
e44842fe
RK
6631
6632 if (target)
6633 emit_0_to_1_insn (target);
6634
bbf6f052 6635 emit_label (op1);
e44842fe 6636 return ignore ? const0_rtx : target;
bbf6f052
RK
6637
6638 case TRUTH_NOT_EXPR:
6639 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
6640 /* The parser is careful to generate TRUTH_NOT_EXPR
6641 only with operands that are always zero or one. */
906c4e36 6642 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
6643 target, 1, OPTAB_LIB_WIDEN);
6644 if (temp == 0)
6645 abort ();
6646 return temp;
6647
6648 case COMPOUND_EXPR:
6649 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
6650 emit_queue ();
6651 return expand_expr (TREE_OPERAND (exp, 1),
6652 (ignore ? const0_rtx : target),
6653 VOIDmode, 0);
6654
6655 case COND_EXPR:
ac01eace
RK
6656 /* If we would have a "singleton" (see below) were it not for a
6657 conversion in each arm, bring that conversion back out. */
6658 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6659 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
6660 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
6661 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
6662 {
6663 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
6664 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
6665
6666 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
6667 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6668 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
6669 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
6670 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
6671 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
6672 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
6673 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
6674 return expand_expr (build1 (NOP_EXPR, type,
6675 build (COND_EXPR, TREE_TYPE (true),
6676 TREE_OPERAND (exp, 0),
6677 true, false)),
6678 target, tmode, modifier);
6679 }
6680
bbf6f052
RK
6681 {
6682 /* Note that COND_EXPRs whose type is a structure or union
6683 are required to be constructed to contain assignments of
6684 a temporary variable, so that we can evaluate them here
6685 for side effect only. If type is void, we must do likewise. */
6686
6687 /* If an arm of the branch requires a cleanup,
6688 only that cleanup is performed. */
6689
6690 tree singleton = 0;
6691 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
6692
6693 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
6694 convert it to our mode, if necessary. */
6695 if (integer_onep (TREE_OPERAND (exp, 1))
6696 && integer_zerop (TREE_OPERAND (exp, 2))
6697 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6698 {
dd27116b
RK
6699 if (ignore)
6700 {
6701 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 6702 ro_modifier);
dd27116b
RK
6703 return const0_rtx;
6704 }
6705
921b3427 6706 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
6707 if (GET_MODE (op0) == mode)
6708 return op0;
d6a5ac33 6709
bbf6f052
RK
6710 if (target == 0)
6711 target = gen_reg_rtx (mode);
6712 convert_move (target, op0, unsignedp);
6713 return target;
6714 }
6715
ac01eace
RK
6716 /* Check for X ? A + B : A. If we have this, we can copy A to the
6717 output and conditionally add B. Similarly for unary operations.
6718 Don't do this if X has side-effects because those side effects
6719 might affect A or B and the "?" operation is a sequence point in
6720 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
6721
6722 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
6723 && operand_equal_p (TREE_OPERAND (exp, 2),
6724 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6725 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
6726 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
6727 && operand_equal_p (TREE_OPERAND (exp, 1),
6728 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6729 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
6730 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
6731 && operand_equal_p (TREE_OPERAND (exp, 2),
6732 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
6733 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
6734 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
6735 && operand_equal_p (TREE_OPERAND (exp, 1),
6736 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
6737 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
6738
01c8a7c8
RK
6739 /* If we are not to produce a result, we have no target. Otherwise,
6740 if a target was specified use it; it will not be used as an
6741 intermediate target unless it is safe. If no target, use a
6742 temporary. */
6743
6744 if (ignore)
6745 temp = 0;
6746 else if (original_target
6747 && (safe_from_p (original_target, TREE_OPERAND (exp, 0))
6748 || (singleton && GET_CODE (original_target) == REG
6749 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
6750 && original_target == var_rtx (singleton)))
6751 && GET_MODE (original_target) == mode
6752 && ! (GET_CODE (original_target) == MEM
6753 && MEM_VOLATILE_P (original_target)))
6754 temp = original_target;
6755 else if (TREE_ADDRESSABLE (type))
6756 abort ();
6757 else
6758 temp = assign_temp (type, 0, 0, 1);
6759
ac01eace
RK
6760 /* If we had X ? A + C : A, with C a constant power of 2, and we can
6761 do the test of X as a store-flag operation, do this as
6762 A + ((X != 0) << log C). Similarly for other simple binary
6763 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 6764 if (temp && singleton && binary_op
bbf6f052
RK
6765 && (TREE_CODE (binary_op) == PLUS_EXPR
6766 || TREE_CODE (binary_op) == MINUS_EXPR
6767 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 6768 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
6769 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
6770 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
6771 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
6772 {
6773 rtx result;
6774 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
6775 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
6776 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 6777 : xor_optab);
bbf6f052
RK
6778
6779 /* If we had X ? A : A + 1, do this as A + (X == 0).
6780
6781 We have to invert the truth value here and then put it
6782 back later if do_store_flag fails. We cannot simply copy
6783 TREE_OPERAND (exp, 0) to another variable and modify that
6784 because invert_truthvalue can modify the tree pointed to
6785 by its argument. */
6786 if (singleton == TREE_OPERAND (exp, 1))
6787 TREE_OPERAND (exp, 0)
6788 = invert_truthvalue (TREE_OPERAND (exp, 0));
6789
6790 result = do_store_flag (TREE_OPERAND (exp, 0),
906c4e36
RK
6791 (safe_from_p (temp, singleton)
6792 ? temp : NULL_RTX),
bbf6f052
RK
6793 mode, BRANCH_COST <= 1);
6794
ac01eace
RK
6795 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
6796 result = expand_shift (LSHIFT_EXPR, mode, result,
6797 build_int_2 (tree_log2
6798 (TREE_OPERAND
6799 (binary_op, 1)),
6800 0),
6801 (safe_from_p (temp, singleton)
6802 ? temp : NULL_RTX), 0);
6803
bbf6f052
RK
6804 if (result)
6805 {
906c4e36 6806 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6807 return expand_binop (mode, boptab, op1, result, temp,
6808 unsignedp, OPTAB_LIB_WIDEN);
6809 }
6810 else if (singleton == TREE_OPERAND (exp, 1))
6811 TREE_OPERAND (exp, 0)
6812 = invert_truthvalue (TREE_OPERAND (exp, 0));
6813 }
6814
dabf8373 6815 do_pending_stack_adjust ();
bbf6f052
RK
6816 NO_DEFER_POP;
6817 op0 = gen_label_rtx ();
6818
6819 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
6820 {
6821 if (temp != 0)
6822 {
6823 /* If the target conflicts with the other operand of the
6824 binary op, we can't use it. Also, we can't use the target
6825 if it is a hard register, because evaluating the condition
6826 might clobber it. */
6827 if ((binary_op
6828 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1)))
6829 || (GET_CODE (temp) == REG
6830 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
6831 temp = gen_reg_rtx (mode);
6832 store_expr (singleton, temp, 0);
6833 }
6834 else
906c4e36 6835 expand_expr (singleton,
2937cf87 6836 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6837 if (singleton == TREE_OPERAND (exp, 1))
6838 jumpif (TREE_OPERAND (exp, 0), op0);
6839 else
6840 jumpifnot (TREE_OPERAND (exp, 0), op0);
6841
956d6950 6842 start_cleanup_deferral ();
bbf6f052
RK
6843 if (binary_op && temp == 0)
6844 /* Just touch the other operand. */
6845 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 6846 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6847 else if (binary_op)
6848 store_expr (build (TREE_CODE (binary_op), type,
6849 make_tree (type, temp),
6850 TREE_OPERAND (binary_op, 1)),
6851 temp, 0);
6852 else
6853 store_expr (build1 (TREE_CODE (unary_op), type,
6854 make_tree (type, temp)),
6855 temp, 0);
6856 op1 = op0;
bbf6f052 6857 }
bbf6f052
RK
6858 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
6859 comparison operator. If we have one of these cases, set the
6860 output to A, branch on A (cse will merge these two references),
6861 then set the output to FOO. */
6862 else if (temp
6863 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6864 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6865 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6866 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
6867 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6868 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
bbf6f052
RK
6869 && safe_from_p (temp, TREE_OPERAND (exp, 2)))
6870 {
6871 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6872 temp = gen_reg_rtx (mode);
6873 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6874 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 6875
956d6950 6876 start_cleanup_deferral ();
bbf6f052
RK
6877 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6878 op1 = op0;
6879 }
6880 else if (temp
6881 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
6882 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
6883 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6884 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
6885 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
6886 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
bbf6f052
RK
6887 && safe_from_p (temp, TREE_OPERAND (exp, 1)))
6888 {
6889 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
6890 temp = gen_reg_rtx (mode);
6891 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6892 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6893
956d6950 6894 start_cleanup_deferral ();
bbf6f052
RK
6895 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6896 op1 = op0;
6897 }
6898 else
6899 {
6900 op1 = gen_label_rtx ();
6901 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 6902
956d6950 6903 start_cleanup_deferral ();
bbf6f052
RK
6904 if (temp != 0)
6905 store_expr (TREE_OPERAND (exp, 1), temp, 0);
6906 else
906c4e36
RK
6907 expand_expr (TREE_OPERAND (exp, 1),
6908 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 6909 end_cleanup_deferral ();
bbf6f052
RK
6910 emit_queue ();
6911 emit_jump_insn (gen_jump (op1));
6912 emit_barrier ();
6913 emit_label (op0);
956d6950 6914 start_cleanup_deferral ();
bbf6f052
RK
6915 if (temp != 0)
6916 store_expr (TREE_OPERAND (exp, 2), temp, 0);
6917 else
906c4e36
RK
6918 expand_expr (TREE_OPERAND (exp, 2),
6919 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6920 }
6921
956d6950 6922 end_cleanup_deferral ();
bbf6f052
RK
6923
6924 emit_queue ();
6925 emit_label (op1);
6926 OK_DEFER_POP;
5dab5552 6927
bbf6f052
RK
6928 return temp;
6929 }
6930
6931 case TARGET_EXPR:
6932 {
6933 /* Something needs to be initialized, but we didn't know
6934 where that thing was when building the tree. For example,
6935 it could be the return value of a function, or a parameter
6936 to a function which lays down in the stack, or a temporary
6937 variable which must be passed by reference.
6938
6939 We guarantee that the expression will either be constructed
6940 or copied into our original target. */
6941
6942 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 6943 tree cleanups = NULL_TREE;
5c062816 6944 tree exp1;
61d6b1cc 6945 rtx temp;
bbf6f052
RK
6946
6947 if (TREE_CODE (slot) != VAR_DECL)
6948 abort ();
6949
9c51f375
RK
6950 if (! ignore)
6951 target = original_target;
6952
bbf6f052
RK
6953 if (target == 0)
6954 {
6955 if (DECL_RTL (slot) != 0)
ac993f4f
MS
6956 {
6957 target = DECL_RTL (slot);
5c062816 6958 /* If we have already expanded the slot, so don't do
ac993f4f 6959 it again. (mrs) */
5c062816
MS
6960 if (TREE_OPERAND (exp, 1) == NULL_TREE)
6961 return target;
ac993f4f 6962 }
bbf6f052
RK
6963 else
6964 {
e9a25f70 6965 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
6966 /* All temp slots at this level must not conflict. */
6967 preserve_temp_slots (target);
6968 DECL_RTL (slot) = target;
e9a25f70
JL
6969 if (TREE_ADDRESSABLE (slot))
6970 {
6971 TREE_ADDRESSABLE (slot) = 0;
6972 mark_addressable (slot);
6973 }
bbf6f052 6974
e287fd6e
RK
6975 /* Since SLOT is not known to the called function
6976 to belong to its stack frame, we must build an explicit
6977 cleanup. This case occurs when we must build up a reference
6978 to pass the reference as an argument. In this case,
6979 it is very likely that such a reference need not be
6980 built here. */
6981
6982 if (TREE_OPERAND (exp, 2) == 0)
6983 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 6984 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 6985 }
bbf6f052
RK
6986 }
6987 else
6988 {
6989 /* This case does occur, when expanding a parameter which
6990 needs to be constructed on the stack. The target
6991 is the actual stack address that we want to initialize.
6992 The function we call will perform the cleanup in this case. */
6993
8c042b47
RS
6994 /* If we have already assigned it space, use that space,
6995 not target that we were passed in, as our target
6996 parameter is only a hint. */
6997 if (DECL_RTL (slot) != 0)
6998 {
6999 target = DECL_RTL (slot);
7000 /* If we have already expanded the slot, so don't do
7001 it again. (mrs) */
7002 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7003 return target;
7004 }
21002281
JW
7005 else
7006 {
7007 DECL_RTL (slot) = target;
7008 /* If we must have an addressable slot, then make sure that
7009 the RTL that we just stored in slot is OK. */
7010 if (TREE_ADDRESSABLE (slot))
7011 {
7012 TREE_ADDRESSABLE (slot) = 0;
7013 mark_addressable (slot);
7014 }
7015 }
bbf6f052
RK
7016 }
7017
4847c938 7018 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7019 /* Mark it as expanded. */
7020 TREE_OPERAND (exp, 1) = NULL_TREE;
7021
41531e5b 7022 store_expr (exp1, target, 0);
61d6b1cc 7023
e976b8b2 7024 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7025
41531e5b 7026 return target;
bbf6f052
RK
7027 }
7028
7029 case INIT_EXPR:
7030 {
7031 tree lhs = TREE_OPERAND (exp, 0);
7032 tree rhs = TREE_OPERAND (exp, 1);
7033 tree noncopied_parts = 0;
7034 tree lhs_type = TREE_TYPE (lhs);
7035
7036 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7037 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7038 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7039 TYPE_NONCOPIED_PARTS (lhs_type));
7040 while (noncopied_parts != 0)
7041 {
7042 expand_assignment (TREE_VALUE (noncopied_parts),
7043 TREE_PURPOSE (noncopied_parts), 0, 0);
7044 noncopied_parts = TREE_CHAIN (noncopied_parts);
7045 }
7046 return temp;
7047 }
7048
7049 case MODIFY_EXPR:
7050 {
7051 /* If lhs is complex, expand calls in rhs before computing it.
7052 That's so we don't compute a pointer and save it over a call.
7053 If lhs is simple, compute it first so we can give it as a
7054 target if the rhs is just a call. This avoids an extra temp and copy
7055 and that prevents a partial-subsumption which makes bad code.
7056 Actually we could treat component_ref's of vars like vars. */
7057
7058 tree lhs = TREE_OPERAND (exp, 0);
7059 tree rhs = TREE_OPERAND (exp, 1);
7060 tree noncopied_parts = 0;
7061 tree lhs_type = TREE_TYPE (lhs);
7062
7063 temp = 0;
7064
7065 if (TREE_CODE (lhs) != VAR_DECL
7066 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7067 && TREE_CODE (lhs) != PARM_DECL
7068 && ! (TREE_CODE (lhs) == INDIRECT_REF
7069 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7070 preexpand_calls (exp);
7071
7072 /* Check for |= or &= of a bitfield of size one into another bitfield
7073 of size 1. In this case, (unless we need the result of the
7074 assignment) we can do this more efficiently with a
7075 test followed by an assignment, if necessary.
7076
7077 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7078 things change so we do, this code should be enhanced to
7079 support it. */
7080 if (ignore
7081 && TREE_CODE (lhs) == COMPONENT_REF
7082 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7083 || TREE_CODE (rhs) == BIT_AND_EXPR)
7084 && TREE_OPERAND (rhs, 0) == lhs
7085 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7086 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7087 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7088 {
7089 rtx label = gen_label_rtx ();
7090
7091 do_jump (TREE_OPERAND (rhs, 1),
7092 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7093 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7094 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7095 (TREE_CODE (rhs) == BIT_IOR_EXPR
7096 ? integer_one_node
7097 : integer_zero_node)),
7098 0, 0);
e7c33f54 7099 do_pending_stack_adjust ();
bbf6f052
RK
7100 emit_label (label);
7101 return const0_rtx;
7102 }
7103
7104 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7105 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7106 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7107 TYPE_NONCOPIED_PARTS (lhs_type));
7108
7109 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7110 while (noncopied_parts != 0)
7111 {
7112 expand_assignment (TREE_PURPOSE (noncopied_parts),
7113 TREE_VALUE (noncopied_parts), 0, 0);
7114 noncopied_parts = TREE_CHAIN (noncopied_parts);
7115 }
7116 return temp;
7117 }
7118
7119 case PREINCREMENT_EXPR:
7120 case PREDECREMENT_EXPR:
7b8b9722 7121 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7122
7123 case POSTINCREMENT_EXPR:
7124 case POSTDECREMENT_EXPR:
7125 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7126 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7127
7128 case ADDR_EXPR:
987c71d9 7129 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7130 be a MEM corresponding to a stack slot. */
987c71d9
RK
7131 temp = 0;
7132
bbf6f052
RK
7133 /* Are we taking the address of a nested function? */
7134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9
JM
7135 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
7136 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0)))
bbf6f052
RK
7137 {
7138 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7139 op0 = force_operand (op0, target);
7140 }
682ba3a6
RK
7141 /* If we are taking the address of something erroneous, just
7142 return a zero. */
7143 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7144 return const0_rtx;
bbf6f052
RK
7145 else
7146 {
e287fd6e
RK
7147 /* We make sure to pass const0_rtx down if we came in with
7148 ignore set, to avoid doing the cleanups twice for something. */
7149 op0 = expand_expr (TREE_OPERAND (exp, 0),
7150 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7151 (modifier == EXPAND_INITIALIZER
7152 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7153
119af78a
RK
7154 /* If we are going to ignore the result, OP0 will have been set
7155 to const0_rtx, so just return it. Don't get confused and
7156 think we are taking the address of the constant. */
7157 if (ignore)
7158 return op0;
7159
3539e816
MS
7160 op0 = protect_from_queue (op0, 0);
7161
896102d0
RK
7162 /* We would like the object in memory. If it is a constant,
7163 we can have it be statically allocated into memory. For
682ba3a6 7164 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7165 memory and store the value into it. */
7166
7167 if (CONSTANT_P (op0))
7168 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7169 op0);
987c71d9 7170 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7171 {
7172 mark_temp_addr_taken (op0);
7173 temp = XEXP (op0, 0);
7174 }
896102d0 7175
682ba3a6
RK
7176 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7177 || GET_CODE (op0) == CONCAT)
896102d0
RK
7178 {
7179 /* If this object is in a register, it must be not
0f41302f 7180 be BLKmode. */
896102d0 7181 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7182 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7183
7a0b7b9a 7184 mark_temp_addr_taken (memloc);
896102d0
RK
7185 emit_move_insn (memloc, op0);
7186 op0 = memloc;
7187 }
7188
bbf6f052
RK
7189 if (GET_CODE (op0) != MEM)
7190 abort ();
7191
7192 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7193 {
7194 temp = XEXP (op0, 0);
7195#ifdef POINTERS_EXTEND_UNSIGNED
7196 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7197 && mode == ptr_mode)
9fcfcce7 7198 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7199#endif
7200 return temp;
7201 }
987c71d9 7202
bbf6f052
RK
7203 op0 = force_operand (XEXP (op0, 0), target);
7204 }
987c71d9 7205
bbf6f052 7206 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7207 op0 = force_reg (Pmode, op0);
7208
dc6d66b3
RK
7209 if (GET_CODE (op0) == REG
7210 && ! REG_USERVAR_P (op0))
7211 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7212
7213 /* If we might have had a temp slot, add an equivalent address
7214 for it. */
7215 if (temp != 0)
7216 update_temp_slot_address (temp, op0);
7217
88f63c77
RK
7218#ifdef POINTERS_EXTEND_UNSIGNED
7219 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7220 && mode == ptr_mode)
9fcfcce7 7221 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7222#endif
7223
bbf6f052
RK
7224 return op0;
7225
7226 case ENTRY_VALUE_EXPR:
7227 abort ();
7228
7308a047
RS
7229 /* COMPLEX type for Extended Pascal & Fortran */
7230 case COMPLEX_EXPR:
7231 {
7232 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7233 rtx insns;
7308a047
RS
7234
7235 /* Get the rtx code of the operands. */
7236 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7237 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7238
7239 if (! target)
7240 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7241
6551fa4d 7242 start_sequence ();
7308a047
RS
7243
7244 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7245 emit_move_insn (gen_realpart (mode, target), op0);
7246 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7247
6551fa4d
JW
7248 insns = get_insns ();
7249 end_sequence ();
7250
7308a047 7251 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7252 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7253 each with a separate pseudo as destination.
7254 It's not correct for flow to treat them as a unit. */
6d6e61ce 7255 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7256 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7257 else
7258 emit_insns (insns);
7308a047
RS
7259
7260 return target;
7261 }
7262
7263 case REALPART_EXPR:
2d7050fd
RS
7264 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7265 return gen_realpart (mode, op0);
7308a047
RS
7266
7267 case IMAGPART_EXPR:
2d7050fd
RS
7268 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7269 return gen_imagpart (mode, op0);
7308a047
RS
7270
7271 case CONJ_EXPR:
7272 {
62acb978 7273 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7274 rtx imag_t;
6551fa4d 7275 rtx insns;
7308a047
RS
7276
7277 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7278
7279 if (! target)
d6a5ac33 7280 target = gen_reg_rtx (mode);
7308a047 7281
6551fa4d 7282 start_sequence ();
7308a047
RS
7283
7284 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7285 emit_move_insn (gen_realpart (partmode, target),
7286 gen_realpart (partmode, op0));
7308a047 7287
62acb978
RK
7288 imag_t = gen_imagpart (partmode, target);
7289 temp = expand_unop (partmode, neg_optab,
7290 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7291 if (temp != imag_t)
7292 emit_move_insn (imag_t, temp);
7293
6551fa4d
JW
7294 insns = get_insns ();
7295 end_sequence ();
7296
d6a5ac33
RK
7297 /* Conjugate should appear as a single unit
7298 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7299 each with a separate pseudo as destination.
7300 It's not correct for flow to treat them as a unit. */
6d6e61ce 7301 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7302 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7303 else
7304 emit_insns (insns);
7308a047
RS
7305
7306 return target;
7307 }
7308
e976b8b2
MS
7309 case TRY_CATCH_EXPR:
7310 {
7311 tree handler = TREE_OPERAND (exp, 1);
7312
7313 expand_eh_region_start ();
7314
7315 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7316
7317 expand_eh_region_end (handler);
7318
7319 return op0;
7320 }
7321
7322 case POPDCC_EXPR:
7323 {
7324 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7325 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7326 return const0_rtx;
7327 }
7328
7329 case POPDHC_EXPR:
7330 {
7331 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7332 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7333 return const0_rtx;
7334 }
7335
bbf6f052 7336 case ERROR_MARK:
66538193
RS
7337 op0 = CONST0_RTX (tmode);
7338 if (op0 != 0)
7339 return op0;
bbf6f052
RK
7340 return const0_rtx;
7341
7342 default:
90764a87 7343 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7344 }
7345
7346 /* Here to do an ordinary binary operator, generating an instruction
7347 from the optab already placed in `this_optab'. */
7348 binop:
7349 preexpand_calls (exp);
7350 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1)))
7351 subtarget = 0;
7352 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7354 binop2:
7355 temp = expand_binop (mode, this_optab, op0, op1, target,
7356 unsignedp, OPTAB_LIB_WIDEN);
7357 if (temp == 0)
7358 abort ();
7359 return temp;
7360}
bbf6f052 7361
bbf6f052 7362
b93a436e
JL
7363\f
7364/* Return the alignment in bits of EXP, a pointer valued expression.
7365 But don't return more than MAX_ALIGN no matter what.
7366 The alignment returned is, by default, the alignment of the thing that
7367 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7368
7369 Otherwise, look at the expression to see if we can do better, i.e., if the
7370 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7371
b93a436e
JL
7372static int
7373get_pointer_alignment (exp, max_align)
7374 tree exp;
7375 unsigned max_align;
bbf6f052 7376{
b93a436e
JL
7377 unsigned align, inner;
7378
7379 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7380 return 0;
7381
7382 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7383 align = MIN (align, max_align);
7384
7385 while (1)
bbf6f052 7386 {
b93a436e 7387 switch (TREE_CODE (exp))
bbf6f052 7388 {
b93a436e
JL
7389 case NOP_EXPR:
7390 case CONVERT_EXPR:
7391 case NON_LVALUE_EXPR:
7392 exp = TREE_OPERAND (exp, 0);
7393 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7394 return align;
7395 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7396 align = MIN (inner, max_align);
7397 break;
7398
7399 case PLUS_EXPR:
7400 /* If sum of pointer + int, restrict our maximum alignment to that
7401 imposed by the integer. If not, we can't do any better than
7402 ALIGN. */
7403 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7404 return align;
7405
7406 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7407 & (max_align - 1))
7408 != 0)
7409 max_align >>= 1;
7410
7411 exp = TREE_OPERAND (exp, 0);
7412 break;
7413
7414 case ADDR_EXPR:
7415 /* See what we are pointing at and look at its alignment. */
7416 exp = TREE_OPERAND (exp, 0);
7417 if (TREE_CODE (exp) == FUNCTION_DECL)
7418 align = FUNCTION_BOUNDARY;
7419 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7420 align = DECL_ALIGN (exp);
7421#ifdef CONSTANT_ALIGNMENT
7422 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7423 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 7424#endif
b93a436e 7425 return MIN (align, max_align);
c02bd5d9 7426
b93a436e
JL
7427 default:
7428 return align;
7429 }
7430 }
7431}
7432\f
7433/* Return the tree node and offset if a given argument corresponds to
7434 a string constant. */
7435
7436static tree
7437string_constant (arg, ptr_offset)
7438 tree arg;
7439 tree *ptr_offset;
7440{
7441 STRIP_NOPS (arg);
7442
7443 if (TREE_CODE (arg) == ADDR_EXPR
7444 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
7445 {
7446 *ptr_offset = integer_zero_node;
7447 return TREE_OPERAND (arg, 0);
7448 }
7449 else if (TREE_CODE (arg) == PLUS_EXPR)
7450 {
7451 tree arg0 = TREE_OPERAND (arg, 0);
7452 tree arg1 = TREE_OPERAND (arg, 1);
7453
7454 STRIP_NOPS (arg0);
7455 STRIP_NOPS (arg1);
7456
7457 if (TREE_CODE (arg0) == ADDR_EXPR
7458 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 7459 {
b93a436e
JL
7460 *ptr_offset = arg1;
7461 return TREE_OPERAND (arg0, 0);
bbf6f052 7462 }
b93a436e
JL
7463 else if (TREE_CODE (arg1) == ADDR_EXPR
7464 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 7465 {
b93a436e
JL
7466 *ptr_offset = arg0;
7467 return TREE_OPERAND (arg1, 0);
bbf6f052 7468 }
b93a436e 7469 }
ca695ac9 7470
b93a436e
JL
7471 return 0;
7472}
ca695ac9 7473
b93a436e
JL
7474/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
7475 way, because it could contain a zero byte in the middle.
7476 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 7477
b93a436e
JL
7478 Unfortunately, string_constant can't access the values of const char
7479 arrays with initializers, so neither can we do so here. */
e87b4f3f 7480
b93a436e
JL
7481static tree
7482c_strlen (src)
7483 tree src;
7484{
7485 tree offset_node;
7486 int offset, max;
7487 char *ptr;
e7c33f54 7488
b93a436e
JL
7489 src = string_constant (src, &offset_node);
7490 if (src == 0)
7491 return 0;
7492 max = TREE_STRING_LENGTH (src);
7493 ptr = TREE_STRING_POINTER (src);
7494 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
7495 {
7496 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
7497 compute the offset to the following null if we don't know where to
7498 start searching for it. */
7499 int i;
7500 for (i = 0; i < max; i++)
7501 if (ptr[i] == 0)
7502 return 0;
7503 /* We don't know the starting offset, but we do know that the string
7504 has no internal zero bytes. We can assume that the offset falls
7505 within the bounds of the string; otherwise, the programmer deserves
7506 what he gets. Subtract the offset from the length of the string,
7507 and return that. */
7508 /* This would perhaps not be valid if we were dealing with named
7509 arrays in addition to literal string constants. */
7510 return size_binop (MINUS_EXPR, size_int (max), offset_node);
7511 }
e7c33f54 7512
b93a436e
JL
7513 /* We have a known offset into the string. Start searching there for
7514 a null character. */
7515 if (offset_node == 0)
7516 offset = 0;
7517 else
7518 {
7519 /* Did we get a long long offset? If so, punt. */
7520 if (TREE_INT_CST_HIGH (offset_node) != 0)
7521 return 0;
7522 offset = TREE_INT_CST_LOW (offset_node);
7523 }
7524 /* If the offset is known to be out of bounds, warn, and call strlen at
7525 runtime. */
7526 if (offset < 0 || offset > max)
7527 {
7528 warning ("offset outside bounds of constant string");
7529 return 0;
7530 }
7531 /* Use strlen to search for the first zero byte. Since any strings
7532 constructed with build_string will have nulls appended, we win even
7533 if we get handed something like (char[4])"abcd".
e7c33f54 7534
b93a436e
JL
7535 Since OFFSET is our starting index into the string, no further
7536 calculation is needed. */
7537 return size_int (strlen (ptr + offset));
7538}
1bbddf11 7539
b93a436e
JL
7540rtx
7541expand_builtin_return_addr (fndecl_code, count, tem)
7542 enum built_in_function fndecl_code;
7543 int count;
7544 rtx tem;
7545{
7546 int i;
e7c33f54 7547
b93a436e
JL
7548 /* Some machines need special handling before we can access
7549 arbitrary frames. For example, on the sparc, we must first flush
7550 all register windows to the stack. */
7551#ifdef SETUP_FRAME_ADDRESSES
7552 if (count > 0)
7553 SETUP_FRAME_ADDRESSES ();
7554#endif
e87b4f3f 7555
b93a436e
JL
7556 /* On the sparc, the return address is not in the frame, it is in a
7557 register. There is no way to access it off of the current frame
7558 pointer, but it can be accessed off the previous frame pointer by
7559 reading the value from the register window save area. */
7560#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
7561 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
7562 count--;
7563#endif
60bac6ea 7564
b93a436e
JL
7565 /* Scan back COUNT frames to the specified frame. */
7566 for (i = 0; i < count; i++)
7567 {
7568 /* Assume the dynamic chain pointer is in the word that the
7569 frame address points to, unless otherwise specified. */
7570#ifdef DYNAMIC_CHAIN_ADDRESS
7571 tem = DYNAMIC_CHAIN_ADDRESS (tem);
7572#endif
7573 tem = memory_address (Pmode, tem);
7574 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
7575 }
ca695ac9 7576
b93a436e
JL
7577 /* For __builtin_frame_address, return what we've got. */
7578 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
7579 return tem;
e9a25f70 7580
b93a436e
JL
7581 /* For __builtin_return_address, Get the return address from that
7582 frame. */
7583#ifdef RETURN_ADDR_RTX
7584 tem = RETURN_ADDR_RTX (count, tem);
7585#else
7586 tem = memory_address (Pmode,
7587 plus_constant (tem, GET_MODE_SIZE (Pmode)));
7588 tem = gen_rtx_MEM (Pmode, tem);
7589#endif
7590 return tem;
7591}
e9a25f70 7592
b93a436e
JL
7593/* __builtin_setjmp is passed a pointer to an array of five words (not
7594 all will be used on all machines). It operates similarly to the C
7595 library function of the same name, but is more efficient. Much of
7596 the code below (and for longjmp) is copied from the handling of
7597 non-local gotos.
ca695ac9 7598
b93a436e
JL
7599 NOTE: This is intended for use by GNAT and the exception handling
7600 scheme in the compiler and will only work in the method used by
7601 them. */
e9a25f70 7602
b93a436e
JL
7603rtx
7604expand_builtin_setjmp (buf_addr, target)
7605 rtx buf_addr;
7606 rtx target;
7607{
7608 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
7609 enum machine_mode sa_mode = Pmode, value_mode;
7610 rtx stack_save;
7611 int old_inhibit_defer_pop = inhibit_defer_pop;
7612 int return_pops
7613 = RETURN_POPS_ARGS (get_identifier ("__dummy"),
7614 build_function_type (void_type_node, NULL_TREE),
7615 0);
7616 rtx next_arg_reg;
7617 CUMULATIVE_ARGS args_so_far;
7618 rtx op0;
7619 int i;
e9a25f70 7620
b93a436e 7621 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 7622
b93a436e
JL
7623#ifdef POINTERS_EXTEND_UNSIGNED
7624 buf_addr = convert_memory_address (Pmode, buf_addr);
7625#endif
d7f21d63 7626
b93a436e 7627 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 7628
b93a436e
JL
7629 if (target == 0 || GET_CODE (target) != REG
7630 || REGNO (target) < FIRST_PSEUDO_REGISTER)
7631 target = gen_reg_rtx (value_mode);
d7f21d63 7632
b93a436e 7633 emit_queue ();
d7f21d63 7634
b93a436e
JL
7635 /* We store the frame pointer and the address of lab1 in the buffer
7636 and use the rest of it for the stack save area, which is
7637 machine-dependent. */
7638 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
7639 virtual_stack_vars_rtx);
7640 emit_move_insn
7641 (validize_mem (gen_rtx_MEM (Pmode,
7642 plus_constant (buf_addr,
7643 GET_MODE_SIZE (Pmode)))),
7644 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 7645
b93a436e
JL
7646#ifdef HAVE_save_stack_nonlocal
7647 if (HAVE_save_stack_nonlocal)
7648 sa_mode = insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0];
7649#endif
6c174fc0 7650
b93a436e
JL
7651 stack_save = gen_rtx_MEM (sa_mode,
7652 plus_constant (buf_addr,
7653 2 * GET_MODE_SIZE (Pmode)));
7654 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 7655
b93a436e
JL
7656#ifdef HAVE_setjmp
7657 if (HAVE_setjmp)
7658 emit_insn (gen_setjmp ());
7659#endif
d7f21d63 7660
b93a436e
JL
7661 /* Set TARGET to zero and branch around the other case. */
7662 emit_move_insn (target, const0_rtx);
7663 emit_jump_insn (gen_jump (lab2));
7664 emit_barrier ();
7665 emit_label (lab1);
d7f21d63 7666
b93a436e
JL
7667 /* Note that setjmp clobbers FP when we get here, so we have to make
7668 sure it's marked as used by this function. */
7669 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 7670
b93a436e
JL
7671 /* Mark the static chain as clobbered here so life information
7672 doesn't get messed up for it. */
7673 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 7674
b93a436e
JL
7675 /* Now put in the code to restore the frame pointer, and argument
7676 pointer, if needed. The code below is from expand_end_bindings
7677 in stmt.c; see detailed documentation there. */
7678#ifdef HAVE_nonlocal_goto
7679 if (! HAVE_nonlocal_goto)
7680#endif
7681 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 7682
b93a436e 7683 current_function_has_nonlocal_label = 1;
e9a25f70 7684
b93a436e
JL
7685#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
7686 if (fixed_regs[ARG_POINTER_REGNUM])
7687 {
7688#ifdef ELIMINABLE_REGS
7689 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 7690
b93a436e
JL
7691 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
7692 if (elim_regs[i].from == ARG_POINTER_REGNUM
7693 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
7694 break;
ca695ac9 7695
b93a436e
JL
7696 if (i == sizeof elim_regs / sizeof elim_regs [0])
7697#endif
7698 {
7699 /* Now restore our arg pointer from the address at which it
7700 was saved in our stack frame.
7701 If there hasn't be space allocated for it yet, make
7702 some now. */
7703 if (arg_pointer_save_area == 0)
7704 arg_pointer_save_area
7705 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
7706 emit_move_insn (virtual_incoming_args_rtx,
7707 copy_to_reg (arg_pointer_save_area));
7708 }
7709 }
7710#endif
ca695ac9 7711
b93a436e
JL
7712#ifdef HAVE_nonlocal_goto_receiver
7713 if (HAVE_nonlocal_goto_receiver)
7714 emit_insn (gen_nonlocal_goto_receiver ());
7715#endif
7716 /* The static chain pointer contains the address of dummy function.
7717 We need to call it here to handle some PIC cases of restoring a
7718 global pointer. Then return 1. */
7719 op0 = copy_to_mode_reg (Pmode, static_chain_rtx);
ca695ac9 7720
b93a436e
JL
7721 /* We can't actually call emit_library_call here, so do everything
7722 it does, which isn't much for a libfunc with no args. */
7723 op0 = memory_address (FUNCTION_MODE, op0);
ca695ac9 7724
b93a436e
JL
7725 INIT_CUMULATIVE_ARGS (args_so_far, NULL_TREE,
7726 gen_rtx_SYMBOL_REF (Pmode, "__dummy"), 1);
7727 next_arg_reg = FUNCTION_ARG (args_so_far, VOIDmode, void_type_node, 1);
ca695ac9 7728
b93a436e
JL
7729#ifndef ACCUMULATE_OUTGOING_ARGS
7730#ifdef HAVE_call_pop
7731 if (HAVE_call_pop)
7732 emit_call_insn (gen_call_pop (gen_rtx_MEM (FUNCTION_MODE, op0),
7733 const0_rtx, next_arg_reg,
7734 GEN_INT (return_pops)));
7735 else
7736#endif
7737#endif
e9a25f70 7738
b93a436e
JL
7739#ifdef HAVE_call
7740 if (HAVE_call)
7741 emit_call_insn (gen_call (gen_rtx_MEM (FUNCTION_MODE, op0),
7742 const0_rtx, next_arg_reg, const0_rtx));
7743 else
7744#endif
7745 abort ();
ca695ac9 7746
b93a436e
JL
7747#ifdef HAVE_builtin_setjmp_receiver
7748 if (HAVE_builtin_setjmp_receiver)
7749 emit_insn (gen_builtin_setjmp_receiver ());
7750#endif
60bac6ea 7751
b93a436e
JL
7752 emit_move_insn (target, const1_rtx);
7753 emit_label (lab2);
7754 return target;
7755}
60bac6ea 7756
b93a436e
JL
7757\f
7758/* Expand an expression EXP that calls a built-in function,
7759 with result going to TARGET if that's convenient
7760 (and in mode MODE if that's convenient).
7761 SUBTARGET may be used as the target for computing one of EXP's operands.
7762 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 7763
b93a436e
JL
7764#define CALLED_AS_BUILT_IN(NODE) \
7765 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 7766
b93a436e
JL
7767static rtx
7768expand_builtin (exp, target, subtarget, mode, ignore)
7769 tree exp;
7770 rtx target;
7771 rtx subtarget;
7772 enum machine_mode mode;
7773 int ignore;
7774{
7775 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7776 tree arglist = TREE_OPERAND (exp, 1);
7777 rtx op0;
7778 rtx lab1, insns;
7779 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
7780 optab builtin_optab;
60bac6ea 7781
b93a436e
JL
7782 switch (DECL_FUNCTION_CODE (fndecl))
7783 {
7784 case BUILT_IN_ABS:
7785 case BUILT_IN_LABS:
7786 case BUILT_IN_FABS:
7787 /* build_function_call changes these into ABS_EXPR. */
7788 abort ();
4ed67205 7789
b93a436e
JL
7790 case BUILT_IN_SIN:
7791 case BUILT_IN_COS:
7792 /* Treat these like sqrt, but only if the user asks for them. */
7793 if (! flag_fast_math)
7794 break;
7795 case BUILT_IN_FSQRT:
7796 /* If not optimizing, call the library function. */
7797 if (! optimize)
7798 break;
4ed67205 7799
b93a436e
JL
7800 if (arglist == 0
7801 /* Arg could be wrong type if user redeclared this fcn wrong. */
7802 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
7803 break;
7804
b93a436e
JL
7805 /* Stabilize and compute the argument. */
7806 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
7807 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
7808 {
7809 exp = copy_node (exp);
7810 arglist = copy_node (arglist);
7811 TREE_OPERAND (exp, 1) = arglist;
7812 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
7813 }
7814 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 7815
b93a436e
JL
7816 /* Make a suitable register to place result in. */
7817 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 7818
b93a436e
JL
7819 emit_queue ();
7820 start_sequence ();
7565a035 7821
b93a436e
JL
7822 switch (DECL_FUNCTION_CODE (fndecl))
7823 {
7824 case BUILT_IN_SIN:
7825 builtin_optab = sin_optab; break;
7826 case BUILT_IN_COS:
7827 builtin_optab = cos_optab; break;
7828 case BUILT_IN_FSQRT:
7829 builtin_optab = sqrt_optab; break;
7830 default:
7831 abort ();
7832 }
4ed67205 7833
b93a436e
JL
7834 /* Compute into TARGET.
7835 Set TARGET to wherever the result comes back. */
7836 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
7837 builtin_optab, op0, target, 0);
7838
7839 /* If we were unable to expand via the builtin, stop the
7840 sequence (without outputting the insns) and break, causing
7841 a call the the library function. */
7842 if (target == 0)
4ed67205 7843 {
b93a436e
JL
7844 end_sequence ();
7845 break;
7846 }
4ed67205 7847
b93a436e
JL
7848 /* Check the results by default. But if flag_fast_math is turned on,
7849 then assume sqrt will always be called with valid arguments. */
4ed67205 7850
b93a436e
JL
7851 if (! flag_fast_math)
7852 {
7853 /* Don't define the builtin FP instructions
7854 if your machine is not IEEE. */
7855 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
7856 abort ();
4ed67205 7857
b93a436e 7858 lab1 = gen_label_rtx ();
ca55abae 7859
b93a436e
JL
7860 /* Test the result; if it is NaN, set errno=EDOM because
7861 the argument was not in the domain. */
7862 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
7863 emit_jump_insn (gen_beq (lab1));
7864
7865#ifdef TARGET_EDOM
7866 {
7867#ifdef GEN_ERRNO_RTX
7868 rtx errno_rtx = GEN_ERRNO_RTX;
7869#else
7870 rtx errno_rtx
7871 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
7872#endif
e87b4f3f 7873
b93a436e
JL
7874 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
7875 }
7876#else
7877 /* We can't set errno=EDOM directly; let the library call do it.
7878 Pop the arguments right away in case the call gets deleted. */
7879 NO_DEFER_POP;
7880 expand_call (exp, target, 0);
7881 OK_DEFER_POP;
7882#endif
e7c33f54 7883
b93a436e
JL
7884 emit_label (lab1);
7885 }
0006469d 7886
b93a436e
JL
7887 /* Output the entire sequence. */
7888 insns = get_insns ();
7889 end_sequence ();
7890 emit_insns (insns);
7891
7892 return target;
0006469d 7893
b93a436e
JL
7894 case BUILT_IN_FMOD:
7895 break;
0006469d 7896
b93a436e
JL
7897 /* __builtin_apply_args returns block of memory allocated on
7898 the stack into which is stored the arg pointer, structure
7899 value address, static chain, and all the registers that might
7900 possibly be used in performing a function call. The code is
7901 moved to the start of the function so the incoming values are
7902 saved. */
7903 case BUILT_IN_APPLY_ARGS:
7904 /* Don't do __builtin_apply_args more than once in a function.
7905 Save the result of the first call and reuse it. */
7906 if (apply_args_value != 0)
7907 return apply_args_value;
7908 {
7909 /* When this function is called, it means that registers must be
7910 saved on entry to this function. So we migrate the
7911 call to the first insn of this function. */
7912 rtx temp;
7913 rtx seq;
0006469d 7914
b93a436e
JL
7915 start_sequence ();
7916 temp = expand_builtin_apply_args ();
7917 seq = get_insns ();
7918 end_sequence ();
0006469d 7919
b93a436e 7920 apply_args_value = temp;
0006469d 7921
b93a436e
JL
7922 /* Put the sequence after the NOTE that starts the function.
7923 If this is inside a SEQUENCE, make the outer-level insn
7924 chain current, so the code is placed at the start of the
7925 function. */
7926 push_topmost_sequence ();
7927 emit_insns_before (seq, NEXT_INSN (get_insns ()));
7928 pop_topmost_sequence ();
7929 return temp;
7930 }
0006469d 7931
b93a436e
JL
7932 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
7933 FUNCTION with a copy of the parameters described by
7934 ARGUMENTS, and ARGSIZE. It returns a block of memory
7935 allocated on the stack into which is stored all the registers
7936 that might possibly be used for returning the result of a
7937 function. ARGUMENTS is the value returned by
7938 __builtin_apply_args. ARGSIZE is the number of bytes of
7939 arguments that must be copied. ??? How should this value be
7940 computed? We'll also need a safe worst case value for varargs
7941 functions. */
7942 case BUILT_IN_APPLY:
7943 if (arglist == 0
7944 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7945 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
7946 || TREE_CHAIN (arglist) == 0
7947 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
7948 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
7949 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
7950 return const0_rtx;
7951 else
7952 {
7953 int i;
7954 tree t;
7955 rtx ops[3];
0006469d 7956
b93a436e
JL
7957 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
7958 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 7959
b93a436e
JL
7960 return expand_builtin_apply (ops[0], ops[1], ops[2]);
7961 }
bbf6f052 7962
b93a436e
JL
7963 /* __builtin_return (RESULT) causes the function to return the
7964 value described by RESULT. RESULT is address of the block of
7965 memory returned by __builtin_apply. */
7966 case BUILT_IN_RETURN:
7967 if (arglist
7968 /* Arg could be non-pointer if user redeclared this fcn wrong. */
7969 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
7970 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
7971 NULL_RTX, VOIDmode, 0));
7972 return const0_rtx;
ca695ac9 7973
b93a436e
JL
7974 case BUILT_IN_SAVEREGS:
7975 /* Don't do __builtin_saveregs more than once in a function.
7976 Save the result of the first call and reuse it. */
7977 if (saveregs_value != 0)
7978 return saveregs_value;
7979 {
7980 /* When this function is called, it means that registers must be
7981 saved on entry to this function. So we migrate the
7982 call to the first insn of this function. */
7983 rtx temp;
7984 rtx seq;
ca695ac9 7985
b93a436e
JL
7986 /* Now really call the function. `expand_call' does not call
7987 expand_builtin, so there is no danger of infinite recursion here. */
7988 start_sequence ();
ca695ac9 7989
b93a436e
JL
7990#ifdef EXPAND_BUILTIN_SAVEREGS
7991 /* Do whatever the machine needs done in this case. */
7992 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
7993#else
7994 /* The register where the function returns its value
7995 is likely to have something else in it, such as an argument.
7996 So preserve that register around the call. */
ca695ac9 7997
b93a436e
JL
7998 if (value_mode != VOIDmode)
7999 {
8000 rtx valreg = hard_libcall_value (value_mode);
8001 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8002
b93a436e
JL
8003 emit_move_insn (saved_valreg, valreg);
8004 temp = expand_call (exp, target, ignore);
8005 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8006 }
8007 else
b93a436e
JL
8008 /* Generate the call, putting the value in a pseudo. */
8009 temp = expand_call (exp, target, ignore);
8010#endif
bbf6f052 8011
b93a436e
JL
8012 seq = get_insns ();
8013 end_sequence ();
bbf6f052 8014
b93a436e 8015 saveregs_value = temp;
bbf6f052 8016
b93a436e
JL
8017 /* Put the sequence after the NOTE that starts the function.
8018 If this is inside a SEQUENCE, make the outer-level insn
8019 chain current, so the code is placed at the start of the
8020 function. */
8021 push_topmost_sequence ();
8022 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8023 pop_topmost_sequence ();
8024 return temp;
8025 }
bbf6f052 8026
b93a436e
JL
8027 /* __builtin_args_info (N) returns word N of the arg space info
8028 for the current function. The number and meanings of words
8029 is controlled by the definition of CUMULATIVE_ARGS. */
8030 case BUILT_IN_ARGS_INFO:
8031 {
8032 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
8033 int i;
8034 int *word_ptr = (int *) &current_function_args_info;
8035 tree type, elts, result;
bbf6f052 8036
b93a436e
JL
8037 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8038 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8039 __FILE__, __LINE__);
bbf6f052 8040
b93a436e
JL
8041 if (arglist != 0)
8042 {
8043 tree arg = TREE_VALUE (arglist);
8044 if (TREE_CODE (arg) != INTEGER_CST)
8045 error ("argument of `__builtin_args_info' must be constant");
8046 else
8047 {
8048 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8049
b93a436e
JL
8050 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8051 error ("argument of `__builtin_args_info' out of range");
8052 else
8053 return GEN_INT (word_ptr[wordnum]);
8054 }
bbf6f052
RK
8055 }
8056 else
b93a436e 8057 error ("missing argument in `__builtin_args_info'");
bbf6f052 8058
b93a436e 8059 return const0_rtx;
bbf6f052 8060
b93a436e
JL
8061#if 0
8062 for (i = 0; i < nwords; i++)
8063 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8064
b93a436e
JL
8065 type = build_array_type (integer_type_node,
8066 build_index_type (build_int_2 (nwords, 0)));
8067 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8068 TREE_CONSTANT (result) = 1;
8069 TREE_STATIC (result) = 1;
8070 result = build (INDIRECT_REF, build_pointer_type (type), result);
8071 TREE_CONSTANT (result) = 1;
8072 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8073#endif
8074 }
8075
8076 /* Return the address of the first anonymous stack arg. */
8077 case BUILT_IN_NEXT_ARG:
ca695ac9 8078 {
b93a436e
JL
8079 tree fntype = TREE_TYPE (current_function_decl);
8080
8081 if ((TYPE_ARG_TYPES (fntype) == 0
8082 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8083 == void_type_node))
8084 && ! current_function_varargs)
8085 {
8086 error ("`va_start' used in function with fixed args");
8087 return const0_rtx;
8088 }
8089
8090 if (arglist)
8091 {
8092 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8093 tree arg = TREE_VALUE (arglist);
8094
8095 /* Strip off all nops for the sake of the comparison. This
8096 is not quite the same as STRIP_NOPS. It does more.
8097 We must also strip off INDIRECT_EXPR for C++ reference
8098 parameters. */
8099 while (TREE_CODE (arg) == NOP_EXPR
8100 || TREE_CODE (arg) == CONVERT_EXPR
8101 || TREE_CODE (arg) == NON_LVALUE_EXPR
8102 || TREE_CODE (arg) == INDIRECT_REF)
8103 arg = TREE_OPERAND (arg, 0);
8104 if (arg != last_parm)
8105 warning ("second parameter of `va_start' not last named argument");
8106 }
8107 else if (! current_function_varargs)
8108 /* Evidently an out of date version of <stdarg.h>; can't validate
8109 va_start's second argument, but can still work as intended. */
8110 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8111 }
8112
b93a436e
JL
8113 return expand_binop (Pmode, add_optab,
8114 current_function_internal_arg_pointer,
8115 current_function_arg_offset_rtx,
8116 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8117
b93a436e
JL
8118 case BUILT_IN_CLASSIFY_TYPE:
8119 if (arglist != 0)
8120 {
8121 tree type = TREE_TYPE (TREE_VALUE (arglist));
8122 enum tree_code code = TREE_CODE (type);
8123 if (code == VOID_TYPE)
8124 return GEN_INT (void_type_class);
8125 if (code == INTEGER_TYPE)
8126 return GEN_INT (integer_type_class);
8127 if (code == CHAR_TYPE)
8128 return GEN_INT (char_type_class);
8129 if (code == ENUMERAL_TYPE)
8130 return GEN_INT (enumeral_type_class);
8131 if (code == BOOLEAN_TYPE)
8132 return GEN_INT (boolean_type_class);
8133 if (code == POINTER_TYPE)
8134 return GEN_INT (pointer_type_class);
8135 if (code == REFERENCE_TYPE)
8136 return GEN_INT (reference_type_class);
8137 if (code == OFFSET_TYPE)
8138 return GEN_INT (offset_type_class);
8139 if (code == REAL_TYPE)
8140 return GEN_INT (real_type_class);
8141 if (code == COMPLEX_TYPE)
8142 return GEN_INT (complex_type_class);
8143 if (code == FUNCTION_TYPE)
8144 return GEN_INT (function_type_class);
8145 if (code == METHOD_TYPE)
8146 return GEN_INT (method_type_class);
8147 if (code == RECORD_TYPE)
8148 return GEN_INT (record_type_class);
8149 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8150 return GEN_INT (union_type_class);
8151 if (code == ARRAY_TYPE)
8152 {
8153 if (TYPE_STRING_FLAG (type))
8154 return GEN_INT (string_type_class);
8155 else
8156 return GEN_INT (array_type_class);
8157 }
8158 if (code == SET_TYPE)
8159 return GEN_INT (set_type_class);
8160 if (code == FILE_TYPE)
8161 return GEN_INT (file_type_class);
8162 if (code == LANG_TYPE)
8163 return GEN_INT (lang_type_class);
8164 }
8165 return GEN_INT (no_type_class);
ca695ac9 8166
b93a436e
JL
8167 case BUILT_IN_CONSTANT_P:
8168 if (arglist == 0)
8169 return const0_rtx;
8170 else
8171 {
8172 tree arg = TREE_VALUE (arglist);
ca695ac9 8173
b93a436e
JL
8174 STRIP_NOPS (arg);
8175 return (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
8176 || (TREE_CODE (arg) == ADDR_EXPR
8177 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8178 ? const1_rtx : const0_rtx);
8179 }
ca695ac9 8180
b93a436e
JL
8181 case BUILT_IN_FRAME_ADDRESS:
8182 /* The argument must be a nonnegative integer constant.
8183 It counts the number of frames to scan up the stack.
8184 The value is the address of that frame. */
8185 case BUILT_IN_RETURN_ADDRESS:
8186 /* The argument must be a nonnegative integer constant.
8187 It counts the number of frames to scan up the stack.
8188 The value is the return address saved in that frame. */
8189 if (arglist == 0)
8190 /* Warning about missing arg was already issued. */
8191 return const0_rtx;
8192 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8193 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8194 {
8195 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8196 error ("invalid arg to `__builtin_frame_address'");
8197 else
8198 error ("invalid arg to `__builtin_return_address'");
8199 return const0_rtx;
8200 }
8201 else
8202 {
8203 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8204 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8205 hard_frame_pointer_rtx);
ee33823f 8206
b93a436e
JL
8207 /* Some ports cannot access arbitrary stack frames. */
8208 if (tem == NULL)
8209 {
8210 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8211 warning ("unsupported arg to `__builtin_frame_address'");
8212 else
8213 warning ("unsupported arg to `__builtin_return_address'");
8214 return const0_rtx;
8215 }
ee33823f 8216
b93a436e
JL
8217 /* For __builtin_frame_address, return what we've got. */
8218 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8219 return tem;
ee33823f 8220
b93a436e
JL
8221 if (GET_CODE (tem) != REG)
8222 tem = copy_to_reg (tem);
8223 return tem;
8224 }
ee33823f 8225
b93a436e
JL
8226 /* Returns the address of the area where the structure is returned.
8227 0 otherwise. */
8228 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8229 if (arglist != 0
8230 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8231 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8232 return const0_rtx;
8233 else
8234 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8235
b93a436e
JL
8236 case BUILT_IN_ALLOCA:
8237 if (arglist == 0
8238 /* Arg could be non-integer if user redeclared this fcn wrong. */
8239 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8240 break;
bbf6f052 8241
b93a436e
JL
8242 /* Compute the argument. */
8243 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8244
b93a436e
JL
8245 /* Allocate the desired space. */
8246 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8247
b93a436e
JL
8248 case BUILT_IN_FFS:
8249 /* If not optimizing, call the library function. */
8250 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8251 break;
ca695ac9 8252
b93a436e
JL
8253 if (arglist == 0
8254 /* Arg could be non-integer if user redeclared this fcn wrong. */
8255 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8256 break;
ca695ac9 8257
b93a436e
JL
8258 /* Compute the argument. */
8259 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8260 /* Compute ffs, into TARGET if possible.
8261 Set TARGET to wherever the result comes back. */
8262 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8263 ffs_optab, op0, target, 1);
8264 if (target == 0)
8265 abort ();
8266 return target;
bbf6f052 8267
b93a436e
JL
8268 case BUILT_IN_STRLEN:
8269 /* If not optimizing, call the library function. */
8270 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8271 break;
bbf6f052 8272
b93a436e
JL
8273 if (arglist == 0
8274 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8275 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8276 break;
8277 else
8278 {
8279 tree src = TREE_VALUE (arglist);
8280 tree len = c_strlen (src);
bbf6f052 8281
b93a436e
JL
8282 int align
8283 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 8284
b93a436e
JL
8285 rtx result, src_rtx, char_rtx;
8286 enum machine_mode insn_mode = value_mode, char_mode;
8287 enum insn_code icode;
46b68a37 8288
b93a436e
JL
8289 /* If the length is known, just return it. */
8290 if (len != 0)
8291 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 8292
b93a436e
JL
8293 /* If SRC is not a pointer type, don't do this operation inline. */
8294 if (align == 0)
8295 break;
bbf6f052 8296
b93a436e 8297 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 8298
b93a436e
JL
8299 while (insn_mode != VOIDmode)
8300 {
8301 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8302 if (icode != CODE_FOR_nothing)
8303 break;
ca695ac9 8304
b93a436e
JL
8305 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8306 }
8307 if (insn_mode == VOIDmode)
8308 break;
ca695ac9 8309
b93a436e
JL
8310 /* Make a place to write the result of the instruction. */
8311 result = target;
8312 if (! (result != 0
8313 && GET_CODE (result) == REG
8314 && GET_MODE (result) == insn_mode
8315 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8316 result = gen_reg_rtx (insn_mode);
ca695ac9 8317
b93a436e 8318 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 8319
b93a436e
JL
8320 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8321 result = gen_reg_rtx (insn_mode);
8322 src_rtx = memory_address (BLKmode,
8323 expand_expr (src, NULL_RTX, ptr_mode,
8324 EXPAND_NORMAL));
bbf6f052 8325
b93a436e
JL
8326 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8327 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 8328
b93a436e
JL
8329 /* Check the string is readable and has an end. */
8330 if (flag_check_memory_usage)
8331 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
8332 src_rtx, ptr_mode,
8333 GEN_INT (MEMORY_USE_RO),
8334 TYPE_MODE (integer_type_node));
bbf6f052 8335
b93a436e
JL
8336 char_rtx = const0_rtx;
8337 char_mode = insn_operand_mode[(int)icode][2];
8338 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
8339 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 8340
b93a436e
JL
8341 emit_insn (GEN_FCN (icode) (result,
8342 gen_rtx_MEM (BLKmode, src_rtx),
8343 char_rtx, GEN_INT (align)));
bbf6f052 8344
b93a436e
JL
8345 /* Return the value in the proper mode for this function. */
8346 if (GET_MODE (result) == value_mode)
8347 return result;
8348 else if (target != 0)
8349 {
8350 convert_move (target, result, 0);
8351 return target;
8352 }
8353 else
8354 return convert_to_mode (value_mode, result, 0);
8355 }
bbf6f052 8356
b93a436e
JL
8357 case BUILT_IN_STRCPY:
8358 /* If not optimizing, call the library function. */
8359 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8360 break;
bbf6f052 8361
b93a436e
JL
8362 if (arglist == 0
8363 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8364 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8365 || TREE_CHAIN (arglist) == 0
8366 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8367 break;
8368 else
8369 {
8370 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 8371
b93a436e
JL
8372 if (len == 0)
8373 break;
bbf6f052 8374
b93a436e 8375 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 8376
b93a436e
JL
8377 chainon (arglist, build_tree_list (NULL_TREE, len));
8378 }
6d100794 8379
b93a436e
JL
8380 /* Drops in. */
8381 case BUILT_IN_MEMCPY:
8382 /* If not optimizing, call the library function. */
8383 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8384 break;
e7c33f54 8385
b93a436e
JL
8386 if (arglist == 0
8387 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8388 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8389 || TREE_CHAIN (arglist) == 0
8390 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8391 != POINTER_TYPE)
8392 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8393 || (TREE_CODE (TREE_TYPE (TREE_VALUE
8394 (TREE_CHAIN (TREE_CHAIN (arglist)))))
8395 != INTEGER_TYPE))
8396 break;
8397 else
8398 {
8399 tree dest = TREE_VALUE (arglist);
8400 tree src = TREE_VALUE (TREE_CHAIN (arglist));
8401 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8402 tree type;
e7c33f54 8403
b93a436e
JL
8404 int src_align
8405 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8406 int dest_align
8407 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8408 rtx dest_rtx, dest_mem, src_mem, src_rtx, dest_addr, len_rtx;
e7c33f54 8409
b93a436e
JL
8410 /* If either SRC or DEST is not a pointer type, don't do
8411 this operation in-line. */
8412 if (src_align == 0 || dest_align == 0)
8413 {
8414 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
8415 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8416 break;
8417 }
e7c33f54 8418
b93a436e
JL
8419 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8420 dest_mem = gen_rtx_MEM (BLKmode,
8421 memory_address (BLKmode, dest_rtx));
8422 /* There could be a void* cast on top of the object. */
8423 while (TREE_CODE (dest) == NOP_EXPR)
8424 dest = TREE_OPERAND (dest, 0);
8425 type = TREE_TYPE (TREE_TYPE (dest));
8426 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
8427 src_rtx = expand_expr (src, NULL_RTX, ptr_mode, EXPAND_SUM);
8428 src_mem = gen_rtx_MEM (BLKmode,
8429 memory_address (BLKmode, src_rtx));
8430 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 8431
b93a436e
JL
8432 /* Just copy the rights of SRC to the rights of DEST. */
8433 if (flag_check_memory_usage)
8434 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
8435 dest_rtx, ptr_mode,
8436 src_rtx, ptr_mode,
8437 len_rtx, TYPE_MODE (sizetype));
e7c33f54 8438
b93a436e
JL
8439 /* There could be a void* cast on top of the object. */
8440 while (TREE_CODE (src) == NOP_EXPR)
8441 src = TREE_OPERAND (src, 0);
8442 type = TREE_TYPE (TREE_TYPE (src));
8443 MEM_IN_STRUCT_P (src_mem) = AGGREGATE_TYPE_P (type);
e7c33f54 8444
b93a436e
JL
8445 /* Copy word part most expediently. */
8446 dest_addr
8447 = emit_block_move (dest_mem, src_mem, len_rtx,
8448 MIN (src_align, dest_align));
e7c33f54 8449
b93a436e
JL
8450 if (dest_addr == 0)
8451 dest_addr = force_operand (dest_rtx, NULL_RTX);
e7c33f54 8452
b93a436e
JL
8453 return dest_addr;
8454 }
e7c33f54 8455
b93a436e
JL
8456 case BUILT_IN_MEMSET:
8457 /* If not optimizing, call the library function. */
8458 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8459 break;
e7c33f54 8460
b93a436e
JL
8461 if (arglist == 0
8462 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8463 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8464 || TREE_CHAIN (arglist) == 0
8465 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
8466 != INTEGER_TYPE)
8467 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8468 || (INTEGER_TYPE
8469 != (TREE_CODE (TREE_TYPE
8470 (TREE_VALUE
8471 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
8472 break;
8473 else
8474 {
8475 tree dest = TREE_VALUE (arglist);
8476 tree val = TREE_VALUE (TREE_CHAIN (arglist));
8477 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8478 tree type;
e7c33f54 8479
b93a436e
JL
8480 int dest_align
8481 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8482 rtx dest_rtx, dest_mem, dest_addr, len_rtx;
e7c33f54 8483
b93a436e
JL
8484 /* If DEST is not a pointer type, don't do this
8485 operation in-line. */
8486 if (dest_align == 0)
8487 break;
bbf6f052 8488
b93a436e
JL
8489 /* If VAL is not 0, don't do this operation in-line. */
8490 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
8491 break;
bbf6f052 8492
b93a436e
JL
8493 /* If LEN does not expand to a constant, don't do this
8494 operation in-line. */
8495 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
8496 if (GET_CODE (len_rtx) != CONST_INT)
8497 break;
bbf6f052 8498
b93a436e
JL
8499 dest_rtx = expand_expr (dest, NULL_RTX, ptr_mode, EXPAND_SUM);
8500 dest_mem = gen_rtx_MEM (BLKmode,
8501 memory_address (BLKmode, dest_rtx));
8502
8503 /* Just check DST is writable and mark it as readable. */
8504 if (flag_check_memory_usage)
8505 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8506 dest_rtx, ptr_mode,
8507 len_rtx, TYPE_MODE (sizetype),
8508 GEN_INT (MEMORY_USE_WO),
8509 TYPE_MODE (integer_type_node));
bbf6f052 8510
b93a436e
JL
8511 /* There could be a void* cast on top of the object. */
8512 while (TREE_CODE (dest) == NOP_EXPR)
8513 dest = TREE_OPERAND (dest, 0);
8514 type = TREE_TYPE (TREE_TYPE (dest));
8515 MEM_IN_STRUCT_P (dest_mem) = AGGREGATE_TYPE_P (type);
bbf6f052 8516
b93a436e 8517 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 8518
b93a436e
JL
8519 if (dest_addr == 0)
8520 dest_addr = force_operand (dest_rtx, NULL_RTX);
bbf6f052 8521
b93a436e
JL
8522 return dest_addr;
8523 }
bbf6f052 8524
b93a436e
JL
8525/* These comparison functions need an instruction that returns an actual
8526 index. An ordinary compare that just sets the condition codes
8527 is not enough. */
8528#ifdef HAVE_cmpstrsi
8529 case BUILT_IN_STRCMP:
8530 /* If not optimizing, call the library function. */
8531 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8532 break;
bbf6f052 8533
b93a436e
JL
8534 /* If we need to check memory accesses, call the library function. */
8535 if (flag_check_memory_usage)
8536 break;
bbf6f052 8537
b93a436e
JL
8538 if (arglist == 0
8539 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8540 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8541 || TREE_CHAIN (arglist) == 0
8542 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
8543 break;
8544 else if (!HAVE_cmpstrsi)
8545 break;
8546 {
8547 tree arg1 = TREE_VALUE (arglist);
8548 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8549 tree offset;
8550 tree len, len2;
a97f5a86 8551
b93a436e
JL
8552 len = c_strlen (arg1);
8553 if (len)
8554 len = size_binop (PLUS_EXPR, integer_one_node, len);
8555 len2 = c_strlen (arg2);
8556 if (len2)
8557 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 8558
b93a436e
JL
8559 /* If we don't have a constant length for the first, use the length
8560 of the second, if we know it. We don't require a constant for
8561 this case; some cost analysis could be done if both are available
8562 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 8563
b93a436e
JL
8564 If both strings have constant lengths, use the smaller. This
8565 could arise if optimization results in strcpy being called with
8566 two fixed strings, or if the code was machine-generated. We should
8567 add some code to the `memcmp' handler below to deal with such
8568 situations, someday. */
8569 if (!len || TREE_CODE (len) != INTEGER_CST)
8570 {
8571 if (len2)
8572 len = len2;
8573 else if (len == 0)
8574 break;
8575 }
8576 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
8577 {
8578 if (tree_int_cst_lt (len2, len))
8579 len = len2;
8580 }
bbf6f052 8581
b93a436e
JL
8582 chainon (arglist, build_tree_list (NULL_TREE, len));
8583 }
bbf6f052 8584
b93a436e
JL
8585 /* Drops in. */
8586 case BUILT_IN_MEMCMP:
8587 /* If not optimizing, call the library function. */
8588 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8589 break;
bbf6f052 8590
b93a436e
JL
8591 /* If we need to check memory accesses, call the library function. */
8592 if (flag_check_memory_usage)
8593 break;
bbf6f052 8594
b93a436e
JL
8595 if (arglist == 0
8596 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8597 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
8598 || TREE_CHAIN (arglist) == 0
8599 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8600 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8601 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8602 break;
8603 else if (!HAVE_cmpstrsi)
8604 break;
8605 {
8606 tree arg1 = TREE_VALUE (arglist);
8607 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
8608 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
8609 rtx result;
0842a179 8610
b93a436e
JL
8611 int arg1_align
8612 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8613 int arg2_align
8614 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
8615 enum machine_mode insn_mode
8616 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 8617
b93a436e
JL
8618 /* If we don't have POINTER_TYPE, call the function. */
8619 if (arg1_align == 0 || arg2_align == 0)
8620 {
8621 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
8622 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
8623 break;
8624 }
bbf6f052 8625
b93a436e
JL
8626 /* Make a place to write the result of the instruction. */
8627 result = target;
8628 if (! (result != 0
8629 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
8630 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8631 result = gen_reg_rtx (insn_mode);
bbf6f052 8632
b93a436e
JL
8633 emit_insn (gen_cmpstrsi (result,
8634 gen_rtx_MEM (BLKmode,
8635 expand_expr (arg1, NULL_RTX,
8636 ptr_mode,
8637 EXPAND_NORMAL)),
8638 gen_rtx_MEM (BLKmode,
8639 expand_expr (arg2, NULL_RTX,
8640 ptr_mode,
8641 EXPAND_NORMAL)),
8642 expand_expr (len, NULL_RTX, VOIDmode, 0),
8643 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 8644
b93a436e
JL
8645 /* Return the value in the proper mode for this function. */
8646 mode = TYPE_MODE (TREE_TYPE (exp));
8647 if (GET_MODE (result) == mode)
8648 return result;
8649 else if (target != 0)
8650 {
8651 convert_move (target, result, 0);
8652 return target;
8653 }
8654 else
8655 return convert_to_mode (mode, result, 0);
8656 }
8657#else
8658 case BUILT_IN_STRCMP:
8659 case BUILT_IN_MEMCMP:
8660 break;
8661#endif
bbf6f052 8662
b93a436e
JL
8663 case BUILT_IN_SETJMP:
8664 if (arglist == 0
8665 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8666 break;
bbf6f052 8667
b93a436e
JL
8668 {
8669 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
8670 VOIDmode, 0);
8671 return expand_builtin_setjmp (buf_addr, target);
8672 }
bbf6f052 8673
b93a436e
JL
8674 /* __builtin_longjmp is passed a pointer to an array of five words
8675 and a value, which is a dummy. It's similar to the C library longjmp
8676 function but works with __builtin_setjmp above. */
8677 case BUILT_IN_LONGJMP:
8678 if (arglist == 0 || TREE_CHAIN (arglist) == 0
8679 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8680 break;
bbf6f052 8681
b93a436e
JL
8682 {
8683 tree dummy_id = get_identifier ("__dummy");
8684 tree dummy_type = build_function_type (void_type_node, NULL_TREE);
8685 tree dummy_decl = build_decl (FUNCTION_DECL, dummy_id, dummy_type);
8686#ifdef POINTERS_EXTEND_UNSIGNED
8687 rtx buf_addr
8688 = force_reg (Pmode,
8689 convert_memory_address
8690 (Pmode,
8691 expand_expr (TREE_VALUE (arglist),
8692 NULL_RTX, VOIDmode, 0)));
8693#else
8694 rtx buf_addr
8695 = force_reg (Pmode, expand_expr (TREE_VALUE (arglist),
8696 NULL_RTX,
8697 VOIDmode, 0));
8698#endif
8699 rtx fp = gen_rtx_MEM (Pmode, buf_addr);
8700 rtx lab = gen_rtx_MEM (Pmode,
8701 plus_constant (buf_addr,
8702 GET_MODE_SIZE (Pmode)));
8703 enum machine_mode sa_mode
8704#ifdef HAVE_save_stack_nonlocal
8705 = (HAVE_save_stack_nonlocal
8706 ? insn_operand_mode[(int) CODE_FOR_save_stack_nonlocal][0]
8707 : Pmode);
8708#else
8709 = Pmode;
8710#endif
8711 rtx stack = gen_rtx_MEM (sa_mode,
8712 plus_constant (buf_addr,
8713 2 * GET_MODE_SIZE (Pmode)));
bbf6f052 8714
b93a436e
JL
8715 DECL_EXTERNAL (dummy_decl) = 1;
8716 TREE_PUBLIC (dummy_decl) = 1;
8717 make_decl_rtl (dummy_decl, NULL_PTR, 1);
bbf6f052 8718
b93a436e
JL
8719 /* Expand the second expression just for side-effects. */
8720 expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
8721 const0_rtx, VOIDmode, 0);
bbf6f052 8722
b93a436e 8723 assemble_external (dummy_decl);
bbf6f052 8724
b93a436e
JL
8725 /* Pick up FP, label, and SP from the block and jump. This code is
8726 from expand_goto in stmt.c; see there for detailed comments. */
8727#if HAVE_nonlocal_goto
8728 if (HAVE_nonlocal_goto)
8729 emit_insn (gen_nonlocal_goto (fp, lab, stack,
8730 XEXP (DECL_RTL (dummy_decl), 0)));
8731 else
8732#endif
8733 {
8734 lab = copy_to_reg (lab);
8735 emit_move_insn (hard_frame_pointer_rtx, fp);
8736 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
bbf6f052 8737
b93a436e
JL
8738 /* Put in the static chain register the address of the dummy
8739 function. */
8740 emit_move_insn (static_chain_rtx, XEXP (DECL_RTL (dummy_decl), 0));
8741 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8742 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
8743 emit_insn (gen_rtx_USE (VOIDmode, static_chain_rtx));
8744 emit_indirect_jump (lab);
8745 }
bbf6f052 8746
b93a436e 8747 return const0_rtx;
ca695ac9 8748 }
904762c8 8749
b93a436e
JL
8750 /* Various hooks for the DWARF 2 __throw routine. */
8751 case BUILT_IN_UNWIND_INIT:
8752 expand_builtin_unwind_init ();
8753 return const0_rtx;
8754 case BUILT_IN_FP:
8755 return frame_pointer_rtx;
8756 case BUILT_IN_SP:
8757 return stack_pointer_rtx;
8758#ifdef DWARF2_UNWIND_INFO
8759 case BUILT_IN_DWARF_FP_REGNUM:
8760 return expand_builtin_dwarf_fp_regnum ();
8761 case BUILT_IN_DWARF_REG_SIZE:
8762 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 8763#endif
b93a436e
JL
8764 case BUILT_IN_FROB_RETURN_ADDR:
8765 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
8766 case BUILT_IN_EXTRACT_RETURN_ADDR:
8767 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
8768 case BUILT_IN_SET_RETURN_ADDR_REG:
8769 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
8770 return const0_rtx;
8771 case BUILT_IN_EH_STUB:
8772 return expand_builtin_eh_stub ();
8773 case BUILT_IN_SET_EH_REGS:
8774 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
8775 TREE_VALUE (TREE_CHAIN (arglist)));
8776 return const0_rtx;
ca695ac9 8777
b93a436e
JL
8778 default: /* just do library call, if unknown builtin */
8779 error ("built-in function `%s' not currently supported",
8780 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 8781 }
0006469d 8782
b93a436e
JL
8783 /* The switch statement above can drop through to cause the function
8784 to be called normally. */
0006469d 8785
b93a436e 8786 return expand_call (exp, target, ignore);
ca695ac9 8787}
b93a436e
JL
8788\f
8789/* Built-in functions to perform an untyped call and return. */
0006469d 8790
b93a436e
JL
8791/* For each register that may be used for calling a function, this
8792 gives a mode used to copy the register's value. VOIDmode indicates
8793 the register is not used for calling a function. If the machine
8794 has register windows, this gives only the outbound registers.
8795 INCOMING_REGNO gives the corresponding inbound register. */
8796static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 8797
b93a436e
JL
8798/* For each register that may be used for returning values, this gives
8799 a mode used to copy the register's value. VOIDmode indicates the
8800 register is not used for returning values. If the machine has
8801 register windows, this gives only the outbound registers.
8802 INCOMING_REGNO gives the corresponding inbound register. */
8803static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 8804
b93a436e
JL
8805/* For each register that may be used for calling a function, this
8806 gives the offset of that register into the block returned by
8807 __builtin_apply_args. 0 indicates that the register is not
8808 used for calling a function. */
8809static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
8810
8811/* Return the offset of register REGNO into the block returned by
8812 __builtin_apply_args. This is not declared static, since it is
8813 needed in objc-act.c. */
0006469d 8814
b93a436e
JL
8815int
8816apply_args_register_offset (regno)
8817 int regno;
8818{
8819 apply_args_size ();
0006469d 8820
b93a436e
JL
8821 /* Arguments are always put in outgoing registers (in the argument
8822 block) if such make sense. */
8823#ifdef OUTGOING_REGNO
8824 regno = OUTGOING_REGNO(regno);
8825#endif
8826 return apply_args_reg_offset[regno];
8827}
904762c8 8828
b93a436e
JL
8829/* Return the size required for the block returned by __builtin_apply_args,
8830 and initialize apply_args_mode. */
8831
8832static int
8833apply_args_size ()
0006469d 8834{
b93a436e
JL
8835 static int size = -1;
8836 int align, regno;
2f6e6d22 8837 enum machine_mode mode;
0006469d 8838
b93a436e
JL
8839 /* The values computed by this function never change. */
8840 if (size < 0)
ca695ac9 8841 {
b93a436e
JL
8842 /* The first value is the incoming arg-pointer. */
8843 size = GET_MODE_SIZE (Pmode);
0006469d 8844
b93a436e
JL
8845 /* The second value is the structure value address unless this is
8846 passed as an "invisible" first argument. */
8847 if (struct_value_rtx)
8848 size += GET_MODE_SIZE (Pmode);
0006469d 8849
b93a436e
JL
8850 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8851 if (FUNCTION_ARG_REGNO_P (regno))
8852 {
8853 /* Search for the proper mode for copying this register's
8854 value. I'm not sure this is right, but it works so far. */
8855 enum machine_mode best_mode = VOIDmode;
0006469d 8856
b93a436e
JL
8857 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8858 mode != VOIDmode;
8859 mode = GET_MODE_WIDER_MODE (mode))
8860 if (HARD_REGNO_MODE_OK (regno, mode)
8861 && HARD_REGNO_NREGS (regno, mode) == 1)
8862 best_mode = mode;
0006469d 8863
b93a436e
JL
8864 if (best_mode == VOIDmode)
8865 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8866 mode != VOIDmode;
8867 mode = GET_MODE_WIDER_MODE (mode))
8868 if (HARD_REGNO_MODE_OK (regno, mode)
8869 && (mov_optab->handlers[(int) mode].insn_code
8870 != CODE_FOR_nothing))
8871 best_mode = mode;
0006469d 8872
b93a436e
JL
8873 mode = best_mode;
8874 if (mode == VOIDmode)
8875 abort ();
904762c8 8876
b93a436e
JL
8877 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8878 if (size % align != 0)
8879 size = CEIL (size, align) * align;
8880 apply_args_reg_offset[regno] = size;
8881 size += GET_MODE_SIZE (mode);
8882 apply_args_mode[regno] = mode;
8883 }
8884 else
8885 {
8886 apply_args_mode[regno] = VOIDmode;
8887 apply_args_reg_offset[regno] = 0;
8888 }
8889 }
8890 return size;
8891}
0006469d 8892
b93a436e
JL
8893/* Return the size required for the block returned by __builtin_apply,
8894 and initialize apply_result_mode. */
904762c8 8895
b93a436e
JL
8896static int
8897apply_result_size ()
8898{
8899 static int size = -1;
8900 int align, regno;
8901 enum machine_mode mode;
0006469d 8902
b93a436e
JL
8903 /* The values computed by this function never change. */
8904 if (size < 0)
8905 {
8906 size = 0;
0006469d 8907
b93a436e
JL
8908 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8909 if (FUNCTION_VALUE_REGNO_P (regno))
8910 {
8911 /* Search for the proper mode for copying this register's
8912 value. I'm not sure this is right, but it works so far. */
8913 enum machine_mode best_mode = VOIDmode;
0006469d 8914
b93a436e
JL
8915 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
8916 mode != TImode;
8917 mode = GET_MODE_WIDER_MODE (mode))
8918 if (HARD_REGNO_MODE_OK (regno, mode))
8919 best_mode = mode;
0006469d 8920
b93a436e
JL
8921 if (best_mode == VOIDmode)
8922 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
8923 mode != VOIDmode;
8924 mode = GET_MODE_WIDER_MODE (mode))
8925 if (HARD_REGNO_MODE_OK (regno, mode)
8926 && (mov_optab->handlers[(int) mode].insn_code
8927 != CODE_FOR_nothing))
8928 best_mode = mode;
0006469d 8929
b93a436e
JL
8930 mode = best_mode;
8931 if (mode == VOIDmode)
8932 abort ();
8933
8934 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8935 if (size % align != 0)
8936 size = CEIL (size, align) * align;
8937 size += GET_MODE_SIZE (mode);
8938 apply_result_mode[regno] = mode;
8939 }
8940 else
8941 apply_result_mode[regno] = VOIDmode;
8942
8943 /* Allow targets that use untyped_call and untyped_return to override
8944 the size so that machine-specific information can be stored here. */
8945#ifdef APPLY_RESULT_SIZE
8946 size = APPLY_RESULT_SIZE;
8947#endif
8948 }
8949 return size;
8950}
0006469d 8951
b93a436e
JL
8952#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
8953/* Create a vector describing the result block RESULT. If SAVEP is true,
8954 the result block is used to save the values; otherwise it is used to
8955 restore the values. */
8956
8957static rtx
8958result_vector (savep, result)
8959 int savep;
8960 rtx result;
8961{
8962 int regno, size, align, nelts;
8963 enum machine_mode mode;
8964 rtx reg, mem;
8965 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
8966
8967 size = nelts = 0;
8968 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
8969 if ((mode = apply_result_mode[regno]) != VOIDmode)
8970 {
8971 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
8972 if (size % align != 0)
8973 size = CEIL (size, align) * align;
8974 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
8975 mem = change_address (result, mode,
8976 plus_constant (XEXP (result, 0), size));
8977 savevec[nelts++] = (savep
8978 ? gen_rtx_SET (VOIDmode, mem, reg)
8979 : gen_rtx_SET (VOIDmode, reg, mem));
8980 size += GET_MODE_SIZE (mode);
ca695ac9 8981 }
b93a436e
JL
8982 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
8983}
8984#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 8985
b93a436e
JL
8986/* Save the state required to perform an untyped call with the same
8987 arguments as were passed to the current function. */
904762c8 8988
b93a436e
JL
8989static rtx
8990expand_builtin_apply_args ()
8991{
8992 rtx registers;
8993 int size, align, regno;
8994 enum machine_mode mode;
0006469d 8995
b93a436e
JL
8996 /* Create a block where the arg-pointer, structure value address,
8997 and argument registers can be saved. */
8998 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 8999
b93a436e
JL
9000 /* Walk past the arg-pointer and structure value address. */
9001 size = GET_MODE_SIZE (Pmode);
9002 if (struct_value_rtx)
9003 size += GET_MODE_SIZE (Pmode);
0cb1d109 9004
b93a436e
JL
9005 /* Save each register used in calling a function to the block. */
9006 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9007 if ((mode = apply_args_mode[regno]) != VOIDmode)
9008 {
9009 rtx tem;
0cb1d109 9010
b93a436e
JL
9011 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9012 if (size % align != 0)
9013 size = CEIL (size, align) * align;
0006469d 9014
b93a436e 9015 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9016
b93a436e
JL
9017#ifdef STACK_REGS
9018 /* For reg-stack.c's stack register household.
9019 Compare with a similar piece of code in function.c. */
0006469d 9020
b93a436e
JL
9021 emit_insn (gen_rtx_USE (mode, tem));
9022#endif
0e8c9172 9023
b93a436e
JL
9024 emit_move_insn (change_address (registers, mode,
9025 plus_constant (XEXP (registers, 0),
9026 size)),
9027 tem);
9028 size += GET_MODE_SIZE (mode);
0e8c9172 9029 }
0006469d 9030
b93a436e
JL
9031 /* Save the arg pointer to the block. */
9032 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9033 copy_to_reg (virtual_incoming_args_rtx));
9034 size = GET_MODE_SIZE (Pmode);
0006469d 9035
b93a436e
JL
9036 /* Save the structure value address unless this is passed as an
9037 "invisible" first argument. */
9038 if (struct_value_incoming_rtx)
9039 {
9040 emit_move_insn (change_address (registers, Pmode,
9041 plus_constant (XEXP (registers, 0),
9042 size)),
9043 copy_to_reg (struct_value_incoming_rtx));
9044 size += GET_MODE_SIZE (Pmode);
9045 }
0006469d 9046
b93a436e
JL
9047 /* Return the address of the block. */
9048 return copy_addr_to_reg (XEXP (registers, 0));
9049}
0006469d 9050
b93a436e
JL
9051/* Perform an untyped call and save the state required to perform an
9052 untyped return of whatever value was returned by the given function. */
0006469d 9053
b93a436e
JL
9054static rtx
9055expand_builtin_apply (function, arguments, argsize)
9056 rtx function, arguments, argsize;
9057{
9058 int size, align, regno;
9059 enum machine_mode mode;
9060 rtx incoming_args, result, reg, dest, call_insn;
9061 rtx old_stack_level = 0;
9062 rtx call_fusage = 0;
0006469d 9063
b93a436e
JL
9064 /* Create a block where the return registers can be saved. */
9065 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9066
9067 /* ??? The argsize value should be adjusted here. */
9068
9069 /* Fetch the arg pointer from the ARGUMENTS block. */
9070 incoming_args = gen_reg_rtx (Pmode);
9071 emit_move_insn (incoming_args,
9072 gen_rtx_MEM (Pmode, arguments));
9073#ifndef STACK_GROWS_DOWNWARD
9074 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9075 incoming_args, 0, OPTAB_LIB_WIDEN);
9076#endif
9077
9078 /* Perform postincrements before actually calling the function. */
ca695ac9 9079 emit_queue ();
0006469d 9080
b93a436e
JL
9081 /* Push a new argument block and copy the arguments. */
9082 do_pending_stack_adjust ();
0006469d 9083
b93a436e
JL
9084 /* Save the stack with nonlocal if available */
9085#ifdef HAVE_save_stack_nonlocal
9086 if (HAVE_save_stack_nonlocal)
9087 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9088 else
9089#endif
9090 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9091
b93a436e
JL
9092 /* Push a block of memory onto the stack to store the memory arguments.
9093 Save the address in a register, and copy the memory arguments. ??? I
9094 haven't figured out how the calling convention macros effect this,
9095 but it's likely that the source and/or destination addresses in
9096 the block copy will need updating in machine specific ways. */
9097 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9098 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9099 gen_rtx_MEM (BLKmode, incoming_args),
9100 argsize,
9101 PARM_BOUNDARY / BITS_PER_UNIT);
9102
9103 /* Refer to the argument block. */
9104 apply_args_size ();
9105 arguments = gen_rtx_MEM (BLKmode, arguments);
9106
9107 /* Walk past the arg-pointer and structure value address. */
9108 size = GET_MODE_SIZE (Pmode);
9109 if (struct_value_rtx)
9110 size += GET_MODE_SIZE (Pmode);
9111
9112 /* Restore each of the registers previously saved. Make USE insns
9113 for each of these registers for use in making the call. */
9114 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9115 if ((mode = apply_args_mode[regno]) != VOIDmode)
9116 {
9117 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9118 if (size % align != 0)
9119 size = CEIL (size, align) * align;
9120 reg = gen_rtx_REG (mode, regno);
9121 emit_move_insn (reg,
9122 change_address (arguments, mode,
9123 plus_constant (XEXP (arguments, 0),
9124 size)));
9125
9126 use_reg (&call_fusage, reg);
9127 size += GET_MODE_SIZE (mode);
9128 }
9129
9130 /* Restore the structure value address unless this is passed as an
9131 "invisible" first argument. */
9132 size = GET_MODE_SIZE (Pmode);
9133 if (struct_value_rtx)
0006469d 9134 {
b93a436e
JL
9135 rtx value = gen_reg_rtx (Pmode);
9136 emit_move_insn (value,
9137 change_address (arguments, Pmode,
9138 plus_constant (XEXP (arguments, 0),
9139 size)));
9140 emit_move_insn (struct_value_rtx, value);
9141 if (GET_CODE (struct_value_rtx) == REG)
9142 use_reg (&call_fusage, struct_value_rtx);
9143 size += GET_MODE_SIZE (Pmode);
ca695ac9 9144 }
0006469d 9145
b93a436e
JL
9146 /* All arguments and registers used for the call are set up by now! */
9147 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9148
b93a436e
JL
9149 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9150 and we don't want to load it into a register as an optimization,
9151 because prepare_call_address already did it if it should be done. */
9152 if (GET_CODE (function) != SYMBOL_REF)
9153 function = memory_address (FUNCTION_MODE, function);
0006469d 9154
b93a436e
JL
9155 /* Generate the actual call instruction and save the return value. */
9156#ifdef HAVE_untyped_call
9157 if (HAVE_untyped_call)
9158 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9159 result, result_vector (1, result)));
9160 else
9161#endif
9162#ifdef HAVE_call_value
9163 if (HAVE_call_value)
ca695ac9 9164 {
b93a436e 9165 rtx valreg = 0;
0006469d 9166
b93a436e
JL
9167 /* Locate the unique return register. It is not possible to
9168 express a call that sets more than one return register using
9169 call_value; use untyped_call for that. In fact, untyped_call
9170 only needs to save the return registers in the given block. */
9171 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9172 if ((mode = apply_result_mode[regno]) != VOIDmode)
9173 {
9174 if (valreg)
9175 abort (); /* HAVE_untyped_call required. */
9176 valreg = gen_rtx_REG (mode, regno);
9177 }
0006469d 9178
b93a436e
JL
9179 emit_call_insn (gen_call_value (valreg,
9180 gen_rtx_MEM (FUNCTION_MODE, function),
9181 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9182
b93a436e
JL
9183 emit_move_insn (change_address (result, GET_MODE (valreg),
9184 XEXP (result, 0)),
9185 valreg);
ca695ac9 9186 }
b93a436e
JL
9187 else
9188#endif
9189 abort ();
0006469d 9190
b93a436e
JL
9191 /* Find the CALL insn we just emitted. */
9192 for (call_insn = get_last_insn ();
9193 call_insn && GET_CODE (call_insn) != CALL_INSN;
9194 call_insn = PREV_INSN (call_insn))
9195 ;
0006469d 9196
b93a436e
JL
9197 if (! call_insn)
9198 abort ();
0006469d 9199
b93a436e
JL
9200 /* Put the register usage information on the CALL. If there is already
9201 some usage information, put ours at the end. */
9202 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9203 {
b93a436e 9204 rtx link;
0006469d 9205
b93a436e
JL
9206 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9207 link = XEXP (link, 1))
9208 ;
9209
9210 XEXP (link, 1) = call_fusage;
ca695ac9 9211 }
b93a436e
JL
9212 else
9213 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9214
b93a436e
JL
9215 /* Restore the stack. */
9216#ifdef HAVE_save_stack_nonlocal
9217 if (HAVE_save_stack_nonlocal)
9218 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9219 else
9220#endif
9221 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9222
9223 /* Return the address of the result block. */
9224 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9225}
bbf6f052 9226
b93a436e 9227/* Perform an untyped return. */
ca695ac9
JB
9228
9229static void
b93a436e
JL
9230expand_builtin_return (result)
9231 rtx result;
bbf6f052 9232{
b93a436e
JL
9233 int size, align, regno;
9234 enum machine_mode mode;
9235 rtx reg;
9236 rtx call_fusage = 0;
bbf6f052 9237
b93a436e
JL
9238 apply_result_size ();
9239 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9240
b93a436e
JL
9241#ifdef HAVE_untyped_return
9242 if (HAVE_untyped_return)
ca695ac9 9243 {
b93a436e
JL
9244 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9245 emit_barrier ();
9246 return;
ca695ac9 9247 }
b93a436e 9248#endif
1499e0a8 9249
b93a436e
JL
9250 /* Restore the return value and note that each value is used. */
9251 size = 0;
9252 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9253 if ((mode = apply_result_mode[regno]) != VOIDmode)
9254 {
9255 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9256 if (size % align != 0)
9257 size = CEIL (size, align) * align;
9258 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9259 emit_move_insn (reg,
9260 change_address (result, mode,
9261 plus_constant (XEXP (result, 0),
9262 size)));
9263
9264 push_to_sequence (call_fusage);
9265 emit_insn (gen_rtx_USE (VOIDmode, reg));
9266 call_fusage = get_insns ();
9267 end_sequence ();
9268 size += GET_MODE_SIZE (mode);
9269 }
9270
9271 /* Put the USE insns before the return. */
9272 emit_insns (call_fusage);
9273
9274 /* Return whatever values was restored by jumping directly to the end
9275 of the function. */
9276 expand_null_return ();
ca695ac9
JB
9277}
9278\f
b93a436e
JL
9279/* Expand code for a post- or pre- increment or decrement
9280 and return the RTX for the result.
9281 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9282
b93a436e
JL
9283static rtx
9284expand_increment (exp, post, ignore)
9285 register tree exp;
9286 int post, ignore;
ca695ac9 9287{
b93a436e
JL
9288 register rtx op0, op1;
9289 register rtx temp, value;
9290 register tree incremented = TREE_OPERAND (exp, 0);
9291 optab this_optab = add_optab;
9292 int icode;
9293 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9294 int op0_is_copy = 0;
9295 int single_insn = 0;
9296 /* 1 means we can't store into OP0 directly,
9297 because it is a subreg narrower than a word,
9298 and we don't dare clobber the rest of the word. */
9299 int bad_subreg = 0;
1499e0a8 9300
b93a436e
JL
9301 /* Stabilize any component ref that might need to be
9302 evaluated more than once below. */
9303 if (!post
9304 || TREE_CODE (incremented) == BIT_FIELD_REF
9305 || (TREE_CODE (incremented) == COMPONENT_REF
9306 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9307 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9308 incremented = stabilize_reference (incremented);
9309 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9310 ones into save exprs so that they don't accidentally get evaluated
9311 more than once by the code below. */
9312 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9313 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9314 incremented = save_expr (incremented);
e9a25f70 9315
b93a436e
JL
9316 /* Compute the operands as RTX.
9317 Note whether OP0 is the actual lvalue or a copy of it:
9318 I believe it is a copy iff it is a register or subreg
9319 and insns were generated in computing it. */
e9a25f70 9320
b93a436e
JL
9321 temp = get_last_insn ();
9322 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9323
b93a436e
JL
9324 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9325 in place but instead must do sign- or zero-extension during assignment,
9326 so we copy it into a new register and let the code below use it as
9327 a copy.
e9a25f70 9328
b93a436e
JL
9329 Note that we can safely modify this SUBREG since it is know not to be
9330 shared (it was made by the expand_expr call above). */
9331
9332 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9333 {
9334 if (post)
9335 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9336 else
9337 bad_subreg = 1;
9338 }
9339 else if (GET_CODE (op0) == SUBREG
9340 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9341 {
9342 /* We cannot increment this SUBREG in place. If we are
9343 post-incrementing, get a copy of the old value. Otherwise,
9344 just mark that we cannot increment in place. */
9345 if (post)
9346 op0 = copy_to_reg (op0);
9347 else
9348 bad_subreg = 1;
e9a25f70
JL
9349 }
9350
b93a436e
JL
9351 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9352 && temp != get_last_insn ());
9353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9354 EXPAND_MEMORY_USE_BAD);
1499e0a8 9355
b93a436e
JL
9356 /* Decide whether incrementing or decrementing. */
9357 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9358 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9359 this_optab = sub_optab;
9360
9361 /* Convert decrement by a constant into a negative increment. */
9362 if (this_optab == sub_optab
9363 && GET_CODE (op1) == CONST_INT)
ca695ac9 9364 {
b93a436e
JL
9365 op1 = GEN_INT (- INTVAL (op1));
9366 this_optab = add_optab;
ca695ac9 9367 }
1499e0a8 9368
b93a436e
JL
9369 /* For a preincrement, see if we can do this with a single instruction. */
9370 if (!post)
9371 {
9372 icode = (int) this_optab->handlers[(int) mode].insn_code;
9373 if (icode != (int) CODE_FOR_nothing
9374 /* Make sure that OP0 is valid for operands 0 and 1
9375 of the insn we want to queue. */
9376 && (*insn_operand_predicate[icode][0]) (op0, mode)
9377 && (*insn_operand_predicate[icode][1]) (op0, mode)
9378 && (*insn_operand_predicate[icode][2]) (op1, mode))
9379 single_insn = 1;
9380 }
bbf6f052 9381
b93a436e
JL
9382 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9383 then we cannot just increment OP0. We must therefore contrive to
9384 increment the original value. Then, for postincrement, we can return
9385 OP0 since it is a copy of the old value. For preincrement, expand here
9386 unless we can do it with a single insn.
bbf6f052 9387
b93a436e
JL
9388 Likewise if storing directly into OP0 would clobber high bits
9389 we need to preserve (bad_subreg). */
9390 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9391 {
b93a436e
JL
9392 /* This is the easiest way to increment the value wherever it is.
9393 Problems with multiple evaluation of INCREMENTED are prevented
9394 because either (1) it is a component_ref or preincrement,
9395 in which case it was stabilized above, or (2) it is an array_ref
9396 with constant index in an array in a register, which is
9397 safe to reevaluate. */
9398 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9399 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9400 ? MINUS_EXPR : PLUS_EXPR),
9401 TREE_TYPE (exp),
9402 incremented,
9403 TREE_OPERAND (exp, 1));
a358cee0 9404
b93a436e
JL
9405 while (TREE_CODE (incremented) == NOP_EXPR
9406 || TREE_CODE (incremented) == CONVERT_EXPR)
9407 {
9408 newexp = convert (TREE_TYPE (incremented), newexp);
9409 incremented = TREE_OPERAND (incremented, 0);
9410 }
bbf6f052 9411
b93a436e
JL
9412 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9413 return post ? op0 : temp;
9414 }
bbf6f052 9415
b93a436e
JL
9416 if (post)
9417 {
9418 /* We have a true reference to the value in OP0.
9419 If there is an insn to add or subtract in this mode, queue it.
9420 Queueing the increment insn avoids the register shuffling
9421 that often results if we must increment now and first save
9422 the old value for subsequent use. */
bbf6f052 9423
b93a436e
JL
9424#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9425 op0 = stabilize (op0);
9426#endif
41dfd40c 9427
b93a436e
JL
9428 icode = (int) this_optab->handlers[(int) mode].insn_code;
9429 if (icode != (int) CODE_FOR_nothing
9430 /* Make sure that OP0 is valid for operands 0 and 1
9431 of the insn we want to queue. */
9432 && (*insn_operand_predicate[icode][0]) (op0, mode)
9433 && (*insn_operand_predicate[icode][1]) (op0, mode))
9434 {
9435 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9436 op1 = force_reg (mode, op1);
bbf6f052 9437
b93a436e
JL
9438 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9439 }
9440 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9441 {
9442 rtx addr = (general_operand (XEXP (op0, 0), mode)
9443 ? force_reg (Pmode, XEXP (op0, 0))
9444 : copy_to_reg (XEXP (op0, 0)));
9445 rtx temp, result;
ca695ac9 9446
b93a436e
JL
9447 op0 = change_address (op0, VOIDmode, addr);
9448 temp = force_reg (GET_MODE (op0), op0);
9449 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
9450 op1 = force_reg (mode, op1);
ca695ac9 9451
b93a436e
JL
9452 /* The increment queue is LIFO, thus we have to `queue'
9453 the instructions in reverse order. */
9454 enqueue_insn (op0, gen_move_insn (op0, temp));
9455 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9456 return result;
bbf6f052
RK
9457 }
9458 }
ca695ac9 9459
b93a436e
JL
9460 /* Preincrement, or we can't increment with one simple insn. */
9461 if (post)
9462 /* Save a copy of the value before inc or dec, to return it later. */
9463 temp = value = copy_to_reg (op0);
9464 else
9465 /* Arrange to return the incremented value. */
9466 /* Copy the rtx because expand_binop will protect from the queue,
9467 and the results of that would be invalid for us to return
9468 if our caller does emit_queue before using our result. */
9469 temp = copy_rtx (value = op0);
bbf6f052 9470
b93a436e
JL
9471 /* Increment however we can. */
9472 op1 = expand_binop (mode, this_optab, value, op1,
9473 flag_check_memory_usage ? NULL_RTX : op0,
9474 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9475 /* Make sure the value is stored into OP0. */
9476 if (op1 != op0)
9477 emit_move_insn (op0, op1);
5718612f 9478
b93a436e
JL
9479 return temp;
9480}
9481\f
9482/* Expand all function calls contained within EXP, innermost ones first.
9483 But don't look within expressions that have sequence points.
9484 For each CALL_EXPR, record the rtx for its value
9485 in the CALL_EXPR_RTL field. */
5718612f 9486
b93a436e
JL
9487static void
9488preexpand_calls (exp)
9489 tree exp;
9490{
9491 register int nops, i;
9492 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9493
b93a436e
JL
9494 if (! do_preexpand_calls)
9495 return;
5718612f 9496
b93a436e 9497 /* Only expressions and references can contain calls. */
bbf6f052 9498
b93a436e
JL
9499 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9500 return;
bbf6f052 9501
b93a436e
JL
9502 switch (TREE_CODE (exp))
9503 {
9504 case CALL_EXPR:
9505 /* Do nothing if already expanded. */
9506 if (CALL_EXPR_RTL (exp) != 0
9507 /* Do nothing if the call returns a variable-sized object. */
9508 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9509 /* Do nothing to built-in functions. */
9510 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9511 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9512 == FUNCTION_DECL)
9513 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9514 return;
bbf6f052 9515
b93a436e
JL
9516 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9517 return;
bbf6f052 9518
b93a436e
JL
9519 case COMPOUND_EXPR:
9520 case COND_EXPR:
9521 case TRUTH_ANDIF_EXPR:
9522 case TRUTH_ORIF_EXPR:
9523 /* If we find one of these, then we can be sure
9524 the adjust will be done for it (since it makes jumps).
9525 Do it now, so that if this is inside an argument
9526 of a function, we don't get the stack adjustment
9527 after some other args have already been pushed. */
9528 do_pending_stack_adjust ();
9529 return;
bbf6f052 9530
b93a436e
JL
9531 case BLOCK:
9532 case RTL_EXPR:
9533 case WITH_CLEANUP_EXPR:
9534 case CLEANUP_POINT_EXPR:
9535 case TRY_CATCH_EXPR:
9536 return;
bbf6f052 9537
b93a436e
JL
9538 case SAVE_EXPR:
9539 if (SAVE_EXPR_RTL (exp) != 0)
9540 return;
9541
9542 default:
9543 break;
ca695ac9 9544 }
bbf6f052 9545
b93a436e
JL
9546 nops = tree_code_length[(int) TREE_CODE (exp)];
9547 for (i = 0; i < nops; i++)
9548 if (TREE_OPERAND (exp, i) != 0)
9549 {
9550 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9551 if (type == 'e' || type == '<' || type == '1' || type == '2'
9552 || type == 'r')
9553 preexpand_calls (TREE_OPERAND (exp, i));
9554 }
9555}
9556\f
9557/* At the start of a function, record that we have no previously-pushed
9558 arguments waiting to be popped. */
bbf6f052 9559
b93a436e
JL
9560void
9561init_pending_stack_adjust ()
9562{
9563 pending_stack_adjust = 0;
9564}
bbf6f052 9565
b93a436e
JL
9566/* When exiting from function, if safe, clear out any pending stack adjust
9567 so the adjustment won't get done. */
bbf6f052 9568
b93a436e
JL
9569void
9570clear_pending_stack_adjust ()
9571{
9572#ifdef EXIT_IGNORE_STACK
9573 if (optimize > 0
9574 && ! flag_omit_frame_pointer && EXIT_IGNORE_STACK
9575 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9576 && ! flag_inline_functions)
9577 pending_stack_adjust = 0;
9578#endif
9579}
bbf6f052 9580
b93a436e
JL
9581/* Pop any previously-pushed arguments that have not been popped yet. */
9582
9583void
9584do_pending_stack_adjust ()
9585{
9586 if (inhibit_defer_pop == 0)
ca695ac9 9587 {
b93a436e
JL
9588 if (pending_stack_adjust != 0)
9589 adjust_stack (GEN_INT (pending_stack_adjust));
9590 pending_stack_adjust = 0;
bbf6f052 9591 }
bbf6f052
RK
9592}
9593\f
b93a436e 9594/* Expand conditional expressions. */
bbf6f052 9595
b93a436e
JL
9596/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9597 LABEL is an rtx of code CODE_LABEL, in this function and all the
9598 functions here. */
bbf6f052 9599
b93a436e
JL
9600void
9601jumpifnot (exp, label)
ca695ac9 9602 tree exp;
b93a436e 9603 rtx label;
bbf6f052 9604{
b93a436e
JL
9605 do_jump (exp, label, NULL_RTX);
9606}
bbf6f052 9607
b93a436e 9608/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9609
b93a436e
JL
9610void
9611jumpif (exp, label)
9612 tree exp;
9613 rtx label;
9614{
9615 do_jump (exp, NULL_RTX, label);
9616}
ca695ac9 9617
b93a436e
JL
9618/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9619 the result is zero, or IF_TRUE_LABEL if the result is one.
9620 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9621 meaning fall through in that case.
ca695ac9 9622
b93a436e
JL
9623 do_jump always does any pending stack adjust except when it does not
9624 actually perform a jump. An example where there is no jump
9625 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9626
b93a436e
JL
9627 This function is responsible for optimizing cases such as
9628 &&, || and comparison operators in EXP. */
5718612f 9629
b93a436e
JL
9630void
9631do_jump (exp, if_false_label, if_true_label)
9632 tree exp;
9633 rtx if_false_label, if_true_label;
9634{
9635 register enum tree_code code = TREE_CODE (exp);
9636 /* Some cases need to create a label to jump to
9637 in order to properly fall through.
9638 These cases set DROP_THROUGH_LABEL nonzero. */
9639 rtx drop_through_label = 0;
9640 rtx temp;
9641 rtx comparison = 0;
9642 int i;
9643 tree type;
9644 enum machine_mode mode;
ca695ac9 9645
b93a436e 9646 emit_queue ();
ca695ac9 9647
b93a436e 9648 switch (code)
ca695ac9 9649 {
b93a436e 9650 case ERROR_MARK:
ca695ac9 9651 break;
bbf6f052 9652
b93a436e
JL
9653 case INTEGER_CST:
9654 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9655 if (temp)
9656 emit_jump (temp);
9657 break;
bbf6f052 9658
b93a436e
JL
9659#if 0
9660 /* This is not true with #pragma weak */
9661 case ADDR_EXPR:
9662 /* The address of something can never be zero. */
9663 if (if_true_label)
9664 emit_jump (if_true_label);
9665 break;
9666#endif
bbf6f052 9667
b93a436e
JL
9668 case NOP_EXPR:
9669 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9670 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9671 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9672 goto normal;
9673 case CONVERT_EXPR:
9674 /* If we are narrowing the operand, we have to do the compare in the
9675 narrower mode. */
9676 if ((TYPE_PRECISION (TREE_TYPE (exp))
9677 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9678 goto normal;
9679 case NON_LVALUE_EXPR:
9680 case REFERENCE_EXPR:
9681 case ABS_EXPR:
9682 case NEGATE_EXPR:
9683 case LROTATE_EXPR:
9684 case RROTATE_EXPR:
9685 /* These cannot change zero->non-zero or vice versa. */
9686 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9687 break;
bbf6f052 9688
b93a436e
JL
9689#if 0
9690 /* This is never less insns than evaluating the PLUS_EXPR followed by
9691 a test and can be longer if the test is eliminated. */
9692 case PLUS_EXPR:
9693 /* Reduce to minus. */
9694 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9695 TREE_OPERAND (exp, 0),
9696 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9697 TREE_OPERAND (exp, 1))));
9698 /* Process as MINUS. */
ca695ac9 9699#endif
bbf6f052 9700
b93a436e
JL
9701 case MINUS_EXPR:
9702 /* Non-zero iff operands of minus differ. */
9703 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
9704 TREE_OPERAND (exp, 0),
9705 TREE_OPERAND (exp, 1)),
9706 NE, NE);
9707 break;
bbf6f052 9708
b93a436e
JL
9709 case BIT_AND_EXPR:
9710 /* If we are AND'ing with a small constant, do this comparison in the
9711 smallest type that fits. If the machine doesn't have comparisons
9712 that small, it will be converted back to the wider comparison.
9713 This helps if we are testing the sign bit of a narrower object.
9714 combine can't do this for us because it can't know whether a
9715 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9716
b93a436e
JL
9717 if (! SLOW_BYTE_ACCESS
9718 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9719 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9720 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9721 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9722 && (type = type_for_mode (mode, 1)) != 0
9723 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9724 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9725 != CODE_FOR_nothing))
9726 {
9727 do_jump (convert (type, exp), if_false_label, if_true_label);
9728 break;
9729 }
9730 goto normal;
bbf6f052 9731
b93a436e
JL
9732 case TRUTH_NOT_EXPR:
9733 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9734 break;
bbf6f052 9735
b93a436e
JL
9736 case TRUTH_ANDIF_EXPR:
9737 if (if_false_label == 0)
9738 if_false_label = drop_through_label = gen_label_rtx ();
9739 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9740 start_cleanup_deferral ();
9741 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9742 end_cleanup_deferral ();
9743 break;
bbf6f052 9744
b93a436e
JL
9745 case TRUTH_ORIF_EXPR:
9746 if (if_true_label == 0)
9747 if_true_label = drop_through_label = gen_label_rtx ();
9748 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9749 start_cleanup_deferral ();
9750 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9751 end_cleanup_deferral ();
9752 break;
bbf6f052 9753
b93a436e
JL
9754 case COMPOUND_EXPR:
9755 push_temp_slots ();
9756 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9757 preserve_temp_slots (NULL_RTX);
9758 free_temp_slots ();
9759 pop_temp_slots ();
9760 emit_queue ();
9761 do_pending_stack_adjust ();
9762 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9763 break;
bbf6f052 9764
b93a436e
JL
9765 case COMPONENT_REF:
9766 case BIT_FIELD_REF:
9767 case ARRAY_REF:
9768 {
9769 int bitsize, bitpos, unsignedp;
9770 enum machine_mode mode;
9771 tree type;
9772 tree offset;
9773 int volatilep = 0;
9774 int alignment;
bbf6f052 9775
b93a436e
JL
9776 /* Get description of this reference. We don't actually care
9777 about the underlying object here. */
9778 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9779 &mode, &unsignedp, &volatilep,
9780 &alignment);
bbf6f052 9781
b93a436e
JL
9782 type = type_for_size (bitsize, unsignedp);
9783 if (! SLOW_BYTE_ACCESS
9784 && type != 0 && bitsize >= 0
9785 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9786 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9787 != CODE_FOR_nothing))
9788 {
9789 do_jump (convert (type, exp), if_false_label, if_true_label);
9790 break;
9791 }
9792 goto normal;
9793 }
bbf6f052 9794
b93a436e
JL
9795 case COND_EXPR:
9796 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9797 if (integer_onep (TREE_OPERAND (exp, 1))
9798 && integer_zerop (TREE_OPERAND (exp, 2)))
9799 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9800
b93a436e
JL
9801 else if (integer_zerop (TREE_OPERAND (exp, 1))
9802 && integer_onep (TREE_OPERAND (exp, 2)))
9803 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9804
b93a436e
JL
9805 else
9806 {
9807 register rtx label1 = gen_label_rtx ();
9808 drop_through_label = gen_label_rtx ();
bbf6f052 9809
b93a436e 9810 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9811
b93a436e
JL
9812 start_cleanup_deferral ();
9813 /* Now the THEN-expression. */
9814 do_jump (TREE_OPERAND (exp, 1),
9815 if_false_label ? if_false_label : drop_through_label,
9816 if_true_label ? if_true_label : drop_through_label);
9817 /* In case the do_jump just above never jumps. */
9818 do_pending_stack_adjust ();
9819 emit_label (label1);
bbf6f052 9820
b93a436e
JL
9821 /* Now the ELSE-expression. */
9822 do_jump (TREE_OPERAND (exp, 2),
9823 if_false_label ? if_false_label : drop_through_label,
9824 if_true_label ? if_true_label : drop_through_label);
9825 end_cleanup_deferral ();
9826 }
9827 break;
bbf6f052 9828
b93a436e
JL
9829 case EQ_EXPR:
9830 {
9831 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9832
b93a436e
JL
9833 if (integer_zerop (TREE_OPERAND (exp, 1)))
9834 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9835 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9836 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9837 do_jump
9838 (fold
9839 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9840 fold (build (EQ_EXPR, TREE_TYPE (exp),
9841 fold (build1 (REALPART_EXPR,
9842 TREE_TYPE (inner_type),
9843 TREE_OPERAND (exp, 0))),
9844 fold (build1 (REALPART_EXPR,
9845 TREE_TYPE (inner_type),
9846 TREE_OPERAND (exp, 1))))),
9847 fold (build (EQ_EXPR, TREE_TYPE (exp),
9848 fold (build1 (IMAGPART_EXPR,
9849 TREE_TYPE (inner_type),
9850 TREE_OPERAND (exp, 0))),
9851 fold (build1 (IMAGPART_EXPR,
9852 TREE_TYPE (inner_type),
9853 TREE_OPERAND (exp, 1))))))),
9854 if_false_label, if_true_label);
9855 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9856 && !can_compare_p (TYPE_MODE (inner_type)))
9857 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9858 else
9859 comparison = compare (exp, EQ, EQ);
9860 break;
9861 }
bbf6f052 9862
b93a436e
JL
9863 case NE_EXPR:
9864 {
9865 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9866
b93a436e
JL
9867 if (integer_zerop (TREE_OPERAND (exp, 1)))
9868 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9869 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9870 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
9871 do_jump
9872 (fold
9873 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9874 fold (build (NE_EXPR, TREE_TYPE (exp),
9875 fold (build1 (REALPART_EXPR,
9876 TREE_TYPE (inner_type),
9877 TREE_OPERAND (exp, 0))),
9878 fold (build1 (REALPART_EXPR,
9879 TREE_TYPE (inner_type),
9880 TREE_OPERAND (exp, 1))))),
9881 fold (build (NE_EXPR, TREE_TYPE (exp),
9882 fold (build1 (IMAGPART_EXPR,
9883 TREE_TYPE (inner_type),
9884 TREE_OPERAND (exp, 0))),
9885 fold (build1 (IMAGPART_EXPR,
9886 TREE_TYPE (inner_type),
9887 TREE_OPERAND (exp, 1))))))),
9888 if_false_label, if_true_label);
9889 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
9890 && !can_compare_p (TYPE_MODE (inner_type)))
9891 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9892 else
9893 comparison = compare (exp, NE, NE);
9894 break;
9895 }
bbf6f052 9896
b93a436e
JL
9897 case LT_EXPR:
9898 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9899 == MODE_INT)
9900 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9901 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9902 else
9903 comparison = compare (exp, LT, LTU);
9904 break;
bbf6f052 9905
b93a436e
JL
9906 case LE_EXPR:
9907 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9908 == MODE_INT)
9909 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9910 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9911 else
9912 comparison = compare (exp, LE, LEU);
9913 break;
bbf6f052 9914
b93a436e
JL
9915 case GT_EXPR:
9916 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9917 == MODE_INT)
9918 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9919 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9920 else
9921 comparison = compare (exp, GT, GTU);
9922 break;
bbf6f052 9923
b93a436e
JL
9924 case GE_EXPR:
9925 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9926 == MODE_INT)
9927 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9928 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9929 else
9930 comparison = compare (exp, GE, GEU);
9931 break;
bbf6f052 9932
b93a436e
JL
9933 default:
9934 normal:
9935 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9936#if 0
9937 /* This is not needed any more and causes poor code since it causes
9938 comparisons and tests from non-SI objects to have different code
9939 sequences. */
9940 /* Copy to register to avoid generating bad insns by cse
9941 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9942 if (!cse_not_expected && GET_CODE (temp) == MEM)
9943 temp = copy_to_reg (temp);
ca695ac9 9944#endif
b93a436e
JL
9945 do_pending_stack_adjust ();
9946 if (GET_CODE (temp) == CONST_INT)
9947 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
9948 else if (GET_CODE (temp) == LABEL_REF)
9949 comparison = const_true_rtx;
9950 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
9951 && !can_compare_p (GET_MODE (temp)))
9952 /* Note swapping the labels gives us not-equal. */
9953 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9954 else if (GET_MODE (temp) != VOIDmode)
9955 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
9956 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9957 GET_MODE (temp), NULL_RTX, 0);
9958 else
9959 abort ();
9960 }
bbf6f052 9961
b93a436e
JL
9962 /* Do any postincrements in the expression that was tested. */
9963 emit_queue ();
bbf6f052 9964
b93a436e
JL
9965 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
9966 straight into a conditional jump instruction as the jump condition.
9967 Otherwise, all the work has been done already. */
bbf6f052 9968
b93a436e
JL
9969 if (comparison == const_true_rtx)
9970 {
9971 if (if_true_label)
9972 emit_jump (if_true_label);
9973 }
9974 else if (comparison == const0_rtx)
9975 {
9976 if (if_false_label)
9977 emit_jump (if_false_label);
9978 }
9979 else if (comparison)
9980 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 9981
b93a436e
JL
9982 if (drop_through_label)
9983 {
9984 /* If do_jump produces code that might be jumped around,
9985 do any stack adjusts from that code, before the place
9986 where control merges in. */
9987 do_pending_stack_adjust ();
9988 emit_label (drop_through_label);
9989 }
bbf6f052 9990}
b93a436e
JL
9991\f
9992/* Given a comparison expression EXP for values too wide to be compared
9993 with one insn, test the comparison and jump to the appropriate label.
9994 The code of EXP is ignored; we always test GT if SWAP is 0,
9995 and LT if SWAP is 1. */
bbf6f052 9996
b93a436e
JL
9997static void
9998do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9999 tree exp;
10000 int swap;
10001 rtx if_false_label, if_true_label;
10002{
10003 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10004 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10005 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10006 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10007 rtx drop_through_label = 0;
10008 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10009 int i;
bbf6f052 10010
b93a436e
JL
10011 if (! if_true_label || ! if_false_label)
10012 drop_through_label = gen_label_rtx ();
10013 if (! if_true_label)
10014 if_true_label = drop_through_label;
10015 if (! if_false_label)
10016 if_false_label = drop_through_label;
bbf6f052 10017
b93a436e
JL
10018 /* Compare a word at a time, high order first. */
10019 for (i = 0; i < nwords; i++)
f81497d9 10020 {
b93a436e
JL
10021 rtx comp;
10022 rtx op0_word, op1_word;
10023
10024 if (WORDS_BIG_ENDIAN)
10025 {
10026 op0_word = operand_subword_force (op0, i, mode);
10027 op1_word = operand_subword_force (op1, i, mode);
10028 }
f81497d9 10029 else
b93a436e
JL
10030 {
10031 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10032 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10033 }
10034
10035 /* All but high-order word must be compared as unsigned. */
10036 comp = compare_from_rtx (op0_word, op1_word,
10037 (unsignedp || i > 0) ? GTU : GT,
10038 unsignedp, word_mode, NULL_RTX, 0);
10039 if (comp == const_true_rtx)
10040 emit_jump (if_true_label);
10041 else if (comp != const0_rtx)
10042 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10043
10044 /* Consider lower words only if these are equal. */
10045 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10046 NULL_RTX, 0);
10047 if (comp == const_true_rtx)
10048 emit_jump (if_false_label);
10049 else if (comp != const0_rtx)
10050 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10051 }
ca695ac9 10052
b93a436e
JL
10053 if (if_false_label)
10054 emit_jump (if_false_label);
10055 if (drop_through_label)
10056 emit_label (drop_through_label);
f81497d9
RS
10057}
10058
b93a436e
JL
10059/* Compare OP0 with OP1, word at a time, in mode MODE.
10060 UNSIGNEDP says to do unsigned comparison.
10061 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10062
b93a436e
JL
10063void
10064do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10065 enum machine_mode mode;
10066 int unsignedp;
10067 rtx op0, op1;
10068 rtx if_false_label, if_true_label;
f81497d9 10069{
b93a436e
JL
10070 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10071 rtx drop_through_label = 0;
10072 int i;
f81497d9 10073
b93a436e
JL
10074 if (! if_true_label || ! if_false_label)
10075 drop_through_label = gen_label_rtx ();
10076 if (! if_true_label)
10077 if_true_label = drop_through_label;
10078 if (! if_false_label)
10079 if_false_label = drop_through_label;
f81497d9 10080
b93a436e
JL
10081 /* Compare a word at a time, high order first. */
10082 for (i = 0; i < nwords; i++)
10083 {
10084 rtx comp;
10085 rtx op0_word, op1_word;
bbf6f052 10086
b93a436e
JL
10087 if (WORDS_BIG_ENDIAN)
10088 {
10089 op0_word = operand_subword_force (op0, i, mode);
10090 op1_word = operand_subword_force (op1, i, mode);
10091 }
10092 else
10093 {
10094 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10095 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10096 }
bbf6f052 10097
b93a436e
JL
10098 /* All but high-order word must be compared as unsigned. */
10099 comp = compare_from_rtx (op0_word, op1_word,
10100 (unsignedp || i > 0) ? GTU : GT,
10101 unsignedp, word_mode, NULL_RTX, 0);
10102 if (comp == const_true_rtx)
10103 emit_jump (if_true_label);
10104 else if (comp != const0_rtx)
10105 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10106
b93a436e
JL
10107 /* Consider lower words only if these are equal. */
10108 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10109 NULL_RTX, 0);
10110 if (comp == const_true_rtx)
10111 emit_jump (if_false_label);
10112 else if (comp != const0_rtx)
10113 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10114 }
bbf6f052 10115
b93a436e
JL
10116 if (if_false_label)
10117 emit_jump (if_false_label);
10118 if (drop_through_label)
10119 emit_label (drop_through_label);
bbf6f052
RK
10120}
10121
b93a436e
JL
10122/* Given an EQ_EXPR expression EXP for values too wide to be compared
10123 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10124
b93a436e
JL
10125static void
10126do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10127 tree exp;
10128 rtx if_false_label, if_true_label;
bbf6f052 10129{
b93a436e
JL
10130 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10131 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10132 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10133 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10134 int i;
10135 rtx drop_through_label = 0;
bbf6f052 10136
b93a436e
JL
10137 if (! if_false_label)
10138 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10139
b93a436e
JL
10140 for (i = 0; i < nwords; i++)
10141 {
10142 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10143 operand_subword_force (op1, i, mode),
10144 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10145 word_mode, NULL_RTX, 0);
10146 if (comp == const_true_rtx)
10147 emit_jump (if_false_label);
10148 else if (comp != const0_rtx)
10149 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10150 }
bbf6f052 10151
b93a436e
JL
10152 if (if_true_label)
10153 emit_jump (if_true_label);
10154 if (drop_through_label)
10155 emit_label (drop_through_label);
bbf6f052 10156}
b93a436e
JL
10157\f
10158/* Jump according to whether OP0 is 0.
10159 We assume that OP0 has an integer mode that is too wide
10160 for the available compare insns. */
bbf6f052 10161
b93a436e
JL
10162static void
10163do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10164 rtx op0;
10165 rtx if_false_label, if_true_label;
ca695ac9 10166{
b93a436e
JL
10167 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10168 rtx part;
10169 int i;
10170 rtx drop_through_label = 0;
bbf6f052 10171
b93a436e
JL
10172 /* The fastest way of doing this comparison on almost any machine is to
10173 "or" all the words and compare the result. If all have to be loaded
10174 from memory and this is a very wide item, it's possible this may
10175 be slower, but that's highly unlikely. */
bbf6f052 10176
b93a436e
JL
10177 part = gen_reg_rtx (word_mode);
10178 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10179 for (i = 1; i < nwords && part != 0; i++)
10180 part = expand_binop (word_mode, ior_optab, part,
10181 operand_subword_force (op0, i, GET_MODE (op0)),
10182 part, 1, OPTAB_WIDEN);
bbf6f052 10183
b93a436e
JL
10184 if (part != 0)
10185 {
10186 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10187 NULL_RTX, 0);
0f41302f 10188
b93a436e
JL
10189 if (comp == const_true_rtx)
10190 emit_jump (if_false_label);
10191 else if (comp == const0_rtx)
10192 emit_jump (if_true_label);
10193 else
10194 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10195
b93a436e
JL
10196 return;
10197 }
bbf6f052 10198
b93a436e
JL
10199 /* If we couldn't do the "or" simply, do this with a series of compares. */
10200 if (! if_false_label)
10201 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10202
b93a436e
JL
10203 for (i = 0; i < nwords; i++)
10204 {
10205 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10206 GET_MODE (op0)),
10207 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10208 if (comp == const_true_rtx)
10209 emit_jump (if_false_label);
10210 else if (comp != const0_rtx)
10211 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10212 }
bbf6f052 10213
b93a436e
JL
10214 if (if_true_label)
10215 emit_jump (if_true_label);
0f41302f 10216
b93a436e
JL
10217 if (drop_through_label)
10218 emit_label (drop_through_label);
bbf6f052 10219}
bbf6f052 10220
b93a436e
JL
10221/* Given a comparison expression in rtl form, output conditional branches to
10222 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10223
b93a436e
JL
10224static void
10225do_jump_for_compare (comparison, if_false_label, if_true_label)
10226 rtx comparison, if_false_label, if_true_label;
bbf6f052 10227{
b93a436e
JL
10228 if (if_true_label)
10229 {
10230 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10231 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10232 else
10233 abort ();
ca695ac9 10234
b93a436e
JL
10235 if (if_false_label)
10236 emit_jump (if_false_label);
10237 }
10238 else if (if_false_label)
10239 {
10240 rtx insn;
10241 rtx prev = get_last_insn ();
10242 rtx branch = 0;
0f41302f 10243
b93a436e
JL
10244 /* Output the branch with the opposite condition. Then try to invert
10245 what is generated. If more than one insn is a branch, or if the
10246 branch is not the last insn written, abort. If we can't invert
10247 the branch, emit make a true label, redirect this jump to that,
10248 emit a jump to the false label and define the true label. */
bbf6f052 10249
b93a436e
JL
10250 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10251 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10252 else
10253 abort ();
bbf6f052 10254
b93a436e
JL
10255 /* Here we get the first insn that was just emitted. It used to be the
10256 case that, on some machines, emitting the branch would discard
10257 the previous compare insn and emit a replacement. This isn't
10258 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10259
b93a436e
JL
10260 if (prev == 0)
10261 insn = get_insns ();
10262 else if (INSN_DELETED_P (prev))
10263 abort ();
10264 else
10265 insn = NEXT_INSN (prev);
bbf6f052 10266
b93a436e
JL
10267 for (; insn; insn = NEXT_INSN (insn))
10268 if (GET_CODE (insn) == JUMP_INSN)
10269 {
10270 if (branch)
10271 abort ();
10272 branch = insn;
10273 }
a7c5971a 10274
b93a436e
JL
10275 if (branch != get_last_insn ())
10276 abort ();
bbf6f052 10277
b93a436e
JL
10278 JUMP_LABEL (branch) = if_false_label;
10279 if (! invert_jump (branch, if_false_label))
10280 {
10281 if_true_label = gen_label_rtx ();
10282 redirect_jump (branch, if_true_label);
10283 emit_jump (if_false_label);
10284 emit_label (if_true_label);
10285 }
10286 }
10287}
10288\f
10289/* Generate code for a comparison expression EXP
10290 (including code to compute the values to be compared)
10291 and set (CC0) according to the result.
10292 SIGNED_CODE should be the rtx operation for this comparison for
10293 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10294
b93a436e
JL
10295 We force a stack adjustment unless there are currently
10296 things pushed on the stack that aren't yet used. */
ca695ac9 10297
b93a436e
JL
10298static rtx
10299compare (exp, signed_code, unsigned_code)
10300 register tree exp;
10301 enum rtx_code signed_code, unsigned_code;
10302{
10303 register rtx op0
10304 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10305 register rtx op1
10306 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10307 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10308 register enum machine_mode mode = TYPE_MODE (type);
10309 int unsignedp = TREE_UNSIGNED (type);
10310 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10311
b93a436e
JL
10312#ifdef HAVE_canonicalize_funcptr_for_compare
10313 /* If function pointers need to be "canonicalized" before they can
10314 be reliably compared, then canonicalize them. */
10315 if (HAVE_canonicalize_funcptr_for_compare
10316 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10317 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10318 == FUNCTION_TYPE))
bbf6f052 10319 {
b93a436e 10320 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 10321
b93a436e
JL
10322 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10323 op0 = new_op0;
ca695ac9 10324 }
bbf6f052 10325
b93a436e
JL
10326 if (HAVE_canonicalize_funcptr_for_compare
10327 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10328 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10329 == FUNCTION_TYPE))
10330 {
10331 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 10332
b93a436e
JL
10333 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10334 op1 = new_op1;
10335 }
10336#endif
0f41302f 10337
b93a436e
JL
10338 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10339 ((mode == BLKmode)
10340 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10341 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 10342}
bbf6f052 10343
b93a436e
JL
10344/* Like compare but expects the values to compare as two rtx's.
10345 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10346
b93a436e
JL
10347 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10348 compared.
bbf6f052 10349
b93a436e
JL
10350 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10351 size of MODE should be used. */
ca695ac9 10352
b93a436e
JL
10353rtx
10354compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10355 register rtx op0, op1;
10356 enum rtx_code code;
10357 int unsignedp;
10358 enum machine_mode mode;
10359 rtx size;
10360 int align;
bbf6f052 10361{
b93a436e 10362 rtx tem;
bbf6f052 10363
b93a436e
JL
10364 /* If one operand is constant, make it the second one. Only do this
10365 if the other operand is not constant as well. */
e7c33f54 10366
b93a436e
JL
10367 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10368 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10369 {
b93a436e
JL
10370 tem = op0;
10371 op0 = op1;
10372 op1 = tem;
10373 code = swap_condition (code);
10374 }
bbf6f052 10375
b93a436e
JL
10376 if (flag_force_mem)
10377 {
10378 op0 = force_not_mem (op0);
10379 op1 = force_not_mem (op1);
10380 }
bbf6f052 10381
b93a436e 10382 do_pending_stack_adjust ();
ca695ac9 10383
b93a436e
JL
10384 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10385 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10386 return tem;
ca695ac9 10387
b93a436e
JL
10388#if 0
10389 /* There's no need to do this now that combine.c can eliminate lots of
10390 sign extensions. This can be less efficient in certain cases on other
10391 machines. */
ca695ac9 10392
b93a436e
JL
10393 /* If this is a signed equality comparison, we can do it as an
10394 unsigned comparison since zero-extension is cheaper than sign
10395 extension and comparisons with zero are done as unsigned. This is
10396 the case even on machines that can do fast sign extension, since
10397 zero-extension is easier to combine with other operations than
10398 sign-extension is. If we are comparing against a constant, we must
10399 convert it to what it would look like unsigned. */
10400 if ((code == EQ || code == NE) && ! unsignedp
10401 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10402 {
10403 if (GET_CODE (op1) == CONST_INT
10404 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10405 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10406 unsignedp = 1;
10407 }
10408#endif
ca695ac9 10409
b93a436e 10410 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 10411
b93a436e
JL
10412 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
10413}
10414\f
10415/* Generate code to calculate EXP using a store-flag instruction
10416 and return an rtx for the result. EXP is either a comparison
10417 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10418
b93a436e 10419 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10420
b93a436e
JL
10421 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10422 cheap.
ca695ac9 10423
b93a436e
JL
10424 Return zero if there is no suitable set-flag instruction
10425 available on this machine.
ca695ac9 10426
b93a436e
JL
10427 Once expand_expr has been called on the arguments of the comparison,
10428 we are committed to doing the store flag, since it is not safe to
10429 re-evaluate the expression. We emit the store-flag insn by calling
10430 emit_store_flag, but only expand the arguments if we have a reason
10431 to believe that emit_store_flag will be successful. If we think that
10432 it will, but it isn't, we have to simulate the store-flag with a
10433 set/jump/set sequence. */
ca695ac9 10434
b93a436e
JL
10435static rtx
10436do_store_flag (exp, target, mode, only_cheap)
10437 tree exp;
10438 rtx target;
10439 enum machine_mode mode;
10440 int only_cheap;
10441{
10442 enum rtx_code code;
10443 tree arg0, arg1, type;
10444 tree tem;
10445 enum machine_mode operand_mode;
10446 int invert = 0;
10447 int unsignedp;
10448 rtx op0, op1;
10449 enum insn_code icode;
10450 rtx subtarget = target;
10451 rtx result, label, pattern, jump_pat;
ca695ac9 10452
b93a436e
JL
10453 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10454 result at the end. We can't simply invert the test since it would
10455 have already been inverted if it were valid. This case occurs for
10456 some floating-point comparisons. */
ca695ac9 10457
b93a436e
JL
10458 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10459 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10460
b93a436e
JL
10461 arg0 = TREE_OPERAND (exp, 0);
10462 arg1 = TREE_OPERAND (exp, 1);
10463 type = TREE_TYPE (arg0);
10464 operand_mode = TYPE_MODE (type);
10465 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10466
b93a436e
JL
10467 /* We won't bother with BLKmode store-flag operations because it would mean
10468 passing a lot of information to emit_store_flag. */
10469 if (operand_mode == BLKmode)
10470 return 0;
ca695ac9 10471
b93a436e
JL
10472 /* We won't bother with store-flag operations involving function pointers
10473 when function pointers must be canonicalized before comparisons. */
10474#ifdef HAVE_canonicalize_funcptr_for_compare
10475 if (HAVE_canonicalize_funcptr_for_compare
10476 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10477 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10478 == FUNCTION_TYPE))
10479 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10480 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10481 == FUNCTION_TYPE))))
10482 return 0;
ca695ac9
JB
10483#endif
10484
b93a436e
JL
10485 STRIP_NOPS (arg0);
10486 STRIP_NOPS (arg1);
ca695ac9 10487
b93a436e
JL
10488 /* Get the rtx comparison code to use. We know that EXP is a comparison
10489 operation of some type. Some comparisons against 1 and -1 can be
10490 converted to comparisons with zero. Do so here so that the tests
10491 below will be aware that we have a comparison with zero. These
10492 tests will not catch constants in the first operand, but constants
10493 are rarely passed as the first operand. */
ca695ac9 10494
b93a436e
JL
10495 switch (TREE_CODE (exp))
10496 {
10497 case EQ_EXPR:
10498 code = EQ;
bbf6f052 10499 break;
b93a436e
JL
10500 case NE_EXPR:
10501 code = NE;
bbf6f052 10502 break;
b93a436e
JL
10503 case LT_EXPR:
10504 if (integer_onep (arg1))
10505 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10506 else
10507 code = unsignedp ? LTU : LT;
ca695ac9 10508 break;
b93a436e
JL
10509 case LE_EXPR:
10510 if (! unsignedp && integer_all_onesp (arg1))
10511 arg1 = integer_zero_node, code = LT;
10512 else
10513 code = unsignedp ? LEU : LE;
ca695ac9 10514 break;
b93a436e
JL
10515 case GT_EXPR:
10516 if (! unsignedp && integer_all_onesp (arg1))
10517 arg1 = integer_zero_node, code = GE;
10518 else
10519 code = unsignedp ? GTU : GT;
10520 break;
10521 case GE_EXPR:
10522 if (integer_onep (arg1))
10523 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10524 else
10525 code = unsignedp ? GEU : GE;
ca695ac9 10526 break;
ca695ac9 10527 default:
b93a436e 10528 abort ();
bbf6f052 10529 }
bbf6f052 10530
b93a436e
JL
10531 /* Put a constant second. */
10532 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10533 {
10534 tem = arg0; arg0 = arg1; arg1 = tem;
10535 code = swap_condition (code);
ca695ac9 10536 }
bbf6f052 10537
b93a436e
JL
10538 /* If this is an equality or inequality test of a single bit, we can
10539 do this by shifting the bit being tested to the low-order bit and
10540 masking the result with the constant 1. If the condition was EQ,
10541 we xor it with 1. This does not require an scc insn and is faster
10542 than an scc insn even if we have it. */
d39985fa 10543
b93a436e
JL
10544 if ((code == NE || code == EQ)
10545 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10546 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10547 {
10548 tree inner = TREE_OPERAND (arg0, 0);
10549 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10550 int ops_unsignedp;
bbf6f052 10551
b93a436e
JL
10552 /* If INNER is a right shift of a constant and it plus BITNUM does
10553 not overflow, adjust BITNUM and INNER. */
ca695ac9 10554
b93a436e
JL
10555 if (TREE_CODE (inner) == RSHIFT_EXPR
10556 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10557 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10558 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10559 < TYPE_PRECISION (type)))
ca695ac9 10560 {
b93a436e
JL
10561 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10562 inner = TREE_OPERAND (inner, 0);
ca695ac9 10563 }
ca695ac9 10564
b93a436e
JL
10565 /* If we are going to be able to omit the AND below, we must do our
10566 operations as unsigned. If we must use the AND, we have a choice.
10567 Normally unsigned is faster, but for some machines signed is. */
10568 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10569#ifdef LOAD_EXTEND_OP
10570 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10571#else
10572 : 1
10573#endif
10574 );
bbf6f052 10575
b93a436e
JL
10576 if (subtarget == 0 || GET_CODE (subtarget) != REG
10577 || GET_MODE (subtarget) != operand_mode
10578 || ! safe_from_p (subtarget, inner))
10579 subtarget = 0;
bbf6f052 10580
b93a436e 10581 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10582
b93a436e
JL
10583 if (bitnum != 0)
10584 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10585 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10586
b93a436e
JL
10587 if (GET_MODE (op0) != mode)
10588 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10589
b93a436e
JL
10590 if ((code == EQ && ! invert) || (code == NE && invert))
10591 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10592 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10593
b93a436e
JL
10594 /* Put the AND last so it can combine with more things. */
10595 if (bitnum != TYPE_PRECISION (type) - 1)
10596 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10597
b93a436e
JL
10598 return op0;
10599 }
bbf6f052 10600
b93a436e
JL
10601 /* Now see if we are likely to be able to do this. Return if not. */
10602 if (! can_compare_p (operand_mode))
10603 return 0;
10604 icode = setcc_gen_code[(int) code];
10605 if (icode == CODE_FOR_nothing
10606 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 10607 {
b93a436e
JL
10608 /* We can only do this if it is one of the special cases that
10609 can be handled without an scc insn. */
10610 if ((code == LT && integer_zerop (arg1))
10611 || (! only_cheap && code == GE && integer_zerop (arg1)))
10612 ;
10613 else if (BRANCH_COST >= 0
10614 && ! only_cheap && (code == NE || code == EQ)
10615 && TREE_CODE (type) != REAL_TYPE
10616 && ((abs_optab->handlers[(int) operand_mode].insn_code
10617 != CODE_FOR_nothing)
10618 || (ffs_optab->handlers[(int) operand_mode].insn_code
10619 != CODE_FOR_nothing)))
10620 ;
10621 else
10622 return 0;
ca695ac9 10623 }
b93a436e
JL
10624
10625 preexpand_calls (exp);
10626 if (subtarget == 0 || GET_CODE (subtarget) != REG
10627 || GET_MODE (subtarget) != operand_mode
10628 || ! safe_from_p (subtarget, arg1))
10629 subtarget = 0;
10630
10631 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10632 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10633
10634 if (target == 0)
10635 target = gen_reg_rtx (mode);
10636
10637 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10638 because, if the emit_store_flag does anything it will succeed and
10639 OP0 and OP1 will not be used subsequently. */
ca695ac9 10640
b93a436e
JL
10641 result = emit_store_flag (target, code,
10642 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10643 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10644 operand_mode, unsignedp, 1);
ca695ac9 10645
b93a436e
JL
10646 if (result)
10647 {
10648 if (invert)
10649 result = expand_binop (mode, xor_optab, result, const1_rtx,
10650 result, 0, OPTAB_LIB_WIDEN);
10651 return result;
ca695ac9 10652 }
bbf6f052 10653
b93a436e
JL
10654 /* If this failed, we have to do this with set/compare/jump/set code. */
10655 if (GET_CODE (target) != REG
10656 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10657 target = gen_reg_rtx (GET_MODE (target));
10658
10659 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10660 result = compare_from_rtx (op0, op1, code, unsignedp,
10661 operand_mode, NULL_RTX, 0);
10662 if (GET_CODE (result) == CONST_INT)
10663 return (((result == const0_rtx && ! invert)
10664 || (result != const0_rtx && invert))
10665 ? const0_rtx : const1_rtx);
ca695ac9 10666
b93a436e
JL
10667 label = gen_label_rtx ();
10668 if (bcc_gen_fctn[(int) code] == 0)
10669 abort ();
0f41302f 10670
b93a436e
JL
10671 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10672 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10673 emit_label (label);
bbf6f052 10674
b93a436e 10675 return target;
ca695ac9 10676}
b93a436e
JL
10677\f
10678/* Generate a tablejump instruction (used for switch statements). */
10679
10680#ifdef HAVE_tablejump
e87b4f3f 10681
b93a436e
JL
10682/* INDEX is the value being switched on, with the lowest value
10683 in the table already subtracted.
10684 MODE is its expected mode (needed if INDEX is constant).
10685 RANGE is the length of the jump table.
10686 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10687
b93a436e
JL
10688 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10689 index value is out of range. */
0f41302f 10690
ca695ac9 10691void
b93a436e
JL
10692do_tablejump (index, mode, range, table_label, default_label)
10693 rtx index, range, table_label, default_label;
10694 enum machine_mode mode;
ca695ac9 10695{
b93a436e 10696 register rtx temp, vector;
88d3b7f0 10697
b93a436e
JL
10698 /* Do an unsigned comparison (in the proper mode) between the index
10699 expression and the value which represents the length of the range.
10700 Since we just finished subtracting the lower bound of the range
10701 from the index expression, this comparison allows us to simultaneously
10702 check that the original index expression value is both greater than
10703 or equal to the minimum value of the range and less than or equal to
10704 the maximum value of the range. */
709f5be1 10705
b93a436e
JL
10706 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
10707 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 10708
b93a436e
JL
10709 /* If index is in range, it must fit in Pmode.
10710 Convert to Pmode so we can index with it. */
10711 if (mode != Pmode)
10712 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10713
b93a436e
JL
10714 /* Don't let a MEM slip thru, because then INDEX that comes
10715 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10716 and break_out_memory_refs will go to work on it and mess it up. */
10717#ifdef PIC_CASE_VECTOR_ADDRESS
10718 if (flag_pic && GET_CODE (index) != REG)
10719 index = copy_to_mode_reg (Pmode, index);
10720#endif
ca695ac9 10721
b93a436e
JL
10722 /* If flag_force_addr were to affect this address
10723 it could interfere with the tricky assumptions made
10724 about addresses that contain label-refs,
10725 which may be valid only very near the tablejump itself. */
10726 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10727 GET_MODE_SIZE, because this indicates how large insns are. The other
10728 uses should all be Pmode, because they are addresses. This code
10729 could fail if addresses and insns are not the same size. */
10730 index = gen_rtx_PLUS (Pmode,
10731 gen_rtx_MULT (Pmode, index,
10732 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10733 gen_rtx_LABEL_REF (Pmode, table_label));
10734#ifdef PIC_CASE_VECTOR_ADDRESS
10735 if (flag_pic)
10736 index = PIC_CASE_VECTOR_ADDRESS (index);
10737 else
bbf6f052 10738#endif
b93a436e
JL
10739 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10740 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10741 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10742 RTX_UNCHANGING_P (vector) = 1;
10743 convert_move (temp, vector, 0);
10744
10745 emit_jump_insn (gen_tablejump (temp, table_label));
10746
10747 /* If we are generating PIC code or if the table is PC-relative, the
10748 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10749 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10750 emit_barrier ();
bbf6f052 10751}
b93a436e
JL
10752
10753#endif /* HAVE_tablejump */
This page took 1.960209 seconds and 5 git commands to generate.