]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
cp-tree.h (FORMAT_VBASE_NAME): New macro.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Like STACK_BOUNDARY but in units of bytes, not bits. */
69#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
18543a22
ILT
71/* Assume that case vectors are not pc-relative. */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
bbf6f052
RK
76/* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82int cse_not_expected;
83
84/* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87int do_preexpand_calls = 1;
88
89/* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91int pending_stack_adjust;
92
93/* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97int inhibit_defer_pop;
98
bbf6f052
RK
99/* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102static rtx saveregs_value;
103
dcf76fff
TW
104/* Similarly for __builtin_apply_args. */
105static rtx apply_args_value;
106
cff48d8f
RH
107/* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110static int can_handle_constant_p;
111
956d6950
JL
112/* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 recursion. */
115static int in_check_memory_usage;
116
4969d05d
RK
117/* This structure is used by move_by_pieces to describe the move to
118 be performed. */
4969d05d
RK
119struct move_by_pieces
120{
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
e9cf6a97 125 int to_struct;
4969d05d
RK
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
e9cf6a97 130 int from_struct;
4969d05d
RK
131 int len;
132 int offset;
133 int reverse;
134};
135
9de08200
RK
136/* This structure is used by clear_by_pieces to describe the clear to
137 be performed. */
138
139struct clear_by_pieces
140{
141 rtx to;
142 rtx to_addr;
143 int autinc_to;
144 int explicit_inc_to;
145 int to_struct;
146 int len;
147 int offset;
148 int reverse;
149};
150
292b1216 151extern struct obstack permanent_obstack;
4ed67205 152extern rtx arg_pointer_save_area;
c02bd5d9 153
03566575
JW
154static rtx get_push_address PROTO ((int));
155
4969d05d
RK
156static rtx enqueue_insn PROTO((rtx, rtx));
157static int queued_subexp_p PROTO((rtx));
158static void init_queue PROTO((void));
4969d05d 159static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 160static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 161 struct move_by_pieces *));
9de08200 162static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 163static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
164 struct clear_by_pieces *));
165static int is_zeros_p PROTO((tree));
166static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
167static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
168 tree, tree, int));
e1a43f73 169static void store_constructor PROTO((tree, rtx, int));
4969d05d 170static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
171 enum machine_mode, int, int,
172 int, int));
e009aaf3
JL
173static enum memory_use_mode
174 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
175static tree save_noncopied_parts PROTO((tree, tree));
176static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 177static int safe_from_p PROTO((rtx, tree, int));
4969d05d 178static int fixed_type_p PROTO((tree));
01c8a7c8 179static rtx var_rtx PROTO((tree));
4969d05d
RK
180static int get_pointer_alignment PROTO((tree, unsigned));
181static tree string_constant PROTO((tree, tree *));
182static tree c_strlen PROTO((tree));
55a6ba9f 183static rtx get_memory_rtx PROTO((tree));
307b821c
RK
184static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
0006469d
TW
186static int apply_args_size PROTO((void));
187static int apply_result_size PROTO((void));
188static rtx result_vector PROTO((int, rtx));
189static rtx expand_builtin_apply_args PROTO((void));
190static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191static void expand_builtin_return PROTO((rtx));
7b8b9722 192static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
193static void preexpand_calls PROTO((tree));
194static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
195static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
4969d05d
RK
196static void do_jump_for_compare PROTO((rtx, rtx, rtx));
197static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
198static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 199
4fa52007
RK
200/* Record for each mode whether we can move a register directly to or
201 from an object of that mode in memory. If we can't, we won't try
202 to use that mode directly when accessing a field of that mode. */
203
204static char direct_load[NUM_MACHINE_MODES];
205static char direct_store[NUM_MACHINE_MODES];
206
7e24ffc9
HPN
207/* If a memory-to-memory move would take MOVE_RATIO or more simple
208 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
209
210#ifndef MOVE_RATIO
266007a7 211#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
212#define MOVE_RATIO 2
213#else
996d9dac
MM
214/* If we are optimizing for space (-Os), cut down the default move ratio */
215#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
216#endif
217#endif
e87b4f3f 218
266007a7 219/* This array records the insn_code of insns to perform block moves. */
e6677db3 220enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 221
9de08200
RK
222/* This array records the insn_code of insns to perform block clears. */
223enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224
0f41302f 225/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
226
227#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 228#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 229#endif
0006469d
TW
230
231/* Register mappings for target machines without register windows. */
232#ifndef INCOMING_REGNO
233#define INCOMING_REGNO(OUT) (OUT)
234#endif
235#ifndef OUTGOING_REGNO
236#define OUTGOING_REGNO(IN) (IN)
237#endif
bbf6f052 238\f
4fa52007 239/* This is run once per compilation to set up which modes can be used
266007a7 240 directly in memory and to initialize the block move optab. */
4fa52007
RK
241
242void
243init_expr_once ()
244{
245 rtx insn, pat;
246 enum machine_mode mode;
cff48d8f 247 int num_clobbers;
9ec36da5
JL
248 rtx mem, mem1;
249 char *free_point;
250
251 start_sequence ();
252
253 /* Since we are on the permanent obstack, we must be sure we save this
254 spot AFTER we call start_sequence, since it will reuse the rtl it
255 makes. */
256 free_point = (char *) oballoc (0);
257
e2549997
RS
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
9ec36da5
JL
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 263
38a448ca 264 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
265 pat = PATTERN (insn);
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
271 rtx reg;
4fa52007
RK
272
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
e2549997 275 PUT_MODE (mem1, mode);
4fa52007 276
e6fe56a4
RK
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
7308a047
RS
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
e6fe56a4 287
38a448ca 288 reg = gen_rtx_REG (mode, regno);
e6fe56a4 289
7308a047
RS
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
e6fe56a4 294
e2549997
RS
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
7308a047
RS
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
e2549997
RS
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
7308a047 309 }
4fa52007
RK
310 }
311
cff48d8f
RH
312 /* Find out if CONSTANT_P_RTX is accepted. */
313 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
314 FIRST_PSEUDO_REGISTER);
315 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
316 SET_DEST (pat));
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 can_handle_constant_p = 1;
319
4fa52007 320 end_sequence ();
9ec36da5 321 obfree (free_point);
4fa52007 322}
cff48d8f 323
bbf6f052
RK
324/* This is run at the start of compiling a function. */
325
326void
327init_expr ()
328{
329 init_queue ();
330
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
bbf6f052 333 saveregs_value = 0;
0006469d 334 apply_args_value = 0;
e87b4f3f 335 forced_labels = 0;
bbf6f052
RK
336}
337
338/* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
340
341void
342save_expr_status (p)
343 struct function *p;
344{
345 /* Instead of saving the postincrement queue, empty it. */
346 emit_queue ();
347
348 p->pending_stack_adjust = pending_stack_adjust;
349 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 350 p->saveregs_value = saveregs_value;
0006469d 351 p->apply_args_value = apply_args_value;
e87b4f3f 352 p->forced_labels = forced_labels;
bbf6f052
RK
353
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
bbf6f052 356 saveregs_value = 0;
0006469d 357 apply_args_value = 0;
e87b4f3f 358 forced_labels = 0;
bbf6f052
RK
359}
360
361/* Restore all variables describing the current status from the structure *P.
362 This is used after a nested function. */
363
364void
365restore_expr_status (p)
366 struct function *p;
367{
368 pending_stack_adjust = p->pending_stack_adjust;
369 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 370 saveregs_value = p->saveregs_value;
0006469d 371 apply_args_value = p->apply_args_value;
e87b4f3f 372 forced_labels = p->forced_labels;
bbf6f052
RK
373}
374\f
375/* Manage the queue of increment instructions to be output
376 for POSTINCREMENT_EXPR expressions, etc. */
377
378static rtx pending_chain;
379
380/* Queue up to increment (or change) VAR later. BODY says how:
381 BODY should be the same thing you would pass to emit_insn
382 to increment right away. It will go to emit_insn later on.
383
384 The value is a QUEUED expression to be used in place of VAR
385 where you want to guarantee the pre-incrementation value of VAR. */
386
387static rtx
388enqueue_insn (var, body)
389 rtx var, body;
390{
38a448ca
RH
391 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
392 var, NULL_RTX, NULL_RTX, body,
393 pending_chain);
bbf6f052
RK
394 return pending_chain;
395}
396
397/* Use protect_from_queue to convert a QUEUED expression
398 into something that you can put immediately into an instruction.
399 If the queued incrementation has not happened yet,
400 protect_from_queue returns the variable itself.
401 If the incrementation has happened, protect_from_queue returns a temp
402 that contains a copy of the old value of the variable.
403
404 Any time an rtx which might possibly be a QUEUED is to be put
405 into an instruction, it must be passed through protect_from_queue first.
406 QUEUED expressions are not meaningful in instructions.
407
408 Do not pass a value through protect_from_queue and then hold
409 on to it for a while before putting it in an instruction!
410 If the queue is flushed in between, incorrect code will result. */
411
412rtx
413protect_from_queue (x, modify)
414 register rtx x;
415 int modify;
416{
417 register RTX_CODE code = GET_CODE (x);
418
419#if 0 /* A QUEUED can hang around after the queue is forced out. */
420 /* Shortcut for most common case. */
421 if (pending_chain == 0)
422 return x;
423#endif
424
425 if (code != QUEUED)
426 {
e9baa644
RK
427 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
428 use of autoincrement. Make a copy of the contents of the memory
429 location rather than a copy of the address, but not if the value is
430 of mode BLKmode. Don't modify X in place since it might be
431 shared. */
bbf6f052
RK
432 if (code == MEM && GET_MODE (x) != BLKmode
433 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
434 {
435 register rtx y = XEXP (x, 0);
38a448ca 436 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
437
438 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
439 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
440 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
41472af8 441 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 442
bbf6f052
RK
443 if (QUEUED_INSN (y))
444 {
e9baa644
RK
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
447 QUEUED_INSN (y));
448 return temp;
449 }
e9baa644 450 return new;
bbf6f052
RK
451 }
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
454 if (code == MEM)
3f15938e
RS
455 {
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = tem;
461 }
462 }
bbf6f052
RK
463 else if (code == PLUS || code == MULT)
464 {
3f15938e
RS
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 {
469 x = copy_rtx (x);
470 XEXP (x, 0) = new0;
471 XEXP (x, 1) = new1;
472 }
bbf6f052
RK
473 }
474 return x;
475 }
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
480 use that copy. */
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
487 QUEUED_INSN (x));
488 return QUEUED_COPY (x);
489}
490
491/* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
495
496static int
497queued_subexp_p (x)
498 rtx x;
499{
500 register enum rtx_code code = GET_CODE (x);
501 switch (code)
502 {
503 case QUEUED:
504 return 1;
505 case MEM:
506 return queued_subexp_p (XEXP (x, 0));
507 case MULT:
508 case PLUS:
509 case MINUS:
e9a25f70
JL
510 return (queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1)));
512 default:
513 return 0;
bbf6f052 514 }
bbf6f052
RK
515}
516
517/* Perform all the pending incrementations. */
518
519void
520emit_queue ()
521{
522 register rtx p;
381127e8 523 while ((p = pending_chain))
bbf6f052 524 {
41b083c4
R
525 rtx body = QUEUED_BODY (p);
526
527 if (GET_CODE (body) == SEQUENCE)
528 {
529 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
530 emit_insn (QUEUED_BODY (p));
531 }
532 else
533 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
534 pending_chain = QUEUED_NEXT (p);
535 }
536}
537
538static void
539init_queue ()
540{
541 if (pending_chain)
542 abort ();
543}
544\f
545/* Copy data from FROM to TO, where the machine modes are not the same.
546 Both modes may be integer, or both may be floating.
547 UNSIGNEDP should be nonzero if FROM is an unsigned type.
548 This causes zero-extension instead of sign-extension. */
549
550void
551convert_move (to, from, unsignedp)
552 register rtx to, from;
553 int unsignedp;
554{
555 enum machine_mode to_mode = GET_MODE (to);
556 enum machine_mode from_mode = GET_MODE (from);
557 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
558 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
559 enum insn_code code;
560 rtx libcall;
561
562 /* rtx code for making an equivalent value. */
563 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
564
565 to = protect_from_queue (to, 1);
566 from = protect_from_queue (from, 0);
567
568 if (to_real != from_real)
569 abort ();
570
1499e0a8
RK
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
573 TO here. */
574
575 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
577 >= GET_MODE_SIZE (to_mode))
578 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
579 from = gen_lowpart (to_mode, from), from_mode = to_mode;
580
581 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 abort ();
583
bbf6f052
RK
584 if (to_mode == from_mode
585 || (from_mode == VOIDmode && CONSTANT_P (from)))
586 {
587 emit_move_insn (to, from);
588 return;
589 }
590
591 if (to_real)
592 {
81d79e2c
RS
593 rtx value;
594
2b01c326 595 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 596 {
2b01c326
RK
597 /* Try converting directly if the insn is supported. */
598 if ((code = can_extend_p (to_mode, from_mode, 0))
599 != CODE_FOR_nothing)
600 {
601 emit_unop_insn (code, to, from, UNKNOWN);
602 return;
603 }
bbf6f052 604 }
2b01c326 605
b424402e
RS
606#ifdef HAVE_trunchfqf2
607 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
608 {
609 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
610 return;
611 }
612#endif
704af6a1
JL
613#ifdef HAVE_trunctqfqf2
614 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
617 return;
618 }
619#endif
b424402e
RS
620#ifdef HAVE_truncsfqf2
621 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
624 return;
625 }
626#endif
627#ifdef HAVE_truncdfqf2
628 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
634#ifdef HAVE_truncxfqf2
635 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641#ifdef HAVE_trunctfqf2
642 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
03747aa3
RK
648
649#ifdef HAVE_trunctqfhf2
650 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
651 {
652 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
b424402e
RS
656#ifdef HAVE_truncsfhf2
657 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
660 return;
661 }
662#endif
663#ifdef HAVE_truncdfhf2
664 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
667 return;
668 }
669#endif
670#ifdef HAVE_truncxfhf2
671 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
677#ifdef HAVE_trunctfhf2
678 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
2b01c326
RK
684
685#ifdef HAVE_truncsftqf2
686 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
689 return;
690 }
691#endif
692#ifdef HAVE_truncdftqf2
693 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
696 return;
697 }
698#endif
699#ifdef HAVE_truncxftqf2
700 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
703 return;
704 }
705#endif
706#ifdef HAVE_trunctftqf2
707 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713
bbf6f052
RK
714#ifdef HAVE_truncdfsf2
715 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
716 {
717 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
b092b471
JW
721#ifdef HAVE_truncxfsf2
722 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
723 {
724 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
725 return;
726 }
727#endif
bbf6f052
RK
728#ifdef HAVE_trunctfsf2
729 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
730 {
731 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
b092b471
JW
735#ifdef HAVE_truncxfdf2
736 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
bbf6f052
RK
742#ifdef HAVE_trunctfdf2
743 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
744 {
745 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
749
b092b471
JW
750 libcall = (rtx) 0;
751 switch (from_mode)
752 {
753 case SFmode:
754 switch (to_mode)
755 {
756 case DFmode:
757 libcall = extendsfdf2_libfunc;
758 break;
759
760 case XFmode:
761 libcall = extendsfxf2_libfunc;
762 break;
763
764 case TFmode:
765 libcall = extendsftf2_libfunc;
766 break;
e9a25f70
JL
767
768 default:
769 break;
b092b471
JW
770 }
771 break;
772
773 case DFmode:
774 switch (to_mode)
775 {
776 case SFmode:
777 libcall = truncdfsf2_libfunc;
778 break;
779
780 case XFmode:
781 libcall = extenddfxf2_libfunc;
782 break;
783
784 case TFmode:
785 libcall = extenddftf2_libfunc;
786 break;
e9a25f70
JL
787
788 default:
789 break;
b092b471
JW
790 }
791 break;
792
793 case XFmode:
794 switch (to_mode)
795 {
796 case SFmode:
797 libcall = truncxfsf2_libfunc;
798 break;
799
800 case DFmode:
801 libcall = truncxfdf2_libfunc;
802 break;
e9a25f70
JL
803
804 default:
805 break;
b092b471
JW
806 }
807 break;
808
809 case TFmode:
810 switch (to_mode)
811 {
812 case SFmode:
813 libcall = trunctfsf2_libfunc;
814 break;
815
816 case DFmode:
817 libcall = trunctfdf2_libfunc;
818 break;
e9a25f70
JL
819
820 default:
821 break;
b092b471
JW
822 }
823 break;
e9a25f70
JL
824
825 default:
826 break;
b092b471
JW
827 }
828
829 if (libcall == (rtx) 0)
830 /* This conversion is not implemented yet. */
bbf6f052
RK
831 abort ();
832
81d79e2c
RS
833 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
834 1, from, from_mode);
835 emit_move_insn (to, value);
bbf6f052
RK
836 return;
837 }
838
839 /* Now both modes are integers. */
840
841 /* Handle expanding beyond a word. */
842 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
843 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
844 {
845 rtx insns;
846 rtx lowpart;
847 rtx fill_value;
848 rtx lowfrom;
849 int i;
850 enum machine_mode lowpart_mode;
851 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
852
853 /* Try converting directly if the insn is supported. */
854 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 != CODE_FOR_nothing)
856 {
cd1b4b44
RK
857 /* If FROM is a SUBREG, put it into a register. Do this
858 so that we always generate the same set of insns for
859 better cse'ing; if an intermediate assignment occurred,
860 we won't be doing the operation directly on the SUBREG. */
861 if (optimize > 0 && GET_CODE (from) == SUBREG)
862 from = force_reg (from_mode, from);
bbf6f052
RK
863 emit_unop_insn (code, to, from, equiv_code);
864 return;
865 }
866 /* Next, try converting via full word. */
867 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
868 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
869 != CODE_FOR_nothing))
870 {
a81fee56 871 if (GET_CODE (to) == REG)
38a448ca 872 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
873 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
874 emit_unop_insn (code, to,
875 gen_lowpart (word_mode, to), equiv_code);
876 return;
877 }
878
879 /* No special multiword conversion insn; do it by hand. */
880 start_sequence ();
881
5c5033c3
RK
882 /* Since we will turn this into a no conflict block, we must ensure
883 that the source does not overlap the target. */
884
885 if (reg_overlap_mentioned_p (to, from))
886 from = force_reg (from_mode, from);
887
bbf6f052
RK
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
891 else
892 lowpart_mode = from_mode;
893
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
898
899 /* Compute the value to put in each remaining word. */
900 if (unsignedp)
901 fill_value = const0_rtx;
902 else
903 {
904#ifdef HAVE_slt
905 if (HAVE_slt
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
908 {
906c4e36
RK
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 lowpart_mode, 0, 0);
bbf6f052
RK
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
913 }
914 else
915#endif
916 {
917 fill_value
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 920 NULL_RTX, 0);
bbf6f052
RK
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 }
923 }
924
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 {
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
930
931 if (subword == 0)
932 abort ();
933
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
936 }
937
938 insns = get_insns ();
939 end_sequence ();
940
906c4e36 941 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 942 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
943 return;
944 }
945
d3c64ee3
RS
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 949 {
431a6eca
JW
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
bbf6f052
RK
957 convert_move (to, gen_lowpart (word_mode, from), 0);
958 return;
959 }
960
961 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
962 if (to_mode == PQImode)
963 {
964 if (from_mode != QImode)
965 from = convert_to_mode (QImode, from, unsignedp);
966
967#ifdef HAVE_truncqipqi2
968 if (HAVE_truncqipqi2)
969 {
970 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
971 return;
972 }
973#endif /* HAVE_truncqipqi2 */
974 abort ();
975 }
976
977 if (from_mode == PQImode)
978 {
979 if (to_mode != QImode)
980 {
981 from = convert_to_mode (QImode, from, unsignedp);
982 from_mode = QImode;
983 }
984 else
985 {
986#ifdef HAVE_extendpqiqi2
987 if (HAVE_extendpqiqi2)
988 {
989 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
990 return;
991 }
992#endif /* HAVE_extendpqiqi2 */
993 abort ();
994 }
995 }
996
bbf6f052
RK
997 if (to_mode == PSImode)
998 {
999 if (from_mode != SImode)
1000 from = convert_to_mode (SImode, from, unsignedp);
1001
1f584163
DE
1002#ifdef HAVE_truncsipsi2
1003 if (HAVE_truncsipsi2)
bbf6f052 1004 {
1f584163 1005 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1006 return;
1007 }
1f584163 1008#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1009 abort ();
1010 }
1011
1012 if (from_mode == PSImode)
1013 {
1014 if (to_mode != SImode)
1015 {
1016 from = convert_to_mode (SImode, from, unsignedp);
1017 from_mode = SImode;
1018 }
1019 else
1020 {
1f584163
DE
1021#ifdef HAVE_extendpsisi2
1022 if (HAVE_extendpsisi2)
bbf6f052 1023 {
1f584163 1024 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1025 return;
1026 }
1f584163 1027#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1028 abort ();
1029 }
1030 }
1031
0407367d
RK
1032 if (to_mode == PDImode)
1033 {
1034 if (from_mode != DImode)
1035 from = convert_to_mode (DImode, from, unsignedp);
1036
1037#ifdef HAVE_truncdipdi2
1038 if (HAVE_truncdipdi2)
1039 {
1040 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1041 return;
1042 }
1043#endif /* HAVE_truncdipdi2 */
1044 abort ();
1045 }
1046
1047 if (from_mode == PDImode)
1048 {
1049 if (to_mode != DImode)
1050 {
1051 from = convert_to_mode (DImode, from, unsignedp);
1052 from_mode = DImode;
1053 }
1054 else
1055 {
1056#ifdef HAVE_extendpdidi2
1057 if (HAVE_extendpdidi2)
1058 {
1059 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1060 return;
1061 }
1062#endif /* HAVE_extendpdidi2 */
1063 abort ();
1064 }
1065 }
1066
bbf6f052
RK
1067 /* Now follow all the conversions between integers
1068 no more than a word long. */
1069
1070 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1071 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1072 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1073 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1074 {
d3c64ee3
RS
1075 if (!((GET_CODE (from) == MEM
1076 && ! MEM_VOLATILE_P (from)
1077 && direct_load[(int) to_mode]
1078 && ! mode_dependent_address_p (XEXP (from, 0)))
1079 || GET_CODE (from) == REG
1080 || GET_CODE (from) == SUBREG))
1081 from = force_reg (from_mode, from);
34aa3599
RK
1082 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1083 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1084 from = copy_to_reg (from);
bbf6f052
RK
1085 emit_move_insn (to, gen_lowpart (to_mode, from));
1086 return;
1087 }
1088
d3c64ee3 1089 /* Handle extension. */
bbf6f052
RK
1090 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1091 {
1092 /* Convert directly if that works. */
1093 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1094 != CODE_FOR_nothing)
1095 {
1096 emit_unop_insn (code, to, from, equiv_code);
1097 return;
1098 }
1099 else
1100 {
1101 enum machine_mode intermediate;
1102
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1112 {
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1115 return;
1116 }
1117
1118 /* No suitable intermediate mode. */
1119 abort ();
1120 }
1121 }
1122
1123 /* Support special truncate insns for certain modes. */
1124
1125 if (from_mode == DImode && to_mode == SImode)
1126 {
1127#ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2)
1129 {
1130 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1131 return;
1132 }
1133#endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1136 }
1137
1138 if (from_mode == DImode && to_mode == HImode)
1139 {
1140#ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2)
1142 {
1143 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1144 return;
1145 }
1146#endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1149 }
1150
1151 if (from_mode == DImode && to_mode == QImode)
1152 {
1153#ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2)
1155 {
1156 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1157 return;
1158 }
1159#endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1162 }
1163
1164 if (from_mode == SImode && to_mode == HImode)
1165 {
1166#ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2)
1168 {
1169 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1170 return;
1171 }
1172#endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
1177 if (from_mode == SImode && to_mode == QImode)
1178 {
1179#ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2)
1181 {
1182 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1183 return;
1184 }
1185#endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == HImode && to_mode == QImode)
1191 {
1192#ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2)
1194 {
1195 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1196 return;
1197 }
1198#endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
b9bcad65
RK
1203 if (from_mode == TImode && to_mode == DImode)
1204 {
1205#ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2)
1207 {
1208 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1209 return;
1210 }
1211#endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
1216 if (from_mode == TImode && to_mode == SImode)
1217 {
1218#ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2)
1220 {
1221 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1222 return;
1223 }
1224#endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
1229 if (from_mode == TImode && to_mode == HImode)
1230 {
1231#ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2)
1233 {
1234 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1235 return;
1236 }
1237#endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1240 }
1241
1242 if (from_mode == TImode && to_mode == QImode)
1243 {
1244#ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2)
1246 {
1247 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1248 return;
1249 }
1250#endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1253 }
1254
bbf6f052
RK
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1259 {
1260 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1261 emit_move_insn (to, temp);
1262 return;
1263 }
1264
1265 /* Mode combination is not recognized. */
1266 abort ();
1267}
1268
1269/* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
5d901c31
RS
1274 or by copying to a new temporary with conversion.
1275
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1278
1279rtx
1280convert_to_mode (mode, x, unsignedp)
1281 enum machine_mode mode;
1282 rtx x;
1283 int unsignedp;
5ffe63ed
RS
1284{
1285 return convert_modes (mode, VOIDmode, x, unsignedp);
1286}
1287
1288/* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1292
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1295
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1300
1301rtx
1302convert_modes (mode, oldmode, x, unsignedp)
1303 enum machine_mode mode, oldmode;
1304 rtx x;
1305 int unsignedp;
bbf6f052
RK
1306{
1307 register rtx temp;
5ffe63ed 1308
1499e0a8
RK
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1311
1312 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1315 x = gen_lowpart (mode, x);
bbf6f052 1316
64791b18
RK
1317 if (GET_MODE (x) != VOIDmode)
1318 oldmode = GET_MODE (x);
1319
5ffe63ed 1320 if (mode == oldmode)
bbf6f052
RK
1321 return x;
1322
1323 /* There is one case that we must handle specially: If we are converting
906c4e36 1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1328
1329 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1330 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1331 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334
1335 if (oldmode != VOIDmode
1336 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1337 {
1338 int width = GET_MODE_BITSIZE (oldmode);
1339
1340 /* We need to zero extend VAL. */
1341 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1342 }
1343
1344 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1345 }
bbf6f052
RK
1346
1347 /* We can do this with a gen_lowpart if both desired and current modes
1348 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1349 non-volatile MEM. Except for the constant case where MODE is no
1350 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1351
ba2e110c
RK
1352 if ((GET_CODE (x) == CONST_INT
1353 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1354 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1355 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1356 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1357 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1358 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1359 && direct_load[(int) mode])
2bf29316
JW
1360 || (GET_CODE (x) == REG
1361 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1362 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1363 {
1364 /* ?? If we don't know OLDMODE, we have to assume here that
1365 X does not need sign- or zero-extension. This may not be
1366 the case, but it's the best we can do. */
1367 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1368 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1369 {
1370 HOST_WIDE_INT val = INTVAL (x);
1371 int width = GET_MODE_BITSIZE (oldmode);
1372
1373 /* We must sign or zero-extend in this case. Start by
1374 zero-extending, then sign extend if we need to. */
1375 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1376 if (! unsignedp
1377 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1378 val |= (HOST_WIDE_INT) (-1) << width;
1379
1380 return GEN_INT (val);
1381 }
1382
1383 return gen_lowpart (mode, x);
1384 }
bbf6f052
RK
1385
1386 temp = gen_reg_rtx (mode);
1387 convert_move (temp, x, unsignedp);
1388 return temp;
1389}
1390\f
1391/* Generate several move instructions to copy LEN bytes
1392 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1393 The caller must pass FROM and TO
1394 through protect_from_queue before calling.
1395 ALIGN (in bytes) is maximum alignment we can assume. */
1396
2e245dac 1397void
bbf6f052
RK
1398move_by_pieces (to, from, len, align)
1399 rtx to, from;
1400 int len, align;
1401{
1402 struct move_by_pieces data;
1403 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1404 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1405
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1418
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1425
e9cf6a97
JW
1426 data.to_struct = MEM_IN_STRUCT_P (to);
1427 data.from_struct = MEM_IN_STRUCT_P (from);
1428
bbf6f052
RK
1429 /* If copying requires more than two move insns,
1430 copy addresses to registers (to make displacements shorter)
1431 and use post-increment if available. */
1432 if (!(data.autinc_from && data.autinc_to)
1433 && move_by_pieces_ninsns (len, align) > 2)
1434 {
1435#ifdef HAVE_PRE_DECREMENT
1436 if (data.reverse && ! data.autinc_from)
1437 {
1438 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = -1;
1441 }
1442#endif
1443#ifdef HAVE_POST_INCREMENT
1444 if (! data.autinc_from)
1445 {
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1449 }
1450#endif
1451 if (!data.autinc_from && CONSTANT_P (from_addr))
1452 data.from_addr = copy_addr_to_reg (from_addr);
1453#ifdef HAVE_PRE_DECREMENT
1454 if (data.reverse && ! data.autinc_to)
1455 {
1456 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1457 data.autinc_to = 1;
1458 data.explicit_inc_to = -1;
1459 }
1460#endif
1461#ifdef HAVE_POST_INCREMENT
1462 if (! data.reverse && ! data.autinc_to)
1463 {
1464 data.to_addr = copy_addr_to_reg (to_addr);
1465 data.autinc_to = 1;
1466 data.explicit_inc_to = 1;
1467 }
1468#endif
1469 if (!data.autinc_to && CONSTANT_P (to_addr))
1470 data.to_addr = copy_addr_to_reg (to_addr);
1471 }
1472
c7a7ac46 1473 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1474 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1475 align = MOVE_MAX;
bbf6f052
RK
1476
1477 /* First move what we can in the largest integer mode, then go to
1478 successively smaller modes. */
1479
1480 while (max_size > 1)
1481 {
1482 enum machine_mode mode = VOIDmode, tmode;
1483 enum insn_code icode;
1484
e7c33f54
RK
1485 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1486 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1487 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1488 mode = tmode;
1489
1490 if (mode == VOIDmode)
1491 break;
1492
1493 icode = mov_optab->handlers[(int) mode].insn_code;
1494 if (icode != CODE_FOR_nothing
1495 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1496 GET_MODE_SIZE (mode)))
1497 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1498
1499 max_size = GET_MODE_SIZE (mode);
1500 }
1501
1502 /* The code above should have handled everything. */
2a8e278c 1503 if (data.len > 0)
bbf6f052
RK
1504 abort ();
1505}
1506
1507/* Return number of insns required to move L bytes by pieces.
1508 ALIGN (in bytes) is maximum alignment we can assume. */
1509
1510static int
1511move_by_pieces_ninsns (l, align)
1512 unsigned int l;
1513 int align;
1514{
1515 register int n_insns = 0;
e87b4f3f 1516 int max_size = MOVE_MAX + 1;
bbf6f052 1517
c7a7ac46 1518 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1519 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1520 align = MOVE_MAX;
bbf6f052
RK
1521
1522 while (max_size > 1)
1523 {
1524 enum machine_mode mode = VOIDmode, tmode;
1525 enum insn_code icode;
1526
e7c33f54
RK
1527 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1528 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1529 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1530 mode = tmode;
1531
1532 if (mode == VOIDmode)
1533 break;
1534
1535 icode = mov_optab->handlers[(int) mode].insn_code;
1536 if (icode != CODE_FOR_nothing
1537 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1538 GET_MODE_SIZE (mode)))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1540
1541 max_size = GET_MODE_SIZE (mode);
1542 }
1543
1544 return n_insns;
1545}
1546
1547/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1548 with move instructions for mode MODE. GENFUN is the gen_... function
1549 to make a move insn for that mode. DATA has all the other info. */
1550
1551static void
1552move_by_pieces_1 (genfun, mode, data)
eae4b970 1553 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1554 enum machine_mode mode;
1555 struct move_by_pieces *data;
1556{
1557 register int size = GET_MODE_SIZE (mode);
1558 register rtx to1, from1;
1559
1560 while (data->len >= size)
1561 {
1562 if (data->reverse) data->offset -= size;
1563
1564 to1 = (data->autinc_to
38a448ca 1565 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1566 : copy_rtx (change_address (data->to, mode,
1567 plus_constant (data->to_addr,
1568 data->offset))));
e9cf6a97 1569 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1570
db3cf6fb
MS
1571 from1
1572 = (data->autinc_from
38a448ca 1573 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1574 : copy_rtx (change_address (data->from, mode,
1575 plus_constant (data->from_addr,
1576 data->offset))));
e9cf6a97 1577 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1578
1579#ifdef HAVE_PRE_DECREMENT
1580 if (data->explicit_inc_to < 0)
906c4e36 1581 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1582 if (data->explicit_inc_from < 0)
906c4e36 1583 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1584#endif
1585
1586 emit_insn ((*genfun) (to1, from1));
1587#ifdef HAVE_POST_INCREMENT
1588 if (data->explicit_inc_to > 0)
906c4e36 1589 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1590 if (data->explicit_inc_from > 0)
906c4e36 1591 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1592#endif
1593
1594 if (! data->reverse) data->offset += size;
1595
1596 data->len -= size;
1597 }
1598}
1599\f
1600/* Emit code to move a block Y to a block X.
1601 This may be done with string-move instructions,
1602 with multiple scalar move instructions, or with a library call.
1603
1604 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1605 with mode BLKmode.
1606 SIZE is an rtx that says how long they are.
1607 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1608 measured in bytes.
bbf6f052 1609
e9a25f70
JL
1610 Return the address of the new block, if memcpy is called and returns it,
1611 0 otherwise. */
1612
1613rtx
bbf6f052
RK
1614emit_block_move (x, y, size, align)
1615 rtx x, y;
1616 rtx size;
1617 int align;
1618{
e9a25f70 1619 rtx retval = 0;
52cf7115
JL
1620#ifdef TARGET_MEM_FUNCTIONS
1621 static tree fn;
1622 tree call_expr, arg_list;
1623#endif
e9a25f70 1624
bbf6f052
RK
1625 if (GET_MODE (x) != BLKmode)
1626 abort ();
1627
1628 if (GET_MODE (y) != BLKmode)
1629 abort ();
1630
1631 x = protect_from_queue (x, 1);
1632 y = protect_from_queue (y, 0);
5d901c31 1633 size = protect_from_queue (size, 0);
bbf6f052
RK
1634
1635 if (GET_CODE (x) != MEM)
1636 abort ();
1637 if (GET_CODE (y) != MEM)
1638 abort ();
1639 if (size == 0)
1640 abort ();
1641
1642 if (GET_CODE (size) == CONST_INT
906c4e36 1643 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1644 move_by_pieces (x, y, INTVAL (size), align);
1645 else
1646 {
1647 /* Try the most limited insn first, because there's no point
1648 including more than one in the machine description unless
1649 the more limited one has some advantage. */
266007a7 1650
0bba3f6f 1651 rtx opalign = GEN_INT (align);
266007a7
RK
1652 enum machine_mode mode;
1653
1654 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1655 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1656 {
266007a7 1657 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1658
1659 if (code != CODE_FOR_nothing
803090c4
RK
1660 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1661 here because if SIZE is less than the mode mask, as it is
8008b228 1662 returned by the macro, it will definitely be less than the
803090c4 1663 actual mode mask. */
8ca00751
RK
1664 && ((GET_CODE (size) == CONST_INT
1665 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1666 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1667 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1668 && (insn_operand_predicate[(int) code][0] == 0
1669 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1670 && (insn_operand_predicate[(int) code][1] == 0
1671 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1672 && (insn_operand_predicate[(int) code][3] == 0
1673 || (*insn_operand_predicate[(int) code][3]) (opalign,
1674 VOIDmode)))
bbf6f052 1675 {
1ba1e2a8 1676 rtx op2;
266007a7
RK
1677 rtx last = get_last_insn ();
1678 rtx pat;
1679
1ba1e2a8 1680 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1681 if (insn_operand_predicate[(int) code][2] != 0
1682 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1683 op2 = copy_to_mode_reg (mode, op2);
1684
1685 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1686 if (pat)
1687 {
1688 emit_insn (pat);
e9a25f70 1689 return 0;
266007a7
RK
1690 }
1691 else
1692 delete_insns_since (last);
bbf6f052
RK
1693 }
1694 }
bbf6f052
RK
1695
1696#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1697 /* It is incorrect to use the libcall calling conventions to call
1698 memcpy in this context.
1699
1700 This could be a user call to memcpy and the user may wish to
1701 examine the return value from memcpy.
1702
1703 For targets where libcalls and normal calls have different conventions
1704 for returning pointers, we could end up generating incorrect code.
1705
1706 So instead of using a libcall sequence we build up a suitable
1707 CALL_EXPR and expand the call in the normal fashion. */
1708 if (fn == NULL_TREE)
1709 {
1710 tree fntype;
1711
1712 /* This was copied from except.c, I don't know if all this is
1713 necessary in this context or not. */
1714 fn = get_identifier ("memcpy");
1715 push_obstacks_nochange ();
1716 end_temporary_allocation ();
1717 fntype = build_pointer_type (void_type_node);
1718 fntype = build_function_type (fntype, NULL_TREE);
1719 fn = build_decl (FUNCTION_DECL, fn, fntype);
1720 DECL_EXTERNAL (fn) = 1;
1721 TREE_PUBLIC (fn) = 1;
1722 DECL_ARTIFICIAL (fn) = 1;
1723 make_decl_rtl (fn, NULL_PTR, 1);
1724 assemble_external (fn);
1725 pop_obstacks ();
1726 }
1727
1728 /* We need to make an argument list for the function call.
1729
1730 memcpy has three arguments, the first two are void * addresses and
1731 the last is a size_t byte count for the copy. */
1732 arg_list
1733 = build_tree_list (NULL_TREE,
1734 make_tree (build_pointer_type (void_type_node),
1735 XEXP (x, 0)));
1736 TREE_CHAIN (arg_list)
1737 = build_tree_list (NULL_TREE,
1738 make_tree (build_pointer_type (void_type_node),
1739 XEXP (y, 0)));
1740 TREE_CHAIN (TREE_CHAIN (arg_list))
1741 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1742 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1743
1744 /* Now we have to build up the CALL_EXPR itself. */
1745 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1746 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1747 call_expr, arg_list, NULL_TREE);
1748 TREE_SIDE_EFFECTS (call_expr) = 1;
1749
1750 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1751#else
d562e42e 1752 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1753 VOIDmode, 3, XEXP (y, 0), Pmode,
1754 XEXP (x, 0), Pmode,
3b6f75e2
JW
1755 convert_to_mode (TYPE_MODE (integer_type_node), size,
1756 TREE_UNSIGNED (integer_type_node)),
1757 TYPE_MODE (integer_type_node));
bbf6f052
RK
1758#endif
1759 }
e9a25f70
JL
1760
1761 return retval;
bbf6f052
RK
1762}
1763\f
1764/* Copy all or part of a value X into registers starting at REGNO.
1765 The number of registers to be filled is NREGS. */
1766
1767void
1768move_block_to_reg (regno, x, nregs, mode)
1769 int regno;
1770 rtx x;
1771 int nregs;
1772 enum machine_mode mode;
1773{
1774 int i;
381127e8
RL
1775#ifdef HAVE_load_multiple
1776 rtx pat;
1777 rtx last;
1778#endif
bbf6f052 1779
72bb9717
RK
1780 if (nregs == 0)
1781 return;
1782
bbf6f052
RK
1783 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1784 x = validize_mem (force_const_mem (mode, x));
1785
1786 /* See if the machine can do this with a load multiple insn. */
1787#ifdef HAVE_load_multiple
c3a02afe 1788 if (HAVE_load_multiple)
bbf6f052 1789 {
c3a02afe 1790 last = get_last_insn ();
38a448ca 1791 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1792 GEN_INT (nregs));
1793 if (pat)
1794 {
1795 emit_insn (pat);
1796 return;
1797 }
1798 else
1799 delete_insns_since (last);
bbf6f052 1800 }
bbf6f052
RK
1801#endif
1802
1803 for (i = 0; i < nregs; i++)
38a448ca 1804 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1805 operand_subword_force (x, i, mode));
1806}
1807
1808/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1809 The number of registers to be filled is NREGS. SIZE indicates the number
1810 of bytes in the object X. */
1811
bbf6f052
RK
1812
1813void
0040593d 1814move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1815 int regno;
1816 rtx x;
1817 int nregs;
0040593d 1818 int size;
bbf6f052
RK
1819{
1820 int i;
381127e8
RL
1821#ifdef HAVE_store_multiple
1822 rtx pat;
1823 rtx last;
1824#endif
58a32c5c 1825 enum machine_mode mode;
bbf6f052 1826
58a32c5c
DE
1827 /* If SIZE is that of a mode no bigger than a word, just use that
1828 mode's store operation. */
1829 if (size <= UNITS_PER_WORD
1830 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1831 {
1832 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1833 gen_rtx_REG (mode, regno));
58a32c5c
DE
1834 return;
1835 }
1836
0040593d 1837 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1838 to the left before storing to memory. Note that the previous test
1839 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1840 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1841 {
1842 rtx tem = operand_subword (x, 0, 1, BLKmode);
1843 rtx shift;
1844
1845 if (tem == 0)
1846 abort ();
1847
1848 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1849 gen_rtx_REG (word_mode, regno),
0040593d
JW
1850 build_int_2 ((UNITS_PER_WORD - size)
1851 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1852 emit_move_insn (tem, shift);
1853 return;
1854 }
1855
bbf6f052
RK
1856 /* See if the machine can do this with a store multiple insn. */
1857#ifdef HAVE_store_multiple
c3a02afe 1858 if (HAVE_store_multiple)
bbf6f052 1859 {
c3a02afe 1860 last = get_last_insn ();
38a448ca 1861 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1862 GEN_INT (nregs));
1863 if (pat)
1864 {
1865 emit_insn (pat);
1866 return;
1867 }
1868 else
1869 delete_insns_since (last);
bbf6f052 1870 }
bbf6f052
RK
1871#endif
1872
1873 for (i = 0; i < nregs; i++)
1874 {
1875 rtx tem = operand_subword (x, i, 1, BLKmode);
1876
1877 if (tem == 0)
1878 abort ();
1879
38a448ca 1880 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1881 }
1882}
1883
aac5cc16
RH
1884/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1885 registers represented by a PARALLEL. SSIZE represents the total size of
1886 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1887 SRC in bits. */
1888/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1889 the balance will be in what would be the low-order memory addresses, i.e.
1890 left justified for big endian, right justified for little endian. This
1891 happens to be true for the targets currently using this support. If this
1892 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1893 would be needed. */
fffa9c1d
JW
1894
1895void
aac5cc16
RH
1896emit_group_load (dst, orig_src, ssize, align)
1897 rtx dst, orig_src;
1898 int align, ssize;
fffa9c1d 1899{
aac5cc16
RH
1900 rtx *tmps, src;
1901 int start, i;
fffa9c1d 1902
aac5cc16 1903 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1904 abort ();
1905
1906 /* Check for a NULL entry, used to indicate that the parameter goes
1907 both on the stack and in registers. */
aac5cc16
RH
1908 if (XEXP (XVECEXP (dst, 0, 0), 0))
1909 start = 0;
fffa9c1d 1910 else
aac5cc16
RH
1911 start = 1;
1912
1913 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1914
1915 /* If we won't be loading directly from memory, protect the real source
1916 from strange tricks we might play. */
1917 src = orig_src;
1918 if (GET_CODE (src) != MEM)
1919 {
1920 src = gen_reg_rtx (GET_MODE (orig_src));
1921 emit_move_insn (src, orig_src);
1922 }
1923
1924 /* Process the pieces. */
1925 for (i = start; i < XVECLEN (dst, 0); i++)
1926 {
1927 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1928 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1929 int bytelen = GET_MODE_SIZE (mode);
1930 int shift = 0;
1931
1932 /* Handle trailing fragments that run over the size of the struct. */
1933 if (ssize >= 0 && bytepos + bytelen > ssize)
1934 {
1935 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1936 bytelen = ssize - bytepos;
1937 if (bytelen <= 0)
1938 abort();
1939 }
1940
1941 /* Optimize the access just a bit. */
1942 if (GET_CODE (src) == MEM
1943 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1944 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1945 && bytelen == GET_MODE_SIZE (mode))
1946 {
1947 tmps[i] = gen_reg_rtx (mode);
1948 emit_move_insn (tmps[i],
1949 change_address (src, mode,
1950 plus_constant (XEXP (src, 0),
1951 bytepos)));
fffa9c1d
JW
1952 }
1953 else
aac5cc16
RH
1954 {
1955 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1956 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1957 mode, mode, align, ssize);
1958 }
fffa9c1d 1959
aac5cc16
RH
1960 if (BYTES_BIG_ENDIAN && shift)
1961 {
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1964 }
fffa9c1d 1965 }
aac5cc16
RH
1966 emit_queue();
1967
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1971}
1972
aac5cc16
RH
1973/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1974 registers represented by a PARALLEL. SSIZE represents the total size of
1975 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1976
1977void
aac5cc16
RH
1978emit_group_store (orig_dst, src, ssize, align)
1979 rtx orig_dst, src;
1980 int ssize, align;
fffa9c1d 1981{
aac5cc16
RH
1982 rtx *tmps, dst;
1983 int start, i;
fffa9c1d 1984
aac5cc16 1985 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1986 abort ();
1987
1988 /* Check for a NULL entry, used to indicate that the parameter goes
1989 both on the stack and in registers. */
aac5cc16
RH
1990 if (XEXP (XVECEXP (src, 0, 0), 0))
1991 start = 0;
fffa9c1d 1992 else
aac5cc16
RH
1993 start = 1;
1994
1995 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 1996
aac5cc16
RH
1997 /* Copy the (probable) hard regs into pseudos. */
1998 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1999 {
aac5cc16
RH
2000 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2001 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2002 emit_move_insn (tmps[i], reg);
2003 }
2004 emit_queue();
fffa9c1d 2005
aac5cc16
RH
2006 /* If we won't be storing directly into memory, protect the real destination
2007 from strange tricks we might play. */
2008 dst = orig_dst;
2009 if (GET_CODE (dst) != MEM)
2010 {
2011 dst = gen_reg_rtx (GET_MODE (orig_dst));
2012 /* Make life a bit easier for combine. */
2013 emit_move_insn (dst, const0_rtx);
2014 }
2015 else if (! MEM_IN_STRUCT_P (dst))
2016 {
2017 /* store_bit_field requires that memory operations have
2018 mem_in_struct_p set; we might not. */
fffa9c1d 2019
aac5cc16
RH
2020 dst = copy_rtx (orig_dst);
2021 MEM_IN_STRUCT_P (dst) = 1;
2022 }
2023
2024 /* Process the pieces. */
2025 for (i = start; i < XVECLEN (src, 0); i++)
2026 {
2027 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2028 enum machine_mode mode = GET_MODE (tmps[i]);
2029 int bytelen = GET_MODE_SIZE (mode);
2030
2031 /* Handle trailing fragments that run over the size of the struct. */
2032 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2033 {
aac5cc16
RH
2034 if (BYTES_BIG_ENDIAN)
2035 {
2036 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2037 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2039 }
2040 bytelen = ssize - bytepos;
71bc0330 2041 }
fffa9c1d 2042
aac5cc16
RH
2043 /* Optimize the access just a bit. */
2044 if (GET_CODE (dst) == MEM
2045 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2046 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2047 && bytelen == GET_MODE_SIZE (mode))
2048 {
2049 emit_move_insn (change_address (dst, mode,
2050 plus_constant (XEXP (dst, 0),
2051 bytepos)),
2052 tmps[i]);
2053 }
2054 else
2055 {
2056 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2057 mode, tmps[i], align, ssize);
2058 }
fffa9c1d 2059 }
aac5cc16
RH
2060 emit_queue();
2061
2062 /* Copy from the pseudo into the (probable) hard reg. */
2063 if (GET_CODE (dst) == REG)
2064 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2065}
2066
94b25f81
RK
2067/* Add a USE expression for REG to the (possibly empty) list pointed
2068 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2069
2070void
b3f8cf4a
RK
2071use_reg (call_fusage, reg)
2072 rtx *call_fusage, reg;
2073{
0304dfbb
DE
2074 if (GET_CODE (reg) != REG
2075 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2076 abort();
2077
2078 *call_fusage
38a448ca
RH
2079 = gen_rtx_EXPR_LIST (VOIDmode,
2080 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2081}
2082
94b25f81
RK
2083/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2084 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2085
2086void
0304dfbb
DE
2087use_regs (call_fusage, regno, nregs)
2088 rtx *call_fusage;
bbf6f052
RK
2089 int regno;
2090 int nregs;
2091{
0304dfbb 2092 int i;
bbf6f052 2093
0304dfbb
DE
2094 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2095 abort ();
2096
2097 for (i = 0; i < nregs; i++)
38a448ca 2098 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2099}
fffa9c1d
JW
2100
2101/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2102 PARALLEL REGS. This is for calls that pass values in multiple
2103 non-contiguous locations. The Irix 6 ABI has examples of this. */
2104
2105void
2106use_group_regs (call_fusage, regs)
2107 rtx *call_fusage;
2108 rtx regs;
2109{
2110 int i;
2111
6bd35f86
DE
2112 for (i = 0; i < XVECLEN (regs, 0); i++)
2113 {
2114 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2115
6bd35f86
DE
2116 /* A NULL entry means the parameter goes both on the stack and in
2117 registers. This can also be a MEM for targets that pass values
2118 partially on the stack and partially in registers. */
e9a25f70 2119 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2120 use_reg (call_fusage, reg);
2121 }
fffa9c1d 2122}
bbf6f052 2123\f
9de08200
RK
2124/* Generate several move instructions to clear LEN bytes of block TO.
2125 (A MEM rtx with BLKmode). The caller must pass TO through
2126 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2127 we can assume. */
2128
2129static void
2130clear_by_pieces (to, len, align)
2131 rtx to;
2132 int len, align;
2133{
2134 struct clear_by_pieces data;
2135 rtx to_addr = XEXP (to, 0);
2136 int max_size = MOVE_MAX + 1;
2137
2138 data.offset = 0;
2139 data.to_addr = to_addr;
2140 data.to = to;
2141 data.autinc_to
2142 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2143 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2144
2145 data.explicit_inc_to = 0;
2146 data.reverse
2147 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2148 if (data.reverse) data.offset = len;
2149 data.len = len;
2150
2151 data.to_struct = MEM_IN_STRUCT_P (to);
2152
2153 /* If copying requires more than two move insns,
2154 copy addresses to registers (to make displacements shorter)
2155 and use post-increment if available. */
2156 if (!data.autinc_to
2157 && move_by_pieces_ninsns (len, align) > 2)
2158 {
2159#ifdef HAVE_PRE_DECREMENT
2160 if (data.reverse && ! data.autinc_to)
2161 {
2162 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2163 data.autinc_to = 1;
2164 data.explicit_inc_to = -1;
2165 }
2166#endif
2167#ifdef HAVE_POST_INCREMENT
2168 if (! data.reverse && ! data.autinc_to)
2169 {
2170 data.to_addr = copy_addr_to_reg (to_addr);
2171 data.autinc_to = 1;
2172 data.explicit_inc_to = 1;
2173 }
2174#endif
2175 if (!data.autinc_to && CONSTANT_P (to_addr))
2176 data.to_addr = copy_addr_to_reg (to_addr);
2177 }
2178
2179 if (! SLOW_UNALIGNED_ACCESS
2180 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2181 align = MOVE_MAX;
2182
2183 /* First move what we can in the largest integer mode, then go to
2184 successively smaller modes. */
2185
2186 while (max_size > 1)
2187 {
2188 enum machine_mode mode = VOIDmode, tmode;
2189 enum insn_code icode;
2190
2191 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2192 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2193 if (GET_MODE_SIZE (tmode) < max_size)
2194 mode = tmode;
2195
2196 if (mode == VOIDmode)
2197 break;
2198
2199 icode = mov_optab->handlers[(int) mode].insn_code;
2200 if (icode != CODE_FOR_nothing
2201 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2202 GET_MODE_SIZE (mode)))
2203 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2204
2205 max_size = GET_MODE_SIZE (mode);
2206 }
2207
2208 /* The code above should have handled everything. */
2209 if (data.len != 0)
2210 abort ();
2211}
2212
2213/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2214 with move instructions for mode MODE. GENFUN is the gen_... function
2215 to make a move insn for that mode. DATA has all the other info. */
2216
2217static void
2218clear_by_pieces_1 (genfun, mode, data)
eae4b970 2219 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2220 enum machine_mode mode;
2221 struct clear_by_pieces *data;
2222{
2223 register int size = GET_MODE_SIZE (mode);
2224 register rtx to1;
2225
2226 while (data->len >= size)
2227 {
2228 if (data->reverse) data->offset -= size;
2229
2230 to1 = (data->autinc_to
38a448ca 2231 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2232 : copy_rtx (change_address (data->to, mode,
2233 plus_constant (data->to_addr,
2234 data->offset))));
9de08200
RK
2235 MEM_IN_STRUCT_P (to1) = data->to_struct;
2236
2237#ifdef HAVE_PRE_DECREMENT
2238 if (data->explicit_inc_to < 0)
2239 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2240#endif
2241
2242 emit_insn ((*genfun) (to1, const0_rtx));
2243#ifdef HAVE_POST_INCREMENT
2244 if (data->explicit_inc_to > 0)
2245 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2246#endif
2247
2248 if (! data->reverse) data->offset += size;
2249
2250 data->len -= size;
2251 }
2252}
2253\f
bbf6f052 2254/* Write zeros through the storage of OBJECT.
9de08200 2255 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2256 the maximum alignment we can is has, measured in bytes.
bbf6f052 2257
e9a25f70
JL
2258 If we call a function that returns the length of the block, return it. */
2259
2260rtx
9de08200 2261clear_storage (object, size, align)
bbf6f052 2262 rtx object;
4c08eef0 2263 rtx size;
9de08200 2264 int align;
bbf6f052 2265{
52cf7115
JL
2266#ifdef TARGET_MEM_FUNCTIONS
2267 static tree fn;
2268 tree call_expr, arg_list;
2269#endif
e9a25f70
JL
2270 rtx retval = 0;
2271
bbf6f052
RK
2272 if (GET_MODE (object) == BLKmode)
2273 {
9de08200
RK
2274 object = protect_from_queue (object, 1);
2275 size = protect_from_queue (size, 0);
2276
2277 if (GET_CODE (size) == CONST_INT
2278 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2279 clear_by_pieces (object, INTVAL (size), align);
2280
2281 else
2282 {
2283 /* Try the most limited insn first, because there's no point
2284 including more than one in the machine description unless
2285 the more limited one has some advantage. */
2286
2287 rtx opalign = GEN_INT (align);
2288 enum machine_mode mode;
2289
2290 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2291 mode = GET_MODE_WIDER_MODE (mode))
2292 {
2293 enum insn_code code = clrstr_optab[(int) mode];
2294
2295 if (code != CODE_FOR_nothing
2296 /* We don't need MODE to be narrower than
2297 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2298 the mode mask, as it is returned by the macro, it will
2299 definitely be less than the actual mode mask. */
2300 && ((GET_CODE (size) == CONST_INT
2301 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2302 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2303 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2304 && (insn_operand_predicate[(int) code][0] == 0
2305 || (*insn_operand_predicate[(int) code][0]) (object,
2306 BLKmode))
2307 && (insn_operand_predicate[(int) code][2] == 0
2308 || (*insn_operand_predicate[(int) code][2]) (opalign,
2309 VOIDmode)))
2310 {
2311 rtx op1;
2312 rtx last = get_last_insn ();
2313 rtx pat;
2314
2315 op1 = convert_to_mode (mode, size, 1);
2316 if (insn_operand_predicate[(int) code][1] != 0
2317 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2318 mode))
2319 op1 = copy_to_mode_reg (mode, op1);
2320
2321 pat = GEN_FCN ((int) code) (object, op1, opalign);
2322 if (pat)
2323 {
2324 emit_insn (pat);
e9a25f70 2325 return 0;
9de08200
RK
2326 }
2327 else
2328 delete_insns_since (last);
2329 }
2330 }
2331
2332
bbf6f052 2333#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
2334 /* It is incorrect to use the libcall calling conventions to call
2335 memset in this context.
2336
2337 This could be a user call to memset and the user may wish to
2338 examine the return value from memset.
2339
2340 For targets where libcalls and normal calls have different conventions
2341 for returning pointers, we could end up generating incorrect code.
2342
2343 So instead of using a libcall sequence we build up a suitable
2344 CALL_EXPR and expand the call in the normal fashion. */
2345 if (fn == NULL_TREE)
2346 {
2347 tree fntype;
2348
2349 /* This was copied from except.c, I don't know if all this is
2350 necessary in this context or not. */
2351 fn = get_identifier ("memset");
2352 push_obstacks_nochange ();
2353 end_temporary_allocation ();
2354 fntype = build_pointer_type (void_type_node);
2355 fntype = build_function_type (fntype, NULL_TREE);
2356 fn = build_decl (FUNCTION_DECL, fn, fntype);
2357 DECL_EXTERNAL (fn) = 1;
2358 TREE_PUBLIC (fn) = 1;
2359 DECL_ARTIFICIAL (fn) = 1;
2360 make_decl_rtl (fn, NULL_PTR, 1);
2361 assemble_external (fn);
2362 pop_obstacks ();
2363 }
2364
2365 /* We need to make an argument list for the function call.
2366
2367 memset has three arguments, the first is a void * addresses, the
2368 second a integer with the initialization value, the last is a size_t
2369 byte count for the copy. */
2370 arg_list
2371 = build_tree_list (NULL_TREE,
2372 make_tree (build_pointer_type (void_type_node),
2373 XEXP (object, 0)));
2374 TREE_CHAIN (arg_list)
2375 = build_tree_list (NULL_TREE,
2376 make_tree (integer_type_node, const0_rtx));
2377 TREE_CHAIN (TREE_CHAIN (arg_list))
2378 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2379 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2380
2381 /* Now we have to build up the CALL_EXPR itself. */
2382 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2383 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2384 call_expr, arg_list, NULL_TREE);
2385 TREE_SIDE_EFFECTS (call_expr) = 1;
2386
2387 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2388#else
9de08200
RK
2389 emit_library_call (bzero_libfunc, 0,
2390 VOIDmode, 2,
2391 XEXP (object, 0), Pmode,
e9a25f70
JL
2392 convert_to_mode
2393 (TYPE_MODE (integer_type_node), size,
2394 TREE_UNSIGNED (integer_type_node)),
9de08200 2395 TYPE_MODE (integer_type_node));
bbf6f052 2396#endif
9de08200 2397 }
bbf6f052
RK
2398 }
2399 else
66ed0683 2400 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2401
2402 return retval;
bbf6f052
RK
2403}
2404
2405/* Generate code to copy Y into X.
2406 Both Y and X must have the same mode, except that
2407 Y can be a constant with VOIDmode.
2408 This mode cannot be BLKmode; use emit_block_move for that.
2409
2410 Return the last instruction emitted. */
2411
2412rtx
2413emit_move_insn (x, y)
2414 rtx x, y;
2415{
2416 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2417
2418 x = protect_from_queue (x, 1);
2419 y = protect_from_queue (y, 0);
2420
2421 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2422 abort ();
2423
2424 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2425 y = force_const_mem (mode, y);
2426
2427 /* If X or Y are memory references, verify that their addresses are valid
2428 for the machine. */
2429 if (GET_CODE (x) == MEM
2430 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2431 && ! push_operand (x, GET_MODE (x)))
2432 || (flag_force_addr
2433 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2434 x = change_address (x, VOIDmode, XEXP (x, 0));
2435
2436 if (GET_CODE (y) == MEM
2437 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2438 || (flag_force_addr
2439 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2440 y = change_address (y, VOIDmode, XEXP (y, 0));
2441
2442 if (mode == BLKmode)
2443 abort ();
2444
261c4230
RS
2445 return emit_move_insn_1 (x, y);
2446}
2447
2448/* Low level part of emit_move_insn.
2449 Called just like emit_move_insn, but assumes X and Y
2450 are basically valid. */
2451
2452rtx
2453emit_move_insn_1 (x, y)
2454 rtx x, y;
2455{
2456 enum machine_mode mode = GET_MODE (x);
2457 enum machine_mode submode;
2458 enum mode_class class = GET_MODE_CLASS (mode);
2459 int i;
2460
bbf6f052
RK
2461 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2462 return
2463 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2464
89742723 2465 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2466 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2467 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2468 * BITS_PER_UNIT),
2469 (class == MODE_COMPLEX_INT
2470 ? MODE_INT : MODE_FLOAT),
2471 0))
7308a047
RS
2472 && (mov_optab->handlers[(int) submode].insn_code
2473 != CODE_FOR_nothing))
2474 {
2475 /* Don't split destination if it is a stack push. */
2476 int stack = push_operand (x, GET_MODE (x));
7308a047 2477
7308a047
RS
2478 /* If this is a stack, push the highpart first, so it
2479 will be in the argument order.
2480
2481 In that case, change_address is used only to convert
2482 the mode, not to change the address. */
c937357e
RS
2483 if (stack)
2484 {
e33c0d66
RS
2485 /* Note that the real part always precedes the imag part in memory
2486 regardless of machine's endianness. */
c937357e
RS
2487#ifdef STACK_GROWS_DOWNWARD
2488 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2489 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2490 gen_imagpart (submode, y)));
c937357e 2491 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2492 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2493 gen_realpart (submode, y)));
c937357e
RS
2494#else
2495 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2496 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2497 gen_realpart (submode, y)));
c937357e 2498 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2499 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2500 gen_imagpart (submode, y)));
c937357e
RS
2501#endif
2502 }
2503 else
2504 {
2638126a
BS
2505 /* Show the output dies here. */
2506 if (x != y)
9e6a5703 2507 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2508
c937357e 2509 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2510 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2511 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2512 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2513 }
7308a047 2514
7a1ab50a 2515 return get_last_insn ();
7308a047
RS
2516 }
2517
bbf6f052
RK
2518 /* This will handle any multi-word mode that lacks a move_insn pattern.
2519 However, you will get better code if you define such patterns,
2520 even if they must turn into multiple assembler instructions. */
a4320483 2521 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2522 {
2523 rtx last_insn = 0;
6551fa4d 2524
a98c9f1a
RK
2525#ifdef PUSH_ROUNDING
2526
2527 /* If X is a push on the stack, do the push now and replace
2528 X with a reference to the stack pointer. */
2529 if (push_operand (x, GET_MODE (x)))
2530 {
2531 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2532 x = change_address (x, VOIDmode, stack_pointer_rtx);
2533 }
2534#endif
2535
15a7a8ec 2536 /* Show the output dies here. */
43e046cb 2537 if (x != y)
38a448ca 2538 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2539
bbf6f052
RK
2540 for (i = 0;
2541 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2542 i++)
2543 {
2544 rtx xpart = operand_subword (x, i, 1, mode);
2545 rtx ypart = operand_subword (y, i, 1, mode);
2546
2547 /* If we can't get a part of Y, put Y into memory if it is a
2548 constant. Otherwise, force it into a register. If we still
2549 can't get a part of Y, abort. */
2550 if (ypart == 0 && CONSTANT_P (y))
2551 {
2552 y = force_const_mem (mode, y);
2553 ypart = operand_subword (y, i, 1, mode);
2554 }
2555 else if (ypart == 0)
2556 ypart = operand_subword_force (y, i, mode);
2557
2558 if (xpart == 0 || ypart == 0)
2559 abort ();
2560
2561 last_insn = emit_move_insn (xpart, ypart);
2562 }
6551fa4d 2563
bbf6f052
RK
2564 return last_insn;
2565 }
2566 else
2567 abort ();
2568}
2569\f
2570/* Pushing data onto the stack. */
2571
2572/* Push a block of length SIZE (perhaps variable)
2573 and return an rtx to address the beginning of the block.
2574 Note that it is not possible for the value returned to be a QUEUED.
2575 The value may be virtual_outgoing_args_rtx.
2576
2577 EXTRA is the number of bytes of padding to push in addition to SIZE.
2578 BELOW nonzero means this padding comes at low addresses;
2579 otherwise, the padding comes at high addresses. */
2580
2581rtx
2582push_block (size, extra, below)
2583 rtx size;
2584 int extra, below;
2585{
2586 register rtx temp;
88f63c77
RK
2587
2588 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2589 if (CONSTANT_P (size))
2590 anti_adjust_stack (plus_constant (size, extra));
2591 else if (GET_CODE (size) == REG && extra == 0)
2592 anti_adjust_stack (size);
2593 else
2594 {
2595 rtx temp = copy_to_mode_reg (Pmode, size);
2596 if (extra != 0)
906c4e36 2597 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2598 temp, 0, OPTAB_LIB_WIDEN);
2599 anti_adjust_stack (temp);
2600 }
2601
2602#ifdef STACK_GROWS_DOWNWARD
2603 temp = virtual_outgoing_args_rtx;
2604 if (extra != 0 && below)
2605 temp = plus_constant (temp, extra);
2606#else
2607 if (GET_CODE (size) == CONST_INT)
2608 temp = plus_constant (virtual_outgoing_args_rtx,
2609 - INTVAL (size) - (below ? 0 : extra));
2610 else if (extra != 0 && !below)
38a448ca 2611 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2612 negate_rtx (Pmode, plus_constant (size, extra)));
2613 else
38a448ca 2614 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2615 negate_rtx (Pmode, size));
2616#endif
2617
2618 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2619}
2620
87e38d84 2621rtx
bbf6f052
RK
2622gen_push_operand ()
2623{
38a448ca 2624 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2625}
2626
921b3427
RK
2627/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2628 block of SIZE bytes. */
2629
2630static rtx
2631get_push_address (size)
2632 int size;
2633{
2634 register rtx temp;
2635
2636 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2637 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2638 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2639 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2640 else
2641 temp = stack_pointer_rtx;
2642
c85f7c16 2643 return copy_to_reg (temp);
921b3427
RK
2644}
2645
bbf6f052
RK
2646/* Generate code to push X onto the stack, assuming it has mode MODE and
2647 type TYPE.
2648 MODE is redundant except when X is a CONST_INT (since they don't
2649 carry mode info).
2650 SIZE is an rtx for the size of data to be copied (in bytes),
2651 needed only if X is BLKmode.
2652
2653 ALIGN (in bytes) is maximum alignment we can assume.
2654
cd048831
RK
2655 If PARTIAL and REG are both nonzero, then copy that many of the first
2656 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2657 The amount of space pushed is decreased by PARTIAL words,
2658 rounded *down* to a multiple of PARM_BOUNDARY.
2659 REG must be a hard register in this case.
cd048831
RK
2660 If REG is zero but PARTIAL is not, take any all others actions for an
2661 argument partially in registers, but do not actually load any
2662 registers.
bbf6f052
RK
2663
2664 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2665 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2666
2667 On a machine that lacks real push insns, ARGS_ADDR is the address of
2668 the bottom of the argument block for this call. We use indexing off there
2669 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2670 argument block has not been preallocated.
2671
e5e809f4
JL
2672 ARGS_SO_FAR is the size of args previously pushed for this call.
2673
2674 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2675 for arguments passed in registers. If nonzero, it will be the number
2676 of bytes required. */
bbf6f052
RK
2677
2678void
2679emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2680 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2681 register rtx x;
2682 enum machine_mode mode;
2683 tree type;
2684 rtx size;
2685 int align;
2686 int partial;
2687 rtx reg;
2688 int extra;
2689 rtx args_addr;
2690 rtx args_so_far;
e5e809f4 2691 int reg_parm_stack_space;
bbf6f052
RK
2692{
2693 rtx xinner;
2694 enum direction stack_direction
2695#ifdef STACK_GROWS_DOWNWARD
2696 = downward;
2697#else
2698 = upward;
2699#endif
2700
2701 /* Decide where to pad the argument: `downward' for below,
2702 `upward' for above, or `none' for don't pad it.
2703 Default is below for small data on big-endian machines; else above. */
2704 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2705
2706 /* Invert direction if stack is post-update. */
2707 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2708 if (where_pad != none)
2709 where_pad = (where_pad == downward ? upward : downward);
2710
2711 xinner = x = protect_from_queue (x, 0);
2712
2713 if (mode == BLKmode)
2714 {
2715 /* Copy a block into the stack, entirely or partially. */
2716
2717 register rtx temp;
2718 int used = partial * UNITS_PER_WORD;
2719 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2720 int skip;
2721
2722 if (size == 0)
2723 abort ();
2724
2725 used -= offset;
2726
2727 /* USED is now the # of bytes we need not copy to the stack
2728 because registers will take care of them. */
2729
2730 if (partial != 0)
2731 xinner = change_address (xinner, BLKmode,
2732 plus_constant (XEXP (xinner, 0), used));
2733
2734 /* If the partial register-part of the arg counts in its stack size,
2735 skip the part of stack space corresponding to the registers.
2736 Otherwise, start copying to the beginning of the stack space,
2737 by setting SKIP to 0. */
e5e809f4 2738 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2739
2740#ifdef PUSH_ROUNDING
2741 /* Do it with several push insns if that doesn't take lots of insns
2742 and if there is no difficulty with push insns that skip bytes
2743 on the stack for alignment purposes. */
2744 if (args_addr == 0
2745 && GET_CODE (size) == CONST_INT
2746 && skip == 0
2747 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2748 < MOVE_RATIO)
bbf6f052
RK
2749 /* Here we avoid the case of a structure whose weak alignment
2750 forces many pushes of a small amount of data,
2751 and such small pushes do rounding that causes trouble. */
c7a7ac46 2752 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2753 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2754 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2755 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2756 {
2757 /* Push padding now if padding above and stack grows down,
2758 or if padding below and stack grows up.
2759 But if space already allocated, this has already been done. */
2760 if (extra && args_addr == 0
2761 && where_pad != none && where_pad != stack_direction)
906c4e36 2762 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2763
38a448ca 2764 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2765 INTVAL (size) - used, align);
921b3427 2766
956d6950 2767 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2768 {
2769 rtx temp;
2770
956d6950 2771 in_check_memory_usage = 1;
921b3427 2772 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2773 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2774 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2775 temp, ptr_mode,
2776 XEXP (xinner, 0), ptr_mode,
2777 GEN_INT (INTVAL(size) - used),
2778 TYPE_MODE (sizetype));
2779 else
2780 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2781 temp, ptr_mode,
2782 GEN_INT (INTVAL(size) - used),
2783 TYPE_MODE (sizetype),
956d6950
JL
2784 GEN_INT (MEMORY_USE_RW),
2785 TYPE_MODE (integer_type_node));
2786 in_check_memory_usage = 0;
921b3427 2787 }
bbf6f052
RK
2788 }
2789 else
2790#endif /* PUSH_ROUNDING */
2791 {
2792 /* Otherwise make space on the stack and copy the data
2793 to the address of that space. */
2794
2795 /* Deduct words put into registers from the size we must copy. */
2796 if (partial != 0)
2797 {
2798 if (GET_CODE (size) == CONST_INT)
906c4e36 2799 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2800 else
2801 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2802 GEN_INT (used), NULL_RTX, 0,
2803 OPTAB_LIB_WIDEN);
bbf6f052
RK
2804 }
2805
2806 /* Get the address of the stack space.
2807 In this case, we do not deal with EXTRA separately.
2808 A single stack adjust will do. */
2809 if (! args_addr)
2810 {
2811 temp = push_block (size, extra, where_pad == downward);
2812 extra = 0;
2813 }
2814 else if (GET_CODE (args_so_far) == CONST_INT)
2815 temp = memory_address (BLKmode,
2816 plus_constant (args_addr,
2817 skip + INTVAL (args_so_far)));
2818 else
2819 temp = memory_address (BLKmode,
38a448ca
RH
2820 plus_constant (gen_rtx_PLUS (Pmode,
2821 args_addr,
2822 args_so_far),
bbf6f052 2823 skip));
956d6950 2824 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2825 {
2826 rtx target;
2827
956d6950 2828 in_check_memory_usage = 1;
921b3427 2829 target = copy_to_reg (temp);
c85f7c16 2830 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2831 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2832 target, ptr_mode,
2833 XEXP (xinner, 0), ptr_mode,
2834 size, TYPE_MODE (sizetype));
2835 else
2836 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2837 target, ptr_mode,
2838 size, TYPE_MODE (sizetype),
956d6950
JL
2839 GEN_INT (MEMORY_USE_RW),
2840 TYPE_MODE (integer_type_node));
2841 in_check_memory_usage = 0;
921b3427 2842 }
bbf6f052
RK
2843
2844 /* TEMP is the address of the block. Copy the data there. */
2845 if (GET_CODE (size) == CONST_INT
2846 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2847 < MOVE_RATIO))
2848 {
38a448ca 2849 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2850 INTVAL (size), align);
2851 goto ret;
2852 }
e5e809f4 2853 else
bbf6f052 2854 {
e5e809f4
JL
2855 rtx opalign = GEN_INT (align);
2856 enum machine_mode mode;
9e6a5703 2857 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
2858
2859 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2860 mode != VOIDmode;
2861 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2862 {
e5e809f4
JL
2863 enum insn_code code = movstr_optab[(int) mode];
2864
2865 if (code != CODE_FOR_nothing
2866 && ((GET_CODE (size) == CONST_INT
2867 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2868 <= (GET_MODE_MASK (mode) >> 1)))
2869 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2870 && (insn_operand_predicate[(int) code][0] == 0
2871 || ((*insn_operand_predicate[(int) code][0])
2872 (target, BLKmode)))
2873 && (insn_operand_predicate[(int) code][1] == 0
2874 || ((*insn_operand_predicate[(int) code][1])
2875 (xinner, BLKmode)))
2876 && (insn_operand_predicate[(int) code][3] == 0
2877 || ((*insn_operand_predicate[(int) code][3])
2878 (opalign, VOIDmode))))
2879 {
2880 rtx op2 = convert_to_mode (mode, size, 1);
2881 rtx last = get_last_insn ();
2882 rtx pat;
2883
2884 if (insn_operand_predicate[(int) code][2] != 0
2885 && ! ((*insn_operand_predicate[(int) code][2])
2886 (op2, mode)))
2887 op2 = copy_to_mode_reg (mode, op2);
2888
2889 pat = GEN_FCN ((int) code) (target, xinner,
2890 op2, opalign);
2891 if (pat)
2892 {
2893 emit_insn (pat);
2894 goto ret;
2895 }
2896 else
2897 delete_insns_since (last);
2898 }
c841050e 2899 }
bbf6f052 2900 }
bbf6f052
RK
2901
2902#ifndef ACCUMULATE_OUTGOING_ARGS
2903 /* If the source is referenced relative to the stack pointer,
2904 copy it to another register to stabilize it. We do not need
2905 to do this if we know that we won't be changing sp. */
2906
2907 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2908 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2909 temp = copy_to_reg (temp);
2910#endif
2911
2912 /* Make inhibit_defer_pop nonzero around the library call
2913 to force it to pop the bcopy-arguments right away. */
2914 NO_DEFER_POP;
2915#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2916 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2917 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2918 convert_to_mode (TYPE_MODE (sizetype),
2919 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2920 TYPE_MODE (sizetype));
bbf6f052 2921#else
d562e42e 2922 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2923 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2924 convert_to_mode (TYPE_MODE (integer_type_node),
2925 size,
2926 TREE_UNSIGNED (integer_type_node)),
2927 TYPE_MODE (integer_type_node));
bbf6f052
RK
2928#endif
2929 OK_DEFER_POP;
2930 }
2931 }
2932 else if (partial > 0)
2933 {
2934 /* Scalar partly in registers. */
2935
2936 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2937 int i;
2938 int not_stack;
2939 /* # words of start of argument
2940 that we must make space for but need not store. */
2941 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2942 int args_offset = INTVAL (args_so_far);
2943 int skip;
2944
2945 /* Push padding now if padding above and stack grows down,
2946 or if padding below and stack grows up.
2947 But if space already allocated, this has already been done. */
2948 if (extra && args_addr == 0
2949 && where_pad != none && where_pad != stack_direction)
906c4e36 2950 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2951
2952 /* If we make space by pushing it, we might as well push
2953 the real data. Otherwise, we can leave OFFSET nonzero
2954 and leave the space uninitialized. */
2955 if (args_addr == 0)
2956 offset = 0;
2957
2958 /* Now NOT_STACK gets the number of words that we don't need to
2959 allocate on the stack. */
2960 not_stack = partial - offset;
2961
2962 /* If the partial register-part of the arg counts in its stack size,
2963 skip the part of stack space corresponding to the registers.
2964 Otherwise, start copying to the beginning of the stack space,
2965 by setting SKIP to 0. */
e5e809f4 2966 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
2967
2968 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2969 x = validize_mem (force_const_mem (mode, x));
2970
2971 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2972 SUBREGs of such registers are not allowed. */
2973 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2974 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2975 x = copy_to_reg (x);
2976
2977 /* Loop over all the words allocated on the stack for this arg. */
2978 /* We can do it by words, because any scalar bigger than a word
2979 has a size a multiple of a word. */
2980#ifndef PUSH_ARGS_REVERSED
2981 for (i = not_stack; i < size; i++)
2982#else
2983 for (i = size - 1; i >= not_stack; i--)
2984#endif
2985 if (i >= not_stack + offset)
2986 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2987 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2988 0, args_addr,
2989 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
2990 * UNITS_PER_WORD)),
2991 reg_parm_stack_space);
bbf6f052
RK
2992 }
2993 else
2994 {
2995 rtx addr;
921b3427 2996 rtx target = NULL_RTX;
bbf6f052
RK
2997
2998 /* Push padding now if padding above and stack grows down,
2999 or if padding below and stack grows up.
3000 But if space already allocated, this has already been done. */
3001 if (extra && args_addr == 0
3002 && where_pad != none && where_pad != stack_direction)
906c4e36 3003 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3004
3005#ifdef PUSH_ROUNDING
3006 if (args_addr == 0)
3007 addr = gen_push_operand ();
3008 else
3009#endif
921b3427
RK
3010 {
3011 if (GET_CODE (args_so_far) == CONST_INT)
3012 addr
3013 = memory_address (mode,
3014 plus_constant (args_addr,
3015 INTVAL (args_so_far)));
3016 else
38a448ca
RH
3017 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3018 args_so_far));
921b3427
RK
3019 target = addr;
3020 }
bbf6f052 3021
38a448ca 3022 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3023
956d6950 3024 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 3025 {
956d6950 3026 in_check_memory_usage = 1;
921b3427
RK
3027 if (target == 0)
3028 target = get_push_address (GET_MODE_SIZE (mode));
3029
c85f7c16 3030 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
3031 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3032 target, ptr_mode,
3033 XEXP (x, 0), ptr_mode,
3034 GEN_INT (GET_MODE_SIZE (mode)),
3035 TYPE_MODE (sizetype));
3036 else
3037 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3038 target, ptr_mode,
3039 GEN_INT (GET_MODE_SIZE (mode)),
3040 TYPE_MODE (sizetype),
956d6950
JL
3041 GEN_INT (MEMORY_USE_RW),
3042 TYPE_MODE (integer_type_node));
3043 in_check_memory_usage = 0;
921b3427 3044 }
bbf6f052
RK
3045 }
3046
3047 ret:
3048 /* If part should go in registers, copy that part
3049 into the appropriate registers. Do this now, at the end,
3050 since mem-to-mem copies above may do function calls. */
cd048831 3051 if (partial > 0 && reg != 0)
fffa9c1d
JW
3052 {
3053 /* Handle calls that pass values in multiple non-contiguous locations.
3054 The Irix 6 ABI has examples of this. */
3055 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3056 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3057 else
3058 move_block_to_reg (REGNO (reg), x, partial, mode);
3059 }
bbf6f052
RK
3060
3061 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3062 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3063}
3064\f
bbf6f052
RK
3065/* Expand an assignment that stores the value of FROM into TO.
3066 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3067 (This may contain a QUEUED rtx;
3068 if the value is constant, this rtx is a constant.)
3069 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3070
3071 SUGGEST_REG is no longer actually used.
3072 It used to mean, copy the value through a register
3073 and return that register, if that is possible.
709f5be1 3074 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3075
3076rtx
3077expand_assignment (to, from, want_value, suggest_reg)
3078 tree to, from;
3079 int want_value;
3080 int suggest_reg;
3081{
3082 register rtx to_rtx = 0;
3083 rtx result;
3084
3085 /* Don't crash if the lhs of the assignment was erroneous. */
3086
3087 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3088 {
3089 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3090 return want_value ? result : NULL_RTX;
3091 }
bbf6f052
RK
3092
3093 /* Assignment of a structure component needs special treatment
3094 if the structure component's rtx is not simply a MEM.
6be58303
JW
3095 Assignment of an array element at a constant index, and assignment of
3096 an array element in an unaligned packed structure field, has the same
3097 problem. */
bbf6f052 3098
08293add
RK
3099 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3100 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3101 {
3102 enum machine_mode mode1;
3103 int bitsize;
3104 int bitpos;
7bb0943f 3105 tree offset;
bbf6f052
RK
3106 int unsignedp;
3107 int volatilep = 0;
0088fcb1 3108 tree tem;
d78d243c 3109 int alignment;
0088fcb1
RK
3110
3111 push_temp_slots ();
839c4796
RK
3112 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3113 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3114
3115 /* If we are going to use store_bit_field and extract_bit_field,
3116 make sure to_rtx will be safe for multiple use. */
3117
3118 if (mode1 == VOIDmode && want_value)
3119 tem = stabilize_reference (tem);
3120
921b3427 3121 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3122 if (offset != 0)
3123 {
906c4e36 3124 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3125
3126 if (GET_CODE (to_rtx) != MEM)
3127 abort ();
bd070e1a
RH
3128
3129 if (GET_MODE (offset_rtx) != ptr_mode)
3130 {
3131#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3132 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3133#else
3134 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3135#endif
3136 }
3137
89752202
HB
3138 if (GET_CODE (to_rtx) == MEM
3139 && GET_MODE (to_rtx) == BLKmode
3140 && bitsize
3141 && (bitpos % bitsize) == 0
3142 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3143 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3144 {
3145 rtx temp = change_address (to_rtx, mode1,
3146 plus_constant (XEXP (to_rtx, 0),
3147 (bitpos /
3148 BITS_PER_UNIT)));
3149 if (GET_CODE (XEXP (temp, 0)) == REG)
3150 to_rtx = temp;
3151 else
3152 to_rtx = change_address (to_rtx, mode1,
3153 force_reg (GET_MODE (XEXP (temp, 0)),
3154 XEXP (temp, 0)));
3155 bitpos = 0;
3156 }
3157
7bb0943f 3158 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
3159 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3160 force_reg (ptr_mode, offset_rtx)));
7bb0943f 3161 }
bbf6f052
RK
3162 if (volatilep)
3163 {
3164 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3165 {
3166 /* When the offset is zero, to_rtx is the address of the
3167 structure we are storing into, and hence may be shared.
3168 We must make a new MEM before setting the volatile bit. */
3169 if (offset == 0)
effbcc6a
RK
3170 to_rtx = copy_rtx (to_rtx);
3171
01188446
JW
3172 MEM_VOLATILE_P (to_rtx) = 1;
3173 }
bbf6f052
RK
3174#if 0 /* This was turned off because, when a field is volatile
3175 in an object which is not volatile, the object may be in a register,
3176 and then we would abort over here. */
3177 else
3178 abort ();
3179#endif
3180 }
3181
956d6950
JL
3182 if (TREE_CODE (to) == COMPONENT_REF
3183 && TREE_READONLY (TREE_OPERAND (to, 1)))
3184 {
8bd6ecc2 3185 if (offset == 0)
956d6950
JL
3186 to_rtx = copy_rtx (to_rtx);
3187
3188 RTX_UNCHANGING_P (to_rtx) = 1;
3189 }
3190
921b3427
RK
3191 /* Check the access. */
3192 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
3193 {
3194 rtx to_addr;
3195 int size;
3196 int best_mode_size;
3197 enum machine_mode best_mode;
3198
3199 best_mode = get_best_mode (bitsize, bitpos,
3200 TYPE_ALIGN (TREE_TYPE (tem)),
3201 mode1, volatilep);
3202 if (best_mode == VOIDmode)
3203 best_mode = QImode;
3204
3205 best_mode_size = GET_MODE_BITSIZE (best_mode);
3206 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3207 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3208 size *= GET_MODE_SIZE (best_mode);
3209
3210 /* Check the access right of the pointer. */
e9a25f70
JL
3211 if (size)
3212 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3213 to_addr, ptr_mode,
3214 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3215 GEN_INT (MEMORY_USE_WO),
3216 TYPE_MODE (integer_type_node));
921b3427
RK
3217 }
3218
bbf6f052
RK
3219 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3220 (want_value
3221 /* Spurious cast makes HPUX compiler happy. */
3222 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3223 : VOIDmode),
3224 unsignedp,
3225 /* Required alignment of containing datum. */
d78d243c 3226 alignment,
ece32014
MM
3227 int_size_in_bytes (TREE_TYPE (tem)),
3228 get_alias_set (to));
bbf6f052
RK
3229 preserve_temp_slots (result);
3230 free_temp_slots ();
0088fcb1 3231 pop_temp_slots ();
bbf6f052 3232
709f5be1
RS
3233 /* If the value is meaningful, convert RESULT to the proper mode.
3234 Otherwise, return nothing. */
5ffe63ed
RS
3235 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3236 TYPE_MODE (TREE_TYPE (from)),
3237 result,
3238 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3239 : NULL_RTX);
bbf6f052
RK
3240 }
3241
cd1db108
RS
3242 /* If the rhs is a function call and its value is not an aggregate,
3243 call the function before we start to compute the lhs.
3244 This is needed for correct code for cases such as
3245 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3246 requires loading up part of an address in a separate insn.
3247
3248 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3249 a promoted variable where the zero- or sign- extension needs to be done.
3250 Handling this in the normal way is safe because no computation is done
3251 before the call. */
3252 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3253 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3254 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3255 {
0088fcb1
RK
3256 rtx value;
3257
3258 push_temp_slots ();
3259 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3260 if (to_rtx == 0)
921b3427 3261 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3262
fffa9c1d
JW
3263 /* Handle calls that return values in multiple non-contiguous locations.
3264 The Irix 6 ABI has examples of this. */
3265 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3266 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3267 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3268 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3269 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3270 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3271 else
3272 emit_move_insn (to_rtx, value);
cd1db108
RS
3273 preserve_temp_slots (to_rtx);
3274 free_temp_slots ();
0088fcb1 3275 pop_temp_slots ();
709f5be1 3276 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3277 }
3278
bbf6f052
RK
3279 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3280 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3281
3282 if (to_rtx == 0)
41472af8
MM
3283 {
3284 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3285 if (GET_CODE (to_rtx) == MEM)
3286 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3287 }
bbf6f052 3288
86d38d25
RS
3289 /* Don't move directly into a return register. */
3290 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3291 {
0088fcb1
RK
3292 rtx temp;
3293
3294 push_temp_slots ();
3295 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3296 emit_move_insn (to_rtx, temp);
3297 preserve_temp_slots (to_rtx);
3298 free_temp_slots ();
0088fcb1 3299 pop_temp_slots ();
709f5be1 3300 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3301 }
3302
bbf6f052
RK
3303 /* In case we are returning the contents of an object which overlaps
3304 the place the value is being stored, use a safe function when copying
3305 a value through a pointer into a structure value return block. */
3306 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3307 && current_function_returns_struct
3308 && !current_function_returns_pcc_struct)
3309 {
0088fcb1
RK
3310 rtx from_rtx, size;
3311
3312 push_temp_slots ();
33a20d10 3313 size = expr_size (from);
921b3427
RK
3314 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3315 EXPAND_MEMORY_USE_DONT);
3316
3317 /* Copy the rights of the bitmap. */
3318 if (flag_check_memory_usage)
3319 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3320 XEXP (to_rtx, 0), ptr_mode,
3321 XEXP (from_rtx, 0), ptr_mode,
3322 convert_to_mode (TYPE_MODE (sizetype),
3323 size, TREE_UNSIGNED (sizetype)),
3324 TYPE_MODE (sizetype));
bbf6f052
RK
3325
3326#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3327 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3328 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3329 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3330 convert_to_mode (TYPE_MODE (sizetype),
3331 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3332 TYPE_MODE (sizetype));
bbf6f052 3333#else
d562e42e 3334 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3335 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3336 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3337 convert_to_mode (TYPE_MODE (integer_type_node),
3338 size, TREE_UNSIGNED (integer_type_node)),
3339 TYPE_MODE (integer_type_node));
bbf6f052
RK
3340#endif
3341
3342 preserve_temp_slots (to_rtx);
3343 free_temp_slots ();
0088fcb1 3344 pop_temp_slots ();
709f5be1 3345 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3346 }
3347
3348 /* Compute FROM and store the value in the rtx we got. */
3349
0088fcb1 3350 push_temp_slots ();
bbf6f052
RK
3351 result = store_expr (from, to_rtx, want_value);
3352 preserve_temp_slots (result);
3353 free_temp_slots ();
0088fcb1 3354 pop_temp_slots ();
709f5be1 3355 return want_value ? result : NULL_RTX;
bbf6f052
RK
3356}
3357
3358/* Generate code for computing expression EXP,
3359 and storing the value into TARGET.
bbf6f052
RK
3360 TARGET may contain a QUEUED rtx.
3361
709f5be1
RS
3362 If WANT_VALUE is nonzero, return a copy of the value
3363 not in TARGET, so that we can be sure to use the proper
3364 value in a containing expression even if TARGET has something
3365 else stored in it. If possible, we copy the value through a pseudo
3366 and return that pseudo. Or, if the value is constant, we try to
3367 return the constant. In some cases, we return a pseudo
3368 copied *from* TARGET.
3369
3370 If the mode is BLKmode then we may return TARGET itself.
3371 It turns out that in BLKmode it doesn't cause a problem.
3372 because C has no operators that could combine two different
3373 assignments into the same BLKmode object with different values
3374 with no sequence point. Will other languages need this to
3375 be more thorough?
3376
3377 If WANT_VALUE is 0, we return NULL, to make sure
3378 to catch quickly any cases where the caller uses the value
3379 and fails to set WANT_VALUE. */
bbf6f052
RK
3380
3381rtx
709f5be1 3382store_expr (exp, target, want_value)
bbf6f052
RK
3383 register tree exp;
3384 register rtx target;
709f5be1 3385 int want_value;
bbf6f052
RK
3386{
3387 register rtx temp;
3388 int dont_return_target = 0;
3389
3390 if (TREE_CODE (exp) == COMPOUND_EXPR)
3391 {
3392 /* Perform first part of compound expression, then assign from second
3393 part. */
3394 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3395 emit_queue ();
709f5be1 3396 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3397 }
3398 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3399 {
3400 /* For conditional expression, get safe form of the target. Then
3401 test the condition, doing the appropriate assignment on either
3402 side. This avoids the creation of unnecessary temporaries.
3403 For non-BLKmode, it is more efficient not to do this. */
3404
3405 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3406
3407 emit_queue ();
3408 target = protect_from_queue (target, 1);
3409
dabf8373 3410 do_pending_stack_adjust ();
bbf6f052
RK
3411 NO_DEFER_POP;
3412 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3413 start_cleanup_deferral ();
709f5be1 3414 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3415 end_cleanup_deferral ();
bbf6f052
RK
3416 emit_queue ();
3417 emit_jump_insn (gen_jump (lab2));
3418 emit_barrier ();
3419 emit_label (lab1);
956d6950 3420 start_cleanup_deferral ();
709f5be1 3421 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3422 end_cleanup_deferral ();
bbf6f052
RK
3423 emit_queue ();
3424 emit_label (lab2);
3425 OK_DEFER_POP;
a3a58acc 3426
709f5be1 3427 return want_value ? target : NULL_RTX;
bbf6f052 3428 }
709f5be1 3429 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3430 && GET_MODE (target) != BLKmode)
3431 /* If target is in memory and caller wants value in a register instead,
3432 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3433 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3434 We know expand_expr will not use the target in that case.
3435 Don't do this if TARGET is volatile because we are supposed
3436 to write it and then read it. */
bbf6f052 3437 {
906c4e36 3438 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3439 GET_MODE (target), 0);
3440 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3441 temp = copy_to_reg (temp);
3442 dont_return_target = 1;
3443 }
3444 else if (queued_subexp_p (target))
709f5be1
RS
3445 /* If target contains a postincrement, let's not risk
3446 using it as the place to generate the rhs. */
bbf6f052
RK
3447 {
3448 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3449 {
3450 /* Expand EXP into a new pseudo. */
3451 temp = gen_reg_rtx (GET_MODE (target));
3452 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3453 }
3454 else
906c4e36 3455 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3456
3457 /* If target is volatile, ANSI requires accessing the value
3458 *from* the target, if it is accessed. So make that happen.
3459 In no case return the target itself. */
3460 if (! MEM_VOLATILE_P (target) && want_value)
3461 dont_return_target = 1;
bbf6f052 3462 }
1499e0a8
RK
3463 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3464 /* If this is an scalar in a register that is stored in a wider mode
3465 than the declared mode, compute the result into its declared mode
3466 and then convert to the wider mode. Our value is the computed
3467 expression. */
3468 {
5a32d038 3469 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3470 which will often result in some optimizations. Do the conversion
3471 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3472 the extend. But don't do this if the type of EXP is a subtype
3473 of something else since then the conversion might involve
3474 more than just converting modes. */
3475 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3476 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3477 {
3478 if (TREE_UNSIGNED (TREE_TYPE (exp))
3479 != SUBREG_PROMOTED_UNSIGNED_P (target))
3480 exp
3481 = convert
3482 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3483 TREE_TYPE (exp)),
3484 exp);
3485
3486 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3487 SUBREG_PROMOTED_UNSIGNED_P (target)),
3488 exp);
3489 }
5a32d038 3490
1499e0a8 3491 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3492
766f36c7 3493 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3494 the access now so it gets done only once. Likewise if
3495 it contains TARGET. */
3496 if (GET_CODE (temp) == MEM && want_value
3497 && (MEM_VOLATILE_P (temp)
3498 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3499 temp = copy_to_reg (temp);
3500
b258707c
RS
3501 /* If TEMP is a VOIDmode constant, use convert_modes to make
3502 sure that we properly convert it. */
3503 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3504 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3505 TYPE_MODE (TREE_TYPE (exp)), temp,
3506 SUBREG_PROMOTED_UNSIGNED_P (target));
3507
1499e0a8
RK
3508 convert_move (SUBREG_REG (target), temp,
3509 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3510 return want_value ? temp : NULL_RTX;
1499e0a8 3511 }
bbf6f052
RK
3512 else
3513 {
3514 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3515 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3516 If TARGET is a volatile mem ref, either return TARGET
3517 or return a reg copied *from* TARGET; ANSI requires this.
3518
3519 Otherwise, if TEMP is not TARGET, return TEMP
3520 if it is constant (for efficiency),
3521 or if we really want the correct value. */
bbf6f052
RK
3522 if (!(target && GET_CODE (target) == REG
3523 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3524 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3525 && ! rtx_equal_p (temp, target)
709f5be1 3526 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3527 dont_return_target = 1;
3528 }
3529
b258707c
RS
3530 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3531 the same as that of TARGET, adjust the constant. This is needed, for
3532 example, in case it is a CONST_DOUBLE and we want only a word-sized
3533 value. */
3534 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3535 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3536 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3537 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3538 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3539
921b3427
RK
3540 if (flag_check_memory_usage
3541 && GET_CODE (target) == MEM
3542 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3543 {
3544 if (GET_CODE (temp) == MEM)
3545 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3546 XEXP (target, 0), ptr_mode,
3547 XEXP (temp, 0), ptr_mode,
3548 expr_size (exp), TYPE_MODE (sizetype));
3549 else
3550 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3551 XEXP (target, 0), ptr_mode,
3552 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3553 GEN_INT (MEMORY_USE_WO),
3554 TYPE_MODE (integer_type_node));
921b3427
RK
3555 }
3556
bbf6f052
RK
3557 /* If value was not generated in the target, store it there.
3558 Convert the value to TARGET's type first if nec. */
3559
6036acbb
R
3560 if ((! rtx_equal_p (temp, target)
3561 || side_effects_p (temp)
3562 || side_effects_p (target))
3563 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3564 {
3565 target = protect_from_queue (target, 1);
3566 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3567 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3568 {
3569 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3570 if (dont_return_target)
3571 {
3572 /* In this case, we will return TEMP,
3573 so make sure it has the proper mode.
3574 But don't forget to store the value into TARGET. */
3575 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3576 emit_move_insn (target, temp);
3577 }
3578 else
3579 convert_move (target, temp, unsignedp);
3580 }
3581
3582 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3583 {
3584 /* Handle copying a string constant into an array.
3585 The string constant may be shorter than the array.
3586 So copy just the string's actual length, and clear the rest. */
3587 rtx size;
22619c3f 3588 rtx addr;
bbf6f052 3589
e87b4f3f
RS
3590 /* Get the size of the data type of the string,
3591 which is actually the size of the target. */
3592 size = expr_size (exp);
3593 if (GET_CODE (size) == CONST_INT
3594 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3595 emit_block_move (target, temp, size,
3596 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3597 else
bbf6f052 3598 {
e87b4f3f
RS
3599 /* Compute the size of the data to copy from the string. */
3600 tree copy_size
c03b7665 3601 = size_binop (MIN_EXPR,
b50d17a1 3602 make_tree (sizetype, size),
c03b7665
RK
3603 convert (sizetype,
3604 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3605 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3606 VOIDmode, 0);
e87b4f3f
RS
3607 rtx label = 0;
3608
3609 /* Copy that much. */
3610 emit_block_move (target, temp, copy_size_rtx,
3611 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3612
88f63c77
RK
3613 /* Figure out how much is left in TARGET that we have to clear.
3614 Do all calculations in ptr_mode. */
3615
3616 addr = XEXP (target, 0);
3617 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3618
e87b4f3f
RS
3619 if (GET_CODE (copy_size_rtx) == CONST_INT)
3620 {
88f63c77 3621 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3622 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3623 }
3624 else
3625 {
88f63c77
RK
3626 addr = force_reg (ptr_mode, addr);
3627 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3628 copy_size_rtx, NULL_RTX, 0,
3629 OPTAB_LIB_WIDEN);
e87b4f3f 3630
88f63c77 3631 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3632 copy_size_rtx, NULL_RTX, 0,
3633 OPTAB_LIB_WIDEN);
e87b4f3f 3634
906c4e36 3635 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3636 GET_MODE (size), 0, 0);
3637 label = gen_label_rtx ();
3638 emit_jump_insn (gen_blt (label));
3639 }
3640
3641 if (size != const0_rtx)
3642 {
921b3427
RK
3643 /* Be sure we can write on ADDR. */
3644 if (flag_check_memory_usage)
3645 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3646 addr, ptr_mode,
3647 size, TYPE_MODE (sizetype),
956d6950
JL
3648 GEN_INT (MEMORY_USE_WO),
3649 TYPE_MODE (integer_type_node));
bbf6f052 3650#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3651 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3652 addr, ptr_mode,
3b6f75e2
JW
3653 const0_rtx, TYPE_MODE (integer_type_node),
3654 convert_to_mode (TYPE_MODE (sizetype),
3655 size,
3656 TREE_UNSIGNED (sizetype)),
3657 TYPE_MODE (sizetype));
bbf6f052 3658#else
d562e42e 3659 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3660 addr, ptr_mode,
3b6f75e2
JW
3661 convert_to_mode (TYPE_MODE (integer_type_node),
3662 size,
3663 TREE_UNSIGNED (integer_type_node)),
3664 TYPE_MODE (integer_type_node));
bbf6f052 3665#endif
e87b4f3f 3666 }
22619c3f 3667
e87b4f3f
RS
3668 if (label)
3669 emit_label (label);
bbf6f052
RK
3670 }
3671 }
fffa9c1d
JW
3672 /* Handle calls that return values in multiple non-contiguous locations.
3673 The Irix 6 ABI has examples of this. */
3674 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3675 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3676 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3677 else if (GET_MODE (temp) == BLKmode)
3678 emit_block_move (target, temp, expr_size (exp),
3679 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3680 else
3681 emit_move_insn (target, temp);
3682 }
709f5be1 3683
766f36c7
RK
3684 /* If we don't want a value, return NULL_RTX. */
3685 if (! want_value)
3686 return NULL_RTX;
3687
3688 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3689 ??? The latter test doesn't seem to make sense. */
3690 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3691 return temp;
766f36c7
RK
3692
3693 /* Return TARGET itself if it is a hard register. */
3694 else if (want_value && GET_MODE (target) != BLKmode
3695 && ! (GET_CODE (target) == REG
3696 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3697 return copy_to_reg (target);
766f36c7
RK
3698
3699 else
709f5be1 3700 return target;
bbf6f052
RK
3701}
3702\f
9de08200
RK
3703/* Return 1 if EXP just contains zeros. */
3704
3705static int
3706is_zeros_p (exp)
3707 tree exp;
3708{
3709 tree elt;
3710
3711 switch (TREE_CODE (exp))
3712 {
3713 case CONVERT_EXPR:
3714 case NOP_EXPR:
3715 case NON_LVALUE_EXPR:
3716 return is_zeros_p (TREE_OPERAND (exp, 0));
3717
3718 case INTEGER_CST:
3719 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3720
3721 case COMPLEX_CST:
3722 return
3723 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3724
3725 case REAL_CST:
41c9120b 3726 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3727
3728 case CONSTRUCTOR:
e1a43f73
PB
3729 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3730 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3731 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3732 if (! is_zeros_p (TREE_VALUE (elt)))
3733 return 0;
3734
3735 return 1;
e9a25f70
JL
3736
3737 default:
3738 return 0;
9de08200 3739 }
9de08200
RK
3740}
3741
3742/* Return 1 if EXP contains mostly (3/4) zeros. */
3743
3744static int
3745mostly_zeros_p (exp)
3746 tree exp;
3747{
9de08200
RK
3748 if (TREE_CODE (exp) == CONSTRUCTOR)
3749 {
e1a43f73
PB
3750 int elts = 0, zeros = 0;
3751 tree elt = CONSTRUCTOR_ELTS (exp);
3752 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3753 {
3754 /* If there are no ranges of true bits, it is all zero. */
3755 return elt == NULL_TREE;
3756 }
3757 for (; elt; elt = TREE_CHAIN (elt))
3758 {
3759 /* We do not handle the case where the index is a RANGE_EXPR,
3760 so the statistic will be somewhat inaccurate.
3761 We do make a more accurate count in store_constructor itself,
3762 so since this function is only used for nested array elements,
0f41302f 3763 this should be close enough. */
e1a43f73
PB
3764 if (mostly_zeros_p (TREE_VALUE (elt)))
3765 zeros++;
3766 elts++;
3767 }
9de08200
RK
3768
3769 return 4 * zeros >= 3 * elts;
3770 }
3771
3772 return is_zeros_p (exp);
3773}
3774\f
e1a43f73
PB
3775/* Helper function for store_constructor.
3776 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3777 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3778 CLEARED is as for store_constructor.
3779
3780 This provides a recursive shortcut back to store_constructor when it isn't
3781 necessary to go through store_field. This is so that we can pass through
3782 the cleared field to let store_constructor know that we may not have to
3783 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3784
3785static void
3786store_constructor_field (target, bitsize, bitpos,
3787 mode, exp, type, cleared)
3788 rtx target;
3789 int bitsize, bitpos;
3790 enum machine_mode mode;
3791 tree exp, type;
3792 int cleared;
3793{
3794 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3795 && bitpos % BITS_PER_UNIT == 0
3796 /* If we have a non-zero bitpos for a register target, then we just
3797 let store_field do the bitfield handling. This is unlikely to
3798 generate unnecessary clear instructions anyways. */
3799 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3800 {
126e5b0d
JW
3801 if (bitpos != 0)
3802 target = change_address (target, VOIDmode,
3803 plus_constant (XEXP (target, 0),
3804 bitpos / BITS_PER_UNIT));
3805 store_constructor (exp, target, cleared);
e1a43f73
PB
3806 }
3807 else
3808 store_field (target, bitsize, bitpos, mode, exp,
3809 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
ece32014 3810 int_size_in_bytes (type), 0);
e1a43f73
PB
3811}
3812
bbf6f052 3813/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3814 TARGET is either a REG or a MEM.
0f41302f 3815 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3816
3817static void
e1a43f73 3818store_constructor (exp, target, cleared)
bbf6f052
RK
3819 tree exp;
3820 rtx target;
e1a43f73 3821 int cleared;
bbf6f052 3822{
4af3895e 3823 tree type = TREE_TYPE (exp);
34c73909 3824 rtx exp_size = expr_size (exp);
4af3895e 3825
bbf6f052
RK
3826 /* We know our target cannot conflict, since safe_from_p has been called. */
3827#if 0
3828 /* Don't try copying piece by piece into a hard register
3829 since that is vulnerable to being clobbered by EXP.
3830 Instead, construct in a pseudo register and then copy it all. */
3831 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3832 {
3833 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3834 store_constructor (exp, temp, 0);
bbf6f052
RK
3835 emit_move_insn (target, temp);
3836 return;
3837 }
3838#endif
3839
e44842fe
RK
3840 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3841 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3842 {
3843 register tree elt;
3844
4af3895e 3845 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3846 if (TREE_CODE (type) == UNION_TYPE
3847 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3848 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3849
3850 /* If we are building a static constructor into a register,
3851 set the initial value as zero so we can fold the value into
67225c15
RK
3852 a constant. But if more than one register is involved,
3853 this probably loses. */
3854 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3855 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3856 {
3857 if (! cleared)
e9a25f70 3858 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3859
9de08200
RK
3860 cleared = 1;
3861 }
3862
3863 /* If the constructor has fewer fields than the structure
3864 or if we are initializing the structure to mostly zeros,
bbf6f052 3865 clear the whole structure first. */
9de08200
RK
3866 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3867 != list_length (TYPE_FIELDS (type)))
3868 || mostly_zeros_p (exp))
3869 {
3870 if (! cleared)
3871 clear_storage (target, expr_size (exp),
3872 TYPE_ALIGN (type) / BITS_PER_UNIT);
3873
3874 cleared = 1;
3875 }
bbf6f052
RK
3876 else
3877 /* Inform later passes that the old value is dead. */
38a448ca 3878 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3879
3880 /* Store each element of the constructor into
3881 the corresponding field of TARGET. */
3882
3883 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3884 {
3885 register tree field = TREE_PURPOSE (elt);
34c73909 3886 tree value = TREE_VALUE (elt);
bbf6f052
RK
3887 register enum machine_mode mode;
3888 int bitsize;
b50d17a1 3889 int bitpos = 0;
bbf6f052 3890 int unsignedp;
b50d17a1
RK
3891 tree pos, constant = 0, offset = 0;
3892 rtx to_rtx = target;
bbf6f052 3893
f32fd778
RS
3894 /* Just ignore missing fields.
3895 We cleared the whole structure, above,
3896 if any fields are missing. */
3897 if (field == 0)
3898 continue;
3899
e1a43f73
PB
3900 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3901 continue;
9de08200 3902
bbf6f052
RK
3903 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3904 unsignedp = TREE_UNSIGNED (field);
3905 mode = DECL_MODE (field);
3906 if (DECL_BIT_FIELD (field))
3907 mode = VOIDmode;
3908
b50d17a1
RK
3909 pos = DECL_FIELD_BITPOS (field);
3910 if (TREE_CODE (pos) == INTEGER_CST)
3911 constant = pos;
3912 else if (TREE_CODE (pos) == PLUS_EXPR
3913 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3914 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3915 else
3916 offset = pos;
3917
3918 if (constant)
cd11b87e 3919 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3920
3921 if (offset)
3922 {
3923 rtx offset_rtx;
3924
3925 if (contains_placeholder_p (offset))
3926 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3927 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3928
b50d17a1
RK
3929 offset = size_binop (FLOOR_DIV_EXPR, offset,
3930 size_int (BITS_PER_UNIT));
bbf6f052 3931
b50d17a1
RK
3932 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3933 if (GET_CODE (to_rtx) != MEM)
3934 abort ();
3935
bd070e1a
RH
3936 if (GET_MODE (offset_rtx) != ptr_mode)
3937 {
3938#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3939 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3940#else
3941 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3942#endif
3943 }
3944
b50d17a1
RK
3945 to_rtx
3946 = change_address (to_rtx, VOIDmode,
38a448ca 3947 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3948 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3949 }
cf04eb80
RK
3950 if (TREE_READONLY (field))
3951 {
9151b3bf 3952 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3953 to_rtx = copy_rtx (to_rtx);
3954
cf04eb80
RK
3955 RTX_UNCHANGING_P (to_rtx) = 1;
3956 }
3957
34c73909
R
3958#ifdef WORD_REGISTER_OPERATIONS
3959 /* If this initializes a field that is smaller than a word, at the
3960 start of a word, try to widen it to a full word.
3961 This special case allows us to output C++ member function
3962 initializations in a form that the optimizers can understand. */
3963 if (constant
3964 && GET_CODE (target) == REG
3965 && bitsize < BITS_PER_WORD
3966 && bitpos % BITS_PER_WORD == 0
3967 && GET_MODE_CLASS (mode) == MODE_INT
3968 && TREE_CODE (value) == INTEGER_CST
3969 && GET_CODE (exp_size) == CONST_INT
3970 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
3971 {
3972 tree type = TREE_TYPE (value);
3973 if (TYPE_PRECISION (type) < BITS_PER_WORD)
3974 {
3975 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
3976 value = convert (type, value);
3977 }
3978 if (BYTES_BIG_ENDIAN)
3979 value
3980 = fold (build (LSHIFT_EXPR, type, value,
3981 build_int_2 (BITS_PER_WORD - bitsize, 0)));
3982 bitsize = BITS_PER_WORD;
3983 mode = word_mode;
3984 }
3985#endif
e1a43f73 3986 store_constructor_field (to_rtx, bitsize, bitpos,
34c73909 3987 mode, value, type, cleared);
bbf6f052
RK
3988 }
3989 }
4af3895e 3990 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3991 {
3992 register tree elt;
3993 register int i;
e1a43f73 3994 int need_to_clear;
4af3895e 3995 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3996 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3997 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3998 tree elttype = TREE_TYPE (type);
bbf6f052 3999
e1a43f73 4000 /* If the constructor has fewer elements than the array,
38e01259 4001 clear the whole array first. Similarly if this is
e1a43f73
PB
4002 static constructor of a non-BLKmode object. */
4003 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4004 need_to_clear = 1;
4005 else
4006 {
4007 HOST_WIDE_INT count = 0, zero_count = 0;
4008 need_to_clear = 0;
4009 /* This loop is a more accurate version of the loop in
4010 mostly_zeros_p (it handles RANGE_EXPR in an index).
4011 It is also needed to check for missing elements. */
4012 for (elt = CONSTRUCTOR_ELTS (exp);
4013 elt != NULL_TREE;
df0faff1 4014 elt = TREE_CHAIN (elt))
e1a43f73
PB
4015 {
4016 tree index = TREE_PURPOSE (elt);
4017 HOST_WIDE_INT this_node_count;
4018 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4019 {
4020 tree lo_index = TREE_OPERAND (index, 0);
4021 tree hi_index = TREE_OPERAND (index, 1);
4022 if (TREE_CODE (lo_index) != INTEGER_CST
4023 || TREE_CODE (hi_index) != INTEGER_CST)
4024 {
4025 need_to_clear = 1;
4026 break;
4027 }
4028 this_node_count = TREE_INT_CST_LOW (hi_index)
4029 - TREE_INT_CST_LOW (lo_index) + 1;
4030 }
4031 else
4032 this_node_count = 1;
4033 count += this_node_count;
4034 if (mostly_zeros_p (TREE_VALUE (elt)))
4035 zero_count += this_node_count;
4036 }
8e958f70 4037 /* Clear the entire array first if there are any missing elements,
0f41302f 4038 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4039 if (count < maxelt - minelt + 1
4040 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4041 need_to_clear = 1;
4042 }
4043 if (need_to_clear)
9de08200
RK
4044 {
4045 if (! cleared)
4046 clear_storage (target, expr_size (exp),
4047 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
4048 cleared = 1;
4049 }
bbf6f052
RK
4050 else
4051 /* Inform later passes that the old value is dead. */
38a448ca 4052 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4053
4054 /* Store each element of the constructor into
4055 the corresponding element of TARGET, determined
4056 by counting the elements. */
4057 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4058 elt;
4059 elt = TREE_CHAIN (elt), i++)
4060 {
4061 register enum machine_mode mode;
4062 int bitsize;
4063 int bitpos;
4064 int unsignedp;
e1a43f73 4065 tree value = TREE_VALUE (elt);
03dc44a6
RS
4066 tree index = TREE_PURPOSE (elt);
4067 rtx xtarget = target;
bbf6f052 4068
e1a43f73
PB
4069 if (cleared && is_zeros_p (value))
4070 continue;
9de08200 4071
bbf6f052
RK
4072 mode = TYPE_MODE (elttype);
4073 bitsize = GET_MODE_BITSIZE (mode);
4074 unsignedp = TREE_UNSIGNED (elttype);
4075
e1a43f73
PB
4076 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4077 {
4078 tree lo_index = TREE_OPERAND (index, 0);
4079 tree hi_index = TREE_OPERAND (index, 1);
4080 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4081 struct nesting *loop;
05c0b405
PB
4082 HOST_WIDE_INT lo, hi, count;
4083 tree position;
e1a43f73 4084
0f41302f 4085 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4086 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4087 && TREE_CODE (hi_index) == INTEGER_CST
4088 && (lo = TREE_INT_CST_LOW (lo_index),
4089 hi = TREE_INT_CST_LOW (hi_index),
4090 count = hi - lo + 1,
4091 (GET_CODE (target) != MEM
4092 || count <= 2
4093 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4094 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4095 <= 40 * 8))))
e1a43f73 4096 {
05c0b405
PB
4097 lo -= minelt; hi -= minelt;
4098 for (; lo <= hi; lo++)
e1a43f73 4099 {
05c0b405
PB
4100 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4101 store_constructor_field (target, bitsize, bitpos,
4102 mode, value, type, cleared);
e1a43f73
PB
4103 }
4104 }
4105 else
4106 {
4107 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4108 loop_top = gen_label_rtx ();
4109 loop_end = gen_label_rtx ();
4110
4111 unsignedp = TREE_UNSIGNED (domain);
4112
4113 index = build_decl (VAR_DECL, NULL_TREE, domain);
4114
4115 DECL_RTL (index) = index_r
4116 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4117 &unsignedp, 0));
4118
4119 if (TREE_CODE (value) == SAVE_EXPR
4120 && SAVE_EXPR_RTL (value) == 0)
4121 {
0f41302f
MS
4122 /* Make sure value gets expanded once before the
4123 loop. */
e1a43f73
PB
4124 expand_expr (value, const0_rtx, VOIDmode, 0);
4125 emit_queue ();
4126 }
4127 store_expr (lo_index, index_r, 0);
4128 loop = expand_start_loop (0);
4129
0f41302f 4130 /* Assign value to element index. */
e1a43f73
PB
4131 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4132 size_int (BITS_PER_UNIT));
4133 position = size_binop (MULT_EXPR,
4134 size_binop (MINUS_EXPR, index,
4135 TYPE_MIN_VALUE (domain)),
4136 position);
4137 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4138 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4139 xtarget = change_address (target, mode, addr);
4140 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 4141 store_constructor (value, xtarget, cleared);
e1a43f73
PB
4142 else
4143 store_expr (value, xtarget, 0);
4144
4145 expand_exit_loop_if_false (loop,
4146 build (LT_EXPR, integer_type_node,
4147 index, hi_index));
4148
4149 expand_increment (build (PREINCREMENT_EXPR,
4150 TREE_TYPE (index),
7b8b9722 4151 index, integer_one_node), 0, 0);
e1a43f73
PB
4152 expand_end_loop ();
4153 emit_label (loop_end);
4154
4155 /* Needed by stupid register allocation. to extend the
4156 lifetime of pseudo-regs used by target past the end
4157 of the loop. */
38a448ca 4158 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4159 }
4160 }
4161 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4162 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4163 {
e1a43f73 4164 rtx pos_rtx, addr;
03dc44a6
RS
4165 tree position;
4166
5b6c44ff
RK
4167 if (index == 0)
4168 index = size_int (i);
4169
e1a43f73
PB
4170 if (minelt)
4171 index = size_binop (MINUS_EXPR, index,
4172 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4173 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4174 size_int (BITS_PER_UNIT));
4175 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4176 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4177 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4178 xtarget = change_address (target, mode, addr);
e1a43f73 4179 store_expr (value, xtarget, 0);
03dc44a6
RS
4180 }
4181 else
4182 {
4183 if (index != 0)
7c314719 4184 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4185 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4186 else
4187 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
4188 store_constructor_field (target, bitsize, bitpos,
4189 mode, value, type, cleared);
03dc44a6 4190 }
bbf6f052
RK
4191 }
4192 }
071a6595
PB
4193 /* set constructor assignments */
4194 else if (TREE_CODE (type) == SET_TYPE)
4195 {
e1a43f73 4196 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4197 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4198 tree domain = TYPE_DOMAIN (type);
4199 tree domain_min, domain_max, bitlength;
4200
9faa82d8 4201 /* The default implementation strategy is to extract the constant
071a6595
PB
4202 parts of the constructor, use that to initialize the target,
4203 and then "or" in whatever non-constant ranges we need in addition.
4204
4205 If a large set is all zero or all ones, it is
4206 probably better to set it using memset (if available) or bzero.
4207 Also, if a large set has just a single range, it may also be
4208 better to first clear all the first clear the set (using
0f41302f 4209 bzero/memset), and set the bits we want. */
071a6595 4210
0f41302f 4211 /* Check for all zeros. */
e1a43f73 4212 if (elt == NULL_TREE)
071a6595 4213 {
e1a43f73
PB
4214 if (!cleared)
4215 clear_storage (target, expr_size (exp),
4216 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4217 return;
4218 }
4219
071a6595
PB
4220 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4221 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4222 bitlength = size_binop (PLUS_EXPR,
4223 size_binop (MINUS_EXPR, domain_max, domain_min),
4224 size_one_node);
4225
e1a43f73
PB
4226 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4227 abort ();
4228 nbits = TREE_INT_CST_LOW (bitlength);
4229
4230 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4231 are "complicated" (more than one range), initialize (the
4232 constant parts) by copying from a constant. */
4233 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4234 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4235 {
b4ee5a72
PB
4236 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4237 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4238 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4239 HOST_WIDE_INT word = 0;
4240 int bit_pos = 0;
4241 int ibit = 0;
0f41302f 4242 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4243 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4244 for (;;)
071a6595 4245 {
b4ee5a72
PB
4246 if (bit_buffer[ibit])
4247 {
b09f3348 4248 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4249 word |= (1 << (set_word_size - 1 - bit_pos));
4250 else
4251 word |= 1 << bit_pos;
4252 }
4253 bit_pos++; ibit++;
4254 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4255 {
e1a43f73
PB
4256 if (word != 0 || ! cleared)
4257 {
4258 rtx datum = GEN_INT (word);
4259 rtx to_rtx;
0f41302f
MS
4260 /* The assumption here is that it is safe to use
4261 XEXP if the set is multi-word, but not if
4262 it's single-word. */
e1a43f73
PB
4263 if (GET_CODE (target) == MEM)
4264 {
4265 to_rtx = plus_constant (XEXP (target, 0), offset);
4266 to_rtx = change_address (target, mode, to_rtx);
4267 }
4268 else if (offset == 0)
4269 to_rtx = target;
4270 else
4271 abort ();
4272 emit_move_insn (to_rtx, datum);
4273 }
b4ee5a72
PB
4274 if (ibit == nbits)
4275 break;
4276 word = 0;
4277 bit_pos = 0;
4278 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4279 }
4280 }
071a6595 4281 }
e1a43f73
PB
4282 else if (!cleared)
4283 {
0f41302f 4284 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4285 if (TREE_CHAIN (elt) != NULL_TREE
4286 || (TREE_PURPOSE (elt) == NULL_TREE
4287 ? nbits != 1
4288 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4289 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4290 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4291 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4292 != nbits))))
4293 clear_storage (target, expr_size (exp),
4294 TYPE_ALIGN (type) / BITS_PER_UNIT);
4295 }
4296
4297 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4298 {
4299 /* start of range of element or NULL */
4300 tree startbit = TREE_PURPOSE (elt);
4301 /* end of range of element, or element value */
4302 tree endbit = TREE_VALUE (elt);
381127e8 4303#ifdef TARGET_MEM_FUNCTIONS
071a6595 4304 HOST_WIDE_INT startb, endb;
381127e8 4305#endif
071a6595
PB
4306 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4307
4308 bitlength_rtx = expand_expr (bitlength,
4309 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4310
4311 /* handle non-range tuple element like [ expr ] */
4312 if (startbit == NULL_TREE)
4313 {
4314 startbit = save_expr (endbit);
4315 endbit = startbit;
4316 }
4317 startbit = convert (sizetype, startbit);
4318 endbit = convert (sizetype, endbit);
4319 if (! integer_zerop (domain_min))
4320 {
4321 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4322 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4323 }
4324 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4325 EXPAND_CONST_ADDRESS);
4326 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4327 EXPAND_CONST_ADDRESS);
4328
4329 if (REG_P (target))
4330 {
4331 targetx = assign_stack_temp (GET_MODE (target),
4332 GET_MODE_SIZE (GET_MODE (target)),
4333 0);
4334 emit_move_insn (targetx, target);
4335 }
4336 else if (GET_CODE (target) == MEM)
4337 targetx = target;
4338 else
4339 abort ();
4340
4341#ifdef TARGET_MEM_FUNCTIONS
4342 /* Optimization: If startbit and endbit are
9faa82d8 4343 constants divisible by BITS_PER_UNIT,
0f41302f 4344 call memset instead. */
071a6595
PB
4345 if (TREE_CODE (startbit) == INTEGER_CST
4346 && TREE_CODE (endbit) == INTEGER_CST
4347 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4348 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4349 {
071a6595
PB
4350 emit_library_call (memset_libfunc, 0,
4351 VOIDmode, 3,
e1a43f73
PB
4352 plus_constant (XEXP (targetx, 0),
4353 startb / BITS_PER_UNIT),
071a6595 4354 Pmode,
3b6f75e2 4355 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4356 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4357 TYPE_MODE (sizetype));
071a6595
PB
4358 }
4359 else
4360#endif
4361 {
38a448ca 4362 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4363 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4364 bitlength_rtx, TYPE_MODE (sizetype),
4365 startbit_rtx, TYPE_MODE (sizetype),
4366 endbit_rtx, TYPE_MODE (sizetype));
4367 }
4368 if (REG_P (target))
4369 emit_move_insn (target, targetx);
4370 }
4371 }
bbf6f052
RK
4372
4373 else
4374 abort ();
4375}
4376
4377/* Store the value of EXP (an expression tree)
4378 into a subfield of TARGET which has mode MODE and occupies
4379 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4380 If MODE is VOIDmode, it means that we are storing into a bit-field.
4381
4382 If VALUE_MODE is VOIDmode, return nothing in particular.
4383 UNSIGNEDP is not used in this case.
4384
4385 Otherwise, return an rtx for the value stored. This rtx
4386 has mode VALUE_MODE if that is convenient to do.
4387 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4388
4389 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4390 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4391
4392 ALIAS_SET is the alias set for the destination. This value will
4393 (in general) be different from that for TARGET, since TARGET is a
4394 reference to the containing structure. */
bbf6f052
RK
4395
4396static rtx
4397store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4398 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4399 rtx target;
4400 int bitsize, bitpos;
4401 enum machine_mode mode;
4402 tree exp;
4403 enum machine_mode value_mode;
4404 int unsignedp;
4405 int align;
4406 int total_size;
ece32014 4407 int alias_set;
bbf6f052 4408{
906c4e36 4409 HOST_WIDE_INT width_mask = 0;
bbf6f052 4410
e9a25f70
JL
4411 if (TREE_CODE (exp) == ERROR_MARK)
4412 return const0_rtx;
4413
906c4e36
RK
4414 if (bitsize < HOST_BITS_PER_WIDE_INT)
4415 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4416
4417 /* If we are storing into an unaligned field of an aligned union that is
4418 in a register, we may have the mode of TARGET being an integer mode but
4419 MODE == BLKmode. In that case, get an aligned object whose size and
4420 alignment are the same as TARGET and store TARGET into it (we can avoid
4421 the store if the field being stored is the entire width of TARGET). Then
4422 call ourselves recursively to store the field into a BLKmode version of
4423 that object. Finally, load from the object into TARGET. This is not
4424 very efficient in general, but should only be slightly more expensive
4425 than the otherwise-required unaligned accesses. Perhaps this can be
4426 cleaned up later. */
4427
4428 if (mode == BLKmode
4429 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4430 {
4431 rtx object = assign_stack_temp (GET_MODE (target),
4432 GET_MODE_SIZE (GET_MODE (target)), 0);
4433 rtx blk_object = copy_rtx (object);
4434
24a13950
JW
4435 MEM_IN_STRUCT_P (object) = 1;
4436 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4437 PUT_MODE (blk_object, BLKmode);
4438
4439 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4440 emit_move_insn (object, target);
4441
4442 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4443 align, total_size, alias_set);
bbf6f052 4444
46093b97
RS
4445 /* Even though we aren't returning target, we need to
4446 give it the updated value. */
bbf6f052
RK
4447 emit_move_insn (target, object);
4448
46093b97 4449 return blk_object;
bbf6f052
RK
4450 }
4451
4452 /* If the structure is in a register or if the component
4453 is a bit field, we cannot use addressing to access it.
4454 Use bit-field techniques or SUBREG to store in it. */
4455
4fa52007
RK
4456 if (mode == VOIDmode
4457 || (mode != BLKmode && ! direct_store[(int) mode])
4458 || GET_CODE (target) == REG
c980ac49 4459 || GET_CODE (target) == SUBREG
ccc98036
RS
4460 /* If the field isn't aligned enough to store as an ordinary memref,
4461 store it as a bit field. */
c7a7ac46 4462 || (SLOW_UNALIGNED_ACCESS
ccc98036 4463 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4464 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4465 {
906c4e36 4466 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4467
ef19912d
RK
4468 /* If BITSIZE is narrower than the size of the type of EXP
4469 we will be narrowing TEMP. Normally, what's wanted are the
4470 low-order bits. However, if EXP's type is a record and this is
4471 big-endian machine, we want the upper BITSIZE bits. */
4472 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4473 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4474 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4475 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4476 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4477 - bitsize),
4478 temp, 1);
4479
bbd6cf73
RK
4480 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4481 MODE. */
4482 if (mode != VOIDmode && mode != BLKmode
4483 && mode != TYPE_MODE (TREE_TYPE (exp)))
4484 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4485
a281e72d
RK
4486 /* If the modes of TARGET and TEMP are both BLKmode, both
4487 must be in memory and BITPOS must be aligned on a byte
4488 boundary. If so, we simply do a block copy. */
4489 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4490 {
4491 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4492 || bitpos % BITS_PER_UNIT != 0)
4493 abort ();
4494
0086427c
RK
4495 target = change_address (target, VOIDmode,
4496 plus_constant (XEXP (target, 0),
a281e72d
RK
4497 bitpos / BITS_PER_UNIT));
4498
4499 emit_block_move (target, temp,
4500 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4501 / BITS_PER_UNIT),
4502 1);
4503
4504 return value_mode == VOIDmode ? const0_rtx : target;
4505 }
4506
bbf6f052
RK
4507 /* Store the value in the bitfield. */
4508 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4509 if (value_mode != VOIDmode)
4510 {
4511 /* The caller wants an rtx for the value. */
4512 /* If possible, avoid refetching from the bitfield itself. */
4513 if (width_mask != 0
4514 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4515 {
9074de27 4516 tree count;
5c4d7cfb 4517 enum machine_mode tmode;
86a2c12a 4518
5c4d7cfb
RS
4519 if (unsignedp)
4520 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4521 tmode = GET_MODE (temp);
86a2c12a
RS
4522 if (tmode == VOIDmode)
4523 tmode = value_mode;
5c4d7cfb
RS
4524 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4525 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4526 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4527 }
bbf6f052 4528 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4529 NULL_RTX, value_mode, 0, align,
4530 total_size);
bbf6f052
RK
4531 }
4532 return const0_rtx;
4533 }
4534 else
4535 {
4536 rtx addr = XEXP (target, 0);
4537 rtx to_rtx;
4538
4539 /* If a value is wanted, it must be the lhs;
4540 so make the address stable for multiple use. */
4541
4542 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4543 && ! CONSTANT_ADDRESS_P (addr)
4544 /* A frame-pointer reference is already stable. */
4545 && ! (GET_CODE (addr) == PLUS
4546 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4547 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4548 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4549 addr = copy_to_reg (addr);
4550
4551 /* Now build a reference to just the desired component. */
4552
effbcc6a
RK
4553 to_rtx = copy_rtx (change_address (target, mode,
4554 plus_constant (addr,
4555 (bitpos
4556 / BITS_PER_UNIT))));
bbf6f052 4557 MEM_IN_STRUCT_P (to_rtx) = 1;
ece32014 4558 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4559
4560 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4561 }
4562}
4563\f
4564/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4565 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4566 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4567
4568 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4569 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4570 If the position of the field is variable, we store a tree
4571 giving the variable offset (in units) in *POFFSET.
4572 This offset is in addition to the bit position.
4573 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4574 We set *PALIGNMENT to the alignment in bytes of the address that will be
4575 computed. This is the alignment of the thing we return if *POFFSET
4576 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4577
4578 If any of the extraction expressions is volatile,
4579 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4580
4581 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4582 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4583 is redundant.
4584
4585 If the field describes a variable-sized object, *PMODE is set to
4586 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4587 this case, but the address of the object can be found. */
bbf6f052
RK
4588
4589tree
4969d05d 4590get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4591 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4592 tree exp;
4593 int *pbitsize;
4594 int *pbitpos;
7bb0943f 4595 tree *poffset;
bbf6f052
RK
4596 enum machine_mode *pmode;
4597 int *punsignedp;
4598 int *pvolatilep;
839c4796 4599 int *palignment;
bbf6f052 4600{
b50d17a1 4601 tree orig_exp = exp;
bbf6f052
RK
4602 tree size_tree = 0;
4603 enum machine_mode mode = VOIDmode;
742920c7 4604 tree offset = integer_zero_node;
839c4796 4605 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4606
4607 if (TREE_CODE (exp) == COMPONENT_REF)
4608 {
4609 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4610 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4611 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4612 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4613 }
4614 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4615 {
4616 size_tree = TREE_OPERAND (exp, 1);
4617 *punsignedp = TREE_UNSIGNED (exp);
4618 }
4619 else
4620 {
4621 mode = TYPE_MODE (TREE_TYPE (exp));
4622 *pbitsize = GET_MODE_BITSIZE (mode);
4623 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4624 }
4625
4626 if (size_tree)
4627 {
4628 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4629 mode = BLKmode, *pbitsize = -1;
4630 else
4631 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4632 }
4633
4634 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4635 and find the ultimate containing object. */
4636
4637 *pbitpos = 0;
4638
4639 while (1)
4640 {
7bb0943f 4641 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4642 {
7bb0943f
RS
4643 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4644 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4645 : TREE_OPERAND (exp, 2));
e6d8c385 4646 tree constant = integer_zero_node, var = pos;
bbf6f052 4647
e7f3c83f
RK
4648 /* If this field hasn't been filled in yet, don't go
4649 past it. This should only happen when folding expressions
4650 made during type construction. */
4651 if (pos == 0)
4652 break;
4653
e6d8c385
RK
4654 /* Assume here that the offset is a multiple of a unit.
4655 If not, there should be an explicitly added constant. */
4656 if (TREE_CODE (pos) == PLUS_EXPR
4657 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4658 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4659 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4660 constant = pos, var = integer_zero_node;
4661
4662 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4663 offset = size_binop (PLUS_EXPR, offset,
4664 size_binop (EXACT_DIV_EXPR, var,
4665 size_int (BITS_PER_UNIT)));
bbf6f052 4666 }
bbf6f052 4667
742920c7 4668 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4669 {
742920c7
RK
4670 /* This code is based on the code in case ARRAY_REF in expand_expr
4671 below. We assume here that the size of an array element is
4672 always an integral multiple of BITS_PER_UNIT. */
4673
4674 tree index = TREE_OPERAND (exp, 1);
4675 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4676 tree low_bound
4677 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4678 tree index_type = TREE_TYPE (index);
ead17059 4679 tree xindex;
742920c7 4680
4c08eef0 4681 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4682 {
4c08eef0
RK
4683 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4684 index);
742920c7
RK
4685 index_type = TREE_TYPE (index);
4686 }
4687
74a4fbfc
DB
4688 /* Optimize the special-case of a zero lower bound.
4689
4690 We convert the low_bound to sizetype to avoid some problems
4691 with constant folding. (E.g. suppose the lower bound is 1,
4692 and its mode is QI. Without the conversion, (ARRAY
4693 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4694 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4695
4696 But sizetype isn't quite right either (especially if
4697 the lowbound is negative). FIXME */
4698
ca0f2220 4699 if (! integer_zerop (low_bound))
74a4fbfc
DB
4700 index = fold (build (MINUS_EXPR, index_type, index,
4701 convert (sizetype, low_bound)));
ca0f2220 4702
f8dac6eb
R
4703 if (TREE_CODE (index) == INTEGER_CST)
4704 {
4705 index = convert (sbitsizetype, index);
4706 index_type = TREE_TYPE (index);
4707 }
4708
ead17059
RH
4709 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4710 convert (sbitsizetype,
4711 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4712
ead17059
RH
4713 if (TREE_CODE (xindex) == INTEGER_CST
4714 && TREE_INT_CST_HIGH (xindex) == 0)
4715 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4716 else
956d6950 4717 {
ead17059
RH
4718 /* Either the bit offset calculated above is not constant, or
4719 it overflowed. In either case, redo the multiplication
4720 against the size in units. This is especially important
4721 in the non-constant case to avoid a division at runtime. */
4722 xindex = fold (build (MULT_EXPR, ssizetype, index,
4723 convert (ssizetype,
4724 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4725
4726 if (contains_placeholder_p (xindex))
4727 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4728
4729 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4730 }
bbf6f052
RK
4731 }
4732 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4733 && ! ((TREE_CODE (exp) == NOP_EXPR
4734 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4735 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4736 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4737 != UNION_TYPE))
bbf6f052
RK
4738 && (TYPE_MODE (TREE_TYPE (exp))
4739 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4740 break;
7bb0943f
RS
4741
4742 /* If any reference in the chain is volatile, the effect is volatile. */
4743 if (TREE_THIS_VOLATILE (exp))
4744 *pvolatilep = 1;
839c4796
RK
4745
4746 /* If the offset is non-constant already, then we can't assume any
4747 alignment more than the alignment here. */
4748 if (! integer_zerop (offset))
4749 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4750
bbf6f052
RK
4751 exp = TREE_OPERAND (exp, 0);
4752 }
4753
839c4796
RK
4754 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4755 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4756 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4757 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4758
742920c7
RK
4759 if (integer_zerop (offset))
4760 offset = 0;
4761
b50d17a1
RK
4762 if (offset != 0 && contains_placeholder_p (offset))
4763 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4764
bbf6f052 4765 *pmode = mode;
7bb0943f 4766 *poffset = offset;
839c4796 4767 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4768 return exp;
4769}
921b3427
RK
4770
4771/* Subroutine of expand_exp: compute memory_usage from modifier. */
4772static enum memory_use_mode
4773get_memory_usage_from_modifier (modifier)
4774 enum expand_modifier modifier;
4775{
4776 switch (modifier)
4777 {
4778 case EXPAND_NORMAL:
e5e809f4 4779 case EXPAND_SUM:
921b3427
RK
4780 return MEMORY_USE_RO;
4781 break;
4782 case EXPAND_MEMORY_USE_WO:
4783 return MEMORY_USE_WO;
4784 break;
4785 case EXPAND_MEMORY_USE_RW:
4786 return MEMORY_USE_RW;
4787 break;
921b3427 4788 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4789 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4790 MEMORY_USE_DONT, because they are modifiers to a call of
4791 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4792 case EXPAND_CONST_ADDRESS:
e5e809f4 4793 case EXPAND_INITIALIZER:
921b3427
RK
4794 return MEMORY_USE_DONT;
4795 case EXPAND_MEMORY_USE_BAD:
4796 default:
4797 abort ();
4798 }
4799}
bbf6f052
RK
4800\f
4801/* Given an rtx VALUE that may contain additions and multiplications,
4802 return an equivalent value that just refers to a register or memory.
4803 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4804 and returning a pseudo-register containing the value.
4805
4806 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4807
4808rtx
4809force_operand (value, target)
4810 rtx value, target;
4811{
4812 register optab binoptab = 0;
4813 /* Use a temporary to force order of execution of calls to
4814 `force_operand'. */
4815 rtx tmp;
4816 register rtx op2;
4817 /* Use subtarget as the target for operand 0 of a binary operation. */
4818 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4819
8b015896
RH
4820 /* Check for a PIC address load. */
4821 if (flag_pic
4822 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4823 && XEXP (value, 0) == pic_offset_table_rtx
4824 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4825 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4826 || GET_CODE (XEXP (value, 1)) == CONST))
4827 {
4828 if (!subtarget)
4829 subtarget = gen_reg_rtx (GET_MODE (value));
4830 emit_move_insn (subtarget, value);
4831 return subtarget;
4832 }
4833
bbf6f052
RK
4834 if (GET_CODE (value) == PLUS)
4835 binoptab = add_optab;
4836 else if (GET_CODE (value) == MINUS)
4837 binoptab = sub_optab;
4838 else if (GET_CODE (value) == MULT)
4839 {
4840 op2 = XEXP (value, 1);
4841 if (!CONSTANT_P (op2)
4842 && !(GET_CODE (op2) == REG && op2 != subtarget))
4843 subtarget = 0;
4844 tmp = force_operand (XEXP (value, 0), subtarget);
4845 return expand_mult (GET_MODE (value), tmp,
906c4e36 4846 force_operand (op2, NULL_RTX),
bbf6f052
RK
4847 target, 0);
4848 }
4849
4850 if (binoptab)
4851 {
4852 op2 = XEXP (value, 1);
4853 if (!CONSTANT_P (op2)
4854 && !(GET_CODE (op2) == REG && op2 != subtarget))
4855 subtarget = 0;
4856 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4857 {
4858 binoptab = add_optab;
4859 op2 = negate_rtx (GET_MODE (value), op2);
4860 }
4861
4862 /* Check for an addition with OP2 a constant integer and our first
4863 operand a PLUS of a virtual register and something else. In that
4864 case, we want to emit the sum of the virtual register and the
4865 constant first and then add the other value. This allows virtual
4866 register instantiation to simply modify the constant rather than
4867 creating another one around this addition. */
4868 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4869 && GET_CODE (XEXP (value, 0)) == PLUS
4870 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4871 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4872 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4873 {
4874 rtx temp = expand_binop (GET_MODE (value), binoptab,
4875 XEXP (XEXP (value, 0), 0), op2,
4876 subtarget, 0, OPTAB_LIB_WIDEN);
4877 return expand_binop (GET_MODE (value), binoptab, temp,
4878 force_operand (XEXP (XEXP (value, 0), 1), 0),
4879 target, 0, OPTAB_LIB_WIDEN);
4880 }
4881
4882 tmp = force_operand (XEXP (value, 0), subtarget);
4883 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4884 force_operand (op2, NULL_RTX),
bbf6f052 4885 target, 0, OPTAB_LIB_WIDEN);
8008b228 4886 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4887 because the only operations we are expanding here are signed ones. */
4888 }
4889 return value;
4890}
4891\f
4892/* Subroutine of expand_expr:
4893 save the non-copied parts (LIST) of an expr (LHS), and return a list
4894 which can restore these values to their previous values,
4895 should something modify their storage. */
4896
4897static tree
4898save_noncopied_parts (lhs, list)
4899 tree lhs;
4900 tree list;
4901{
4902 tree tail;
4903 tree parts = 0;
4904
4905 for (tail = list; tail; tail = TREE_CHAIN (tail))
4906 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4907 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4908 else
4909 {
4910 tree part = TREE_VALUE (tail);
4911 tree part_type = TREE_TYPE (part);
906c4e36 4912 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4913 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4914 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4915 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4916 parts = tree_cons (to_be_saved,
906c4e36
RK
4917 build (RTL_EXPR, part_type, NULL_TREE,
4918 (tree) target),
bbf6f052
RK
4919 parts);
4920 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4921 }
4922 return parts;
4923}
4924
4925/* Subroutine of expand_expr:
4926 record the non-copied parts (LIST) of an expr (LHS), and return a list
4927 which specifies the initial values of these parts. */
4928
4929static tree
4930init_noncopied_parts (lhs, list)
4931 tree lhs;
4932 tree list;
4933{
4934 tree tail;
4935 tree parts = 0;
4936
4937 for (tail = list; tail; tail = TREE_CHAIN (tail))
4938 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4939 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4940 else
4941 {
4942 tree part = TREE_VALUE (tail);
4943 tree part_type = TREE_TYPE (part);
906c4e36 4944 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4945 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4946 }
4947 return parts;
4948}
4949
4950/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
4951 EXP can reference X, which is being modified. TOP_P is nonzero if this
4952 call is going to be used to determine whether we need a temporary
ff439b5f
CB
4953 for EXP, as opposed to a recursive call to this function.
4954
4955 It is always safe for this routine to return zero since it merely
4956 searches for optimization opportunities. */
bbf6f052
RK
4957
4958static int
e5e809f4 4959safe_from_p (x, exp, top_p)
bbf6f052
RK
4960 rtx x;
4961 tree exp;
e5e809f4 4962 int top_p;
bbf6f052
RK
4963{
4964 rtx exp_rtl = 0;
4965 int i, nops;
ff439b5f
CB
4966 static int save_expr_count;
4967 static int save_expr_size = 0;
4968 static tree *save_expr_rewritten;
4969 static tree save_expr_trees[256];
bbf6f052 4970
6676e72f
RK
4971 if (x == 0
4972 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4973 have no way of allocating temporaries of variable size
4974 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4975 So we assume here that something at a higher level has prevented a
f4510f37 4976 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
4977 do this when X is BLKmode and when we are at the top level. */
4978 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4979 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4980 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4981 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4982 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4983 != INTEGER_CST)
f4510f37 4984 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4985 return 1;
4986
ff439b5f
CB
4987 if (top_p && save_expr_size == 0)
4988 {
4989 int rtn;
4990
4991 save_expr_count = 0;
4992 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4993 save_expr_rewritten = &save_expr_trees[0];
4994
4995 rtn = safe_from_p (x, exp, 1);
4996
4997 for (i = 0; i < save_expr_count; ++i)
4998 {
4999 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5000 abort ();
5001 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5002 }
5003
5004 save_expr_size = 0;
5005
5006 return rtn;
5007 }
5008
bbf6f052
RK
5009 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5010 find the underlying pseudo. */
5011 if (GET_CODE (x) == SUBREG)
5012 {
5013 x = SUBREG_REG (x);
5014 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5015 return 0;
5016 }
5017
5018 /* If X is a location in the outgoing argument area, it is always safe. */
5019 if (GET_CODE (x) == MEM
5020 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5021 || (GET_CODE (XEXP (x, 0)) == PLUS
5022 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5023 return 1;
5024
5025 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5026 {
5027 case 'd':
5028 exp_rtl = DECL_RTL (exp);
5029 break;
5030
5031 case 'c':
5032 return 1;
5033
5034 case 'x':
5035 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5036 return ((TREE_VALUE (exp) == 0
e5e809f4 5037 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5038 && (TREE_CHAIN (exp) == 0
e5e809f4 5039 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5040 else if (TREE_CODE (exp) == ERROR_MARK)
5041 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5042 else
5043 return 0;
5044
5045 case '1':
e5e809f4 5046 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5047
5048 case '2':
5049 case '<':
e5e809f4
JL
5050 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5051 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5052
5053 case 'e':
5054 case 'r':
5055 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5056 the expression. If it is set, we conflict iff we are that rtx or
5057 both are in memory. Otherwise, we check all operands of the
5058 expression recursively. */
5059
5060 switch (TREE_CODE (exp))
5061 {
5062 case ADDR_EXPR:
e44842fe 5063 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5064 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5065 || TREE_STATIC (exp));
bbf6f052
RK
5066
5067 case INDIRECT_REF:
5068 if (GET_CODE (x) == MEM)
5069 return 0;
5070 break;
5071
5072 case CALL_EXPR:
5073 exp_rtl = CALL_EXPR_RTL (exp);
5074 if (exp_rtl == 0)
5075 {
5076 /* Assume that the call will clobber all hard registers and
5077 all of memory. */
5078 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5079 || GET_CODE (x) == MEM)
5080 return 0;
5081 }
5082
5083 break;
5084
5085 case RTL_EXPR:
3bb5826a
RK
5086 /* If a sequence exists, we would have to scan every instruction
5087 in the sequence to see if it was safe. This is probably not
5088 worthwhile. */
5089 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5090 return 0;
5091
3bb5826a 5092 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5093 break;
5094
5095 case WITH_CLEANUP_EXPR:
5096 exp_rtl = RTL_EXPR_RTL (exp);
5097 break;
5098
5dab5552 5099 case CLEANUP_POINT_EXPR:
e5e809f4 5100 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5101
bbf6f052
RK
5102 case SAVE_EXPR:
5103 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5104 if (exp_rtl)
5105 break;
5106
5107 /* This SAVE_EXPR might appear many times in the top-level
5108 safe_from_p() expression, and if it has a complex
5109 subexpression, examining it multiple times could result
5110 in a combinatorial explosion. E.g. on an Alpha
5111 running at least 200MHz, a Fortran test case compiled with
5112 optimization took about 28 minutes to compile -- even though
5113 it was only a few lines long, and the complicated line causing
5114 so much time to be spent in the earlier version of safe_from_p()
5115 had only 293 or so unique nodes.
5116
5117 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5118 where it is so we can turn it back in the top-level safe_from_p()
5119 when we're done. */
5120
5121 /* For now, don't bother re-sizing the array. */
5122 if (save_expr_count >= save_expr_size)
5123 return 0;
5124 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5125
5126 nops = tree_code_length[(int) SAVE_EXPR];
5127 for (i = 0; i < nops; i++)
ff59bfe6
JM
5128 {
5129 tree operand = TREE_OPERAND (exp, i);
5130 if (operand == NULL_TREE)
5131 continue;
5132 TREE_SET_CODE (exp, ERROR_MARK);
5133 if (!safe_from_p (x, operand, 0))
5134 return 0;
5135 TREE_SET_CODE (exp, SAVE_EXPR);
5136 }
5137 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5138 return 1;
bbf6f052 5139
8129842c
RS
5140 case BIND_EXPR:
5141 /* The only operand we look at is operand 1. The rest aren't
5142 part of the expression. */
e5e809f4 5143 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5144
bbf6f052 5145 case METHOD_CALL_EXPR:
0f41302f 5146 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5147 abort ();
e9a25f70
JL
5148
5149 default:
5150 break;
bbf6f052
RK
5151 }
5152
5153 /* If we have an rtx, we do not need to scan our operands. */
5154 if (exp_rtl)
5155 break;
5156
5157 nops = tree_code_length[(int) TREE_CODE (exp)];
5158 for (i = 0; i < nops; i++)
5159 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5160 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5161 return 0;
5162 }
5163
5164 /* If we have an rtl, find any enclosed object. Then see if we conflict
5165 with it. */
5166 if (exp_rtl)
5167 {
5168 if (GET_CODE (exp_rtl) == SUBREG)
5169 {
5170 exp_rtl = SUBREG_REG (exp_rtl);
5171 if (GET_CODE (exp_rtl) == REG
5172 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5173 return 0;
5174 }
5175
5176 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5177 are memory and EXP is not readonly. */
5178 return ! (rtx_equal_p (x, exp_rtl)
5179 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5180 && ! TREE_READONLY (exp)));
5181 }
5182
5183 /* If we reach here, it is safe. */
5184 return 1;
5185}
5186
5187/* Subroutine of expand_expr: return nonzero iff EXP is an
5188 expression whose type is statically determinable. */
5189
5190static int
5191fixed_type_p (exp)
5192 tree exp;
5193{
5194 if (TREE_CODE (exp) == PARM_DECL
5195 || TREE_CODE (exp) == VAR_DECL
5196 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5197 || TREE_CODE (exp) == COMPONENT_REF
5198 || TREE_CODE (exp) == ARRAY_REF)
5199 return 1;
5200 return 0;
5201}
01c8a7c8
RK
5202
5203/* Subroutine of expand_expr: return rtx if EXP is a
5204 variable or parameter; else return 0. */
5205
5206static rtx
5207var_rtx (exp)
5208 tree exp;
5209{
5210 STRIP_NOPS (exp);
5211 switch (TREE_CODE (exp))
5212 {
5213 case PARM_DECL:
5214 case VAR_DECL:
5215 return DECL_RTL (exp);
5216 default:
5217 return 0;
5218 }
5219}
dbecbbe4
JL
5220
5221#ifdef MAX_INTEGER_COMPUTATION_MODE
5222void
5223check_max_integer_computation_mode (exp)
5224 tree exp;
5225{
5226 enum tree_code code = TREE_CODE (exp);
5227 enum machine_mode mode;
5228
71bca506
JL
5229 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5230 if (code == NOP_EXPR
5231 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5232 return;
5233
dbecbbe4
JL
5234 /* First check the type of the overall operation. We need only look at
5235 unary, binary and relational operations. */
5236 if (TREE_CODE_CLASS (code) == '1'
5237 || TREE_CODE_CLASS (code) == '2'
5238 || TREE_CODE_CLASS (code) == '<')
5239 {
5240 mode = TYPE_MODE (TREE_TYPE (exp));
5241 if (GET_MODE_CLASS (mode) == MODE_INT
5242 && mode > MAX_INTEGER_COMPUTATION_MODE)
5243 fatal ("unsupported wide integer operation");
5244 }
5245
5246 /* Check operand of a unary op. */
5247 if (TREE_CODE_CLASS (code) == '1')
5248 {
5249 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5250 if (GET_MODE_CLASS (mode) == MODE_INT
5251 && mode > MAX_INTEGER_COMPUTATION_MODE)
5252 fatal ("unsupported wide integer operation");
5253 }
5254
5255 /* Check operands of a binary/comparison op. */
5256 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5257 {
5258 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5259 if (GET_MODE_CLASS (mode) == MODE_INT
5260 && mode > MAX_INTEGER_COMPUTATION_MODE)
5261 fatal ("unsupported wide integer operation");
5262
5263 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5264 if (GET_MODE_CLASS (mode) == MODE_INT
5265 && mode > MAX_INTEGER_COMPUTATION_MODE)
5266 fatal ("unsupported wide integer operation");
5267 }
5268}
5269#endif
5270
bbf6f052
RK
5271\f
5272/* expand_expr: generate code for computing expression EXP.
5273 An rtx for the computed value is returned. The value is never null.
5274 In the case of a void EXP, const0_rtx is returned.
5275
5276 The value may be stored in TARGET if TARGET is nonzero.
5277 TARGET is just a suggestion; callers must assume that
5278 the rtx returned may not be the same as TARGET.
5279
5280 If TARGET is CONST0_RTX, it means that the value will be ignored.
5281
5282 If TMODE is not VOIDmode, it suggests generating the
5283 result in mode TMODE. But this is done only when convenient.
5284 Otherwise, TMODE is ignored and the value generated in its natural mode.
5285 TMODE is just a suggestion; callers must assume that
5286 the rtx returned may not have mode TMODE.
5287
d6a5ac33
RK
5288 Note that TARGET may have neither TMODE nor MODE. In that case, it
5289 probably will not be used.
bbf6f052
RK
5290
5291 If MODIFIER is EXPAND_SUM then when EXP is an addition
5292 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5293 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5294 products as above, or REG or MEM, or constant.
5295 Ordinarily in such cases we would output mul or add instructions
5296 and then return a pseudo reg containing the sum.
5297
5298 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5299 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5300 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5301 This is used for outputting expressions used in initializers.
5302
5303 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5304 with a constant address even if that address is not normally legitimate.
5305 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5306
5307rtx
5308expand_expr (exp, target, tmode, modifier)
5309 register tree exp;
5310 rtx target;
5311 enum machine_mode tmode;
5312 enum expand_modifier modifier;
5313{
b50d17a1
RK
5314 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5315 This is static so it will be accessible to our recursive callees. */
5316 static tree placeholder_list = 0;
bbf6f052
RK
5317 register rtx op0, op1, temp;
5318 tree type = TREE_TYPE (exp);
5319 int unsignedp = TREE_UNSIGNED (type);
5320 register enum machine_mode mode = TYPE_MODE (type);
5321 register enum tree_code code = TREE_CODE (exp);
5322 optab this_optab;
5323 /* Use subtarget as the target for operand 0 of a binary operation. */
5324 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5325 rtx original_target = target;
dd27116b
RK
5326 int ignore = (target == const0_rtx
5327 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
5328 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5329 || code == COND_EXPR)
dd27116b 5330 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 5331 tree context;
921b3427
RK
5332 /* Used by check-memory-usage to make modifier read only. */
5333 enum expand_modifier ro_modifier;
bbf6f052 5334
921b3427
RK
5335 /* Make a read-only version of the modifier. */
5336 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5337 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5338 ro_modifier = modifier;
5339 else
5340 ro_modifier = EXPAND_NORMAL;
ca695ac9 5341
bbf6f052
RK
5342 /* Don't use hard regs as subtargets, because the combiner
5343 can only handle pseudo regs. */
5344 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5345 subtarget = 0;
5346 /* Avoid subtargets inside loops,
5347 since they hide some invariant expressions. */
5348 if (preserve_subexpressions_p ())
5349 subtarget = 0;
5350
dd27116b
RK
5351 /* If we are going to ignore this result, we need only do something
5352 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5353 is, short-circuit the most common cases here. Note that we must
5354 not call expand_expr with anything but const0_rtx in case this
5355 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5356
dd27116b
RK
5357 if (ignore)
5358 {
5359 if (! TREE_SIDE_EFFECTS (exp))
5360 return const0_rtx;
5361
5362 /* Ensure we reference a volatile object even if value is ignored. */
5363 if (TREE_THIS_VOLATILE (exp)
5364 && TREE_CODE (exp) != FUNCTION_DECL
5365 && mode != VOIDmode && mode != BLKmode)
5366 {
921b3427 5367 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5368 if (GET_CODE (temp) == MEM)
5369 temp = copy_to_reg (temp);
5370 return const0_rtx;
5371 }
5372
5373 if (TREE_CODE_CLASS (code) == '1')
5374 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5375 VOIDmode, ro_modifier);
dd27116b
RK
5376 else if (TREE_CODE_CLASS (code) == '2'
5377 || TREE_CODE_CLASS (code) == '<')
5378 {
921b3427
RK
5379 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5380 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5381 return const0_rtx;
5382 }
5383 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5384 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5385 /* If the second operand has no side effects, just evaluate
0f41302f 5386 the first. */
dd27116b 5387 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5388 VOIDmode, ro_modifier);
dd27116b 5389
90764a87 5390 target = 0;
dd27116b 5391 }
bbf6f052 5392
dbecbbe4 5393#ifdef MAX_INTEGER_COMPUTATION_MODE
ce3c0b53
JL
5394 if (target
5395 && TREE_CODE (exp) != INTEGER_CST
5396 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5397 && TREE_CODE (exp) != ARRAY_REF
5398 && TREE_CODE (exp) != COMPONENT_REF
5399 && TREE_CODE (exp) != BIT_FIELD_REF
5400 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5401 && TREE_CODE (exp) != VAR_DECL)
dbecbbe4
JL
5402 {
5403 enum machine_mode mode = GET_MODE (target);
5404
5405 if (GET_MODE_CLASS (mode) == MODE_INT
5406 && mode > MAX_INTEGER_COMPUTATION_MODE)
5407 fatal ("unsupported wide integer operation");
5408 }
5409
71bca506 5410 if (TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5411 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5412 && TREE_CODE (exp) != ARRAY_REF
5413 && TREE_CODE (exp) != COMPONENT_REF
5414 && TREE_CODE (exp) != BIT_FIELD_REF
5415 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5416 && TREE_CODE (exp) != VAR_DECL
71bca506 5417 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5418 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5419 fatal ("unsupported wide integer operation");
5420
5421 check_max_integer_computation_mode (exp);
5422#endif
5423
e44842fe
RK
5424 /* If will do cse, generate all results into pseudo registers
5425 since 1) that allows cse to find more things
5426 and 2) otherwise cse could produce an insn the machine
5427 cannot support. */
5428
bbf6f052
RK
5429 if (! cse_not_expected && mode != BLKmode && target
5430 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5431 target = subtarget;
5432
bbf6f052
RK
5433 switch (code)
5434 {
5435 case LABEL_DECL:
b552441b
RS
5436 {
5437 tree function = decl_function_context (exp);
5438 /* Handle using a label in a containing function. */
d0977240
RK
5439 if (function != current_function_decl
5440 && function != inline_function_decl && function != 0)
b552441b
RS
5441 {
5442 struct function *p = find_function_data (function);
5443 /* Allocate in the memory associated with the function
5444 that the label is in. */
5445 push_obstacks (p->function_obstack,
5446 p->function_maybepermanent_obstack);
5447
38a448ca
RH
5448 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5449 label_rtx (exp),
5450 p->forced_labels);
b552441b
RS
5451 pop_obstacks ();
5452 }
5453 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
5454 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5455 label_rtx (exp), forced_labels);
5456 temp = gen_rtx_MEM (FUNCTION_MODE,
5457 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5458 if (function != current_function_decl
5459 && function != inline_function_decl && function != 0)
26fcb35a
RS
5460 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5461 return temp;
b552441b 5462 }
bbf6f052
RK
5463
5464 case PARM_DECL:
5465 if (DECL_RTL (exp) == 0)
5466 {
5467 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5468 return CONST0_RTX (mode);
bbf6f052
RK
5469 }
5470
0f41302f 5471 /* ... fall through ... */
d6a5ac33 5472
bbf6f052 5473 case VAR_DECL:
2dca20cd
RS
5474 /* If a static var's type was incomplete when the decl was written,
5475 but the type is complete now, lay out the decl now. */
5476 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5477 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5478 {
5479 push_obstacks_nochange ();
5480 end_temporary_allocation ();
5481 layout_decl (exp, 0);
5482 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5483 pop_obstacks ();
5484 }
d6a5ac33 5485
921b3427
RK
5486 /* Only check automatic variables. Currently, function arguments are
5487 not checked (this can be done at compile-time with prototypes).
5488 Aggregates are not checked. */
5489 if (flag_check_memory_usage && code == VAR_DECL
5490 && GET_CODE (DECL_RTL (exp)) == MEM
5491 && DECL_CONTEXT (exp) != NULL_TREE
5492 && ! TREE_STATIC (exp)
5493 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5494 {
5495 enum memory_use_mode memory_usage;
5496 memory_usage = get_memory_usage_from_modifier (modifier);
5497
5498 if (memory_usage != MEMORY_USE_DONT)
5499 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5500 XEXP (DECL_RTL (exp), 0), ptr_mode,
5501 GEN_INT (int_size_in_bytes (type)),
5502 TYPE_MODE (sizetype),
956d6950
JL
5503 GEN_INT (memory_usage),
5504 TYPE_MODE (integer_type_node));
921b3427
RK
5505 }
5506
0f41302f 5507 /* ... fall through ... */
d6a5ac33 5508
2dca20cd 5509 case FUNCTION_DECL:
bbf6f052
RK
5510 case RESULT_DECL:
5511 if (DECL_RTL (exp) == 0)
5512 abort ();
d6a5ac33 5513
e44842fe
RK
5514 /* Ensure variable marked as used even if it doesn't go through
5515 a parser. If it hasn't be used yet, write out an external
5516 definition. */
5517 if (! TREE_USED (exp))
5518 {
5519 assemble_external (exp);
5520 TREE_USED (exp) = 1;
5521 }
5522
dc6d66b3
RK
5523 /* Show we haven't gotten RTL for this yet. */
5524 temp = 0;
5525
bbf6f052
RK
5526 /* Handle variables inherited from containing functions. */
5527 context = decl_function_context (exp);
5528
5529 /* We treat inline_function_decl as an alias for the current function
5530 because that is the inline function whose vars, types, etc.
5531 are being merged into the current function.
5532 See expand_inline_function. */
d6a5ac33 5533
bbf6f052
RK
5534 if (context != 0 && context != current_function_decl
5535 && context != inline_function_decl
5536 /* If var is static, we don't need a static chain to access it. */
5537 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5538 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5539 {
5540 rtx addr;
5541
5542 /* Mark as non-local and addressable. */
81feeecb 5543 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5544 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5545 abort ();
bbf6f052
RK
5546 mark_addressable (exp);
5547 if (GET_CODE (DECL_RTL (exp)) != MEM)
5548 abort ();
5549 addr = XEXP (DECL_RTL (exp), 0);
5550 if (GET_CODE (addr) == MEM)
38a448ca
RH
5551 addr = gen_rtx_MEM (Pmode,
5552 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5553 else
5554 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5555 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5556 }
4af3895e 5557
bbf6f052
RK
5558 /* This is the case of an array whose size is to be determined
5559 from its initializer, while the initializer is still being parsed.
5560 See expand_decl. */
d6a5ac33 5561
dc6d66b3
RK
5562 else if (GET_CODE (DECL_RTL (exp)) == MEM
5563 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5564 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5565 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5566
5567 /* If DECL_RTL is memory, we are in the normal case and either
5568 the address is not valid or it is not a register and -fforce-addr
5569 is specified, get the address into a register. */
5570
dc6d66b3
RK
5571 else if (GET_CODE (DECL_RTL (exp)) == MEM
5572 && modifier != EXPAND_CONST_ADDRESS
5573 && modifier != EXPAND_SUM
5574 && modifier != EXPAND_INITIALIZER
5575 && (! memory_address_p (DECL_MODE (exp),
5576 XEXP (DECL_RTL (exp), 0))
5577 || (flag_force_addr
5578 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5579 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5580 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5581
dc6d66b3
RK
5582 /* If we got something, return it. But first, set the alignment
5583 the address is a register. */
5584 if (temp != 0)
5585 {
5586 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5587 mark_reg_pointer (XEXP (temp, 0),
5588 DECL_ALIGN (exp) / BITS_PER_UNIT);
5589
5590 return temp;
5591 }
5592
1499e0a8
RK
5593 /* If the mode of DECL_RTL does not match that of the decl, it
5594 must be a promoted value. We return a SUBREG of the wanted mode,
5595 but mark it so that we know that it was already extended. */
5596
5597 if (GET_CODE (DECL_RTL (exp)) == REG
5598 && GET_MODE (DECL_RTL (exp)) != mode)
5599 {
1499e0a8
RK
5600 /* Get the signedness used for this variable. Ensure we get the
5601 same mode we got when the variable was declared. */
78911e8b
RK
5602 if (GET_MODE (DECL_RTL (exp))
5603 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5604 abort ();
5605
38a448ca 5606 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5607 SUBREG_PROMOTED_VAR_P (temp) = 1;
5608 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5609 return temp;
5610 }
5611
bbf6f052
RK
5612 return DECL_RTL (exp);
5613
5614 case INTEGER_CST:
5615 return immed_double_const (TREE_INT_CST_LOW (exp),
5616 TREE_INT_CST_HIGH (exp),
5617 mode);
5618
5619 case CONST_DECL:
921b3427
RK
5620 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5621 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5622
5623 case REAL_CST:
5624 /* If optimized, generate immediate CONST_DOUBLE
5625 which will be turned into memory by reload if necessary.
5626
5627 We used to force a register so that loop.c could see it. But
5628 this does not allow gen_* patterns to perform optimizations with
5629 the constants. It also produces two insns in cases like "x = 1.0;".
5630 On most machines, floating-point constants are not permitted in
5631 many insns, so we'd end up copying it to a register in any case.
5632
5633 Now, we do the copying in expand_binop, if appropriate. */
5634 return immed_real_const (exp);
5635
5636 case COMPLEX_CST:
5637 case STRING_CST:
5638 if (! TREE_CST_RTL (exp))
5639 output_constant_def (exp);
5640
5641 /* TREE_CST_RTL probably contains a constant address.
5642 On RISC machines where a constant address isn't valid,
5643 make some insns to get that address into a register. */
5644 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5645 && modifier != EXPAND_CONST_ADDRESS
5646 && modifier != EXPAND_INITIALIZER
5647 && modifier != EXPAND_SUM
d6a5ac33
RK
5648 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5649 || (flag_force_addr
5650 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5651 return change_address (TREE_CST_RTL (exp), VOIDmode,
5652 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5653 return TREE_CST_RTL (exp);
5654
bf1e5319 5655 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5656 {
5657 rtx to_return;
5658 char *saved_input_filename = input_filename;
5659 int saved_lineno = lineno;
5660 input_filename = EXPR_WFL_FILENAME (exp);
5661 lineno = EXPR_WFL_LINENO (exp);
5662 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5663 emit_line_note (input_filename, lineno);
5664 /* Possibly avoid switching back and force here */
5665 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5666 input_filename = saved_input_filename;
5667 lineno = saved_lineno;
5668 return to_return;
5669 }
bf1e5319 5670
bbf6f052
RK
5671 case SAVE_EXPR:
5672 context = decl_function_context (exp);
d6a5ac33 5673
d0977240
RK
5674 /* If this SAVE_EXPR was at global context, assume we are an
5675 initialization function and move it into our context. */
5676 if (context == 0)
5677 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5678
bbf6f052
RK
5679 /* We treat inline_function_decl as an alias for the current function
5680 because that is the inline function whose vars, types, etc.
5681 are being merged into the current function.
5682 See expand_inline_function. */
5683 if (context == current_function_decl || context == inline_function_decl)
5684 context = 0;
5685
5686 /* If this is non-local, handle it. */
5687 if (context)
5688 {
d0977240
RK
5689 /* The following call just exists to abort if the context is
5690 not of a containing function. */
5691 find_function_data (context);
5692
bbf6f052
RK
5693 temp = SAVE_EXPR_RTL (exp);
5694 if (temp && GET_CODE (temp) == REG)
5695 {
5696 put_var_into_stack (exp);
5697 temp = SAVE_EXPR_RTL (exp);
5698 }
5699 if (temp == 0 || GET_CODE (temp) != MEM)
5700 abort ();
5701 return change_address (temp, mode,
5702 fix_lexical_addr (XEXP (temp, 0), exp));
5703 }
5704 if (SAVE_EXPR_RTL (exp) == 0)
5705 {
06089a8b
RK
5706 if (mode == VOIDmode)
5707 temp = const0_rtx;
5708 else
e5e809f4 5709 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5710
bbf6f052 5711 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5712 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5713 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5714 save_expr_regs);
ff78f773
RK
5715
5716 /* If the mode of TEMP does not match that of the expression, it
5717 must be a promoted value. We pass store_expr a SUBREG of the
5718 wanted mode but mark it so that we know that it was already
5719 extended. Note that `unsignedp' was modified above in
5720 this case. */
5721
5722 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5723 {
38a448ca 5724 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5725 SUBREG_PROMOTED_VAR_P (temp) = 1;
5726 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5727 }
5728
4c7a0be9 5729 if (temp == const0_rtx)
921b3427
RK
5730 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5731 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5732 else
5733 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5734
5735 TREE_USED (exp) = 1;
bbf6f052 5736 }
1499e0a8
RK
5737
5738 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5739 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5740 but mark it so that we know that it was already extended. */
1499e0a8
RK
5741
5742 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5743 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5744 {
e70d22c8
RK
5745 /* Compute the signedness and make the proper SUBREG. */
5746 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5747 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5748 SUBREG_PROMOTED_VAR_P (temp) = 1;
5749 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5750 return temp;
5751 }
5752
bbf6f052
RK
5753 return SAVE_EXPR_RTL (exp);
5754
679163cf
MS
5755 case UNSAVE_EXPR:
5756 {
5757 rtx temp;
5758 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5759 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5760 return temp;
5761 }
5762
b50d17a1 5763 case PLACEHOLDER_EXPR:
e9a25f70
JL
5764 {
5765 tree placeholder_expr;
5766
5767 /* If there is an object on the head of the placeholder list,
e5e809f4 5768 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5769 further information, see tree.def. */
5770 for (placeholder_expr = placeholder_list;
5771 placeholder_expr != 0;
5772 placeholder_expr = TREE_CHAIN (placeholder_expr))
5773 {
5774 tree need_type = TYPE_MAIN_VARIANT (type);
5775 tree object = 0;
5776 tree old_list = placeholder_list;
5777 tree elt;
5778
e5e809f4
JL
5779 /* Find the outermost reference that is of the type we want.
5780 If none, see if any object has a type that is a pointer to
5781 the type we want. */
5782 for (elt = TREE_PURPOSE (placeholder_expr);
5783 elt != 0 && object == 0;
5784 elt
5785 = ((TREE_CODE (elt) == COMPOUND_EXPR
5786 || TREE_CODE (elt) == COND_EXPR)
5787 ? TREE_OPERAND (elt, 1)
5788 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5789 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5790 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5791 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5792 ? TREE_OPERAND (elt, 0) : 0))
5793 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5794 object = elt;
e9a25f70 5795
e9a25f70 5796 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5797 elt != 0 && object == 0;
5798 elt
5799 = ((TREE_CODE (elt) == COMPOUND_EXPR
5800 || TREE_CODE (elt) == COND_EXPR)
5801 ? TREE_OPERAND (elt, 1)
5802 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5803 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5804 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5805 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5806 ? TREE_OPERAND (elt, 0) : 0))
5807 if (POINTER_TYPE_P (TREE_TYPE (elt))
5808 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5809 == need_type))
e5e809f4 5810 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5811
e9a25f70 5812 if (object != 0)
2cde2255 5813 {
e9a25f70
JL
5814 /* Expand this object skipping the list entries before
5815 it was found in case it is also a PLACEHOLDER_EXPR.
5816 In that case, we want to translate it using subsequent
5817 entries. */
5818 placeholder_list = TREE_CHAIN (placeholder_expr);
5819 temp = expand_expr (object, original_target, tmode,
5820 ro_modifier);
5821 placeholder_list = old_list;
5822 return temp;
2cde2255 5823 }
e9a25f70
JL
5824 }
5825 }
b50d17a1
RK
5826
5827 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5828 abort ();
5829
5830 case WITH_RECORD_EXPR:
5831 /* Put the object on the placeholder list, expand our first operand,
5832 and pop the list. */
5833 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5834 placeholder_list);
5835 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5836 tmode, ro_modifier);
b50d17a1
RK
5837 placeholder_list = TREE_CHAIN (placeholder_list);
5838 return target;
5839
bbf6f052 5840 case EXIT_EXPR:
e44842fe
RK
5841 expand_exit_loop_if_false (NULL_PTR,
5842 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5843 return const0_rtx;
5844
f42e28dd
APB
5845 case LABELED_BLOCK_EXPR:
5846 if (LABELED_BLOCK_BODY (exp))
5847 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
5848 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
5849 return const0_rtx;
5850
5851 case EXIT_BLOCK_EXPR:
5852 if (EXIT_BLOCK_RETURN (exp))
5853 really_sorry ("returned value in block_exit_expr");
5854 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
5855 return const0_rtx;
5856
bbf6f052 5857 case LOOP_EXPR:
0088fcb1 5858 push_temp_slots ();
bbf6f052
RK
5859 expand_start_loop (1);
5860 expand_expr_stmt (TREE_OPERAND (exp, 0));
5861 expand_end_loop ();
0088fcb1 5862 pop_temp_slots ();
bbf6f052
RK
5863
5864 return const0_rtx;
5865
5866 case BIND_EXPR:
5867 {
5868 tree vars = TREE_OPERAND (exp, 0);
5869 int vars_need_expansion = 0;
5870
5871 /* Need to open a binding contour here because
e976b8b2 5872 if there are any cleanups they must be contained here. */
bbf6f052
RK
5873 expand_start_bindings (0);
5874
2df53c0b
RS
5875 /* Mark the corresponding BLOCK for output in its proper place. */
5876 if (TREE_OPERAND (exp, 2) != 0
5877 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5878 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5879
5880 /* If VARS have not yet been expanded, expand them now. */
5881 while (vars)
5882 {
5883 if (DECL_RTL (vars) == 0)
5884 {
5885 vars_need_expansion = 1;
5886 expand_decl (vars);
5887 }
5888 expand_decl_init (vars);
5889 vars = TREE_CHAIN (vars);
5890 }
5891
921b3427 5892 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5893
5894 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5895
5896 return temp;
5897 }
5898
5899 case RTL_EXPR:
83b853c9
JM
5900 if (RTL_EXPR_SEQUENCE (exp))
5901 {
5902 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5903 abort ();
5904 emit_insns (RTL_EXPR_SEQUENCE (exp));
5905 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5906 }
99310285 5907 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5908 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5909 return RTL_EXPR_RTL (exp);
5910
5911 case CONSTRUCTOR:
dd27116b
RK
5912 /* If we don't need the result, just ensure we evaluate any
5913 subexpressions. */
5914 if (ignore)
5915 {
5916 tree elt;
5917 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5918 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5919 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5920 return const0_rtx;
5921 }
3207b172 5922
4af3895e
JVA
5923 /* All elts simple constants => refer to a constant in memory. But
5924 if this is a non-BLKmode mode, let it store a field at a time
5925 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5926 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5927 store directly into the target unless the type is large enough
5928 that memcpy will be used. If we are making an initializer and
3207b172 5929 all operands are constant, put it in memory as well. */
dd27116b 5930 else if ((TREE_STATIC (exp)
3207b172 5931 && ((mode == BLKmode
e5e809f4 5932 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
5933 || TREE_ADDRESSABLE (exp)
5934 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5935 && (move_by_pieces_ninsns
67225c15
RK
5936 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5937 TYPE_ALIGN (type) / BITS_PER_UNIT)
9862dea9 5938 >= MOVE_RATIO)
9de08200 5939 && ! mostly_zeros_p (exp))))
dd27116b 5940 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5941 {
5942 rtx constructor = output_constant_def (exp);
b552441b
RS
5943 if (modifier != EXPAND_CONST_ADDRESS
5944 && modifier != EXPAND_INITIALIZER
5945 && modifier != EXPAND_SUM
d6a5ac33
RK
5946 && (! memory_address_p (GET_MODE (constructor),
5947 XEXP (constructor, 0))
5948 || (flag_force_addr
5949 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5950 constructor = change_address (constructor, VOIDmode,
5951 XEXP (constructor, 0));
5952 return constructor;
5953 }
5954
bbf6f052
RK
5955 else
5956 {
e9ac02a6
JW
5957 /* Handle calls that pass values in multiple non-contiguous
5958 locations. The Irix 6 ABI has examples of this. */
e5e809f4 5959 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 5960 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5961 {
5962 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5963 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5964 else
5965 target = assign_temp (type, 0, 1, 1);
5966 }
07604beb
RK
5967
5968 if (TREE_READONLY (exp))
5969 {
9151b3bf 5970 if (GET_CODE (target) == MEM)
effbcc6a
RK
5971 target = copy_rtx (target);
5972
07604beb
RK
5973 RTX_UNCHANGING_P (target) = 1;
5974 }
5975
e1a43f73 5976 store_constructor (exp, target, 0);
bbf6f052
RK
5977 return target;
5978 }
5979
5980 case INDIRECT_REF:
5981 {
5982 tree exp1 = TREE_OPERAND (exp, 0);
5983 tree exp2;
7581a30f
JW
5984 tree index;
5985 tree string = string_constant (exp1, &index);
5986 int i;
5987
06eaa86f 5988 /* Try to optimize reads from const strings. */
7581a30f
JW
5989 if (string
5990 && TREE_CODE (string) == STRING_CST
5991 && TREE_CODE (index) == INTEGER_CST
5992 && !TREE_INT_CST_HIGH (index)
5993 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5994 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
5995 && GET_MODE_SIZE (mode) == 1
5996 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 5997 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5998
405f0da6
JW
5999 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6000 op0 = memory_address (mode, op0);
8c8a8e34 6001
921b3427
RK
6002 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6003 {
6004 enum memory_use_mode memory_usage;
6005 memory_usage = get_memory_usage_from_modifier (modifier);
6006
6007 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6008 {
6009 in_check_memory_usage = 1;
6010 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6011 op0, ptr_mode,
6012 GEN_INT (int_size_in_bytes (type)),
6013 TYPE_MODE (sizetype),
6014 GEN_INT (memory_usage),
6015 TYPE_MODE (integer_type_node));
6016 in_check_memory_usage = 0;
6017 }
921b3427
RK
6018 }
6019
38a448ca 6020 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6021 /* If address was computed by addition,
6022 mark this as an element of an aggregate. */
9ec36da5
JL
6023 if (TREE_CODE (exp1) == PLUS_EXPR
6024 || (TREE_CODE (exp1) == SAVE_EXPR
6025 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6026 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6027 || (TREE_CODE (exp1) == ADDR_EXPR
6028 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6029 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 6030 MEM_IN_STRUCT_P (temp) = 1;
b5f88157
JL
6031
6032 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
6033 into some aggregate too. In theory we could fold this into the
6034 previous check and use rtx_addr_varies_p there too.
6035
6036 However, this seems safer. */
6037 if (!MEM_IN_STRUCT_P (temp)
6038 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
6039 /* This may have been an array reference to the first element
6040 that was optimized away from being an addition. */
6041 || (TREE_CODE (exp1) == NOP_EXPR
6042 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6043 == REFERENCE_TYPE)
6044 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6045 == POINTER_TYPE)
6046 && (AGGREGATE_TYPE_P
6047 (TREE_TYPE (TREE_TYPE
6048 (TREE_OPERAND (exp1, 0))))))))))
6049 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
6050
2c4c436a 6051 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6052 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6053
6054 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6055 here, because, in C and C++, the fact that a location is accessed
6056 through a pointer to const does not mean that the value there can
6057 never change. Languages where it can never change should
6058 also set TREE_STATIC. */
5cb7a25a 6059 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6060 return temp;
6061 }
bbf6f052
RK
6062
6063 case ARRAY_REF:
742920c7
RK
6064 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6065 abort ();
bbf6f052 6066
bbf6f052 6067 {
742920c7
RK
6068 tree array = TREE_OPERAND (exp, 0);
6069 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6070 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6071 tree index = TREE_OPERAND (exp, 1);
6072 tree index_type = TREE_TYPE (index);
08293add 6073 HOST_WIDE_INT i;
b50d17a1 6074
d4c89139
PB
6075 /* Optimize the special-case of a zero lower bound.
6076
6077 We convert the low_bound to sizetype to avoid some problems
6078 with constant folding. (E.g. suppose the lower bound is 1,
6079 and its mode is QI. Without the conversion, (ARRAY
6080 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6081 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6082
6083 But sizetype isn't quite right either (especially if
6084 the lowbound is negative). FIXME */
6085
742920c7 6086 if (! integer_zerop (low_bound))
d4c89139
PB
6087 index = fold (build (MINUS_EXPR, index_type, index,
6088 convert (sizetype, low_bound)));
742920c7 6089
742920c7 6090 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6091 This is not done in fold so it won't happen inside &.
6092 Don't fold if this is for wide characters since it's too
6093 difficult to do correctly and this is a very rare case. */
742920c7
RK
6094
6095 if (TREE_CODE (array) == STRING_CST
6096 && TREE_CODE (index) == INTEGER_CST
6097 && !TREE_INT_CST_HIGH (index)
307b821c 6098 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6099 && GET_MODE_CLASS (mode) == MODE_INT
6100 && GET_MODE_SIZE (mode) == 1)
307b821c 6101 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6102
742920c7
RK
6103 /* If this is a constant index into a constant array,
6104 just get the value from the array. Handle both the cases when
6105 we have an explicit constructor and when our operand is a variable
6106 that was declared const. */
4af3895e 6107
742920c7
RK
6108 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6109 {
6110 if (TREE_CODE (index) == INTEGER_CST
6111 && TREE_INT_CST_HIGH (index) == 0)
6112 {
6113 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6114
6115 i = TREE_INT_CST_LOW (index);
6116 while (elem && i--)
6117 elem = TREE_CHAIN (elem);
6118 if (elem)
6119 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6120 tmode, ro_modifier);
742920c7
RK
6121 }
6122 }
4af3895e 6123
742920c7
RK
6124 else if (optimize >= 1
6125 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6126 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6127 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6128 {
08293add 6129 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6130 {
6131 tree init = DECL_INITIAL (array);
6132
6133 i = TREE_INT_CST_LOW (index);
6134 if (TREE_CODE (init) == CONSTRUCTOR)
6135 {
6136 tree elem = CONSTRUCTOR_ELTS (init);
6137
03dc44a6
RS
6138 while (elem
6139 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6140 elem = TREE_CHAIN (elem);
6141 if (elem)
6142 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6143 tmode, ro_modifier);
742920c7
RK
6144 }
6145 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6146 && TREE_INT_CST_HIGH (index) == 0
6147 && (TREE_INT_CST_LOW (index)
6148 < TREE_STRING_LENGTH (init)))
6149 return (GEN_INT
6150 (TREE_STRING_POINTER
6151 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6152 }
6153 }
6154 }
8c8a8e34 6155
08293add 6156 /* ... fall through ... */
bbf6f052
RK
6157
6158 case COMPONENT_REF:
6159 case BIT_FIELD_REF:
4af3895e 6160 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6161 appropriate field if it is present. Don't do this if we have
6162 already written the data since we want to refer to that copy
6163 and varasm.c assumes that's what we'll do. */
4af3895e 6164 if (code != ARRAY_REF
7a0b7b9a
RK
6165 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6166 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6167 {
6168 tree elt;
6169
6170 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6171 elt = TREE_CHAIN (elt))
86b5812c
RK
6172 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6173 /* We can normally use the value of the field in the
6174 CONSTRUCTOR. However, if this is a bitfield in
6175 an integral mode that we can fit in a HOST_WIDE_INT,
6176 we must mask only the number of bits in the bitfield,
6177 since this is done implicitly by the constructor. If
6178 the bitfield does not meet either of those conditions,
6179 we can't do this optimization. */
6180 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6181 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6182 == MODE_INT)
6183 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6184 <= HOST_BITS_PER_WIDE_INT))))
6185 {
6186 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6187 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6188 {
6189 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6190
6191 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6192 {
6193 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6194 op0 = expand_and (op0, op1, target);
6195 }
6196 else
6197 {
e5e809f4
JL
6198 enum machine_mode imode
6199 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6200 tree count
e5e809f4
JL
6201 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6202 0);
86b5812c
RK
6203
6204 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6205 target, 0);
6206 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6207 target, 0);
6208 }
6209 }
6210
6211 return op0;
6212 }
4af3895e
JVA
6213 }
6214
bbf6f052
RK
6215 {
6216 enum machine_mode mode1;
6217 int bitsize;
6218 int bitpos;
7bb0943f 6219 tree offset;
bbf6f052 6220 int volatilep = 0;
034f9101 6221 int alignment;
839c4796
RK
6222 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6223 &mode1, &unsignedp, &volatilep,
6224 &alignment);
bbf6f052 6225
e7f3c83f
RK
6226 /* If we got back the original object, something is wrong. Perhaps
6227 we are evaluating an expression too early. In any event, don't
6228 infinitely recurse. */
6229 if (tem == exp)
6230 abort ();
6231
3d27140a 6232 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6233 computation, since it will need a temporary and TARGET is known
6234 to have to do. This occurs in unchecked conversion in Ada. */
6235
6236 op0 = expand_expr (tem,
6237 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6238 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6239 != INTEGER_CST)
6240 ? target : NULL_RTX),
4ed67205 6241 VOIDmode,
e5e809f4
JL
6242 modifier == EXPAND_INITIALIZER
6243 ? modifier : EXPAND_NORMAL);
bbf6f052 6244
8c8a8e34 6245 /* If this is a constant, put it into a register if it is a
8008b228 6246 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6247 if (CONSTANT_P (op0))
6248 {
6249 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6250 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6251 op0 = force_reg (mode, op0);
6252 else
6253 op0 = validize_mem (force_const_mem (mode, op0));
6254 }
6255
7bb0943f
RS
6256 if (offset != 0)
6257 {
906c4e36 6258 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6259
6260 if (GET_CODE (op0) != MEM)
6261 abort ();
2d48c13d
JL
6262
6263 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6264 {
2d48c13d 6265#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6266 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6267#else
bd070e1a 6268 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6269#endif
bd070e1a 6270 }
2d48c13d 6271
89752202
HB
6272 if (GET_CODE (op0) == MEM
6273 && GET_MODE (op0) == BLKmode
6274 && bitsize
6275 && (bitpos % bitsize) == 0
6276 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6277 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6278 {
6279 rtx temp = change_address (op0, mode1,
6280 plus_constant (XEXP (op0, 0),
6281 (bitpos /
6282 BITS_PER_UNIT)));
6283 if (GET_CODE (XEXP (temp, 0)) == REG)
6284 op0 = temp;
6285 else
6286 op0 = change_address (op0, mode1,
6287 force_reg (GET_MODE (XEXP (temp, 0)),
6288 XEXP (temp, 0)));
6289 bitpos = 0;
6290 }
6291
6292
7bb0943f 6293 op0 = change_address (op0, VOIDmode,
38a448ca
RH
6294 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6295 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
6296 }
6297
bbf6f052
RK
6298 /* Don't forget about volatility even if this is a bitfield. */
6299 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6300 {
6301 op0 = copy_rtx (op0);
6302 MEM_VOLATILE_P (op0) = 1;
6303 }
6304
921b3427
RK
6305 /* Check the access. */
6306 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
6307 {
6308 enum memory_use_mode memory_usage;
6309 memory_usage = get_memory_usage_from_modifier (modifier);
6310
6311 if (memory_usage != MEMORY_USE_DONT)
6312 {
6313 rtx to;
6314 int size;
6315
6316 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6317 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6318
6319 /* Check the access right of the pointer. */
e9a25f70
JL
6320 if (size > BITS_PER_UNIT)
6321 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6322 to, ptr_mode,
6323 GEN_INT (size / BITS_PER_UNIT),
6324 TYPE_MODE (sizetype),
956d6950
JL
6325 GEN_INT (memory_usage),
6326 TYPE_MODE (integer_type_node));
921b3427
RK
6327 }
6328 }
6329
ccc98036
RS
6330 /* In cases where an aligned union has an unaligned object
6331 as a field, we might be extracting a BLKmode value from
6332 an integer-mode (e.g., SImode) object. Handle this case
6333 by doing the extract into an object as wide as the field
6334 (which we know to be the width of a basic mode), then
f2420d0b
JW
6335 storing into memory, and changing the mode to BLKmode.
6336 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6337 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6338 if (mode1 == VOIDmode
ccc98036 6339 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6340 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6341 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6342 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6343 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6344 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6345 /* If the field isn't aligned enough to fetch as a memref,
6346 fetch it as a bit field. */
6347 || (SLOW_UNALIGNED_ACCESS
6348 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
6349 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6350 {
bbf6f052
RK
6351 enum machine_mode ext_mode = mode;
6352
6353 if (ext_mode == BLKmode)
6354 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6355
6356 if (ext_mode == BLKmode)
a281e72d
RK
6357 {
6358 /* In this case, BITPOS must start at a byte boundary and
6359 TARGET, if specified, must be a MEM. */
6360 if (GET_CODE (op0) != MEM
6361 || (target != 0 && GET_CODE (target) != MEM)
6362 || bitpos % BITS_PER_UNIT != 0)
6363 abort ();
6364
6365 op0 = change_address (op0, VOIDmode,
6366 plus_constant (XEXP (op0, 0),
6367 bitpos / BITS_PER_UNIT));
6368 if (target == 0)
6369 target = assign_temp (type, 0, 1, 1);
6370
6371 emit_block_move (target, op0,
6372 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6373 / BITS_PER_UNIT),
6374 1);
6375
6376 return target;
6377 }
bbf6f052 6378
dc6d66b3
RK
6379 op0 = validize_mem (op0);
6380
6381 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6382 mark_reg_pointer (XEXP (op0, 0), alignment);
6383
6384 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6385 unsignedp, target, ext_mode, ext_mode,
034f9101 6386 alignment,
bbf6f052 6387 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6388
6389 /* If the result is a record type and BITSIZE is narrower than
6390 the mode of OP0, an integral mode, and this is a big endian
6391 machine, we must put the field into the high-order bits. */
6392 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6393 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6394 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6395 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6396 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6397 - bitsize),
6398 op0, 1);
6399
bbf6f052
RK
6400 if (mode == BLKmode)
6401 {
6402 rtx new = assign_stack_temp (ext_mode,
6403 bitsize / BITS_PER_UNIT, 0);
6404
6405 emit_move_insn (new, op0);
6406 op0 = copy_rtx (new);
6407 PUT_MODE (op0, BLKmode);
092dded9 6408 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
6409 }
6410
6411 return op0;
6412 }
6413
05019f83
RK
6414 /* If the result is BLKmode, use that to access the object
6415 now as well. */
6416 if (mode == BLKmode)
6417 mode1 = BLKmode;
6418
bbf6f052
RK
6419 /* Get a reference to just this component. */
6420 if (modifier == EXPAND_CONST_ADDRESS
6421 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6422 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6423 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6424 else
6425 op0 = change_address (op0, mode1,
6426 plus_constant (XEXP (op0, 0),
6427 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6428
6429 if (GET_CODE (op0) == MEM)
6430 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6431
dc6d66b3
RK
6432 if (GET_CODE (XEXP (op0, 0)) == REG)
6433 mark_reg_pointer (XEXP (op0, 0), alignment);
6434
bbf6f052
RK
6435 MEM_IN_STRUCT_P (op0) = 1;
6436 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6437 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6438 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6439 || modifier == EXPAND_INITIALIZER)
bbf6f052 6440 return op0;
0d15e60c 6441 else if (target == 0)
bbf6f052 6442 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6443
bbf6f052
RK
6444 convert_move (target, op0, unsignedp);
6445 return target;
6446 }
6447
bbf6f052
RK
6448 /* Intended for a reference to a buffer of a file-object in Pascal.
6449 But it's not certain that a special tree code will really be
6450 necessary for these. INDIRECT_REF might work for them. */
6451 case BUFFER_REF:
6452 abort ();
6453
7308a047 6454 case IN_EXPR:
7308a047 6455 {
d6a5ac33
RK
6456 /* Pascal set IN expression.
6457
6458 Algorithm:
6459 rlo = set_low - (set_low%bits_per_word);
6460 the_word = set [ (index - rlo)/bits_per_word ];
6461 bit_index = index % bits_per_word;
6462 bitmask = 1 << bit_index;
6463 return !!(the_word & bitmask); */
6464
7308a047
RS
6465 tree set = TREE_OPERAND (exp, 0);
6466 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6467 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6468 tree set_type = TREE_TYPE (set);
7308a047
RS
6469 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6470 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6471 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6472 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6473 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6474 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6475 rtx setaddr = XEXP (setval, 0);
6476 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6477 rtx rlow;
6478 rtx diff, quo, rem, addr, bit, result;
7308a047 6479
d6a5ac33
RK
6480 preexpand_calls (exp);
6481
6482 /* If domain is empty, answer is no. Likewise if index is constant
6483 and out of bounds. */
51723711 6484 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6485 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6486 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6487 || (TREE_CODE (index) == INTEGER_CST
6488 && TREE_CODE (set_low_bound) == INTEGER_CST
6489 && tree_int_cst_lt (index, set_low_bound))
6490 || (TREE_CODE (set_high_bound) == INTEGER_CST
6491 && TREE_CODE (index) == INTEGER_CST
6492 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6493 return const0_rtx;
6494
d6a5ac33
RK
6495 if (target == 0)
6496 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6497
6498 /* If we get here, we have to generate the code for both cases
6499 (in range and out of range). */
6500
6501 op0 = gen_label_rtx ();
6502 op1 = gen_label_rtx ();
6503
6504 if (! (GET_CODE (index_val) == CONST_INT
6505 && GET_CODE (lo_r) == CONST_INT))
6506 {
17938e57 6507 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 6508 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
6509 emit_jump_insn (gen_blt (op1));
6510 }
6511
6512 if (! (GET_CODE (index_val) == CONST_INT
6513 && GET_CODE (hi_r) == CONST_INT))
6514 {
17938e57 6515 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 6516 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
6517 emit_jump_insn (gen_bgt (op1));
6518 }
6519
6520 /* Calculate the element number of bit zero in the first word
6521 of the set. */
6522 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6523 rlow = GEN_INT (INTVAL (lo_r)
6524 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6525 else
17938e57
RK
6526 rlow = expand_binop (index_mode, and_optab, lo_r,
6527 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6528 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6529
d6a5ac33
RK
6530 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6531 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6532
6533 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6534 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6535 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6536 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6537
7308a047 6538 addr = memory_address (byte_mode,
d6a5ac33
RK
6539 expand_binop (index_mode, add_optab, diff,
6540 setaddr, NULL_RTX, iunsignedp,
17938e57 6541 OPTAB_LIB_WIDEN));
d6a5ac33 6542
7308a047
RS
6543 /* Extract the bit we want to examine */
6544 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6545 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6546 make_tree (TREE_TYPE (index), rem),
6547 NULL_RTX, 1);
6548 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6549 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6550 1, OPTAB_LIB_WIDEN);
17938e57
RK
6551
6552 if (result != target)
6553 convert_move (target, result, 1);
7308a047
RS
6554
6555 /* Output the code to handle the out-of-range case. */
6556 emit_jump (op0);
6557 emit_label (op1);
6558 emit_move_insn (target, const0_rtx);
6559 emit_label (op0);
6560 return target;
6561 }
6562
bbf6f052
RK
6563 case WITH_CLEANUP_EXPR:
6564 if (RTL_EXPR_RTL (exp) == 0)
6565 {
6566 RTL_EXPR_RTL (exp)
921b3427 6567 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6568 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6569
bbf6f052
RK
6570 /* That's it for this cleanup. */
6571 TREE_OPERAND (exp, 2) = 0;
6572 }
6573 return RTL_EXPR_RTL (exp);
6574
5dab5552
MS
6575 case CLEANUP_POINT_EXPR:
6576 {
d93d4205 6577 extern int temp_slot_level;
e976b8b2
MS
6578 /* Start a new binding layer that will keep track of all cleanup
6579 actions to be performed. */
6580 expand_start_bindings (0);
6581
d93d4205 6582 target_temp_slot_level = temp_slot_level;
e976b8b2 6583
921b3427 6584 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6585 /* If we're going to use this value, load it up now. */
6586 if (! ignore)
6587 op0 = force_not_mem (op0);
d93d4205 6588 preserve_temp_slots (op0);
e976b8b2 6589 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6590 }
6591 return op0;
6592
bbf6f052
RK
6593 case CALL_EXPR:
6594 /* Check for a built-in function. */
6595 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6596 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6597 == FUNCTION_DECL)
bbf6f052
RK
6598 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6599 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6600
bbf6f052
RK
6601 /* If this call was expanded already by preexpand_calls,
6602 just return the result we got. */
6603 if (CALL_EXPR_RTL (exp) != 0)
6604 return CALL_EXPR_RTL (exp);
d6a5ac33 6605
8129842c 6606 return expand_call (exp, target, ignore);
bbf6f052
RK
6607
6608 case NON_LVALUE_EXPR:
6609 case NOP_EXPR:
6610 case CONVERT_EXPR:
6611 case REFERENCE_EXPR:
bbf6f052
RK
6612 if (TREE_CODE (type) == UNION_TYPE)
6613 {
6614 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6615 if (target == 0)
06089a8b
RK
6616 {
6617 if (mode != BLKmode)
6618 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6619 else
6620 target = assign_temp (type, 0, 1, 1);
6621 }
d6a5ac33 6622
bbf6f052
RK
6623 if (GET_CODE (target) == MEM)
6624 /* Store data into beginning of memory target. */
6625 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6626 change_address (target, TYPE_MODE (valtype), 0), 0);
6627
bbf6f052
RK
6628 else if (GET_CODE (target) == REG)
6629 /* Store this field into a union of the proper type. */
6630 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6631 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6632 VOIDmode, 0, 1,
ece32014
MM
6633 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6634 0);
bbf6f052
RK
6635 else
6636 abort ();
6637
6638 /* Return the entire union. */
6639 return target;
6640 }
d6a5ac33 6641
7f62854a
RK
6642 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6643 {
6644 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6645 ro_modifier);
7f62854a
RK
6646
6647 /* If the signedness of the conversion differs and OP0 is
6648 a promoted SUBREG, clear that indication since we now
6649 have to do the proper extension. */
6650 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6651 && GET_CODE (op0) == SUBREG)
6652 SUBREG_PROMOTED_VAR_P (op0) = 0;
6653
6654 return op0;
6655 }
6656
1499e0a8 6657 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6658 if (GET_MODE (op0) == mode)
6659 return op0;
12342f90 6660
d6a5ac33
RK
6661 /* If OP0 is a constant, just convert it into the proper mode. */
6662 if (CONSTANT_P (op0))
6663 return
6664 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6665 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6666
26fcb35a 6667 if (modifier == EXPAND_INITIALIZER)
38a448ca 6668 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6669
bbf6f052 6670 if (target == 0)
d6a5ac33
RK
6671 return
6672 convert_to_mode (mode, op0,
6673 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6674 else
d6a5ac33
RK
6675 convert_move (target, op0,
6676 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6677 return target;
6678
6679 case PLUS_EXPR:
0f41302f
MS
6680 /* We come here from MINUS_EXPR when the second operand is a
6681 constant. */
bbf6f052
RK
6682 plus_expr:
6683 this_optab = add_optab;
6684
6685 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6686 something else, make sure we add the register to the constant and
6687 then to the other thing. This case can occur during strength
6688 reduction and doing it this way will produce better code if the
6689 frame pointer or argument pointer is eliminated.
6690
6691 fold-const.c will ensure that the constant is always in the inner
6692 PLUS_EXPR, so the only case we need to do anything about is if
6693 sp, ap, or fp is our second argument, in which case we must swap
6694 the innermost first argument and our second argument. */
6695
6696 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6697 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6698 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6699 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6700 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6701 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6702 {
6703 tree t = TREE_OPERAND (exp, 1);
6704
6705 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6706 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6707 }
6708
88f63c77 6709 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6710 something, we might be forming a constant. So try to use
6711 plus_constant. If it produces a sum and we can't accept it,
6712 use force_operand. This allows P = &ARR[const] to generate
6713 efficient code on machines where a SYMBOL_REF is not a valid
6714 address.
6715
6716 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6717 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6718 || mode == ptr_mode)
bbf6f052 6719 {
c980ac49
RS
6720 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6721 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6722 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6723 {
6724 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6725 EXPAND_SUM);
6726 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6727 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6728 op1 = force_operand (op1, target);
6729 return op1;
6730 }
bbf6f052 6731
c980ac49
RS
6732 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6733 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6734 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6735 {
6736 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6737 EXPAND_SUM);
6738 if (! CONSTANT_P (op0))
6739 {
6740 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6741 VOIDmode, modifier);
709f5be1
RS
6742 /* Don't go to both_summands if modifier
6743 says it's not right to return a PLUS. */
6744 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6745 goto binop2;
c980ac49
RS
6746 goto both_summands;
6747 }
6748 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6749 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6750 op0 = force_operand (op0, target);
6751 return op0;
6752 }
bbf6f052
RK
6753 }
6754
6755 /* No sense saving up arithmetic to be done
6756 if it's all in the wrong mode to form part of an address.
6757 And force_operand won't know whether to sign-extend or
6758 zero-extend. */
6759 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6760 || mode != ptr_mode)
c980ac49 6761 goto binop;
bbf6f052
RK
6762
6763 preexpand_calls (exp);
e5e809f4 6764 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6765 subtarget = 0;
6766
921b3427
RK
6767 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6768 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6769
c980ac49 6770 both_summands:
bbf6f052
RK
6771 /* Make sure any term that's a sum with a constant comes last. */
6772 if (GET_CODE (op0) == PLUS
6773 && CONSTANT_P (XEXP (op0, 1)))
6774 {
6775 temp = op0;
6776 op0 = op1;
6777 op1 = temp;
6778 }
6779 /* If adding to a sum including a constant,
6780 associate it to put the constant outside. */
6781 if (GET_CODE (op1) == PLUS
6782 && CONSTANT_P (XEXP (op1, 1)))
6783 {
6784 rtx constant_term = const0_rtx;
6785
6786 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6787 if (temp != 0)
6788 op0 = temp;
6f90e075
JW
6789 /* Ensure that MULT comes first if there is one. */
6790 else if (GET_CODE (op0) == MULT)
38a448ca 6791 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6792 else
38a448ca 6793 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6794
6795 /* Let's also eliminate constants from op0 if possible. */
6796 op0 = eliminate_constant_term (op0, &constant_term);
6797
6798 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6799 their sum should be a constant. Form it into OP1, since the
6800 result we want will then be OP0 + OP1. */
6801
6802 temp = simplify_binary_operation (PLUS, mode, constant_term,
6803 XEXP (op1, 1));
6804 if (temp != 0)
6805 op1 = temp;
6806 else
38a448ca 6807 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6808 }
6809
6810 /* Put a constant term last and put a multiplication first. */
6811 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6812 temp = op1, op1 = op0, op0 = temp;
6813
6814 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6815 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6816
6817 case MINUS_EXPR:
ea87523e
RK
6818 /* For initializers, we are allowed to return a MINUS of two
6819 symbolic constants. Here we handle all cases when both operands
6820 are constant. */
bbf6f052
RK
6821 /* Handle difference of two symbolic constants,
6822 for the sake of an initializer. */
6823 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6824 && really_constant_p (TREE_OPERAND (exp, 0))
6825 && really_constant_p (TREE_OPERAND (exp, 1)))
6826 {
906c4e36 6827 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6828 VOIDmode, ro_modifier);
906c4e36 6829 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6830 VOIDmode, ro_modifier);
ea87523e 6831
ea87523e
RK
6832 /* If the last operand is a CONST_INT, use plus_constant of
6833 the negated constant. Else make the MINUS. */
6834 if (GET_CODE (op1) == CONST_INT)
6835 return plus_constant (op0, - INTVAL (op1));
6836 else
38a448ca 6837 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6838 }
6839 /* Convert A - const to A + (-const). */
6840 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6841 {
ae431183
RK
6842 tree negated = fold (build1 (NEGATE_EXPR, type,
6843 TREE_OPERAND (exp, 1)));
6844
6845 /* Deal with the case where we can't negate the constant
6846 in TYPE. */
6847 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6848 {
6849 tree newtype = signed_type (type);
6850 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6851 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6852 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6853
6854 if (! TREE_OVERFLOW (newneg))
6855 return expand_expr (convert (type,
6856 build (PLUS_EXPR, newtype,
6857 newop0, newneg)),
921b3427 6858 target, tmode, ro_modifier);
ae431183
RK
6859 }
6860 else
6861 {
6862 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6863 goto plus_expr;
6864 }
bbf6f052
RK
6865 }
6866 this_optab = sub_optab;
6867 goto binop;
6868
6869 case MULT_EXPR:
6870 preexpand_calls (exp);
6871 /* If first operand is constant, swap them.
6872 Thus the following special case checks need only
6873 check the second operand. */
6874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6875 {
6876 register tree t1 = TREE_OPERAND (exp, 0);
6877 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6878 TREE_OPERAND (exp, 1) = t1;
6879 }
6880
6881 /* Attempt to return something suitable for generating an
6882 indexed address, for machines that support that. */
6883
88f63c77 6884 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6885 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6886 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6887 {
921b3427
RK
6888 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6889 EXPAND_SUM);
bbf6f052
RK
6890
6891 /* Apply distributive law if OP0 is x+c. */
6892 if (GET_CODE (op0) == PLUS
6893 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6894 return gen_rtx_PLUS (mode,
6895 gen_rtx_MULT (mode, XEXP (op0, 0),
6896 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6897 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6898 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6899
6900 if (GET_CODE (op0) != REG)
906c4e36 6901 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6902 if (GET_CODE (op0) != REG)
6903 op0 = copy_to_mode_reg (mode, op0);
6904
38a448ca
RH
6905 return gen_rtx_MULT (mode, op0,
6906 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6907 }
6908
e5e809f4 6909 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6910 subtarget = 0;
6911
6912 /* Check for multiplying things that have been extended
6913 from a narrower type. If this machine supports multiplying
6914 in that narrower type with a result in the desired type,
6915 do it that way, and avoid the explicit type-conversion. */
6916 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6917 && TREE_CODE (type) == INTEGER_TYPE
6918 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6919 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6920 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6921 && int_fits_type_p (TREE_OPERAND (exp, 1),
6922 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6923 /* Don't use a widening multiply if a shift will do. */
6924 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6925 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6926 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6927 ||
6928 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6929 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6930 ==
6931 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6932 /* If both operands are extended, they must either both
6933 be zero-extended or both be sign-extended. */
6934 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6935 ==
6936 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6937 {
6938 enum machine_mode innermode
6939 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6940 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6941 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6942 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6943 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6944 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6945 {
b10af0c8
TG
6946 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6947 {
6948 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6949 NULL_RTX, VOIDmode, 0);
6950 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6951 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6952 VOIDmode, 0);
6953 else
6954 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6955 NULL_RTX, VOIDmode, 0);
6956 goto binop2;
6957 }
6958 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6959 && innermode == word_mode)
6960 {
6961 rtx htem;
6962 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6963 NULL_RTX, VOIDmode, 0);
6964 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6965 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6966 VOIDmode, 0);
6967 else
6968 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6969 NULL_RTX, VOIDmode, 0);
6970 temp = expand_binop (mode, other_optab, op0, op1, target,
6971 unsignedp, OPTAB_LIB_WIDEN);
6972 htem = expand_mult_highpart_adjust (innermode,
6973 gen_highpart (innermode, temp),
6974 op0, op1,
6975 gen_highpart (innermode, temp),
6976 unsignedp);
6977 emit_move_insn (gen_highpart (innermode, temp), htem);
6978 return temp;
6979 }
bbf6f052
RK
6980 }
6981 }
6982 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6983 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6984 return expand_mult (mode, op0, op1, target, unsignedp);
6985
6986 case TRUNC_DIV_EXPR:
6987 case FLOOR_DIV_EXPR:
6988 case CEIL_DIV_EXPR:
6989 case ROUND_DIV_EXPR:
6990 case EXACT_DIV_EXPR:
6991 preexpand_calls (exp);
e5e809f4 6992 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6993 subtarget = 0;
6994 /* Possible optimization: compute the dividend with EXPAND_SUM
6995 then if the divisor is constant can optimize the case
6996 where some terms of the dividend have coeffs divisible by it. */
6997 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6998 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6999 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7000
7001 case RDIV_EXPR:
7002 this_optab = flodiv_optab;
7003 goto binop;
7004
7005 case TRUNC_MOD_EXPR:
7006 case FLOOR_MOD_EXPR:
7007 case CEIL_MOD_EXPR:
7008 case ROUND_MOD_EXPR:
7009 preexpand_calls (exp);
e5e809f4 7010 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7011 subtarget = 0;
7012 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7013 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7014 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7015
7016 case FIX_ROUND_EXPR:
7017 case FIX_FLOOR_EXPR:
7018 case FIX_CEIL_EXPR:
7019 abort (); /* Not used for C. */
7020
7021 case FIX_TRUNC_EXPR:
906c4e36 7022 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7023 if (target == 0)
7024 target = gen_reg_rtx (mode);
7025 expand_fix (target, op0, unsignedp);
7026 return target;
7027
7028 case FLOAT_EXPR:
906c4e36 7029 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7030 if (target == 0)
7031 target = gen_reg_rtx (mode);
7032 /* expand_float can't figure out what to do if FROM has VOIDmode.
7033 So give it the correct mode. With -O, cse will optimize this. */
7034 if (GET_MODE (op0) == VOIDmode)
7035 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7036 op0);
7037 expand_float (target, op0,
7038 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7039 return target;
7040
7041 case NEGATE_EXPR:
5b22bee8 7042 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7043 temp = expand_unop (mode, neg_optab, op0, target, 0);
7044 if (temp == 0)
7045 abort ();
7046 return temp;
7047
7048 case ABS_EXPR:
7049 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7050
2d7050fd 7051 /* Handle complex values specially. */
d6a5ac33
RK
7052 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7053 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7054 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7055
bbf6f052
RK
7056 /* Unsigned abs is simply the operand. Testing here means we don't
7057 risk generating incorrect code below. */
7058 if (TREE_UNSIGNED (type))
7059 return op0;
7060
2e5ec6cf 7061 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7062 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7063
7064 case MAX_EXPR:
7065 case MIN_EXPR:
7066 target = original_target;
e5e809f4 7067 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7068 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7069 || GET_MODE (target) != mode
bbf6f052
RK
7070 || (GET_CODE (target) == REG
7071 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7072 target = gen_reg_rtx (mode);
906c4e36 7073 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7074 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7075
7076 /* First try to do it with a special MIN or MAX instruction.
7077 If that does not win, use a conditional jump to select the proper
7078 value. */
7079 this_optab = (TREE_UNSIGNED (type)
7080 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7081 : (code == MIN_EXPR ? smin_optab : smax_optab));
7082
7083 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7084 OPTAB_WIDEN);
7085 if (temp != 0)
7086 return temp;
7087
fa2981d8
JW
7088 /* At this point, a MEM target is no longer useful; we will get better
7089 code without it. */
7090
7091 if (GET_CODE (target) == MEM)
7092 target = gen_reg_rtx (mode);
7093
ee456b1c
RK
7094 if (target != op0)
7095 emit_move_insn (target, op0);
d6a5ac33 7096
bbf6f052 7097 op0 = gen_label_rtx ();
d6a5ac33 7098
f81497d9
RS
7099 /* If this mode is an integer too wide to compare properly,
7100 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 7101 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 7102 {
f81497d9 7103 if (code == MAX_EXPR)
d6a5ac33
RK
7104 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7105 target, op1, NULL_RTX, op0);
bbf6f052 7106 else
d6a5ac33
RK
7107 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7108 op1, target, NULL_RTX, op0);
ee456b1c 7109 emit_move_insn (target, op1);
bbf6f052 7110 }
f81497d9
RS
7111 else
7112 {
7113 if (code == MAX_EXPR)
7114 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7115 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7116 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
7117 else
7118 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7119 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7120 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 7121 if (temp == const0_rtx)
ee456b1c 7122 emit_move_insn (target, op1);
f81497d9
RS
7123 else if (temp != const_true_rtx)
7124 {
7125 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7126 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7127 else
7128 abort ();
ee456b1c 7129 emit_move_insn (target, op1);
f81497d9
RS
7130 }
7131 }
bbf6f052
RK
7132 emit_label (op0);
7133 return target;
7134
bbf6f052
RK
7135 case BIT_NOT_EXPR:
7136 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7137 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7138 if (temp == 0)
7139 abort ();
7140 return temp;
7141
7142 case FFS_EXPR:
7143 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7144 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7145 if (temp == 0)
7146 abort ();
7147 return temp;
7148
d6a5ac33
RK
7149 /* ??? Can optimize bitwise operations with one arg constant.
7150 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7151 and (a bitwise1 b) bitwise2 b (etc)
7152 but that is probably not worth while. */
7153
7154 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7155 boolean values when we want in all cases to compute both of them. In
7156 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7157 as actual zero-or-1 values and then bitwise anding. In cases where
7158 there cannot be any side effects, better code would be made by
7159 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7160 how to recognize those cases. */
7161
bbf6f052
RK
7162 case TRUTH_AND_EXPR:
7163 case BIT_AND_EXPR:
7164 this_optab = and_optab;
7165 goto binop;
7166
bbf6f052
RK
7167 case TRUTH_OR_EXPR:
7168 case BIT_IOR_EXPR:
7169 this_optab = ior_optab;
7170 goto binop;
7171
874726a8 7172 case TRUTH_XOR_EXPR:
bbf6f052
RK
7173 case BIT_XOR_EXPR:
7174 this_optab = xor_optab;
7175 goto binop;
7176
7177 case LSHIFT_EXPR:
7178 case RSHIFT_EXPR:
7179 case LROTATE_EXPR:
7180 case RROTATE_EXPR:
7181 preexpand_calls (exp);
e5e809f4 7182 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7183 subtarget = 0;
7184 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7185 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7186 unsignedp);
7187
d6a5ac33
RK
7188 /* Could determine the answer when only additive constants differ. Also,
7189 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7190 case LT_EXPR:
7191 case LE_EXPR:
7192 case GT_EXPR:
7193 case GE_EXPR:
7194 case EQ_EXPR:
7195 case NE_EXPR:
7196 preexpand_calls (exp);
7197 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7198 if (temp != 0)
7199 return temp;
d6a5ac33 7200
0f41302f 7201 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7202 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7203 && original_target
7204 && GET_CODE (original_target) == REG
7205 && (GET_MODE (original_target)
7206 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7207 {
d6a5ac33
RK
7208 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7209 VOIDmode, 0);
7210
bbf6f052
RK
7211 if (temp != original_target)
7212 temp = copy_to_reg (temp);
d6a5ac33 7213
bbf6f052 7214 op1 = gen_label_rtx ();
906c4e36 7215 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
7216 GET_MODE (temp), unsignedp, 0);
7217 emit_jump_insn (gen_beq (op1));
7218 emit_move_insn (temp, const1_rtx);
7219 emit_label (op1);
7220 return temp;
7221 }
d6a5ac33 7222
bbf6f052
RK
7223 /* If no set-flag instruction, must generate a conditional
7224 store into a temporary variable. Drop through
7225 and handle this like && and ||. */
7226
7227 case TRUTH_ANDIF_EXPR:
7228 case TRUTH_ORIF_EXPR:
e44842fe 7229 if (! ignore
e5e809f4 7230 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7231 /* Make sure we don't have a hard reg (such as function's return
7232 value) live across basic blocks, if not optimizing. */
7233 || (!optimize && GET_CODE (target) == REG
7234 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7235 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7236
7237 if (target)
7238 emit_clr_insn (target);
7239
bbf6f052
RK
7240 op1 = gen_label_rtx ();
7241 jumpifnot (exp, op1);
e44842fe
RK
7242
7243 if (target)
7244 emit_0_to_1_insn (target);
7245
bbf6f052 7246 emit_label (op1);
e44842fe 7247 return ignore ? const0_rtx : target;
bbf6f052
RK
7248
7249 case TRUTH_NOT_EXPR:
7250 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7251 /* The parser is careful to generate TRUTH_NOT_EXPR
7252 only with operands that are always zero or one. */
906c4e36 7253 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7254 target, 1, OPTAB_LIB_WIDEN);
7255 if (temp == 0)
7256 abort ();
7257 return temp;
7258
7259 case COMPOUND_EXPR:
7260 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7261 emit_queue ();
7262 return expand_expr (TREE_OPERAND (exp, 1),
7263 (ignore ? const0_rtx : target),
7264 VOIDmode, 0);
7265
7266 case COND_EXPR:
ac01eace
RK
7267 /* If we would have a "singleton" (see below) were it not for a
7268 conversion in each arm, bring that conversion back out. */
7269 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7270 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7271 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7272 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7273 {
7274 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7275 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7276
7277 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7278 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7279 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7280 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7281 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7282 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7283 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7284 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7285 return expand_expr (build1 (NOP_EXPR, type,
7286 build (COND_EXPR, TREE_TYPE (true),
7287 TREE_OPERAND (exp, 0),
7288 true, false)),
7289 target, tmode, modifier);
7290 }
7291
bbf6f052
RK
7292 {
7293 /* Note that COND_EXPRs whose type is a structure or union
7294 are required to be constructed to contain assignments of
7295 a temporary variable, so that we can evaluate them here
7296 for side effect only. If type is void, we must do likewise. */
7297
7298 /* If an arm of the branch requires a cleanup,
7299 only that cleanup is performed. */
7300
7301 tree singleton = 0;
7302 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7303
7304 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7305 convert it to our mode, if necessary. */
7306 if (integer_onep (TREE_OPERAND (exp, 1))
7307 && integer_zerop (TREE_OPERAND (exp, 2))
7308 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7309 {
dd27116b
RK
7310 if (ignore)
7311 {
7312 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7313 ro_modifier);
dd27116b
RK
7314 return const0_rtx;
7315 }
7316
921b3427 7317 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7318 if (GET_MODE (op0) == mode)
7319 return op0;
d6a5ac33 7320
bbf6f052
RK
7321 if (target == 0)
7322 target = gen_reg_rtx (mode);
7323 convert_move (target, op0, unsignedp);
7324 return target;
7325 }
7326
ac01eace
RK
7327 /* Check for X ? A + B : A. If we have this, we can copy A to the
7328 output and conditionally add B. Similarly for unary operations.
7329 Don't do this if X has side-effects because those side effects
7330 might affect A or B and the "?" operation is a sequence point in
7331 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7332
7333 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7334 && operand_equal_p (TREE_OPERAND (exp, 2),
7335 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7336 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7337 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7338 && operand_equal_p (TREE_OPERAND (exp, 1),
7339 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7340 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7341 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7342 && operand_equal_p (TREE_OPERAND (exp, 2),
7343 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7344 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7345 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7346 && operand_equal_p (TREE_OPERAND (exp, 1),
7347 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7348 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7349
01c8a7c8
RK
7350 /* If we are not to produce a result, we have no target. Otherwise,
7351 if a target was specified use it; it will not be used as an
7352 intermediate target unless it is safe. If no target, use a
7353 temporary. */
7354
7355 if (ignore)
7356 temp = 0;
7357 else if (original_target
e5e809f4 7358 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7359 || (singleton && GET_CODE (original_target) == REG
7360 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7361 && original_target == var_rtx (singleton)))
7362 && GET_MODE (original_target) == mode
7c00d1fe
RK
7363#ifdef HAVE_conditional_move
7364 && (! can_conditionally_move_p (mode)
7365 || GET_CODE (original_target) == REG
7366 || TREE_ADDRESSABLE (type))
7367#endif
01c8a7c8
RK
7368 && ! (GET_CODE (original_target) == MEM
7369 && MEM_VOLATILE_P (original_target)))
7370 temp = original_target;
7371 else if (TREE_ADDRESSABLE (type))
7372 abort ();
7373 else
7374 temp = assign_temp (type, 0, 0, 1);
7375
ac01eace
RK
7376 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7377 do the test of X as a store-flag operation, do this as
7378 A + ((X != 0) << log C). Similarly for other simple binary
7379 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7380 if (temp && singleton && binary_op
bbf6f052
RK
7381 && (TREE_CODE (binary_op) == PLUS_EXPR
7382 || TREE_CODE (binary_op) == MINUS_EXPR
7383 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7384 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7385 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7386 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7387 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7388 {
7389 rtx result;
7390 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7391 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7392 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7393 : xor_optab);
bbf6f052
RK
7394
7395 /* If we had X ? A : A + 1, do this as A + (X == 0).
7396
7397 We have to invert the truth value here and then put it
7398 back later if do_store_flag fails. We cannot simply copy
7399 TREE_OPERAND (exp, 0) to another variable and modify that
7400 because invert_truthvalue can modify the tree pointed to
7401 by its argument. */
7402 if (singleton == TREE_OPERAND (exp, 1))
7403 TREE_OPERAND (exp, 0)
7404 = invert_truthvalue (TREE_OPERAND (exp, 0));
7405
7406 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7407 (safe_from_p (temp, singleton, 1)
906c4e36 7408 ? temp : NULL_RTX),
bbf6f052
RK
7409 mode, BRANCH_COST <= 1);
7410
ac01eace
RK
7411 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7412 result = expand_shift (LSHIFT_EXPR, mode, result,
7413 build_int_2 (tree_log2
7414 (TREE_OPERAND
7415 (binary_op, 1)),
7416 0),
e5e809f4 7417 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7418 ? temp : NULL_RTX), 0);
7419
bbf6f052
RK
7420 if (result)
7421 {
906c4e36 7422 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7423 return expand_binop (mode, boptab, op1, result, temp,
7424 unsignedp, OPTAB_LIB_WIDEN);
7425 }
7426 else if (singleton == TREE_OPERAND (exp, 1))
7427 TREE_OPERAND (exp, 0)
7428 = invert_truthvalue (TREE_OPERAND (exp, 0));
7429 }
7430
dabf8373 7431 do_pending_stack_adjust ();
bbf6f052
RK
7432 NO_DEFER_POP;
7433 op0 = gen_label_rtx ();
7434
7435 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7436 {
7437 if (temp != 0)
7438 {
7439 /* If the target conflicts with the other operand of the
7440 binary op, we can't use it. Also, we can't use the target
7441 if it is a hard register, because evaluating the condition
7442 might clobber it. */
7443 if ((binary_op
e5e809f4 7444 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7445 || (GET_CODE (temp) == REG
7446 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7447 temp = gen_reg_rtx (mode);
7448 store_expr (singleton, temp, 0);
7449 }
7450 else
906c4e36 7451 expand_expr (singleton,
2937cf87 7452 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7453 if (singleton == TREE_OPERAND (exp, 1))
7454 jumpif (TREE_OPERAND (exp, 0), op0);
7455 else
7456 jumpifnot (TREE_OPERAND (exp, 0), op0);
7457
956d6950 7458 start_cleanup_deferral ();
bbf6f052
RK
7459 if (binary_op && temp == 0)
7460 /* Just touch the other operand. */
7461 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7462 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7463 else if (binary_op)
7464 store_expr (build (TREE_CODE (binary_op), type,
7465 make_tree (type, temp),
7466 TREE_OPERAND (binary_op, 1)),
7467 temp, 0);
7468 else
7469 store_expr (build1 (TREE_CODE (unary_op), type,
7470 make_tree (type, temp)),
7471 temp, 0);
7472 op1 = op0;
bbf6f052 7473 }
bbf6f052
RK
7474 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7475 comparison operator. If we have one of these cases, set the
7476 output to A, branch on A (cse will merge these two references),
7477 then set the output to FOO. */
7478 else if (temp
7479 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7480 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7481 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7482 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7483 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7484 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7485 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7486 {
7487 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7488 temp = gen_reg_rtx (mode);
7489 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7490 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7491
956d6950 7492 start_cleanup_deferral ();
bbf6f052
RK
7493 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7494 op1 = op0;
7495 }
7496 else if (temp
7497 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7498 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7499 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7500 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7501 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7502 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7503 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7504 {
7505 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7506 temp = gen_reg_rtx (mode);
7507 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7508 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7509
956d6950 7510 start_cleanup_deferral ();
bbf6f052
RK
7511 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7512 op1 = op0;
7513 }
7514 else
7515 {
7516 op1 = gen_label_rtx ();
7517 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7518
956d6950 7519 start_cleanup_deferral ();
bbf6f052
RK
7520 if (temp != 0)
7521 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7522 else
906c4e36
RK
7523 expand_expr (TREE_OPERAND (exp, 1),
7524 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7525 end_cleanup_deferral ();
bbf6f052
RK
7526 emit_queue ();
7527 emit_jump_insn (gen_jump (op1));
7528 emit_barrier ();
7529 emit_label (op0);
956d6950 7530 start_cleanup_deferral ();
bbf6f052
RK
7531 if (temp != 0)
7532 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7533 else
906c4e36
RK
7534 expand_expr (TREE_OPERAND (exp, 2),
7535 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7536 }
7537
956d6950 7538 end_cleanup_deferral ();
bbf6f052
RK
7539
7540 emit_queue ();
7541 emit_label (op1);
7542 OK_DEFER_POP;
5dab5552 7543
bbf6f052
RK
7544 return temp;
7545 }
7546
7547 case TARGET_EXPR:
7548 {
7549 /* Something needs to be initialized, but we didn't know
7550 where that thing was when building the tree. For example,
7551 it could be the return value of a function, or a parameter
7552 to a function which lays down in the stack, or a temporary
7553 variable which must be passed by reference.
7554
7555 We guarantee that the expression will either be constructed
7556 or copied into our original target. */
7557
7558 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7559 tree cleanups = NULL_TREE;
5c062816 7560 tree exp1;
bbf6f052
RK
7561
7562 if (TREE_CODE (slot) != VAR_DECL)
7563 abort ();
7564
9c51f375
RK
7565 if (! ignore)
7566 target = original_target;
7567
bbf6f052
RK
7568 if (target == 0)
7569 {
7570 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7571 {
7572 target = DECL_RTL (slot);
5c062816 7573 /* If we have already expanded the slot, so don't do
ac993f4f 7574 it again. (mrs) */
5c062816
MS
7575 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7576 return target;
ac993f4f 7577 }
bbf6f052
RK
7578 else
7579 {
e9a25f70 7580 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7581 /* All temp slots at this level must not conflict. */
7582 preserve_temp_slots (target);
7583 DECL_RTL (slot) = target;
e9a25f70
JL
7584 if (TREE_ADDRESSABLE (slot))
7585 {
7586 TREE_ADDRESSABLE (slot) = 0;
7587 mark_addressable (slot);
7588 }
bbf6f052 7589
e287fd6e
RK
7590 /* Since SLOT is not known to the called function
7591 to belong to its stack frame, we must build an explicit
7592 cleanup. This case occurs when we must build up a reference
7593 to pass the reference as an argument. In this case,
7594 it is very likely that such a reference need not be
7595 built here. */
7596
7597 if (TREE_OPERAND (exp, 2) == 0)
7598 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7599 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7600 }
bbf6f052
RK
7601 }
7602 else
7603 {
7604 /* This case does occur, when expanding a parameter which
7605 needs to be constructed on the stack. The target
7606 is the actual stack address that we want to initialize.
7607 The function we call will perform the cleanup in this case. */
7608
8c042b47
RS
7609 /* If we have already assigned it space, use that space,
7610 not target that we were passed in, as our target
7611 parameter is only a hint. */
7612 if (DECL_RTL (slot) != 0)
7613 {
7614 target = DECL_RTL (slot);
7615 /* If we have already expanded the slot, so don't do
7616 it again. (mrs) */
7617 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7618 return target;
7619 }
21002281
JW
7620 else
7621 {
7622 DECL_RTL (slot) = target;
7623 /* If we must have an addressable slot, then make sure that
7624 the RTL that we just stored in slot is OK. */
7625 if (TREE_ADDRESSABLE (slot))
7626 {
7627 TREE_ADDRESSABLE (slot) = 0;
7628 mark_addressable (slot);
7629 }
7630 }
bbf6f052
RK
7631 }
7632
4847c938 7633 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7634 /* Mark it as expanded. */
7635 TREE_OPERAND (exp, 1) = NULL_TREE;
7636
e5e809f4 7637 TREE_USED (slot) = 1;
41531e5b 7638 store_expr (exp1, target, 0);
61d6b1cc 7639
e976b8b2 7640 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7641
41531e5b 7642 return target;
bbf6f052
RK
7643 }
7644
7645 case INIT_EXPR:
7646 {
7647 tree lhs = TREE_OPERAND (exp, 0);
7648 tree rhs = TREE_OPERAND (exp, 1);
7649 tree noncopied_parts = 0;
7650 tree lhs_type = TREE_TYPE (lhs);
7651
7652 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7653 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7654 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7655 TYPE_NONCOPIED_PARTS (lhs_type));
7656 while (noncopied_parts != 0)
7657 {
7658 expand_assignment (TREE_VALUE (noncopied_parts),
7659 TREE_PURPOSE (noncopied_parts), 0, 0);
7660 noncopied_parts = TREE_CHAIN (noncopied_parts);
7661 }
7662 return temp;
7663 }
7664
7665 case MODIFY_EXPR:
7666 {
7667 /* If lhs is complex, expand calls in rhs before computing it.
7668 That's so we don't compute a pointer and save it over a call.
7669 If lhs is simple, compute it first so we can give it as a
7670 target if the rhs is just a call. This avoids an extra temp and copy
7671 and that prevents a partial-subsumption which makes bad code.
7672 Actually we could treat component_ref's of vars like vars. */
7673
7674 tree lhs = TREE_OPERAND (exp, 0);
7675 tree rhs = TREE_OPERAND (exp, 1);
7676 tree noncopied_parts = 0;
7677 tree lhs_type = TREE_TYPE (lhs);
7678
7679 temp = 0;
7680
7681 if (TREE_CODE (lhs) != VAR_DECL
7682 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7683 && TREE_CODE (lhs) != PARM_DECL
7684 && ! (TREE_CODE (lhs) == INDIRECT_REF
7685 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7686 preexpand_calls (exp);
7687
7688 /* Check for |= or &= of a bitfield of size one into another bitfield
7689 of size 1. In this case, (unless we need the result of the
7690 assignment) we can do this more efficiently with a
7691 test followed by an assignment, if necessary.
7692
7693 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7694 things change so we do, this code should be enhanced to
7695 support it. */
7696 if (ignore
7697 && TREE_CODE (lhs) == COMPONENT_REF
7698 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7699 || TREE_CODE (rhs) == BIT_AND_EXPR)
7700 && TREE_OPERAND (rhs, 0) == lhs
7701 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7702 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7703 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7704 {
7705 rtx label = gen_label_rtx ();
7706
7707 do_jump (TREE_OPERAND (rhs, 1),
7708 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7709 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7710 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7711 (TREE_CODE (rhs) == BIT_IOR_EXPR
7712 ? integer_one_node
7713 : integer_zero_node)),
7714 0, 0);
e7c33f54 7715 do_pending_stack_adjust ();
bbf6f052
RK
7716 emit_label (label);
7717 return const0_rtx;
7718 }
7719
7720 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7721 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7722 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7723 TYPE_NONCOPIED_PARTS (lhs_type));
7724
7725 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7726 while (noncopied_parts != 0)
7727 {
7728 expand_assignment (TREE_PURPOSE (noncopied_parts),
7729 TREE_VALUE (noncopied_parts), 0, 0);
7730 noncopied_parts = TREE_CHAIN (noncopied_parts);
7731 }
7732 return temp;
7733 }
7734
6e7f84a7
APB
7735 case RETURN_EXPR:
7736 if (!TREE_OPERAND (exp, 0))
7737 expand_null_return ();
7738 else
7739 expand_return (TREE_OPERAND (exp, 0));
7740 return const0_rtx;
7741
bbf6f052
RK
7742 case PREINCREMENT_EXPR:
7743 case PREDECREMENT_EXPR:
7b8b9722 7744 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7745
7746 case POSTINCREMENT_EXPR:
7747 case POSTDECREMENT_EXPR:
7748 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7749 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7750
7751 case ADDR_EXPR:
987c71d9 7752 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7753 be a MEM corresponding to a stack slot. */
987c71d9
RK
7754 temp = 0;
7755
bbf6f052
RK
7756 /* Are we taking the address of a nested function? */
7757 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7758 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7759 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7760 && ! TREE_STATIC (exp))
bbf6f052
RK
7761 {
7762 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7763 op0 = force_operand (op0, target);
7764 }
682ba3a6
RK
7765 /* If we are taking the address of something erroneous, just
7766 return a zero. */
7767 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7768 return const0_rtx;
bbf6f052
RK
7769 else
7770 {
e287fd6e
RK
7771 /* We make sure to pass const0_rtx down if we came in with
7772 ignore set, to avoid doing the cleanups twice for something. */
7773 op0 = expand_expr (TREE_OPERAND (exp, 0),
7774 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7775 (modifier == EXPAND_INITIALIZER
7776 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7777
119af78a
RK
7778 /* If we are going to ignore the result, OP0 will have been set
7779 to const0_rtx, so just return it. Don't get confused and
7780 think we are taking the address of the constant. */
7781 if (ignore)
7782 return op0;
7783
3539e816
MS
7784 op0 = protect_from_queue (op0, 0);
7785
896102d0
RK
7786 /* We would like the object in memory. If it is a constant,
7787 we can have it be statically allocated into memory. For
682ba3a6 7788 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7789 memory and store the value into it. */
7790
7791 if (CONSTANT_P (op0))
7792 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7793 op0);
987c71d9 7794 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7795 {
7796 mark_temp_addr_taken (op0);
7797 temp = XEXP (op0, 0);
7798 }
896102d0 7799
682ba3a6 7800 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 7801 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
7802 {
7803 /* If this object is in a register, it must be not
0f41302f 7804 be BLKmode. */
896102d0 7805 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7806 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7807
7a0b7b9a 7808 mark_temp_addr_taken (memloc);
896102d0
RK
7809 emit_move_insn (memloc, op0);
7810 op0 = memloc;
7811 }
7812
bbf6f052
RK
7813 if (GET_CODE (op0) != MEM)
7814 abort ();
7815
7816 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7817 {
7818 temp = XEXP (op0, 0);
7819#ifdef POINTERS_EXTEND_UNSIGNED
7820 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7821 && mode == ptr_mode)
9fcfcce7 7822 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7823#endif
7824 return temp;
7825 }
987c71d9 7826
bbf6f052
RK
7827 op0 = force_operand (XEXP (op0, 0), target);
7828 }
987c71d9 7829
bbf6f052 7830 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7831 op0 = force_reg (Pmode, op0);
7832
dc6d66b3
RK
7833 if (GET_CODE (op0) == REG
7834 && ! REG_USERVAR_P (op0))
7835 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7836
7837 /* If we might have had a temp slot, add an equivalent address
7838 for it. */
7839 if (temp != 0)
7840 update_temp_slot_address (temp, op0);
7841
88f63c77
RK
7842#ifdef POINTERS_EXTEND_UNSIGNED
7843 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7844 && mode == ptr_mode)
9fcfcce7 7845 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7846#endif
7847
bbf6f052
RK
7848 return op0;
7849
7850 case ENTRY_VALUE_EXPR:
7851 abort ();
7852
7308a047
RS
7853 /* COMPLEX type for Extended Pascal & Fortran */
7854 case COMPLEX_EXPR:
7855 {
7856 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7857 rtx insns;
7308a047
RS
7858
7859 /* Get the rtx code of the operands. */
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7861 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7862
7863 if (! target)
7864 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7865
6551fa4d 7866 start_sequence ();
7308a047
RS
7867
7868 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7869 emit_move_insn (gen_realpart (mode, target), op0);
7870 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7871
6551fa4d
JW
7872 insns = get_insns ();
7873 end_sequence ();
7874
7308a047 7875 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7876 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7877 each with a separate pseudo as destination.
7878 It's not correct for flow to treat them as a unit. */
6d6e61ce 7879 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7880 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7881 else
7882 emit_insns (insns);
7308a047
RS
7883
7884 return target;
7885 }
7886
7887 case REALPART_EXPR:
2d7050fd
RS
7888 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7889 return gen_realpart (mode, op0);
7308a047
RS
7890
7891 case IMAGPART_EXPR:
2d7050fd
RS
7892 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7893 return gen_imagpart (mode, op0);
7308a047
RS
7894
7895 case CONJ_EXPR:
7896 {
62acb978 7897 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7898 rtx imag_t;
6551fa4d 7899 rtx insns;
7308a047
RS
7900
7901 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7902
7903 if (! target)
d6a5ac33 7904 target = gen_reg_rtx (mode);
7308a047 7905
6551fa4d 7906 start_sequence ();
7308a047
RS
7907
7908 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7909 emit_move_insn (gen_realpart (partmode, target),
7910 gen_realpart (partmode, op0));
7308a047 7911
62acb978
RK
7912 imag_t = gen_imagpart (partmode, target);
7913 temp = expand_unop (partmode, neg_optab,
7914 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7915 if (temp != imag_t)
7916 emit_move_insn (imag_t, temp);
7917
6551fa4d
JW
7918 insns = get_insns ();
7919 end_sequence ();
7920
d6a5ac33
RK
7921 /* Conjugate should appear as a single unit
7922 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7923 each with a separate pseudo as destination.
7924 It's not correct for flow to treat them as a unit. */
6d6e61ce 7925 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7926 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7927 else
7928 emit_insns (insns);
7308a047
RS
7929
7930 return target;
7931 }
7932
e976b8b2
MS
7933 case TRY_CATCH_EXPR:
7934 {
7935 tree handler = TREE_OPERAND (exp, 1);
7936
7937 expand_eh_region_start ();
7938
7939 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7940
7941 expand_eh_region_end (handler);
7942
7943 return op0;
7944 }
7945
7946 case POPDCC_EXPR:
7947 {
7948 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7949 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7950 return const0_rtx;
7951 }
7952
7953 case POPDHC_EXPR:
7954 {
7955 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7956 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7957 return const0_rtx;
7958 }
7959
bbf6f052 7960 case ERROR_MARK:
66538193
RS
7961 op0 = CONST0_RTX (tmode);
7962 if (op0 != 0)
7963 return op0;
bbf6f052
RK
7964 return const0_rtx;
7965
7966 default:
90764a87 7967 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7968 }
7969
7970 /* Here to do an ordinary binary operator, generating an instruction
7971 from the optab already placed in `this_optab'. */
7972 binop:
7973 preexpand_calls (exp);
e5e809f4 7974 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7975 subtarget = 0;
7976 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7977 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7978 binop2:
7979 temp = expand_binop (mode, this_optab, op0, op1, target,
7980 unsignedp, OPTAB_LIB_WIDEN);
7981 if (temp == 0)
7982 abort ();
7983 return temp;
7984}
bbf6f052 7985
bbf6f052 7986
b93a436e
JL
7987\f
7988/* Return the alignment in bits of EXP, a pointer valued expression.
7989 But don't return more than MAX_ALIGN no matter what.
7990 The alignment returned is, by default, the alignment of the thing that
7991 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7992
7993 Otherwise, look at the expression to see if we can do better, i.e., if the
7994 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7995
b93a436e
JL
7996static int
7997get_pointer_alignment (exp, max_align)
7998 tree exp;
7999 unsigned max_align;
bbf6f052 8000{
b93a436e
JL
8001 unsigned align, inner;
8002
8003 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8004 return 0;
8005
8006 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8007 align = MIN (align, max_align);
8008
8009 while (1)
bbf6f052 8010 {
b93a436e 8011 switch (TREE_CODE (exp))
bbf6f052 8012 {
b93a436e
JL
8013 case NOP_EXPR:
8014 case CONVERT_EXPR:
8015 case NON_LVALUE_EXPR:
8016 exp = TREE_OPERAND (exp, 0);
8017 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8018 return align;
8019 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8020 align = MIN (inner, max_align);
8021 break;
8022
8023 case PLUS_EXPR:
8024 /* If sum of pointer + int, restrict our maximum alignment to that
8025 imposed by the integer. If not, we can't do any better than
8026 ALIGN. */
8027 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8028 return align;
8029
8030 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8031 & (max_align - 1))
8032 != 0)
8033 max_align >>= 1;
8034
8035 exp = TREE_OPERAND (exp, 0);
8036 break;
8037
8038 case ADDR_EXPR:
8039 /* See what we are pointing at and look at its alignment. */
8040 exp = TREE_OPERAND (exp, 0);
8041 if (TREE_CODE (exp) == FUNCTION_DECL)
8042 align = FUNCTION_BOUNDARY;
8043 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8044 align = DECL_ALIGN (exp);
8045#ifdef CONSTANT_ALIGNMENT
8046 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8047 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 8048#endif
b93a436e 8049 return MIN (align, max_align);
c02bd5d9 8050
b93a436e
JL
8051 default:
8052 return align;
8053 }
8054 }
8055}
8056\f
8057/* Return the tree node and offset if a given argument corresponds to
8058 a string constant. */
8059
8060static tree
8061string_constant (arg, ptr_offset)
8062 tree arg;
8063 tree *ptr_offset;
8064{
8065 STRIP_NOPS (arg);
8066
8067 if (TREE_CODE (arg) == ADDR_EXPR
8068 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8069 {
8070 *ptr_offset = integer_zero_node;
8071 return TREE_OPERAND (arg, 0);
8072 }
8073 else if (TREE_CODE (arg) == PLUS_EXPR)
8074 {
8075 tree arg0 = TREE_OPERAND (arg, 0);
8076 tree arg1 = TREE_OPERAND (arg, 1);
8077
8078 STRIP_NOPS (arg0);
8079 STRIP_NOPS (arg1);
8080
8081 if (TREE_CODE (arg0) == ADDR_EXPR
8082 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8083 {
b93a436e
JL
8084 *ptr_offset = arg1;
8085 return TREE_OPERAND (arg0, 0);
bbf6f052 8086 }
b93a436e
JL
8087 else if (TREE_CODE (arg1) == ADDR_EXPR
8088 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8089 {
b93a436e
JL
8090 *ptr_offset = arg0;
8091 return TREE_OPERAND (arg1, 0);
bbf6f052 8092 }
b93a436e 8093 }
ca695ac9 8094
b93a436e
JL
8095 return 0;
8096}
ca695ac9 8097
b93a436e
JL
8098/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8099 way, because it could contain a zero byte in the middle.
8100 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 8101
b93a436e
JL
8102 Unfortunately, string_constant can't access the values of const char
8103 arrays with initializers, so neither can we do so here. */
e87b4f3f 8104
b93a436e
JL
8105static tree
8106c_strlen (src)
8107 tree src;
8108{
8109 tree offset_node;
8110 int offset, max;
8111 char *ptr;
e7c33f54 8112
b93a436e
JL
8113 src = string_constant (src, &offset_node);
8114 if (src == 0)
8115 return 0;
8116 max = TREE_STRING_LENGTH (src);
8117 ptr = TREE_STRING_POINTER (src);
8118 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8119 {
8120 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8121 compute the offset to the following null if we don't know where to
8122 start searching for it. */
8123 int i;
8124 for (i = 0; i < max; i++)
8125 if (ptr[i] == 0)
8126 return 0;
8127 /* We don't know the starting offset, but we do know that the string
8128 has no internal zero bytes. We can assume that the offset falls
8129 within the bounds of the string; otherwise, the programmer deserves
8130 what he gets. Subtract the offset from the length of the string,
8131 and return that. */
8132 /* This would perhaps not be valid if we were dealing with named
8133 arrays in addition to literal string constants. */
8134 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8135 }
e7c33f54 8136
b93a436e
JL
8137 /* We have a known offset into the string. Start searching there for
8138 a null character. */
8139 if (offset_node == 0)
8140 offset = 0;
8141 else
8142 {
8143 /* Did we get a long long offset? If so, punt. */
8144 if (TREE_INT_CST_HIGH (offset_node) != 0)
8145 return 0;
8146 offset = TREE_INT_CST_LOW (offset_node);
8147 }
8148 /* If the offset is known to be out of bounds, warn, and call strlen at
8149 runtime. */
8150 if (offset < 0 || offset > max)
8151 {
8152 warning ("offset outside bounds of constant string");
8153 return 0;
8154 }
8155 /* Use strlen to search for the first zero byte. Since any strings
8156 constructed with build_string will have nulls appended, we win even
8157 if we get handed something like (char[4])"abcd".
e7c33f54 8158
b93a436e
JL
8159 Since OFFSET is our starting index into the string, no further
8160 calculation is needed. */
8161 return size_int (strlen (ptr + offset));
8162}
1bbddf11 8163
b93a436e
JL
8164rtx
8165expand_builtin_return_addr (fndecl_code, count, tem)
8166 enum built_in_function fndecl_code;
8167 int count;
8168 rtx tem;
8169{
8170 int i;
e7c33f54 8171
b93a436e
JL
8172 /* Some machines need special handling before we can access
8173 arbitrary frames. For example, on the sparc, we must first flush
8174 all register windows to the stack. */
8175#ifdef SETUP_FRAME_ADDRESSES
8176 if (count > 0)
8177 SETUP_FRAME_ADDRESSES ();
8178#endif
e87b4f3f 8179
b93a436e
JL
8180 /* On the sparc, the return address is not in the frame, it is in a
8181 register. There is no way to access it off of the current frame
8182 pointer, but it can be accessed off the previous frame pointer by
8183 reading the value from the register window save area. */
8184#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8185 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8186 count--;
8187#endif
60bac6ea 8188
b93a436e
JL
8189 /* Scan back COUNT frames to the specified frame. */
8190 for (i = 0; i < count; i++)
8191 {
8192 /* Assume the dynamic chain pointer is in the word that the
8193 frame address points to, unless otherwise specified. */
8194#ifdef DYNAMIC_CHAIN_ADDRESS
8195 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8196#endif
8197 tem = memory_address (Pmode, tem);
8198 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8199 }
ca695ac9 8200
b93a436e
JL
8201 /* For __builtin_frame_address, return what we've got. */
8202 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8203 return tem;
e9a25f70 8204
b93a436e
JL
8205 /* For __builtin_return_address, Get the return address from that
8206 frame. */
8207#ifdef RETURN_ADDR_RTX
8208 tem = RETURN_ADDR_RTX (count, tem);
8209#else
8210 tem = memory_address (Pmode,
8211 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8212 tem = gen_rtx_MEM (Pmode, tem);
8213#endif
8214 return tem;
8215}
e9a25f70 8216
b93a436e
JL
8217/* __builtin_setjmp is passed a pointer to an array of five words (not
8218 all will be used on all machines). It operates similarly to the C
8219 library function of the same name, but is more efficient. Much of
8220 the code below (and for longjmp) is copied from the handling of
8221 non-local gotos.
ca695ac9 8222
b93a436e
JL
8223 NOTE: This is intended for use by GNAT and the exception handling
8224 scheme in the compiler and will only work in the method used by
8225 them. */
e9a25f70 8226
b93a436e 8227rtx
6fd1c67b 8228expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
8229 rtx buf_addr;
8230 rtx target;
6fd1c67b 8231 rtx first_label, next_label;
b93a436e 8232{
6fd1c67b 8233 rtx lab1 = gen_label_rtx ();
a260abc9
DE
8234 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8235 enum machine_mode value_mode;
b93a436e 8236 rtx stack_save;
e9a25f70 8237
b93a436e 8238 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 8239
b93a436e
JL
8240#ifdef POINTERS_EXTEND_UNSIGNED
8241 buf_addr = convert_memory_address (Pmode, buf_addr);
8242#endif
d7f21d63 8243
b93a436e 8244 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 8245
b93a436e
JL
8246 if (target == 0 || GET_CODE (target) != REG
8247 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8248 target = gen_reg_rtx (value_mode);
d7f21d63 8249
b93a436e 8250 emit_queue ();
d7f21d63 8251
9ec36da5
JL
8252 /* We store the frame pointer and the address of lab1 in the buffer
8253 and use the rest of it for the stack save area, which is
8254 machine-dependent. */
8255
0bc02db4
MS
8256#ifndef BUILTIN_SETJMP_FRAME_VALUE
8257#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8258#endif
8259
b93a436e 8260 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
0bc02db4 8261 BUILTIN_SETJMP_FRAME_VALUE);
6fd1c67b
RH
8262 emit_move_insn (validize_mem
8263 (gen_rtx_MEM (Pmode,
b93a436e
JL
8264 plus_constant (buf_addr,
8265 GET_MODE_SIZE (Pmode)))),
6fd1c67b 8266 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 8267
b93a436e
JL
8268 stack_save = gen_rtx_MEM (sa_mode,
8269 plus_constant (buf_addr,
8270 2 * GET_MODE_SIZE (Pmode)));
8271 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 8272
6fd1c67b
RH
8273 /* If there is further processing to do, do it. */
8274#ifdef HAVE_builtin_setjmp_setup
8275 if (HAVE_builtin_setjmp_setup)
8276 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 8277#endif
d7f21d63 8278
6fd1c67b 8279 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 8280 emit_move_insn (target, const0_rtx);
6fd1c67b 8281 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
8282 emit_barrier ();
8283 emit_label (lab1);
d7f21d63 8284
6fd1c67b
RH
8285 /* Tell flow about the strange goings on. */
8286 current_function_has_nonlocal_label = 1;
8287
8288 /* Clobber the FP when we get here, so we have to make sure it's
8289 marked as used by this function. */
b93a436e 8290 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 8291
b93a436e
JL
8292 /* Mark the static chain as clobbered here so life information
8293 doesn't get messed up for it. */
8294 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 8295
b93a436e
JL
8296 /* Now put in the code to restore the frame pointer, and argument
8297 pointer, if needed. The code below is from expand_end_bindings
8298 in stmt.c; see detailed documentation there. */
8299#ifdef HAVE_nonlocal_goto
8300 if (! HAVE_nonlocal_goto)
8301#endif
8302 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 8303
b93a436e
JL
8304#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8305 if (fixed_regs[ARG_POINTER_REGNUM])
8306 {
8307#ifdef ELIMINABLE_REGS
081f5e7e 8308 int i;
b93a436e 8309 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 8310
b93a436e
JL
8311 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8312 if (elim_regs[i].from == ARG_POINTER_REGNUM
8313 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8314 break;
ca695ac9 8315
b93a436e
JL
8316 if (i == sizeof elim_regs / sizeof elim_regs [0])
8317#endif
8318 {
8319 /* Now restore our arg pointer from the address at which it
8320 was saved in our stack frame.
8321 If there hasn't be space allocated for it yet, make
8322 some now. */
8323 if (arg_pointer_save_area == 0)
8324 arg_pointer_save_area
8325 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8326 emit_move_insn (virtual_incoming_args_rtx,
8327 copy_to_reg (arg_pointer_save_area));
8328 }
8329 }
8330#endif
ca695ac9 8331
6fd1c67b
RH
8332#ifdef HAVE_builtin_setjmp_receiver
8333 if (HAVE_builtin_setjmp_receiver)
8334 emit_insn (gen_builtin_setjmp_receiver (lab1));
8335 else
8336#endif
b93a436e 8337#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
8338 if (HAVE_nonlocal_goto_receiver)
8339 emit_insn (gen_nonlocal_goto_receiver ());
8340 else
b93a436e 8341#endif
081f5e7e
KG
8342 {
8343 ; /* Nothing */
8344 }
6fd1c67b
RH
8345
8346 /* Set TARGET, and branch to the next-time-through label. */
3e2b9a3d 8347 emit_move_insn (target, const1_rtx);
6fd1c67b
RH
8348 emit_jump_insn (gen_jump (next_label));
8349 emit_barrier ();
ca695ac9 8350
6fd1c67b
RH
8351 return target;
8352}
ca695ac9 8353
6fd1c67b
RH
8354void
8355expand_builtin_longjmp (buf_addr, value)
8356 rtx buf_addr, value;
8357{
8358 rtx fp, lab, stack;
a260abc9 8359 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
ca695ac9 8360
6fd1c67b
RH
8361#ifdef POINTERS_EXTEND_UNSIGNED
8362 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 8363#endif
6fd1c67b
RH
8364 buf_addr = force_reg (Pmode, buf_addr);
8365
3e2b9a3d
JW
8366 /* We used to store value in static_chain_rtx, but that fails if pointers
8367 are smaller than integers. We instead require that the user must pass
8368 a second argument of 1, because that is what builtin_setjmp will
8369 return. This also makes EH slightly more efficient, since we are no
8370 longer copying around a value that we don't care about. */
8371 if (value != const1_rtx)
8372 abort ();
6fd1c67b
RH
8373
8374#ifdef HAVE_builtin_longjmp
8375 if (HAVE_builtin_longjmp)
3e2b9a3d 8376 emit_insn (gen_builtin_longjmp (buf_addr));
6fd1c67b 8377 else
b93a436e 8378#endif
6fd1c67b
RH
8379 {
8380 fp = gen_rtx_MEM (Pmode, buf_addr);
8381 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8382 GET_MODE_SIZE (Pmode)));
e9a25f70 8383
6fd1c67b
RH
8384 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8385 2 * GET_MODE_SIZE (Pmode)));
8386
8387 /* Pick up FP, label, and SP from the block and jump. This code is
8388 from expand_goto in stmt.c; see there for detailed comments. */
8389#if HAVE_nonlocal_goto
8390 if (HAVE_nonlocal_goto)
3e2b9a3d
JW
8391 /* We have to pass a value to the nonlocal_goto pattern that will
8392 get copied into the static_chain pointer, but it does not matter
8393 what that value is, because builtin_setjmp does not use it. */
6fd1c67b
RH
8394 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8395 else
b93a436e 8396#endif
6fd1c67b
RH
8397 {
8398 lab = copy_to_reg (lab);
60bac6ea 8399
6fd1c67b
RH
8400 emit_move_insn (hard_frame_pointer_rtx, fp);
8401 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8402
8403 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8404 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
6fd1c67b
RH
8405 emit_indirect_jump (lab);
8406 }
8407 }
b93a436e 8408}
60bac6ea 8409
55a6ba9f
JC
8410static rtx
8411get_memory_rtx (exp)
8412 tree exp;
8413{
8414 rtx mem;
8415 int is_aggregate;
8416
8417 mem = gen_rtx_MEM (BLKmode,
8418 memory_address (BLKmode,
8419 expand_expr (exp, NULL_RTX,
8420 ptr_mode, EXPAND_SUM)));
8421
8422 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8423
8424 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8425 if the value is the address of a structure or if the expression is
8426 cast to a pointer to structure type. */
8427 is_aggregate = 0;
8428
8429 while (TREE_CODE (exp) == NOP_EXPR)
8430 {
8431 tree cast_type = TREE_TYPE (exp);
8432 if (TREE_CODE (cast_type) == POINTER_TYPE
8433 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8434 {
8435 is_aggregate = 1;
8436 break;
8437 }
8438 exp = TREE_OPERAND (exp, 0);
8439 }
8440
8441 if (is_aggregate == 0)
8442 {
8443 tree type;
8444
8445 if (TREE_CODE (exp) == ADDR_EXPR)
8446 /* If this is the address of an object, check whether the
8447 object is an array. */
8448 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8449 else
8450 type = TREE_TYPE (TREE_TYPE (exp));
8451 is_aggregate = AGGREGATE_TYPE_P (type);
8452 }
8453
8454 MEM_IN_STRUCT_P (mem) = is_aggregate;
8455 return mem;
8456}
8457
b93a436e
JL
8458\f
8459/* Expand an expression EXP that calls a built-in function,
8460 with result going to TARGET if that's convenient
8461 (and in mode MODE if that's convenient).
8462 SUBTARGET may be used as the target for computing one of EXP's operands.
8463 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 8464
b93a436e
JL
8465#define CALLED_AS_BUILT_IN(NODE) \
8466 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 8467
b93a436e
JL
8468static rtx
8469expand_builtin (exp, target, subtarget, mode, ignore)
8470 tree exp;
8471 rtx target;
8472 rtx subtarget;
8473 enum machine_mode mode;
8474 int ignore;
8475{
8476 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8477 tree arglist = TREE_OPERAND (exp, 1);
8478 rtx op0;
8479 rtx lab1, insns;
8480 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8481 optab builtin_optab;
60bac6ea 8482
b93a436e
JL
8483 switch (DECL_FUNCTION_CODE (fndecl))
8484 {
8485 case BUILT_IN_ABS:
8486 case BUILT_IN_LABS:
8487 case BUILT_IN_FABS:
8488 /* build_function_call changes these into ABS_EXPR. */
8489 abort ();
4ed67205 8490
b93a436e
JL
8491 case BUILT_IN_SIN:
8492 case BUILT_IN_COS:
8493 /* Treat these like sqrt, but only if the user asks for them. */
8494 if (! flag_fast_math)
8495 break;
8496 case BUILT_IN_FSQRT:
8497 /* If not optimizing, call the library function. */
8498 if (! optimize)
8499 break;
4ed67205 8500
b93a436e
JL
8501 if (arglist == 0
8502 /* Arg could be wrong type if user redeclared this fcn wrong. */
8503 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
8504 break;
8505
b93a436e
JL
8506 /* Stabilize and compute the argument. */
8507 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8508 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8509 {
8510 exp = copy_node (exp);
8511 arglist = copy_node (arglist);
8512 TREE_OPERAND (exp, 1) = arglist;
8513 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8514 }
8515 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 8516
b93a436e
JL
8517 /* Make a suitable register to place result in. */
8518 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 8519
b93a436e
JL
8520 emit_queue ();
8521 start_sequence ();
7565a035 8522
b93a436e
JL
8523 switch (DECL_FUNCTION_CODE (fndecl))
8524 {
8525 case BUILT_IN_SIN:
8526 builtin_optab = sin_optab; break;
8527 case BUILT_IN_COS:
8528 builtin_optab = cos_optab; break;
8529 case BUILT_IN_FSQRT:
8530 builtin_optab = sqrt_optab; break;
8531 default:
8532 abort ();
8533 }
4ed67205 8534
b93a436e
JL
8535 /* Compute into TARGET.
8536 Set TARGET to wherever the result comes back. */
8537 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8538 builtin_optab, op0, target, 0);
8539
8540 /* If we were unable to expand via the builtin, stop the
8541 sequence (without outputting the insns) and break, causing
38e01259 8542 a call to the library function. */
b93a436e 8543 if (target == 0)
4ed67205 8544 {
b93a436e
JL
8545 end_sequence ();
8546 break;
8547 }
4ed67205 8548
b93a436e
JL
8549 /* Check the results by default. But if flag_fast_math is turned on,
8550 then assume sqrt will always be called with valid arguments. */
4ed67205 8551
b93a436e
JL
8552 if (! flag_fast_math)
8553 {
8554 /* Don't define the builtin FP instructions
8555 if your machine is not IEEE. */
8556 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8557 abort ();
4ed67205 8558
b93a436e 8559 lab1 = gen_label_rtx ();
ca55abae 8560
b93a436e
JL
8561 /* Test the result; if it is NaN, set errno=EDOM because
8562 the argument was not in the domain. */
8563 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8564 emit_jump_insn (gen_beq (lab1));
8565
8566#ifdef TARGET_EDOM
8567 {
8568#ifdef GEN_ERRNO_RTX
8569 rtx errno_rtx = GEN_ERRNO_RTX;
8570#else
8571 rtx errno_rtx
8572 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8573#endif
e87b4f3f 8574
b93a436e
JL
8575 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8576 }
8577#else
8578 /* We can't set errno=EDOM directly; let the library call do it.
8579 Pop the arguments right away in case the call gets deleted. */
8580 NO_DEFER_POP;
8581 expand_call (exp, target, 0);
8582 OK_DEFER_POP;
8583#endif
e7c33f54 8584
b93a436e
JL
8585 emit_label (lab1);
8586 }
0006469d 8587
b93a436e
JL
8588 /* Output the entire sequence. */
8589 insns = get_insns ();
8590 end_sequence ();
8591 emit_insns (insns);
8592
8593 return target;
0006469d 8594
b93a436e
JL
8595 case BUILT_IN_FMOD:
8596 break;
0006469d 8597
b93a436e
JL
8598 /* __builtin_apply_args returns block of memory allocated on
8599 the stack into which is stored the arg pointer, structure
8600 value address, static chain, and all the registers that might
8601 possibly be used in performing a function call. The code is
8602 moved to the start of the function so the incoming values are
8603 saved. */
8604 case BUILT_IN_APPLY_ARGS:
8605 /* Don't do __builtin_apply_args more than once in a function.
8606 Save the result of the first call and reuse it. */
8607 if (apply_args_value != 0)
8608 return apply_args_value;
8609 {
8610 /* When this function is called, it means that registers must be
8611 saved on entry to this function. So we migrate the
8612 call to the first insn of this function. */
8613 rtx temp;
8614 rtx seq;
0006469d 8615
b93a436e
JL
8616 start_sequence ();
8617 temp = expand_builtin_apply_args ();
8618 seq = get_insns ();
8619 end_sequence ();
0006469d 8620
b93a436e 8621 apply_args_value = temp;
0006469d 8622
b93a436e
JL
8623 /* Put the sequence after the NOTE that starts the function.
8624 If this is inside a SEQUENCE, make the outer-level insn
8625 chain current, so the code is placed at the start of the
8626 function. */
8627 push_topmost_sequence ();
8628 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8629 pop_topmost_sequence ();
8630 return temp;
8631 }
0006469d 8632
b93a436e
JL
8633 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8634 FUNCTION with a copy of the parameters described by
8635 ARGUMENTS, and ARGSIZE. It returns a block of memory
8636 allocated on the stack into which is stored all the registers
8637 that might possibly be used for returning the result of a
8638 function. ARGUMENTS is the value returned by
8639 __builtin_apply_args. ARGSIZE is the number of bytes of
8640 arguments that must be copied. ??? How should this value be
8641 computed? We'll also need a safe worst case value for varargs
8642 functions. */
8643 case BUILT_IN_APPLY:
8644 if (arglist == 0
8645 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8646 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8647 || TREE_CHAIN (arglist) == 0
8648 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8649 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8650 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8651 return const0_rtx;
8652 else
8653 {
8654 int i;
8655 tree t;
8656 rtx ops[3];
0006469d 8657
b93a436e
JL
8658 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8659 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8660
b93a436e
JL
8661 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8662 }
bbf6f052 8663
b93a436e
JL
8664 /* __builtin_return (RESULT) causes the function to return the
8665 value described by RESULT. RESULT is address of the block of
8666 memory returned by __builtin_apply. */
8667 case BUILT_IN_RETURN:
8668 if (arglist
8669 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8670 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8671 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8672 NULL_RTX, VOIDmode, 0));
8673 return const0_rtx;
ca695ac9 8674
b93a436e
JL
8675 case BUILT_IN_SAVEREGS:
8676 /* Don't do __builtin_saveregs more than once in a function.
8677 Save the result of the first call and reuse it. */
8678 if (saveregs_value != 0)
8679 return saveregs_value;
8680 {
8681 /* When this function is called, it means that registers must be
8682 saved on entry to this function. So we migrate the
8683 call to the first insn of this function. */
8684 rtx temp;
8685 rtx seq;
ca695ac9 8686
b93a436e
JL
8687 /* Now really call the function. `expand_call' does not call
8688 expand_builtin, so there is no danger of infinite recursion here. */
8689 start_sequence ();
ca695ac9 8690
b93a436e
JL
8691#ifdef EXPAND_BUILTIN_SAVEREGS
8692 /* Do whatever the machine needs done in this case. */
8693 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8694#else
8695 /* The register where the function returns its value
8696 is likely to have something else in it, such as an argument.
8697 So preserve that register around the call. */
ca695ac9 8698
b93a436e
JL
8699 if (value_mode != VOIDmode)
8700 {
8701 rtx valreg = hard_libcall_value (value_mode);
8702 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8703
b93a436e
JL
8704 emit_move_insn (saved_valreg, valreg);
8705 temp = expand_call (exp, target, ignore);
8706 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8707 }
8708 else
b93a436e
JL
8709 /* Generate the call, putting the value in a pseudo. */
8710 temp = expand_call (exp, target, ignore);
8711#endif
bbf6f052 8712
b93a436e
JL
8713 seq = get_insns ();
8714 end_sequence ();
bbf6f052 8715
b93a436e 8716 saveregs_value = temp;
bbf6f052 8717
b93a436e
JL
8718 /* Put the sequence after the NOTE that starts the function.
8719 If this is inside a SEQUENCE, make the outer-level insn
8720 chain current, so the code is placed at the start of the
8721 function. */
8722 push_topmost_sequence ();
8723 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8724 pop_topmost_sequence ();
8725 return temp;
8726 }
bbf6f052 8727
b93a436e
JL
8728 /* __builtin_args_info (N) returns word N of the arg space info
8729 for the current function. The number and meanings of words
8730 is controlled by the definition of CUMULATIVE_ARGS. */
8731 case BUILT_IN_ARGS_INFO:
8732 {
8733 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8734 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8735#if 0
8736 /* These are used by the code below that is if 0'ed away */
8737 int i;
b93a436e 8738 tree type, elts, result;
381127e8 8739#endif
bbf6f052 8740
b93a436e
JL
8741 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8742 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8743 __FILE__, __LINE__);
bbf6f052 8744
b93a436e
JL
8745 if (arglist != 0)
8746 {
8747 tree arg = TREE_VALUE (arglist);
8748 if (TREE_CODE (arg) != INTEGER_CST)
8749 error ("argument of `__builtin_args_info' must be constant");
8750 else
8751 {
8752 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8753
b93a436e
JL
8754 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8755 error ("argument of `__builtin_args_info' out of range");
8756 else
8757 return GEN_INT (word_ptr[wordnum]);
8758 }
bbf6f052
RK
8759 }
8760 else
b93a436e 8761 error ("missing argument in `__builtin_args_info'");
bbf6f052 8762
b93a436e 8763 return const0_rtx;
bbf6f052 8764
b93a436e
JL
8765#if 0
8766 for (i = 0; i < nwords; i++)
8767 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8768
b93a436e
JL
8769 type = build_array_type (integer_type_node,
8770 build_index_type (build_int_2 (nwords, 0)));
8771 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8772 TREE_CONSTANT (result) = 1;
8773 TREE_STATIC (result) = 1;
8774 result = build (INDIRECT_REF, build_pointer_type (type), result);
8775 TREE_CONSTANT (result) = 1;
8776 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8777#endif
8778 }
8779
8780 /* Return the address of the first anonymous stack arg. */
8781 case BUILT_IN_NEXT_ARG:
ca695ac9 8782 {
b93a436e
JL
8783 tree fntype = TREE_TYPE (current_function_decl);
8784
8785 if ((TYPE_ARG_TYPES (fntype) == 0
8786 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8787 == void_type_node))
8788 && ! current_function_varargs)
8789 {
8790 error ("`va_start' used in function with fixed args");
8791 return const0_rtx;
8792 }
8793
8794 if (arglist)
8795 {
8796 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8797 tree arg = TREE_VALUE (arglist);
8798
8799 /* Strip off all nops for the sake of the comparison. This
8800 is not quite the same as STRIP_NOPS. It does more.
8801 We must also strip off INDIRECT_EXPR for C++ reference
8802 parameters. */
8803 while (TREE_CODE (arg) == NOP_EXPR
8804 || TREE_CODE (arg) == CONVERT_EXPR
8805 || TREE_CODE (arg) == NON_LVALUE_EXPR
8806 || TREE_CODE (arg) == INDIRECT_REF)
8807 arg = TREE_OPERAND (arg, 0);
8808 if (arg != last_parm)
8809 warning ("second parameter of `va_start' not last named argument");
8810 }
8811 else if (! current_function_varargs)
8812 /* Evidently an out of date version of <stdarg.h>; can't validate
8813 va_start's second argument, but can still work as intended. */
8814 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8815 }
8816
b93a436e
JL
8817 return expand_binop (Pmode, add_optab,
8818 current_function_internal_arg_pointer,
8819 current_function_arg_offset_rtx,
8820 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8821
b93a436e
JL
8822 case BUILT_IN_CLASSIFY_TYPE:
8823 if (arglist != 0)
8824 {
8825 tree type = TREE_TYPE (TREE_VALUE (arglist));
8826 enum tree_code code = TREE_CODE (type);
8827 if (code == VOID_TYPE)
8828 return GEN_INT (void_type_class);
8829 if (code == INTEGER_TYPE)
8830 return GEN_INT (integer_type_class);
8831 if (code == CHAR_TYPE)
8832 return GEN_INT (char_type_class);
8833 if (code == ENUMERAL_TYPE)
8834 return GEN_INT (enumeral_type_class);
8835 if (code == BOOLEAN_TYPE)
8836 return GEN_INT (boolean_type_class);
8837 if (code == POINTER_TYPE)
8838 return GEN_INT (pointer_type_class);
8839 if (code == REFERENCE_TYPE)
8840 return GEN_INT (reference_type_class);
8841 if (code == OFFSET_TYPE)
8842 return GEN_INT (offset_type_class);
8843 if (code == REAL_TYPE)
8844 return GEN_INT (real_type_class);
8845 if (code == COMPLEX_TYPE)
8846 return GEN_INT (complex_type_class);
8847 if (code == FUNCTION_TYPE)
8848 return GEN_INT (function_type_class);
8849 if (code == METHOD_TYPE)
8850 return GEN_INT (method_type_class);
8851 if (code == RECORD_TYPE)
8852 return GEN_INT (record_type_class);
8853 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8854 return GEN_INT (union_type_class);
8855 if (code == ARRAY_TYPE)
8856 {
8857 if (TYPE_STRING_FLAG (type))
8858 return GEN_INT (string_type_class);
8859 else
8860 return GEN_INT (array_type_class);
8861 }
8862 if (code == SET_TYPE)
8863 return GEN_INT (set_type_class);
8864 if (code == FILE_TYPE)
8865 return GEN_INT (file_type_class);
8866 if (code == LANG_TYPE)
8867 return GEN_INT (lang_type_class);
8868 }
8869 return GEN_INT (no_type_class);
ca695ac9 8870
b93a436e
JL
8871 case BUILT_IN_CONSTANT_P:
8872 if (arglist == 0)
8873 return const0_rtx;
8874 else
8875 {
8876 tree arg = TREE_VALUE (arglist);
ca695ac9 8877
b93a436e 8878 STRIP_NOPS (arg);
cff48d8f
RH
8879 if (really_constant_p (arg)
8880 || (TREE_CODE (arg) == ADDR_EXPR
8881 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8882 return const1_rtx;
8883
8884 /* Only emit CONSTANT_P_RTX if CSE will be run.
8885 Moreover, we don't want to expand trees that have side effects,
8886 as the original __builtin_constant_p did not evaluate its
8887 argument at all, and we would break existing usage by changing
8888 this. This quirk was generally useful, eliminating a bit of hair
8889 in the writing of the macros that use this function. Now the
8890 same thing can be better accomplished in an inline function. */
8891
8892 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
8893 {
8894 /* Lazy fixup of old code: issue a warning and fail the test. */
8895 if (! can_handle_constant_p)
8896 {
8897 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8898 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
8899 return const0_rtx;
8900 }
8901 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
8902 expand_expr (arg, NULL_RTX,
8903 VOIDmode, 0));
8904 }
8905
8906 return const0_rtx;
b93a436e 8907 }
ca695ac9 8908
b93a436e
JL
8909 case BUILT_IN_FRAME_ADDRESS:
8910 /* The argument must be a nonnegative integer constant.
8911 It counts the number of frames to scan up the stack.
8912 The value is the address of that frame. */
8913 case BUILT_IN_RETURN_ADDRESS:
8914 /* The argument must be a nonnegative integer constant.
8915 It counts the number of frames to scan up the stack.
8916 The value is the return address saved in that frame. */
8917 if (arglist == 0)
8918 /* Warning about missing arg was already issued. */
8919 return const0_rtx;
8920 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8921 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8922 {
8923 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8924 error ("invalid arg to `__builtin_frame_address'");
8925 else
8926 error ("invalid arg to `__builtin_return_address'");
8927 return const0_rtx;
8928 }
8929 else
8930 {
8931 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8932 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8933 hard_frame_pointer_rtx);
ee33823f 8934
b93a436e
JL
8935 /* Some ports cannot access arbitrary stack frames. */
8936 if (tem == NULL)
8937 {
8938 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8939 warning ("unsupported arg to `__builtin_frame_address'");
8940 else
8941 warning ("unsupported arg to `__builtin_return_address'");
8942 return const0_rtx;
8943 }
ee33823f 8944
b93a436e
JL
8945 /* For __builtin_frame_address, return what we've got. */
8946 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8947 return tem;
ee33823f 8948
b93a436e
JL
8949 if (GET_CODE (tem) != REG)
8950 tem = copy_to_reg (tem);
8951 return tem;
8952 }
ee33823f 8953
b93a436e
JL
8954 /* Returns the address of the area where the structure is returned.
8955 0 otherwise. */
8956 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8957 if (arglist != 0
8958 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8959 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8960 return const0_rtx;
8961 else
8962 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8963
b93a436e
JL
8964 case BUILT_IN_ALLOCA:
8965 if (arglist == 0
8966 /* Arg could be non-integer if user redeclared this fcn wrong. */
8967 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8968 break;
bbf6f052 8969
b93a436e
JL
8970 /* Compute the argument. */
8971 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8972
b93a436e
JL
8973 /* Allocate the desired space. */
8974 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8975
b93a436e
JL
8976 case BUILT_IN_FFS:
8977 /* If not optimizing, call the library function. */
8978 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8979 break;
ca695ac9 8980
b93a436e
JL
8981 if (arglist == 0
8982 /* Arg could be non-integer if user redeclared this fcn wrong. */
8983 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8984 break;
ca695ac9 8985
b93a436e
JL
8986 /* Compute the argument. */
8987 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8988 /* Compute ffs, into TARGET if possible.
8989 Set TARGET to wherever the result comes back. */
8990 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8991 ffs_optab, op0, target, 1);
8992 if (target == 0)
8993 abort ();
8994 return target;
bbf6f052 8995
b93a436e
JL
8996 case BUILT_IN_STRLEN:
8997 /* If not optimizing, call the library function. */
8998 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8999 break;
bbf6f052 9000
b93a436e
JL
9001 if (arglist == 0
9002 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9003 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9004 break;
9005 else
9006 {
9007 tree src = TREE_VALUE (arglist);
9008 tree len = c_strlen (src);
bbf6f052 9009
b93a436e
JL
9010 int align
9011 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 9012
b93a436e
JL
9013 rtx result, src_rtx, char_rtx;
9014 enum machine_mode insn_mode = value_mode, char_mode;
9015 enum insn_code icode;
46b68a37 9016
b93a436e
JL
9017 /* If the length is known, just return it. */
9018 if (len != 0)
9019 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 9020
b93a436e
JL
9021 /* If SRC is not a pointer type, don't do this operation inline. */
9022 if (align == 0)
9023 break;
bbf6f052 9024
b93a436e 9025 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 9026
b93a436e
JL
9027 while (insn_mode != VOIDmode)
9028 {
9029 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9030 if (icode != CODE_FOR_nothing)
9031 break;
ca695ac9 9032
b93a436e
JL
9033 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9034 }
9035 if (insn_mode == VOIDmode)
9036 break;
ca695ac9 9037
b93a436e
JL
9038 /* Make a place to write the result of the instruction. */
9039 result = target;
9040 if (! (result != 0
9041 && GET_CODE (result) == REG
9042 && GET_MODE (result) == insn_mode
9043 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9044 result = gen_reg_rtx (insn_mode);
ca695ac9 9045
b93a436e 9046 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 9047
b93a436e
JL
9048 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9049 result = gen_reg_rtx (insn_mode);
9050 src_rtx = memory_address (BLKmode,
9051 expand_expr (src, NULL_RTX, ptr_mode,
9052 EXPAND_NORMAL));
bbf6f052 9053
b93a436e
JL
9054 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9055 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 9056
b93a436e
JL
9057 /* Check the string is readable and has an end. */
9058 if (flag_check_memory_usage)
9059 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9060 src_rtx, ptr_mode,
9061 GEN_INT (MEMORY_USE_RO),
9062 TYPE_MODE (integer_type_node));
bbf6f052 9063
b93a436e
JL
9064 char_rtx = const0_rtx;
9065 char_mode = insn_operand_mode[(int)icode][2];
9066 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9067 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 9068
b93a436e
JL
9069 emit_insn (GEN_FCN (icode) (result,
9070 gen_rtx_MEM (BLKmode, src_rtx),
9071 char_rtx, GEN_INT (align)));
bbf6f052 9072
b93a436e
JL
9073 /* Return the value in the proper mode for this function. */
9074 if (GET_MODE (result) == value_mode)
9075 return result;
9076 else if (target != 0)
9077 {
9078 convert_move (target, result, 0);
9079 return target;
9080 }
9081 else
9082 return convert_to_mode (value_mode, result, 0);
9083 }
bbf6f052 9084
b93a436e
JL
9085 case BUILT_IN_STRCPY:
9086 /* If not optimizing, call the library function. */
9087 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9088 break;
bbf6f052 9089
b93a436e
JL
9090 if (arglist == 0
9091 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9092 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9093 || TREE_CHAIN (arglist) == 0
9094 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9095 break;
9096 else
9097 {
9098 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 9099
b93a436e
JL
9100 if (len == 0)
9101 break;
bbf6f052 9102
b93a436e 9103 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 9104
b93a436e
JL
9105 chainon (arglist, build_tree_list (NULL_TREE, len));
9106 }
6d100794 9107
b93a436e
JL
9108 /* Drops in. */
9109 case BUILT_IN_MEMCPY:
9110 /* If not optimizing, call the library function. */
9111 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9112 break;
e7c33f54 9113
b93a436e
JL
9114 if (arglist == 0
9115 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9116 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9117 || TREE_CHAIN (arglist) == 0
9118 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9119 != POINTER_TYPE)
9120 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9121 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9122 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9123 != INTEGER_TYPE))
9124 break;
9125 else
9126 {
9127 tree dest = TREE_VALUE (arglist);
9128 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9129 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9130
b93a436e
JL
9131 int src_align
9132 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9133 int dest_align
9134 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9135 rtx dest_mem, src_mem, dest_addr, len_rtx;
e7c33f54 9136
b93a436e
JL
9137 /* If either SRC or DEST is not a pointer type, don't do
9138 this operation in-line. */
9139 if (src_align == 0 || dest_align == 0)
9140 {
9141 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9142 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9143 break;
9144 }
e7c33f54 9145
55a6ba9f
JC
9146 dest_mem = get_memory_rtx (dest);
9147 src_mem = get_memory_rtx (src);
b93a436e 9148 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 9149
b93a436e
JL
9150 /* Just copy the rights of SRC to the rights of DEST. */
9151 if (flag_check_memory_usage)
9152 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
55a6ba9f
JC
9153 XEXP (dest_mem, 0), ptr_mode,
9154 XEXP (src_mem, 0), ptr_mode,
b93a436e 9155 len_rtx, TYPE_MODE (sizetype));
e7c33f54 9156
b93a436e
JL
9157 /* Copy word part most expediently. */
9158 dest_addr
9159 = emit_block_move (dest_mem, src_mem, len_rtx,
9160 MIN (src_align, dest_align));
e7c33f54 9161
b93a436e 9162 if (dest_addr == 0)
55a6ba9f 9163 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
e7c33f54 9164
b93a436e
JL
9165 return dest_addr;
9166 }
e7c33f54 9167
b93a436e
JL
9168 case BUILT_IN_MEMSET:
9169 /* If not optimizing, call the library function. */
9170 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9171 break;
e7c33f54 9172
b93a436e
JL
9173 if (arglist == 0
9174 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9175 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9176 || TREE_CHAIN (arglist) == 0
9177 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9178 != INTEGER_TYPE)
9179 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9180 || (INTEGER_TYPE
9181 != (TREE_CODE (TREE_TYPE
9182 (TREE_VALUE
9183 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9184 break;
9185 else
9186 {
9187 tree dest = TREE_VALUE (arglist);
9188 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9189 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9190
b93a436e
JL
9191 int dest_align
9192 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9193 rtx dest_mem, dest_addr, len_rtx;
e7c33f54 9194
b93a436e
JL
9195 /* If DEST is not a pointer type, don't do this
9196 operation in-line. */
9197 if (dest_align == 0)
9198 break;
bbf6f052 9199
bf931ec8
JW
9200 /* If the arguments have side-effects, then we can only evaluate
9201 them at most once. The following code evaluates them twice if
9202 they are not constants because we break out to expand_call
9203 in that case. They can't be constants if they have side-effects
9204 so we can check for that first. Alternatively, we could call
9205 save_expr to make multiple evaluation safe. */
9206 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9207 break;
9208
b93a436e
JL
9209 /* If VAL is not 0, don't do this operation in-line. */
9210 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9211 break;
bbf6f052 9212
b93a436e
JL
9213 /* If LEN does not expand to a constant, don't do this
9214 operation in-line. */
9215 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9216 if (GET_CODE (len_rtx) != CONST_INT)
9217 break;
bbf6f052 9218
55a6ba9f 9219 dest_mem = get_memory_rtx (dest);
b93a436e
JL
9220
9221 /* Just check DST is writable and mark it as readable. */
9222 if (flag_check_memory_usage)
9223 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
55a6ba9f 9224 XEXP (dest_mem, 0), ptr_mode,
b93a436e
JL
9225 len_rtx, TYPE_MODE (sizetype),
9226 GEN_INT (MEMORY_USE_WO),
9227 TYPE_MODE (integer_type_node));
bbf6f052 9228
bbf6f052 9229
b93a436e 9230 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 9231
b93a436e 9232 if (dest_addr == 0)
55a6ba9f 9233 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
bbf6f052 9234
b93a436e
JL
9235 return dest_addr;
9236 }
bbf6f052 9237
b93a436e
JL
9238/* These comparison functions need an instruction that returns an actual
9239 index. An ordinary compare that just sets the condition codes
9240 is not enough. */
9241#ifdef HAVE_cmpstrsi
9242 case BUILT_IN_STRCMP:
9243 /* If not optimizing, call the library function. */
9244 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9245 break;
bbf6f052 9246
b93a436e
JL
9247 /* If we need to check memory accesses, call the library function. */
9248 if (flag_check_memory_usage)
9249 break;
bbf6f052 9250
b93a436e
JL
9251 if (arglist == 0
9252 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9253 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9254 || TREE_CHAIN (arglist) == 0
9255 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9256 break;
9257 else if (!HAVE_cmpstrsi)
9258 break;
9259 {
9260 tree arg1 = TREE_VALUE (arglist);
9261 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 9262 tree len, len2;
a97f5a86 9263
b93a436e
JL
9264 len = c_strlen (arg1);
9265 if (len)
9266 len = size_binop (PLUS_EXPR, integer_one_node, len);
9267 len2 = c_strlen (arg2);
9268 if (len2)
9269 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 9270
b93a436e
JL
9271 /* If we don't have a constant length for the first, use the length
9272 of the second, if we know it. We don't require a constant for
9273 this case; some cost analysis could be done if both are available
9274 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 9275
b93a436e
JL
9276 If both strings have constant lengths, use the smaller. This
9277 could arise if optimization results in strcpy being called with
9278 two fixed strings, or if the code was machine-generated. We should
9279 add some code to the `memcmp' handler below to deal with such
9280 situations, someday. */
9281 if (!len || TREE_CODE (len) != INTEGER_CST)
9282 {
9283 if (len2)
9284 len = len2;
9285 else if (len == 0)
9286 break;
9287 }
9288 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9289 {
9290 if (tree_int_cst_lt (len2, len))
9291 len = len2;
9292 }
bbf6f052 9293
b93a436e
JL
9294 chainon (arglist, build_tree_list (NULL_TREE, len));
9295 }
bbf6f052 9296
b93a436e
JL
9297 /* Drops in. */
9298 case BUILT_IN_MEMCMP:
9299 /* If not optimizing, call the library function. */
9300 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9301 break;
bbf6f052 9302
b93a436e
JL
9303 /* If we need to check memory accesses, call the library function. */
9304 if (flag_check_memory_usage)
9305 break;
bbf6f052 9306
b93a436e
JL
9307 if (arglist == 0
9308 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9309 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9310 || TREE_CHAIN (arglist) == 0
9311 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9312 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9313 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9314 break;
9315 else if (!HAVE_cmpstrsi)
9316 break;
9317 {
9318 tree arg1 = TREE_VALUE (arglist);
9319 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9320 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9321 rtx result;
0842a179 9322
b93a436e
JL
9323 int arg1_align
9324 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9325 int arg2_align
9326 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9327 enum machine_mode insn_mode
9328 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 9329
b93a436e
JL
9330 /* If we don't have POINTER_TYPE, call the function. */
9331 if (arg1_align == 0 || arg2_align == 0)
9332 {
9333 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9334 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9335 break;
9336 }
bbf6f052 9337
b93a436e
JL
9338 /* Make a place to write the result of the instruction. */
9339 result = target;
9340 if (! (result != 0
9341 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9342 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9343 result = gen_reg_rtx (insn_mode);
bbf6f052 9344
55a6ba9f
JC
9345 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9346 get_memory_rtx (arg2),
b93a436e
JL
9347 expand_expr (len, NULL_RTX, VOIDmode, 0),
9348 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 9349
b93a436e
JL
9350 /* Return the value in the proper mode for this function. */
9351 mode = TYPE_MODE (TREE_TYPE (exp));
9352 if (GET_MODE (result) == mode)
9353 return result;
9354 else if (target != 0)
9355 {
9356 convert_move (target, result, 0);
9357 return target;
9358 }
9359 else
9360 return convert_to_mode (mode, result, 0);
9361 }
9362#else
9363 case BUILT_IN_STRCMP:
9364 case BUILT_IN_MEMCMP:
9365 break;
9366#endif
bbf6f052 9367
b93a436e
JL
9368 case BUILT_IN_SETJMP:
9369 if (arglist == 0
9370 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9371 break;
6fd1c67b
RH
9372 else
9373 {
9374 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9375 VOIDmode, 0);
9376 rtx lab = gen_label_rtx ();
9377 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9378 emit_label (lab);
9379 return ret;
9380 }
bbf6f052 9381
6fd1c67b
RH
9382 /* __builtin_longjmp is passed a pointer to an array of five words.
9383 It's similar to the C library longjmp function but works with
9384 __builtin_setjmp above. */
b93a436e
JL
9385 case BUILT_IN_LONGJMP:
9386 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9387 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9388 break;
b93a436e 9389 else
b93a436e 9390 {
6fd1c67b
RH
9391 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9392 VOIDmode, 0);
9393 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3e2b9a3d 9394 NULL_RTX, VOIDmode, 0);
e0cd0770
JC
9395
9396 if (value != const1_rtx)
9397 {
9398 error ("__builtin_longjmp second argument must be 1");
9399 return const0_rtx;
9400 }
9401
6fd1c67b
RH
9402 expand_builtin_longjmp (buf_addr, value);
9403 return const0_rtx;
b93a436e 9404 }
bbf6f052 9405
e0cd0770
JC
9406 case BUILT_IN_TRAP:
9407#ifdef HAVE_trap
9408 if (HAVE_trap)
9409 emit_insn (gen_trap ());
9410 else
9411#endif
9412 error ("__builtin_trap not supported by this target");
9413 emit_barrier ();
9414 return const0_rtx;
9415
b93a436e
JL
9416 /* Various hooks for the DWARF 2 __throw routine. */
9417 case BUILT_IN_UNWIND_INIT:
9418 expand_builtin_unwind_init ();
9419 return const0_rtx;
71038426
RH
9420 case BUILT_IN_DWARF_CFA:
9421 return virtual_cfa_rtx;
b93a436e
JL
9422#ifdef DWARF2_UNWIND_INFO
9423 case BUILT_IN_DWARF_FP_REGNUM:
9424 return expand_builtin_dwarf_fp_regnum ();
9425 case BUILT_IN_DWARF_REG_SIZE:
9426 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 9427#endif
b93a436e
JL
9428 case BUILT_IN_FROB_RETURN_ADDR:
9429 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9430 case BUILT_IN_EXTRACT_RETURN_ADDR:
9431 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
71038426
RH
9432 case BUILT_IN_EH_RETURN:
9433 expand_builtin_eh_return (TREE_VALUE (arglist),
9434 TREE_VALUE (TREE_CHAIN (arglist)),
9435 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
b93a436e 9436 return const0_rtx;
ca695ac9 9437
b93a436e
JL
9438 default: /* just do library call, if unknown builtin */
9439 error ("built-in function `%s' not currently supported",
9440 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 9441 }
0006469d 9442
b93a436e
JL
9443 /* The switch statement above can drop through to cause the function
9444 to be called normally. */
0006469d 9445
b93a436e 9446 return expand_call (exp, target, ignore);
ca695ac9 9447}
b93a436e
JL
9448\f
9449/* Built-in functions to perform an untyped call and return. */
0006469d 9450
b93a436e
JL
9451/* For each register that may be used for calling a function, this
9452 gives a mode used to copy the register's value. VOIDmode indicates
9453 the register is not used for calling a function. If the machine
9454 has register windows, this gives only the outbound registers.
9455 INCOMING_REGNO gives the corresponding inbound register. */
9456static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9457
b93a436e
JL
9458/* For each register that may be used for returning values, this gives
9459 a mode used to copy the register's value. VOIDmode indicates the
9460 register is not used for returning values. If the machine has
9461 register windows, this gives only the outbound registers.
9462 INCOMING_REGNO gives the corresponding inbound register. */
9463static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9464
b93a436e
JL
9465/* For each register that may be used for calling a function, this
9466 gives the offset of that register into the block returned by
9467 __builtin_apply_args. 0 indicates that the register is not
9468 used for calling a function. */
9469static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9470
9471/* Return the offset of register REGNO into the block returned by
9472 __builtin_apply_args. This is not declared static, since it is
9473 needed in objc-act.c. */
0006469d 9474
b93a436e
JL
9475int
9476apply_args_register_offset (regno)
9477 int regno;
9478{
9479 apply_args_size ();
0006469d 9480
b93a436e
JL
9481 /* Arguments are always put in outgoing registers (in the argument
9482 block) if such make sense. */
9483#ifdef OUTGOING_REGNO
9484 regno = OUTGOING_REGNO(regno);
9485#endif
9486 return apply_args_reg_offset[regno];
9487}
904762c8 9488
b93a436e
JL
9489/* Return the size required for the block returned by __builtin_apply_args,
9490 and initialize apply_args_mode. */
9491
9492static int
9493apply_args_size ()
0006469d 9494{
b93a436e
JL
9495 static int size = -1;
9496 int align, regno;
2f6e6d22 9497 enum machine_mode mode;
0006469d 9498
b93a436e
JL
9499 /* The values computed by this function never change. */
9500 if (size < 0)
ca695ac9 9501 {
b93a436e
JL
9502 /* The first value is the incoming arg-pointer. */
9503 size = GET_MODE_SIZE (Pmode);
0006469d 9504
b93a436e
JL
9505 /* The second value is the structure value address unless this is
9506 passed as an "invisible" first argument. */
9507 if (struct_value_rtx)
9508 size += GET_MODE_SIZE (Pmode);
0006469d 9509
b93a436e
JL
9510 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9511 if (FUNCTION_ARG_REGNO_P (regno))
9512 {
9513 /* Search for the proper mode for copying this register's
9514 value. I'm not sure this is right, but it works so far. */
9515 enum machine_mode best_mode = VOIDmode;
0006469d 9516
b93a436e
JL
9517 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9518 mode != VOIDmode;
9519 mode = GET_MODE_WIDER_MODE (mode))
9520 if (HARD_REGNO_MODE_OK (regno, mode)
9521 && HARD_REGNO_NREGS (regno, mode) == 1)
9522 best_mode = mode;
0006469d 9523
b93a436e
JL
9524 if (best_mode == VOIDmode)
9525 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9526 mode != VOIDmode;
9527 mode = GET_MODE_WIDER_MODE (mode))
9528 if (HARD_REGNO_MODE_OK (regno, mode)
9529 && (mov_optab->handlers[(int) mode].insn_code
9530 != CODE_FOR_nothing))
9531 best_mode = mode;
0006469d 9532
b93a436e
JL
9533 mode = best_mode;
9534 if (mode == VOIDmode)
9535 abort ();
904762c8 9536
b93a436e
JL
9537 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9538 if (size % align != 0)
9539 size = CEIL (size, align) * align;
9540 apply_args_reg_offset[regno] = size;
9541 size += GET_MODE_SIZE (mode);
9542 apply_args_mode[regno] = mode;
9543 }
9544 else
9545 {
9546 apply_args_mode[regno] = VOIDmode;
9547 apply_args_reg_offset[regno] = 0;
9548 }
9549 }
9550 return size;
9551}
0006469d 9552
b93a436e
JL
9553/* Return the size required for the block returned by __builtin_apply,
9554 and initialize apply_result_mode. */
904762c8 9555
b93a436e
JL
9556static int
9557apply_result_size ()
9558{
9559 static int size = -1;
9560 int align, regno;
9561 enum machine_mode mode;
0006469d 9562
b93a436e
JL
9563 /* The values computed by this function never change. */
9564 if (size < 0)
9565 {
9566 size = 0;
0006469d 9567
b93a436e
JL
9568 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9569 if (FUNCTION_VALUE_REGNO_P (regno))
9570 {
9571 /* Search for the proper mode for copying this register's
9572 value. I'm not sure this is right, but it works so far. */
9573 enum machine_mode best_mode = VOIDmode;
0006469d 9574
b93a436e
JL
9575 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9576 mode != TImode;
9577 mode = GET_MODE_WIDER_MODE (mode))
9578 if (HARD_REGNO_MODE_OK (regno, mode))
9579 best_mode = mode;
0006469d 9580
b93a436e
JL
9581 if (best_mode == VOIDmode)
9582 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9583 mode != VOIDmode;
9584 mode = GET_MODE_WIDER_MODE (mode))
9585 if (HARD_REGNO_MODE_OK (regno, mode)
9586 && (mov_optab->handlers[(int) mode].insn_code
9587 != CODE_FOR_nothing))
9588 best_mode = mode;
0006469d 9589
b93a436e
JL
9590 mode = best_mode;
9591 if (mode == VOIDmode)
9592 abort ();
9593
9594 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9595 if (size % align != 0)
9596 size = CEIL (size, align) * align;
9597 size += GET_MODE_SIZE (mode);
9598 apply_result_mode[regno] = mode;
9599 }
9600 else
9601 apply_result_mode[regno] = VOIDmode;
9602
9603 /* Allow targets that use untyped_call and untyped_return to override
9604 the size so that machine-specific information can be stored here. */
9605#ifdef APPLY_RESULT_SIZE
9606 size = APPLY_RESULT_SIZE;
9607#endif
9608 }
9609 return size;
9610}
0006469d 9611
b93a436e
JL
9612#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9613/* Create a vector describing the result block RESULT. If SAVEP is true,
9614 the result block is used to save the values; otherwise it is used to
9615 restore the values. */
9616
9617static rtx
9618result_vector (savep, result)
9619 int savep;
9620 rtx result;
9621{
9622 int regno, size, align, nelts;
9623 enum machine_mode mode;
9624 rtx reg, mem;
9625 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9626
9627 size = nelts = 0;
9628 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9629 if ((mode = apply_result_mode[regno]) != VOIDmode)
9630 {
9631 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9632 if (size % align != 0)
9633 size = CEIL (size, align) * align;
9634 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9635 mem = change_address (result, mode,
9636 plus_constant (XEXP (result, 0), size));
9637 savevec[nelts++] = (savep
9638 ? gen_rtx_SET (VOIDmode, mem, reg)
9639 : gen_rtx_SET (VOIDmode, reg, mem));
9640 size += GET_MODE_SIZE (mode);
ca695ac9 9641 }
b93a436e
JL
9642 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9643}
9644#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9645
b93a436e
JL
9646/* Save the state required to perform an untyped call with the same
9647 arguments as were passed to the current function. */
904762c8 9648
b93a436e
JL
9649static rtx
9650expand_builtin_apply_args ()
9651{
9652 rtx registers;
9653 int size, align, regno;
9654 enum machine_mode mode;
0006469d 9655
b93a436e
JL
9656 /* Create a block where the arg-pointer, structure value address,
9657 and argument registers can be saved. */
9658 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9659
b93a436e
JL
9660 /* Walk past the arg-pointer and structure value address. */
9661 size = GET_MODE_SIZE (Pmode);
9662 if (struct_value_rtx)
9663 size += GET_MODE_SIZE (Pmode);
0cb1d109 9664
b93a436e
JL
9665 /* Save each register used in calling a function to the block. */
9666 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9667 if ((mode = apply_args_mode[regno]) != VOIDmode)
9668 {
9669 rtx tem;
0cb1d109 9670
b93a436e
JL
9671 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9672 if (size % align != 0)
9673 size = CEIL (size, align) * align;
0006469d 9674
b93a436e 9675 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9676
b93a436e
JL
9677#ifdef STACK_REGS
9678 /* For reg-stack.c's stack register household.
9679 Compare with a similar piece of code in function.c. */
0006469d 9680
b93a436e
JL
9681 emit_insn (gen_rtx_USE (mode, tem));
9682#endif
0e8c9172 9683
b93a436e
JL
9684 emit_move_insn (change_address (registers, mode,
9685 plus_constant (XEXP (registers, 0),
9686 size)),
9687 tem);
9688 size += GET_MODE_SIZE (mode);
0e8c9172 9689 }
0006469d 9690
b93a436e
JL
9691 /* Save the arg pointer to the block. */
9692 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9693 copy_to_reg (virtual_incoming_args_rtx));
9694 size = GET_MODE_SIZE (Pmode);
0006469d 9695
b93a436e
JL
9696 /* Save the structure value address unless this is passed as an
9697 "invisible" first argument. */
9698 if (struct_value_incoming_rtx)
9699 {
9700 emit_move_insn (change_address (registers, Pmode,
9701 plus_constant (XEXP (registers, 0),
9702 size)),
9703 copy_to_reg (struct_value_incoming_rtx));
9704 size += GET_MODE_SIZE (Pmode);
9705 }
0006469d 9706
b93a436e
JL
9707 /* Return the address of the block. */
9708 return copy_addr_to_reg (XEXP (registers, 0));
9709}
0006469d 9710
b93a436e
JL
9711/* Perform an untyped call and save the state required to perform an
9712 untyped return of whatever value was returned by the given function. */
0006469d 9713
b93a436e
JL
9714static rtx
9715expand_builtin_apply (function, arguments, argsize)
9716 rtx function, arguments, argsize;
9717{
9718 int size, align, regno;
9719 enum machine_mode mode;
9720 rtx incoming_args, result, reg, dest, call_insn;
9721 rtx old_stack_level = 0;
9722 rtx call_fusage = 0;
0006469d 9723
b93a436e
JL
9724 /* Create a block where the return registers can be saved. */
9725 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9726
9727 /* ??? The argsize value should be adjusted here. */
9728
9729 /* Fetch the arg pointer from the ARGUMENTS block. */
9730 incoming_args = gen_reg_rtx (Pmode);
9731 emit_move_insn (incoming_args,
9732 gen_rtx_MEM (Pmode, arguments));
9733#ifndef STACK_GROWS_DOWNWARD
9734 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9735 incoming_args, 0, OPTAB_LIB_WIDEN);
9736#endif
9737
9738 /* Perform postincrements before actually calling the function. */
ca695ac9 9739 emit_queue ();
0006469d 9740
b93a436e
JL
9741 /* Push a new argument block and copy the arguments. */
9742 do_pending_stack_adjust ();
0006469d 9743
b93a436e
JL
9744 /* Save the stack with nonlocal if available */
9745#ifdef HAVE_save_stack_nonlocal
9746 if (HAVE_save_stack_nonlocal)
9747 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9748 else
9749#endif
9750 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9751
b93a436e
JL
9752 /* Push a block of memory onto the stack to store the memory arguments.
9753 Save the address in a register, and copy the memory arguments. ??? I
9754 haven't figured out how the calling convention macros effect this,
9755 but it's likely that the source and/or destination addresses in
9756 the block copy will need updating in machine specific ways. */
9757 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9758 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9759 gen_rtx_MEM (BLKmode, incoming_args),
9760 argsize,
9761 PARM_BOUNDARY / BITS_PER_UNIT);
9762
9763 /* Refer to the argument block. */
9764 apply_args_size ();
9765 arguments = gen_rtx_MEM (BLKmode, arguments);
9766
9767 /* Walk past the arg-pointer and structure value address. */
9768 size = GET_MODE_SIZE (Pmode);
9769 if (struct_value_rtx)
9770 size += GET_MODE_SIZE (Pmode);
9771
9772 /* Restore each of the registers previously saved. Make USE insns
9773 for each of these registers for use in making the call. */
9774 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9775 if ((mode = apply_args_mode[regno]) != VOIDmode)
9776 {
9777 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9778 if (size % align != 0)
9779 size = CEIL (size, align) * align;
9780 reg = gen_rtx_REG (mode, regno);
9781 emit_move_insn (reg,
9782 change_address (arguments, mode,
9783 plus_constant (XEXP (arguments, 0),
9784 size)));
9785
9786 use_reg (&call_fusage, reg);
9787 size += GET_MODE_SIZE (mode);
9788 }
9789
9790 /* Restore the structure value address unless this is passed as an
9791 "invisible" first argument. */
9792 size = GET_MODE_SIZE (Pmode);
9793 if (struct_value_rtx)
0006469d 9794 {
b93a436e
JL
9795 rtx value = gen_reg_rtx (Pmode);
9796 emit_move_insn (value,
9797 change_address (arguments, Pmode,
9798 plus_constant (XEXP (arguments, 0),
9799 size)));
9800 emit_move_insn (struct_value_rtx, value);
9801 if (GET_CODE (struct_value_rtx) == REG)
9802 use_reg (&call_fusage, struct_value_rtx);
9803 size += GET_MODE_SIZE (Pmode);
ca695ac9 9804 }
0006469d 9805
b93a436e
JL
9806 /* All arguments and registers used for the call are set up by now! */
9807 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9808
b93a436e
JL
9809 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9810 and we don't want to load it into a register as an optimization,
9811 because prepare_call_address already did it if it should be done. */
9812 if (GET_CODE (function) != SYMBOL_REF)
9813 function = memory_address (FUNCTION_MODE, function);
0006469d 9814
b93a436e
JL
9815 /* Generate the actual call instruction and save the return value. */
9816#ifdef HAVE_untyped_call
9817 if (HAVE_untyped_call)
9818 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9819 result, result_vector (1, result)));
9820 else
9821#endif
9822#ifdef HAVE_call_value
9823 if (HAVE_call_value)
ca695ac9 9824 {
b93a436e 9825 rtx valreg = 0;
0006469d 9826
b93a436e
JL
9827 /* Locate the unique return register. It is not possible to
9828 express a call that sets more than one return register using
9829 call_value; use untyped_call for that. In fact, untyped_call
9830 only needs to save the return registers in the given block. */
9831 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9832 if ((mode = apply_result_mode[regno]) != VOIDmode)
9833 {
9834 if (valreg)
9835 abort (); /* HAVE_untyped_call required. */
9836 valreg = gen_rtx_REG (mode, regno);
9837 }
0006469d 9838
b93a436e
JL
9839 emit_call_insn (gen_call_value (valreg,
9840 gen_rtx_MEM (FUNCTION_MODE, function),
9841 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9842
b93a436e
JL
9843 emit_move_insn (change_address (result, GET_MODE (valreg),
9844 XEXP (result, 0)),
9845 valreg);
ca695ac9 9846 }
b93a436e
JL
9847 else
9848#endif
9849 abort ();
0006469d 9850
b93a436e
JL
9851 /* Find the CALL insn we just emitted. */
9852 for (call_insn = get_last_insn ();
9853 call_insn && GET_CODE (call_insn) != CALL_INSN;
9854 call_insn = PREV_INSN (call_insn))
9855 ;
0006469d 9856
b93a436e
JL
9857 if (! call_insn)
9858 abort ();
0006469d 9859
b93a436e
JL
9860 /* Put the register usage information on the CALL. If there is already
9861 some usage information, put ours at the end. */
9862 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9863 {
b93a436e 9864 rtx link;
0006469d 9865
b93a436e
JL
9866 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9867 link = XEXP (link, 1))
9868 ;
9869
9870 XEXP (link, 1) = call_fusage;
ca695ac9 9871 }
b93a436e
JL
9872 else
9873 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9874
b93a436e
JL
9875 /* Restore the stack. */
9876#ifdef HAVE_save_stack_nonlocal
9877 if (HAVE_save_stack_nonlocal)
9878 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9879 else
9880#endif
9881 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9882
9883 /* Return the address of the result block. */
9884 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9885}
bbf6f052 9886
b93a436e 9887/* Perform an untyped return. */
ca695ac9
JB
9888
9889static void
b93a436e
JL
9890expand_builtin_return (result)
9891 rtx result;
bbf6f052 9892{
b93a436e
JL
9893 int size, align, regno;
9894 enum machine_mode mode;
9895 rtx reg;
9896 rtx call_fusage = 0;
bbf6f052 9897
b93a436e
JL
9898 apply_result_size ();
9899 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9900
b93a436e
JL
9901#ifdef HAVE_untyped_return
9902 if (HAVE_untyped_return)
ca695ac9 9903 {
b93a436e
JL
9904 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9905 emit_barrier ();
9906 return;
ca695ac9 9907 }
b93a436e 9908#endif
1499e0a8 9909
b93a436e
JL
9910 /* Restore the return value and note that each value is used. */
9911 size = 0;
9912 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9913 if ((mode = apply_result_mode[regno]) != VOIDmode)
9914 {
9915 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9916 if (size % align != 0)
9917 size = CEIL (size, align) * align;
9918 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9919 emit_move_insn (reg,
9920 change_address (result, mode,
9921 plus_constant (XEXP (result, 0),
9922 size)));
9923
9924 push_to_sequence (call_fusage);
9925 emit_insn (gen_rtx_USE (VOIDmode, reg));
9926 call_fusage = get_insns ();
9927 end_sequence ();
9928 size += GET_MODE_SIZE (mode);
9929 }
9930
9931 /* Put the USE insns before the return. */
9932 emit_insns (call_fusage);
9933
9934 /* Return whatever values was restored by jumping directly to the end
9935 of the function. */
9936 expand_null_return ();
ca695ac9
JB
9937}
9938\f
b93a436e
JL
9939/* Expand code for a post- or pre- increment or decrement
9940 and return the RTX for the result.
9941 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9942
b93a436e
JL
9943static rtx
9944expand_increment (exp, post, ignore)
9945 register tree exp;
9946 int post, ignore;
ca695ac9 9947{
b93a436e
JL
9948 register rtx op0, op1;
9949 register rtx temp, value;
9950 register tree incremented = TREE_OPERAND (exp, 0);
9951 optab this_optab = add_optab;
9952 int icode;
9953 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9954 int op0_is_copy = 0;
9955 int single_insn = 0;
9956 /* 1 means we can't store into OP0 directly,
9957 because it is a subreg narrower than a word,
9958 and we don't dare clobber the rest of the word. */
9959 int bad_subreg = 0;
1499e0a8 9960
b93a436e
JL
9961 /* Stabilize any component ref that might need to be
9962 evaluated more than once below. */
9963 if (!post
9964 || TREE_CODE (incremented) == BIT_FIELD_REF
9965 || (TREE_CODE (incremented) == COMPONENT_REF
9966 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9967 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9968 incremented = stabilize_reference (incremented);
9969 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9970 ones into save exprs so that they don't accidentally get evaluated
9971 more than once by the code below. */
9972 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9973 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9974 incremented = save_expr (incremented);
e9a25f70 9975
b93a436e
JL
9976 /* Compute the operands as RTX.
9977 Note whether OP0 is the actual lvalue or a copy of it:
9978 I believe it is a copy iff it is a register or subreg
9979 and insns were generated in computing it. */
e9a25f70 9980
b93a436e
JL
9981 temp = get_last_insn ();
9982 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9983
b93a436e
JL
9984 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9985 in place but instead must do sign- or zero-extension during assignment,
9986 so we copy it into a new register and let the code below use it as
9987 a copy.
e9a25f70 9988
b93a436e
JL
9989 Note that we can safely modify this SUBREG since it is know not to be
9990 shared (it was made by the expand_expr call above). */
9991
9992 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9993 {
9994 if (post)
9995 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9996 else
9997 bad_subreg = 1;
9998 }
9999 else if (GET_CODE (op0) == SUBREG
10000 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10001 {
10002 /* We cannot increment this SUBREG in place. If we are
10003 post-incrementing, get a copy of the old value. Otherwise,
10004 just mark that we cannot increment in place. */
10005 if (post)
10006 op0 = copy_to_reg (op0);
10007 else
10008 bad_subreg = 1;
e9a25f70
JL
10009 }
10010
b93a436e
JL
10011 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10012 && temp != get_last_insn ());
10013 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10014 EXPAND_MEMORY_USE_BAD);
1499e0a8 10015
b93a436e
JL
10016 /* Decide whether incrementing or decrementing. */
10017 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10018 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10019 this_optab = sub_optab;
10020
10021 /* Convert decrement by a constant into a negative increment. */
10022 if (this_optab == sub_optab
10023 && GET_CODE (op1) == CONST_INT)
ca695ac9 10024 {
b93a436e
JL
10025 op1 = GEN_INT (- INTVAL (op1));
10026 this_optab = add_optab;
ca695ac9 10027 }
1499e0a8 10028
b93a436e
JL
10029 /* For a preincrement, see if we can do this with a single instruction. */
10030 if (!post)
10031 {
10032 icode = (int) this_optab->handlers[(int) mode].insn_code;
10033 if (icode != (int) CODE_FOR_nothing
10034 /* Make sure that OP0 is valid for operands 0 and 1
10035 of the insn we want to queue. */
10036 && (*insn_operand_predicate[icode][0]) (op0, mode)
10037 && (*insn_operand_predicate[icode][1]) (op0, mode)
10038 && (*insn_operand_predicate[icode][2]) (op1, mode))
10039 single_insn = 1;
10040 }
bbf6f052 10041
b93a436e
JL
10042 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10043 then we cannot just increment OP0. We must therefore contrive to
10044 increment the original value. Then, for postincrement, we can return
10045 OP0 since it is a copy of the old value. For preincrement, expand here
10046 unless we can do it with a single insn.
bbf6f052 10047
b93a436e
JL
10048 Likewise if storing directly into OP0 would clobber high bits
10049 we need to preserve (bad_subreg). */
10050 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 10051 {
b93a436e
JL
10052 /* This is the easiest way to increment the value wherever it is.
10053 Problems with multiple evaluation of INCREMENTED are prevented
10054 because either (1) it is a component_ref or preincrement,
10055 in which case it was stabilized above, or (2) it is an array_ref
10056 with constant index in an array in a register, which is
10057 safe to reevaluate. */
10058 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10059 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10060 ? MINUS_EXPR : PLUS_EXPR),
10061 TREE_TYPE (exp),
10062 incremented,
10063 TREE_OPERAND (exp, 1));
a358cee0 10064
b93a436e
JL
10065 while (TREE_CODE (incremented) == NOP_EXPR
10066 || TREE_CODE (incremented) == CONVERT_EXPR)
10067 {
10068 newexp = convert (TREE_TYPE (incremented), newexp);
10069 incremented = TREE_OPERAND (incremented, 0);
10070 }
bbf6f052 10071
b93a436e
JL
10072 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10073 return post ? op0 : temp;
10074 }
bbf6f052 10075
b93a436e
JL
10076 if (post)
10077 {
10078 /* We have a true reference to the value in OP0.
10079 If there is an insn to add or subtract in this mode, queue it.
10080 Queueing the increment insn avoids the register shuffling
10081 that often results if we must increment now and first save
10082 the old value for subsequent use. */
bbf6f052 10083
b93a436e
JL
10084#if 0 /* Turned off to avoid making extra insn for indexed memref. */
10085 op0 = stabilize (op0);
10086#endif
41dfd40c 10087
b93a436e
JL
10088 icode = (int) this_optab->handlers[(int) mode].insn_code;
10089 if (icode != (int) CODE_FOR_nothing
10090 /* Make sure that OP0 is valid for operands 0 and 1
10091 of the insn we want to queue. */
10092 && (*insn_operand_predicate[icode][0]) (op0, mode)
10093 && (*insn_operand_predicate[icode][1]) (op0, mode))
10094 {
10095 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10096 op1 = force_reg (mode, op1);
bbf6f052 10097
b93a436e
JL
10098 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10099 }
10100 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10101 {
10102 rtx addr = (general_operand (XEXP (op0, 0), mode)
10103 ? force_reg (Pmode, XEXP (op0, 0))
10104 : copy_to_reg (XEXP (op0, 0)));
10105 rtx temp, result;
ca695ac9 10106
b93a436e
JL
10107 op0 = change_address (op0, VOIDmode, addr);
10108 temp = force_reg (GET_MODE (op0), op0);
10109 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10110 op1 = force_reg (mode, op1);
ca695ac9 10111
b93a436e
JL
10112 /* The increment queue is LIFO, thus we have to `queue'
10113 the instructions in reverse order. */
10114 enqueue_insn (op0, gen_move_insn (op0, temp));
10115 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10116 return result;
bbf6f052
RK
10117 }
10118 }
ca695ac9 10119
b93a436e
JL
10120 /* Preincrement, or we can't increment with one simple insn. */
10121 if (post)
10122 /* Save a copy of the value before inc or dec, to return it later. */
10123 temp = value = copy_to_reg (op0);
10124 else
10125 /* Arrange to return the incremented value. */
10126 /* Copy the rtx because expand_binop will protect from the queue,
10127 and the results of that would be invalid for us to return
10128 if our caller does emit_queue before using our result. */
10129 temp = copy_rtx (value = op0);
bbf6f052 10130
b93a436e
JL
10131 /* Increment however we can. */
10132 op1 = expand_binop (mode, this_optab, value, op1,
10133 flag_check_memory_usage ? NULL_RTX : op0,
10134 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10135 /* Make sure the value is stored into OP0. */
10136 if (op1 != op0)
10137 emit_move_insn (op0, op1);
5718612f 10138
b93a436e
JL
10139 return temp;
10140}
10141\f
10142/* Expand all function calls contained within EXP, innermost ones first.
10143 But don't look within expressions that have sequence points.
10144 For each CALL_EXPR, record the rtx for its value
10145 in the CALL_EXPR_RTL field. */
5718612f 10146
b93a436e
JL
10147static void
10148preexpand_calls (exp)
10149 tree exp;
10150{
10151 register int nops, i;
10152 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 10153
b93a436e
JL
10154 if (! do_preexpand_calls)
10155 return;
5718612f 10156
b93a436e 10157 /* Only expressions and references can contain calls. */
bbf6f052 10158
b93a436e
JL
10159 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10160 return;
bbf6f052 10161
b93a436e
JL
10162 switch (TREE_CODE (exp))
10163 {
10164 case CALL_EXPR:
10165 /* Do nothing if already expanded. */
10166 if (CALL_EXPR_RTL (exp) != 0
10167 /* Do nothing if the call returns a variable-sized object. */
10168 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10169 /* Do nothing to built-in functions. */
10170 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10171 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10172 == FUNCTION_DECL)
10173 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10174 return;
bbf6f052 10175
b93a436e
JL
10176 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10177 return;
bbf6f052 10178
b93a436e
JL
10179 case COMPOUND_EXPR:
10180 case COND_EXPR:
10181 case TRUTH_ANDIF_EXPR:
10182 case TRUTH_ORIF_EXPR:
10183 /* If we find one of these, then we can be sure
10184 the adjust will be done for it (since it makes jumps).
10185 Do it now, so that if this is inside an argument
10186 of a function, we don't get the stack adjustment
10187 after some other args have already been pushed. */
10188 do_pending_stack_adjust ();
10189 return;
bbf6f052 10190
b93a436e
JL
10191 case BLOCK:
10192 case RTL_EXPR:
10193 case WITH_CLEANUP_EXPR:
10194 case CLEANUP_POINT_EXPR:
10195 case TRY_CATCH_EXPR:
10196 return;
bbf6f052 10197
b93a436e
JL
10198 case SAVE_EXPR:
10199 if (SAVE_EXPR_RTL (exp) != 0)
10200 return;
10201
10202 default:
10203 break;
ca695ac9 10204 }
bbf6f052 10205
b93a436e
JL
10206 nops = tree_code_length[(int) TREE_CODE (exp)];
10207 for (i = 0; i < nops; i++)
10208 if (TREE_OPERAND (exp, i) != 0)
10209 {
10210 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10211 if (type == 'e' || type == '<' || type == '1' || type == '2'
10212 || type == 'r')
10213 preexpand_calls (TREE_OPERAND (exp, i));
10214 }
10215}
10216\f
10217/* At the start of a function, record that we have no previously-pushed
10218 arguments waiting to be popped. */
bbf6f052 10219
b93a436e
JL
10220void
10221init_pending_stack_adjust ()
10222{
10223 pending_stack_adjust = 0;
10224}
bbf6f052 10225
b93a436e 10226/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
10227 so the adjustment won't get done.
10228
10229 Note, if the current function calls alloca, then it must have a
10230 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 10231
b93a436e
JL
10232void
10233clear_pending_stack_adjust ()
10234{
10235#ifdef EXIT_IGNORE_STACK
10236 if (optimize > 0
060fbabf
JL
10237 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10238 && EXIT_IGNORE_STACK
b93a436e
JL
10239 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10240 && ! flag_inline_functions)
10241 pending_stack_adjust = 0;
10242#endif
10243}
bbf6f052 10244
b93a436e
JL
10245/* Pop any previously-pushed arguments that have not been popped yet. */
10246
10247void
10248do_pending_stack_adjust ()
10249{
10250 if (inhibit_defer_pop == 0)
ca695ac9 10251 {
b93a436e
JL
10252 if (pending_stack_adjust != 0)
10253 adjust_stack (GEN_INT (pending_stack_adjust));
10254 pending_stack_adjust = 0;
bbf6f052 10255 }
bbf6f052
RK
10256}
10257\f
b93a436e 10258/* Expand conditional expressions. */
bbf6f052 10259
b93a436e
JL
10260/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10261 LABEL is an rtx of code CODE_LABEL, in this function and all the
10262 functions here. */
bbf6f052 10263
b93a436e
JL
10264void
10265jumpifnot (exp, label)
ca695ac9 10266 tree exp;
b93a436e 10267 rtx label;
bbf6f052 10268{
b93a436e
JL
10269 do_jump (exp, label, NULL_RTX);
10270}
bbf6f052 10271
b93a436e 10272/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 10273
b93a436e
JL
10274void
10275jumpif (exp, label)
10276 tree exp;
10277 rtx label;
10278{
10279 do_jump (exp, NULL_RTX, label);
10280}
ca695ac9 10281
b93a436e
JL
10282/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10283 the result is zero, or IF_TRUE_LABEL if the result is one.
10284 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10285 meaning fall through in that case.
ca695ac9 10286
b93a436e
JL
10287 do_jump always does any pending stack adjust except when it does not
10288 actually perform a jump. An example where there is no jump
10289 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 10290
b93a436e
JL
10291 This function is responsible for optimizing cases such as
10292 &&, || and comparison operators in EXP. */
5718612f 10293
b93a436e
JL
10294void
10295do_jump (exp, if_false_label, if_true_label)
10296 tree exp;
10297 rtx if_false_label, if_true_label;
10298{
10299 register enum tree_code code = TREE_CODE (exp);
10300 /* Some cases need to create a label to jump to
10301 in order to properly fall through.
10302 These cases set DROP_THROUGH_LABEL nonzero. */
10303 rtx drop_through_label = 0;
10304 rtx temp;
10305 rtx comparison = 0;
10306 int i;
10307 tree type;
10308 enum machine_mode mode;
ca695ac9 10309
dbecbbe4
JL
10310#ifdef MAX_INTEGER_COMPUTATION_MODE
10311 check_max_integer_computation_mode (exp);
10312#endif
10313
b93a436e 10314 emit_queue ();
ca695ac9 10315
b93a436e 10316 switch (code)
ca695ac9 10317 {
b93a436e 10318 case ERROR_MARK:
ca695ac9 10319 break;
bbf6f052 10320
b93a436e
JL
10321 case INTEGER_CST:
10322 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10323 if (temp)
10324 emit_jump (temp);
10325 break;
bbf6f052 10326
b93a436e
JL
10327#if 0
10328 /* This is not true with #pragma weak */
10329 case ADDR_EXPR:
10330 /* The address of something can never be zero. */
10331 if (if_true_label)
10332 emit_jump (if_true_label);
10333 break;
10334#endif
bbf6f052 10335
b93a436e
JL
10336 case NOP_EXPR:
10337 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10338 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10339 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10340 goto normal;
10341 case CONVERT_EXPR:
10342 /* If we are narrowing the operand, we have to do the compare in the
10343 narrower mode. */
10344 if ((TYPE_PRECISION (TREE_TYPE (exp))
10345 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10346 goto normal;
10347 case NON_LVALUE_EXPR:
10348 case REFERENCE_EXPR:
10349 case ABS_EXPR:
10350 case NEGATE_EXPR:
10351 case LROTATE_EXPR:
10352 case RROTATE_EXPR:
10353 /* These cannot change zero->non-zero or vice versa. */
10354 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10355 break;
bbf6f052 10356
b93a436e
JL
10357#if 0
10358 /* This is never less insns than evaluating the PLUS_EXPR followed by
10359 a test and can be longer if the test is eliminated. */
10360 case PLUS_EXPR:
10361 /* Reduce to minus. */
10362 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10363 TREE_OPERAND (exp, 0),
10364 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10365 TREE_OPERAND (exp, 1))));
10366 /* Process as MINUS. */
ca695ac9 10367#endif
bbf6f052 10368
b93a436e
JL
10369 case MINUS_EXPR:
10370 /* Non-zero iff operands of minus differ. */
10371 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10372 TREE_OPERAND (exp, 0),
10373 TREE_OPERAND (exp, 1)),
10374 NE, NE);
10375 break;
bbf6f052 10376
b93a436e
JL
10377 case BIT_AND_EXPR:
10378 /* If we are AND'ing with a small constant, do this comparison in the
10379 smallest type that fits. If the machine doesn't have comparisons
10380 that small, it will be converted back to the wider comparison.
10381 This helps if we are testing the sign bit of a narrower object.
10382 combine can't do this for us because it can't know whether a
10383 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 10384
b93a436e
JL
10385 if (! SLOW_BYTE_ACCESS
10386 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10387 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10388 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10389 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10390 && (type = type_for_mode (mode, 1)) != 0
10391 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10392 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10393 != CODE_FOR_nothing))
10394 {
10395 do_jump (convert (type, exp), if_false_label, if_true_label);
10396 break;
10397 }
10398 goto normal;
bbf6f052 10399
b93a436e
JL
10400 case TRUTH_NOT_EXPR:
10401 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10402 break;
bbf6f052 10403
b93a436e
JL
10404 case TRUTH_ANDIF_EXPR:
10405 if (if_false_label == 0)
10406 if_false_label = drop_through_label = gen_label_rtx ();
10407 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10408 start_cleanup_deferral ();
10409 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10410 end_cleanup_deferral ();
10411 break;
bbf6f052 10412
b93a436e
JL
10413 case TRUTH_ORIF_EXPR:
10414 if (if_true_label == 0)
10415 if_true_label = drop_through_label = gen_label_rtx ();
10416 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10417 start_cleanup_deferral ();
10418 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10419 end_cleanup_deferral ();
10420 break;
bbf6f052 10421
b93a436e
JL
10422 case COMPOUND_EXPR:
10423 push_temp_slots ();
10424 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10425 preserve_temp_slots (NULL_RTX);
10426 free_temp_slots ();
10427 pop_temp_slots ();
10428 emit_queue ();
10429 do_pending_stack_adjust ();
10430 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10431 break;
bbf6f052 10432
b93a436e
JL
10433 case COMPONENT_REF:
10434 case BIT_FIELD_REF:
10435 case ARRAY_REF:
10436 {
10437 int bitsize, bitpos, unsignedp;
10438 enum machine_mode mode;
10439 tree type;
10440 tree offset;
10441 int volatilep = 0;
10442 int alignment;
bbf6f052 10443
b93a436e
JL
10444 /* Get description of this reference. We don't actually care
10445 about the underlying object here. */
10446 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10447 &mode, &unsignedp, &volatilep,
10448 &alignment);
bbf6f052 10449
b93a436e
JL
10450 type = type_for_size (bitsize, unsignedp);
10451 if (! SLOW_BYTE_ACCESS
10452 && type != 0 && bitsize >= 0
10453 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10454 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10455 != CODE_FOR_nothing))
10456 {
10457 do_jump (convert (type, exp), if_false_label, if_true_label);
10458 break;
10459 }
10460 goto normal;
10461 }
bbf6f052 10462
b93a436e
JL
10463 case COND_EXPR:
10464 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10465 if (integer_onep (TREE_OPERAND (exp, 1))
10466 && integer_zerop (TREE_OPERAND (exp, 2)))
10467 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 10468
b93a436e
JL
10469 else if (integer_zerop (TREE_OPERAND (exp, 1))
10470 && integer_onep (TREE_OPERAND (exp, 2)))
10471 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 10472
b93a436e
JL
10473 else
10474 {
10475 register rtx label1 = gen_label_rtx ();
10476 drop_through_label = gen_label_rtx ();
bbf6f052 10477
b93a436e 10478 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 10479
b93a436e
JL
10480 start_cleanup_deferral ();
10481 /* Now the THEN-expression. */
10482 do_jump (TREE_OPERAND (exp, 1),
10483 if_false_label ? if_false_label : drop_through_label,
10484 if_true_label ? if_true_label : drop_through_label);
10485 /* In case the do_jump just above never jumps. */
10486 do_pending_stack_adjust ();
10487 emit_label (label1);
bbf6f052 10488
b93a436e
JL
10489 /* Now the ELSE-expression. */
10490 do_jump (TREE_OPERAND (exp, 2),
10491 if_false_label ? if_false_label : drop_through_label,
10492 if_true_label ? if_true_label : drop_through_label);
10493 end_cleanup_deferral ();
10494 }
10495 break;
bbf6f052 10496
b93a436e
JL
10497 case EQ_EXPR:
10498 {
10499 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10500
9ec36da5
JL
10501 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10502 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10503 {
10504 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10505 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10506 do_jump
10507 (fold
10508 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10509 fold (build (EQ_EXPR, TREE_TYPE (exp),
10510 fold (build1 (REALPART_EXPR,
10511 TREE_TYPE (inner_type),
10512 exp0)),
10513 fold (build1 (REALPART_EXPR,
10514 TREE_TYPE (inner_type),
10515 exp1)))),
10516 fold (build (EQ_EXPR, TREE_TYPE (exp),
10517 fold (build1 (IMAGPART_EXPR,
10518 TREE_TYPE (inner_type),
10519 exp0)),
10520 fold (build1 (IMAGPART_EXPR,
10521 TREE_TYPE (inner_type),
10522 exp1)))))),
10523 if_false_label, if_true_label);
10524 }
9ec36da5
JL
10525
10526 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10527 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10528
b93a436e
JL
10529 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10530 && !can_compare_p (TYPE_MODE (inner_type)))
10531 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10532 else
10533 comparison = compare (exp, EQ, EQ);
10534 break;
10535 }
bbf6f052 10536
b93a436e
JL
10537 case NE_EXPR:
10538 {
10539 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10540
9ec36da5
JL
10541 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10542 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10543 {
10544 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10545 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10546 do_jump
10547 (fold
10548 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10549 fold (build (NE_EXPR, TREE_TYPE (exp),
10550 fold (build1 (REALPART_EXPR,
10551 TREE_TYPE (inner_type),
10552 exp0)),
10553 fold (build1 (REALPART_EXPR,
10554 TREE_TYPE (inner_type),
10555 exp1)))),
10556 fold (build (NE_EXPR, TREE_TYPE (exp),
10557 fold (build1 (IMAGPART_EXPR,
10558 TREE_TYPE (inner_type),
10559 exp0)),
10560 fold (build1 (IMAGPART_EXPR,
10561 TREE_TYPE (inner_type),
10562 exp1)))))),
10563 if_false_label, if_true_label);
10564 }
9ec36da5
JL
10565
10566 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10567 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10568
b93a436e
JL
10569 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10570 && !can_compare_p (TYPE_MODE (inner_type)))
10571 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10572 else
10573 comparison = compare (exp, NE, NE);
10574 break;
10575 }
bbf6f052 10576
b93a436e
JL
10577 case LT_EXPR:
10578 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10579 == MODE_INT)
10580 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10581 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10582 else
10583 comparison = compare (exp, LT, LTU);
10584 break;
bbf6f052 10585
b93a436e
JL
10586 case LE_EXPR:
10587 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10588 == MODE_INT)
10589 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10590 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10591 else
10592 comparison = compare (exp, LE, LEU);
10593 break;
bbf6f052 10594
b93a436e
JL
10595 case GT_EXPR:
10596 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10597 == MODE_INT)
10598 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10599 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10600 else
10601 comparison = compare (exp, GT, GTU);
10602 break;
bbf6f052 10603
b93a436e
JL
10604 case GE_EXPR:
10605 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10606 == MODE_INT)
10607 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10608 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10609 else
10610 comparison = compare (exp, GE, GEU);
10611 break;
bbf6f052 10612
b93a436e
JL
10613 default:
10614 normal:
10615 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10616#if 0
10617 /* This is not needed any more and causes poor code since it causes
10618 comparisons and tests from non-SI objects to have different code
10619 sequences. */
10620 /* Copy to register to avoid generating bad insns by cse
10621 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10622 if (!cse_not_expected && GET_CODE (temp) == MEM)
10623 temp = copy_to_reg (temp);
ca695ac9 10624#endif
b93a436e
JL
10625 do_pending_stack_adjust ();
10626 if (GET_CODE (temp) == CONST_INT)
10627 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10628 else if (GET_CODE (temp) == LABEL_REF)
10629 comparison = const_true_rtx;
10630 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10631 && !can_compare_p (GET_MODE (temp)))
10632 /* Note swapping the labels gives us not-equal. */
10633 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10634 else if (GET_MODE (temp) != VOIDmode)
10635 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10636 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10637 GET_MODE (temp), NULL_RTX, 0);
10638 else
10639 abort ();
10640 }
bbf6f052 10641
b93a436e
JL
10642 /* Do any postincrements in the expression that was tested. */
10643 emit_queue ();
bbf6f052 10644
b93a436e
JL
10645 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10646 straight into a conditional jump instruction as the jump condition.
10647 Otherwise, all the work has been done already. */
bbf6f052 10648
b93a436e
JL
10649 if (comparison == const_true_rtx)
10650 {
10651 if (if_true_label)
10652 emit_jump (if_true_label);
10653 }
10654 else if (comparison == const0_rtx)
10655 {
10656 if (if_false_label)
10657 emit_jump (if_false_label);
10658 }
10659 else if (comparison)
10660 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10661
b93a436e
JL
10662 if (drop_through_label)
10663 {
10664 /* If do_jump produces code that might be jumped around,
10665 do any stack adjusts from that code, before the place
10666 where control merges in. */
10667 do_pending_stack_adjust ();
10668 emit_label (drop_through_label);
10669 }
bbf6f052 10670}
b93a436e
JL
10671\f
10672/* Given a comparison expression EXP for values too wide to be compared
10673 with one insn, test the comparison and jump to the appropriate label.
10674 The code of EXP is ignored; we always test GT if SWAP is 0,
10675 and LT if SWAP is 1. */
bbf6f052 10676
b93a436e
JL
10677static void
10678do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10679 tree exp;
10680 int swap;
10681 rtx if_false_label, if_true_label;
10682{
10683 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10684 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10685 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10686 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10687 rtx drop_through_label = 0;
10688 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10689 int i;
bbf6f052 10690
b93a436e
JL
10691 if (! if_true_label || ! if_false_label)
10692 drop_through_label = gen_label_rtx ();
10693 if (! if_true_label)
10694 if_true_label = drop_through_label;
10695 if (! if_false_label)
10696 if_false_label = drop_through_label;
bbf6f052 10697
b93a436e
JL
10698 /* Compare a word at a time, high order first. */
10699 for (i = 0; i < nwords; i++)
f81497d9 10700 {
b93a436e
JL
10701 rtx comp;
10702 rtx op0_word, op1_word;
10703
10704 if (WORDS_BIG_ENDIAN)
10705 {
10706 op0_word = operand_subword_force (op0, i, mode);
10707 op1_word = operand_subword_force (op1, i, mode);
10708 }
f81497d9 10709 else
b93a436e
JL
10710 {
10711 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10712 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10713 }
10714
10715 /* All but high-order word must be compared as unsigned. */
10716 comp = compare_from_rtx (op0_word, op1_word,
10717 (unsignedp || i > 0) ? GTU : GT,
10718 unsignedp, word_mode, NULL_RTX, 0);
10719 if (comp == const_true_rtx)
10720 emit_jump (if_true_label);
10721 else if (comp != const0_rtx)
10722 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10723
10724 /* Consider lower words only if these are equal. */
10725 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10726 NULL_RTX, 0);
10727 if (comp == const_true_rtx)
10728 emit_jump (if_false_label);
10729 else if (comp != const0_rtx)
10730 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10731 }
ca695ac9 10732
b93a436e
JL
10733 if (if_false_label)
10734 emit_jump (if_false_label);
10735 if (drop_through_label)
10736 emit_label (drop_through_label);
f81497d9
RS
10737}
10738
b93a436e
JL
10739/* Compare OP0 with OP1, word at a time, in mode MODE.
10740 UNSIGNEDP says to do unsigned comparison.
10741 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10742
b93a436e
JL
10743void
10744do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10745 enum machine_mode mode;
10746 int unsignedp;
10747 rtx op0, op1;
10748 rtx if_false_label, if_true_label;
f81497d9 10749{
b93a436e
JL
10750 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10751 rtx drop_through_label = 0;
10752 int i;
f81497d9 10753
b93a436e
JL
10754 if (! if_true_label || ! if_false_label)
10755 drop_through_label = gen_label_rtx ();
10756 if (! if_true_label)
10757 if_true_label = drop_through_label;
10758 if (! if_false_label)
10759 if_false_label = drop_through_label;
f81497d9 10760
b93a436e
JL
10761 /* Compare a word at a time, high order first. */
10762 for (i = 0; i < nwords; i++)
10763 {
10764 rtx comp;
10765 rtx op0_word, op1_word;
bbf6f052 10766
b93a436e
JL
10767 if (WORDS_BIG_ENDIAN)
10768 {
10769 op0_word = operand_subword_force (op0, i, mode);
10770 op1_word = operand_subword_force (op1, i, mode);
10771 }
10772 else
10773 {
10774 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10775 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10776 }
bbf6f052 10777
b93a436e
JL
10778 /* All but high-order word must be compared as unsigned. */
10779 comp = compare_from_rtx (op0_word, op1_word,
10780 (unsignedp || i > 0) ? GTU : GT,
10781 unsignedp, word_mode, NULL_RTX, 0);
10782 if (comp == const_true_rtx)
10783 emit_jump (if_true_label);
10784 else if (comp != const0_rtx)
10785 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10786
b93a436e
JL
10787 /* Consider lower words only if these are equal. */
10788 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10789 NULL_RTX, 0);
10790 if (comp == const_true_rtx)
10791 emit_jump (if_false_label);
10792 else if (comp != const0_rtx)
10793 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10794 }
bbf6f052 10795
b93a436e
JL
10796 if (if_false_label)
10797 emit_jump (if_false_label);
10798 if (drop_through_label)
10799 emit_label (drop_through_label);
bbf6f052
RK
10800}
10801
b93a436e
JL
10802/* Given an EQ_EXPR expression EXP for values too wide to be compared
10803 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10804
b93a436e
JL
10805static void
10806do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10807 tree exp;
10808 rtx if_false_label, if_true_label;
bbf6f052 10809{
b93a436e
JL
10810 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10811 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10812 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10813 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10814 int i;
10815 rtx drop_through_label = 0;
bbf6f052 10816
b93a436e
JL
10817 if (! if_false_label)
10818 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10819
b93a436e
JL
10820 for (i = 0; i < nwords; i++)
10821 {
10822 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10823 operand_subword_force (op1, i, mode),
10824 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10825 word_mode, NULL_RTX, 0);
10826 if (comp == const_true_rtx)
10827 emit_jump (if_false_label);
10828 else if (comp != const0_rtx)
10829 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10830 }
bbf6f052 10831
b93a436e
JL
10832 if (if_true_label)
10833 emit_jump (if_true_label);
10834 if (drop_through_label)
10835 emit_label (drop_through_label);
bbf6f052 10836}
b93a436e
JL
10837\f
10838/* Jump according to whether OP0 is 0.
10839 We assume that OP0 has an integer mode that is too wide
10840 for the available compare insns. */
bbf6f052 10841
f5963e61 10842void
b93a436e
JL
10843do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10844 rtx op0;
10845 rtx if_false_label, if_true_label;
ca695ac9 10846{
b93a436e
JL
10847 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10848 rtx part;
10849 int i;
10850 rtx drop_through_label = 0;
bbf6f052 10851
b93a436e
JL
10852 /* The fastest way of doing this comparison on almost any machine is to
10853 "or" all the words and compare the result. If all have to be loaded
10854 from memory and this is a very wide item, it's possible this may
10855 be slower, but that's highly unlikely. */
bbf6f052 10856
b93a436e
JL
10857 part = gen_reg_rtx (word_mode);
10858 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10859 for (i = 1; i < nwords && part != 0; i++)
10860 part = expand_binop (word_mode, ior_optab, part,
10861 operand_subword_force (op0, i, GET_MODE (op0)),
10862 part, 1, OPTAB_WIDEN);
bbf6f052 10863
b93a436e
JL
10864 if (part != 0)
10865 {
10866 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10867 NULL_RTX, 0);
0f41302f 10868
b93a436e
JL
10869 if (comp == const_true_rtx)
10870 emit_jump (if_false_label);
10871 else if (comp == const0_rtx)
10872 emit_jump (if_true_label);
10873 else
10874 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10875
b93a436e
JL
10876 return;
10877 }
bbf6f052 10878
b93a436e
JL
10879 /* If we couldn't do the "or" simply, do this with a series of compares. */
10880 if (! if_false_label)
10881 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10882
b93a436e
JL
10883 for (i = 0; i < nwords; i++)
10884 {
10885 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10886 GET_MODE (op0)),
10887 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10888 if (comp == const_true_rtx)
10889 emit_jump (if_false_label);
10890 else if (comp != const0_rtx)
10891 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10892 }
bbf6f052 10893
b93a436e
JL
10894 if (if_true_label)
10895 emit_jump (if_true_label);
0f41302f 10896
b93a436e
JL
10897 if (drop_through_label)
10898 emit_label (drop_through_label);
bbf6f052 10899}
bbf6f052 10900
b93a436e
JL
10901/* Given a comparison expression in rtl form, output conditional branches to
10902 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10903
b93a436e
JL
10904static void
10905do_jump_for_compare (comparison, if_false_label, if_true_label)
10906 rtx comparison, if_false_label, if_true_label;
bbf6f052 10907{
b93a436e
JL
10908 if (if_true_label)
10909 {
10910 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10911 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10912 else
10913 abort ();
ca695ac9 10914
b93a436e
JL
10915 if (if_false_label)
10916 emit_jump (if_false_label);
10917 }
10918 else if (if_false_label)
10919 {
10920 rtx insn;
10921 rtx prev = get_last_insn ();
10922 rtx branch = 0;
0f41302f 10923
b93a436e
JL
10924 /* Output the branch with the opposite condition. Then try to invert
10925 what is generated. If more than one insn is a branch, or if the
10926 branch is not the last insn written, abort. If we can't invert
10927 the branch, emit make a true label, redirect this jump to that,
10928 emit a jump to the false label and define the true label. */
bbf6f052 10929
b93a436e
JL
10930 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10931 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10932 else
10933 abort ();
bbf6f052 10934
b93a436e
JL
10935 /* Here we get the first insn that was just emitted. It used to be the
10936 case that, on some machines, emitting the branch would discard
10937 the previous compare insn and emit a replacement. This isn't
10938 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10939
b93a436e
JL
10940 if (prev == 0)
10941 insn = get_insns ();
10942 else if (INSN_DELETED_P (prev))
10943 abort ();
10944 else
10945 insn = NEXT_INSN (prev);
bbf6f052 10946
b93a436e
JL
10947 for (; insn; insn = NEXT_INSN (insn))
10948 if (GET_CODE (insn) == JUMP_INSN)
10949 {
10950 if (branch)
10951 abort ();
10952 branch = insn;
10953 }
a7c5971a 10954
b93a436e
JL
10955 if (branch != get_last_insn ())
10956 abort ();
bbf6f052 10957
b93a436e
JL
10958 JUMP_LABEL (branch) = if_false_label;
10959 if (! invert_jump (branch, if_false_label))
10960 {
10961 if_true_label = gen_label_rtx ();
10962 redirect_jump (branch, if_true_label);
10963 emit_jump (if_false_label);
10964 emit_label (if_true_label);
10965 }
10966 }
10967}
10968\f
10969/* Generate code for a comparison expression EXP
10970 (including code to compute the values to be compared)
10971 and set (CC0) according to the result.
10972 SIGNED_CODE should be the rtx operation for this comparison for
10973 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10974
b93a436e
JL
10975 We force a stack adjustment unless there are currently
10976 things pushed on the stack that aren't yet used. */
ca695ac9 10977
b93a436e
JL
10978static rtx
10979compare (exp, signed_code, unsigned_code)
10980 register tree exp;
10981 enum rtx_code signed_code, unsigned_code;
10982{
10983 register rtx op0
10984 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10985 register rtx op1
10986 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10987 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10988 register enum machine_mode mode = TYPE_MODE (type);
10989 int unsignedp = TREE_UNSIGNED (type);
10990 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10991
b93a436e
JL
10992#ifdef HAVE_canonicalize_funcptr_for_compare
10993 /* If function pointers need to be "canonicalized" before they can
10994 be reliably compared, then canonicalize them. */
10995 if (HAVE_canonicalize_funcptr_for_compare
10996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10997 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10998 == FUNCTION_TYPE))
bbf6f052 10999 {
b93a436e 11000 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 11001
b93a436e
JL
11002 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11003 op0 = new_op0;
ca695ac9 11004 }
bbf6f052 11005
b93a436e
JL
11006 if (HAVE_canonicalize_funcptr_for_compare
11007 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11008 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11009 == FUNCTION_TYPE))
11010 {
11011 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 11012
b93a436e
JL
11013 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11014 op1 = new_op1;
11015 }
11016#endif
0f41302f 11017
b93a436e
JL
11018 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11019 ((mode == BLKmode)
11020 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11021 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 11022}
bbf6f052 11023
b93a436e
JL
11024/* Like compare but expects the values to compare as two rtx's.
11025 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 11026
b93a436e
JL
11027 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11028 compared.
bbf6f052 11029
b93a436e
JL
11030 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11031 size of MODE should be used. */
ca695ac9 11032
b93a436e
JL
11033rtx
11034compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11035 register rtx op0, op1;
11036 enum rtx_code code;
11037 int unsignedp;
11038 enum machine_mode mode;
11039 rtx size;
11040 int align;
bbf6f052 11041{
b93a436e 11042 rtx tem;
bbf6f052 11043
b93a436e
JL
11044 /* If one operand is constant, make it the second one. Only do this
11045 if the other operand is not constant as well. */
e7c33f54 11046
b93a436e
JL
11047 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11048 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 11049 {
b93a436e
JL
11050 tem = op0;
11051 op0 = op1;
11052 op1 = tem;
11053 code = swap_condition (code);
11054 }
bbf6f052 11055
b93a436e
JL
11056 if (flag_force_mem)
11057 {
11058 op0 = force_not_mem (op0);
11059 op1 = force_not_mem (op1);
11060 }
bbf6f052 11061
b93a436e 11062 do_pending_stack_adjust ();
ca695ac9 11063
b93a436e
JL
11064 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11065 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11066 return tem;
ca695ac9 11067
b93a436e
JL
11068#if 0
11069 /* There's no need to do this now that combine.c can eliminate lots of
11070 sign extensions. This can be less efficient in certain cases on other
11071 machines. */
ca695ac9 11072
b93a436e
JL
11073 /* If this is a signed equality comparison, we can do it as an
11074 unsigned comparison since zero-extension is cheaper than sign
11075 extension and comparisons with zero are done as unsigned. This is
11076 the case even on machines that can do fast sign extension, since
11077 zero-extension is easier to combine with other operations than
11078 sign-extension is. If we are comparing against a constant, we must
11079 convert it to what it would look like unsigned. */
11080 if ((code == EQ || code == NE) && ! unsignedp
11081 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11082 {
11083 if (GET_CODE (op1) == CONST_INT
11084 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11085 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11086 unsignedp = 1;
11087 }
11088#endif
ca695ac9 11089
b93a436e 11090 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 11091
b93a436e
JL
11092 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11093}
11094\f
11095/* Generate code to calculate EXP using a store-flag instruction
11096 and return an rtx for the result. EXP is either a comparison
11097 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 11098
b93a436e 11099 If TARGET is nonzero, store the result there if convenient.
ca695ac9 11100
b93a436e
JL
11101 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11102 cheap.
ca695ac9 11103
b93a436e
JL
11104 Return zero if there is no suitable set-flag instruction
11105 available on this machine.
ca695ac9 11106
b93a436e
JL
11107 Once expand_expr has been called on the arguments of the comparison,
11108 we are committed to doing the store flag, since it is not safe to
11109 re-evaluate the expression. We emit the store-flag insn by calling
11110 emit_store_flag, but only expand the arguments if we have a reason
11111 to believe that emit_store_flag will be successful. If we think that
11112 it will, but it isn't, we have to simulate the store-flag with a
11113 set/jump/set sequence. */
ca695ac9 11114
b93a436e
JL
11115static rtx
11116do_store_flag (exp, target, mode, only_cheap)
11117 tree exp;
11118 rtx target;
11119 enum machine_mode mode;
11120 int only_cheap;
11121{
11122 enum rtx_code code;
11123 tree arg0, arg1, type;
11124 tree tem;
11125 enum machine_mode operand_mode;
11126 int invert = 0;
11127 int unsignedp;
11128 rtx op0, op1;
11129 enum insn_code icode;
11130 rtx subtarget = target;
381127e8 11131 rtx result, label;
ca695ac9 11132
b93a436e
JL
11133 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11134 result at the end. We can't simply invert the test since it would
11135 have already been inverted if it were valid. This case occurs for
11136 some floating-point comparisons. */
ca695ac9 11137
b93a436e
JL
11138 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11139 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 11140
b93a436e
JL
11141 arg0 = TREE_OPERAND (exp, 0);
11142 arg1 = TREE_OPERAND (exp, 1);
11143 type = TREE_TYPE (arg0);
11144 operand_mode = TYPE_MODE (type);
11145 unsignedp = TREE_UNSIGNED (type);
ca695ac9 11146
b93a436e
JL
11147 /* We won't bother with BLKmode store-flag operations because it would mean
11148 passing a lot of information to emit_store_flag. */
11149 if (operand_mode == BLKmode)
11150 return 0;
ca695ac9 11151
b93a436e
JL
11152 /* We won't bother with store-flag operations involving function pointers
11153 when function pointers must be canonicalized before comparisons. */
11154#ifdef HAVE_canonicalize_funcptr_for_compare
11155 if (HAVE_canonicalize_funcptr_for_compare
11156 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11157 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11158 == FUNCTION_TYPE))
11159 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11160 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11161 == FUNCTION_TYPE))))
11162 return 0;
ca695ac9
JB
11163#endif
11164
b93a436e
JL
11165 STRIP_NOPS (arg0);
11166 STRIP_NOPS (arg1);
ca695ac9 11167
b93a436e
JL
11168 /* Get the rtx comparison code to use. We know that EXP is a comparison
11169 operation of some type. Some comparisons against 1 and -1 can be
11170 converted to comparisons with zero. Do so here so that the tests
11171 below will be aware that we have a comparison with zero. These
11172 tests will not catch constants in the first operand, but constants
11173 are rarely passed as the first operand. */
ca695ac9 11174
b93a436e
JL
11175 switch (TREE_CODE (exp))
11176 {
11177 case EQ_EXPR:
11178 code = EQ;
bbf6f052 11179 break;
b93a436e
JL
11180 case NE_EXPR:
11181 code = NE;
bbf6f052 11182 break;
b93a436e
JL
11183 case LT_EXPR:
11184 if (integer_onep (arg1))
11185 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11186 else
11187 code = unsignedp ? LTU : LT;
ca695ac9 11188 break;
b93a436e
JL
11189 case LE_EXPR:
11190 if (! unsignedp && integer_all_onesp (arg1))
11191 arg1 = integer_zero_node, code = LT;
11192 else
11193 code = unsignedp ? LEU : LE;
ca695ac9 11194 break;
b93a436e
JL
11195 case GT_EXPR:
11196 if (! unsignedp && integer_all_onesp (arg1))
11197 arg1 = integer_zero_node, code = GE;
11198 else
11199 code = unsignedp ? GTU : GT;
11200 break;
11201 case GE_EXPR:
11202 if (integer_onep (arg1))
11203 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11204 else
11205 code = unsignedp ? GEU : GE;
ca695ac9 11206 break;
ca695ac9 11207 default:
b93a436e 11208 abort ();
bbf6f052 11209 }
bbf6f052 11210
b93a436e
JL
11211 /* Put a constant second. */
11212 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11213 {
11214 tem = arg0; arg0 = arg1; arg1 = tem;
11215 code = swap_condition (code);
ca695ac9 11216 }
bbf6f052 11217
b93a436e
JL
11218 /* If this is an equality or inequality test of a single bit, we can
11219 do this by shifting the bit being tested to the low-order bit and
11220 masking the result with the constant 1. If the condition was EQ,
11221 we xor it with 1. This does not require an scc insn and is faster
11222 than an scc insn even if we have it. */
d39985fa 11223
b93a436e
JL
11224 if ((code == NE || code == EQ)
11225 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11226 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11227 {
11228 tree inner = TREE_OPERAND (arg0, 0);
11229 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11230 int ops_unsignedp;
bbf6f052 11231
b93a436e
JL
11232 /* If INNER is a right shift of a constant and it plus BITNUM does
11233 not overflow, adjust BITNUM and INNER. */
ca695ac9 11234
b93a436e
JL
11235 if (TREE_CODE (inner) == RSHIFT_EXPR
11236 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11237 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11238 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11239 < TYPE_PRECISION (type)))
ca695ac9 11240 {
b93a436e
JL
11241 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11242 inner = TREE_OPERAND (inner, 0);
ca695ac9 11243 }
ca695ac9 11244
b93a436e
JL
11245 /* If we are going to be able to omit the AND below, we must do our
11246 operations as unsigned. If we must use the AND, we have a choice.
11247 Normally unsigned is faster, but for some machines signed is. */
11248 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11249#ifdef LOAD_EXTEND_OP
11250 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11251#else
11252 : 1
11253#endif
11254 );
bbf6f052 11255
b93a436e
JL
11256 if (subtarget == 0 || GET_CODE (subtarget) != REG
11257 || GET_MODE (subtarget) != operand_mode
e5e809f4 11258 || ! safe_from_p (subtarget, inner, 1))
b93a436e 11259 subtarget = 0;
bbf6f052 11260
b93a436e 11261 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 11262
b93a436e
JL
11263 if (bitnum != 0)
11264 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11265 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 11266
b93a436e
JL
11267 if (GET_MODE (op0) != mode)
11268 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 11269
b93a436e
JL
11270 if ((code == EQ && ! invert) || (code == NE && invert))
11271 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11272 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 11273
b93a436e
JL
11274 /* Put the AND last so it can combine with more things. */
11275 if (bitnum != TYPE_PRECISION (type) - 1)
11276 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 11277
b93a436e
JL
11278 return op0;
11279 }
bbf6f052 11280
b93a436e
JL
11281 /* Now see if we are likely to be able to do this. Return if not. */
11282 if (! can_compare_p (operand_mode))
11283 return 0;
11284 icode = setcc_gen_code[(int) code];
11285 if (icode == CODE_FOR_nothing
11286 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 11287 {
b93a436e
JL
11288 /* We can only do this if it is one of the special cases that
11289 can be handled without an scc insn. */
11290 if ((code == LT && integer_zerop (arg1))
11291 || (! only_cheap && code == GE && integer_zerop (arg1)))
11292 ;
11293 else if (BRANCH_COST >= 0
11294 && ! only_cheap && (code == NE || code == EQ)
11295 && TREE_CODE (type) != REAL_TYPE
11296 && ((abs_optab->handlers[(int) operand_mode].insn_code
11297 != CODE_FOR_nothing)
11298 || (ffs_optab->handlers[(int) operand_mode].insn_code
11299 != CODE_FOR_nothing)))
11300 ;
11301 else
11302 return 0;
ca695ac9 11303 }
b93a436e
JL
11304
11305 preexpand_calls (exp);
11306 if (subtarget == 0 || GET_CODE (subtarget) != REG
11307 || GET_MODE (subtarget) != operand_mode
e5e809f4 11308 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
11309 subtarget = 0;
11310
11311 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11312 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11313
11314 if (target == 0)
11315 target = gen_reg_rtx (mode);
11316
11317 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11318 because, if the emit_store_flag does anything it will succeed and
11319 OP0 and OP1 will not be used subsequently. */
ca695ac9 11320
b93a436e
JL
11321 result = emit_store_flag (target, code,
11322 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11323 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11324 operand_mode, unsignedp, 1);
ca695ac9 11325
b93a436e
JL
11326 if (result)
11327 {
11328 if (invert)
11329 result = expand_binop (mode, xor_optab, result, const1_rtx,
11330 result, 0, OPTAB_LIB_WIDEN);
11331 return result;
ca695ac9 11332 }
bbf6f052 11333
b93a436e
JL
11334 /* If this failed, we have to do this with set/compare/jump/set code. */
11335 if (GET_CODE (target) != REG
11336 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11337 target = gen_reg_rtx (GET_MODE (target));
11338
11339 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11340 result = compare_from_rtx (op0, op1, code, unsignedp,
11341 operand_mode, NULL_RTX, 0);
11342 if (GET_CODE (result) == CONST_INT)
11343 return (((result == const0_rtx && ! invert)
11344 || (result != const0_rtx && invert))
11345 ? const0_rtx : const1_rtx);
ca695ac9 11346
b93a436e
JL
11347 label = gen_label_rtx ();
11348 if (bcc_gen_fctn[(int) code] == 0)
11349 abort ();
0f41302f 11350
b93a436e
JL
11351 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11352 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11353 emit_label (label);
bbf6f052 11354
b93a436e 11355 return target;
ca695ac9 11356}
b93a436e
JL
11357\f
11358/* Generate a tablejump instruction (used for switch statements). */
11359
11360#ifdef HAVE_tablejump
e87b4f3f 11361
b93a436e
JL
11362/* INDEX is the value being switched on, with the lowest value
11363 in the table already subtracted.
11364 MODE is its expected mode (needed if INDEX is constant).
11365 RANGE is the length of the jump table.
11366 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 11367
b93a436e
JL
11368 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11369 index value is out of range. */
0f41302f 11370
ca695ac9 11371void
b93a436e
JL
11372do_tablejump (index, mode, range, table_label, default_label)
11373 rtx index, range, table_label, default_label;
11374 enum machine_mode mode;
ca695ac9 11375{
b93a436e 11376 register rtx temp, vector;
88d3b7f0 11377
b93a436e
JL
11378 /* Do an unsigned comparison (in the proper mode) between the index
11379 expression and the value which represents the length of the range.
11380 Since we just finished subtracting the lower bound of the range
11381 from the index expression, this comparison allows us to simultaneously
11382 check that the original index expression value is both greater than
11383 or equal to the minimum value of the range and less than or equal to
11384 the maximum value of the range. */
709f5be1 11385
b93a436e
JL
11386 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11387 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 11388
b93a436e
JL
11389 /* If index is in range, it must fit in Pmode.
11390 Convert to Pmode so we can index with it. */
11391 if (mode != Pmode)
11392 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11393
b93a436e
JL
11394 /* Don't let a MEM slip thru, because then INDEX that comes
11395 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11396 and break_out_memory_refs will go to work on it and mess it up. */
11397#ifdef PIC_CASE_VECTOR_ADDRESS
11398 if (flag_pic && GET_CODE (index) != REG)
11399 index = copy_to_mode_reg (Pmode, index);
11400#endif
ca695ac9 11401
b93a436e
JL
11402 /* If flag_force_addr were to affect this address
11403 it could interfere with the tricky assumptions made
11404 about addresses that contain label-refs,
11405 which may be valid only very near the tablejump itself. */
11406 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11407 GET_MODE_SIZE, because this indicates how large insns are. The other
11408 uses should all be Pmode, because they are addresses. This code
11409 could fail if addresses and insns are not the same size. */
11410 index = gen_rtx_PLUS (Pmode,
11411 gen_rtx_MULT (Pmode, index,
11412 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11413 gen_rtx_LABEL_REF (Pmode, table_label));
11414#ifdef PIC_CASE_VECTOR_ADDRESS
11415 if (flag_pic)
11416 index = PIC_CASE_VECTOR_ADDRESS (index);
11417 else
bbf6f052 11418#endif
b93a436e
JL
11419 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11420 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11421 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11422 RTX_UNCHANGING_P (vector) = 1;
11423 convert_move (temp, vector, 0);
11424
11425 emit_jump_insn (gen_tablejump (temp, table_label));
11426
11427 /* If we are generating PIC code or if the table is PC-relative, the
11428 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11429 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11430 emit_barrier ();
bbf6f052 11431}
b93a436e
JL
11432
11433#endif /* HAVE_tablejump */
This page took 2.402416 seconds and 5 git commands to generate.