]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
* fix-header.c (read_scan_file): Fix thinko.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8e37cba8 3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
bbf6f052 50
bbf6f052 51/* Decide whether a function's arguments should be processed
bbc8a071
RK
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
bbf6f052 56
bbf6f052 57#ifdef PUSH_ROUNDING
bbc8a071 58
2da4124d 59#ifndef PUSH_ARGS_REVERSED
3319a347 60#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 61#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 62#endif
2da4124d 63#endif
bbc8a071 64
bbf6f052
RK
65#endif
66
67#ifndef STACK_PUSH_CODE
68#ifdef STACK_GROWS_DOWNWARD
69#define STACK_PUSH_CODE PRE_DEC
70#else
71#define STACK_PUSH_CODE PRE_INC
72#endif
73#endif
74
18543a22
ILT
75/* Assume that case vectors are not pc-relative. */
76#ifndef CASE_VECTOR_PC_RELATIVE
77#define CASE_VECTOR_PC_RELATIVE 0
78#endif
79
4ca79136
RH
80/* Convert defined/undefined to boolean. */
81#ifdef TARGET_MEM_FUNCTIONS
82#undef TARGET_MEM_FUNCTIONS
83#define TARGET_MEM_FUNCTIONS 1
84#else
85#define TARGET_MEM_FUNCTIONS 0
86#endif
87
88
bbf6f052
RK
89/* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95int cse_not_expected;
96
14a774a9 97/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 98tree placeholder_list = 0;
14a774a9 99
4969d05d
RK
100/* This structure is used by move_by_pieces to describe the move to
101 be performed. */
4969d05d
RK
102struct move_by_pieces
103{
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
3bdf5ad1
RK
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
4969d05d
RK
114 int reverse;
115};
116
57814e5e 117/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
118 be performed. */
119
57814e5e 120struct store_by_pieces
9de08200
RK
121{
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
3bdf5ad1
RK
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
57814e5e
JJ
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
9de08200
RK
130 int reverse;
131};
132
711d877c 133static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
134static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
711d877c
KG
137static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
44bb111a 139static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
4ca79136
RH
140static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142static tree emit_block_move_libcall_fn PARAMS ((int));
44bb111a 143static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
57814e5e
JJ
144static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
3bdf5ad1
RK
146static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
57814e5e
JJ
148static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 151 enum machine_mode,
57814e5e 152 struct store_by_pieces *));
4ca79136
RH
153static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155static tree clear_storage_libcall_fn PARAMS ((int));
51286de6 156static rtx compress_float_constant PARAMS ((rtx, rtx));
296b4ed9 157static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
158static int is_zeros_p PARAMS ((tree));
159static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
160static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
04050c69
RK
162 tree, tree, int, int));
163static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
770ae6cc
RK
164static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
a06ef755
RK
166 tree, enum machine_mode, int, tree,
167 int));
711d877c 168static rtx var_rtx PARAMS ((tree));
0d4903b8 169static HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
818c0c94 170static HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree, tree));
1ce7f3c2 171static int is_aligning_offset PARAMS ((tree, tree));
711d877c 172static rtx expand_increment PARAMS ((tree, int, int));
711d877c 173static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
21d93687 174#ifdef PUSH_ROUNDING
566aa174 175static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
21d93687 176#endif
ad82abb8 177static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
bbf6f052 178
4fa52007
RK
179/* Record for each mode whether we can move a register directly to or
180 from an object of that mode in memory. If we can't, we won't try
181 to use that mode directly when accessing a field of that mode. */
182
183static char direct_load[NUM_MACHINE_MODES];
184static char direct_store[NUM_MACHINE_MODES];
185
51286de6
RH
186/* Record for each mode whether we can float-extend from memory. */
187
188static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
189
7e24ffc9
HPN
190/* If a memory-to-memory move would take MOVE_RATIO or more simple
191 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
192
193#ifndef MOVE_RATIO
266007a7 194#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
195#define MOVE_RATIO 2
196#else
3a94c984 197/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 198#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
199#endif
200#endif
e87b4f3f 201
fbe1758d 202/* This macro is used to determine whether move_by_pieces should be called
3a94c984 203 to perform a structure copy. */
fbe1758d 204#ifndef MOVE_BY_PIECES_P
19caa751 205#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 206 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
207#endif
208
78762e3b
RS
209/* If a clear memory operation would take CLEAR_RATIO or more simple
210 move-instruction sequences, we will do a clrstr or libcall instead. */
211
212#ifndef CLEAR_RATIO
213#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
214#define CLEAR_RATIO 2
215#else
216/* If we are optimizing for space, cut down the default clear ratio. */
217#define CLEAR_RATIO (optimize_size ? 3 : 15)
218#endif
219#endif
220
221/* This macro is used to determine whether clear_by_pieces should be
222 called to clear storage. */
223#ifndef CLEAR_BY_PIECES_P
224#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
225 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
226#endif
227
4977bab6
ZW
228/* This macro is used to determine whether store_by_pieces should be
229 called to "memset" storage with byte values other than zero, or
230 to "memcpy" storage when the source is a constant string. */
231#ifndef STORE_BY_PIECES_P
232#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
233#endif
234
266007a7 235/* This array records the insn_code of insns to perform block moves. */
e6677db3 236enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 237
9de08200
RK
238/* This array records the insn_code of insns to perform block clears. */
239enum insn_code clrstr_optab[NUM_MACHINE_MODES];
240
cc2902df 241/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
242
243#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 244#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 245#endif
bbf6f052 246\f
4fa52007 247/* This is run once per compilation to set up which modes can be used
266007a7 248 directly in memory and to initialize the block move optab. */
4fa52007
RK
249
250void
251init_expr_once ()
252{
253 rtx insn, pat;
254 enum machine_mode mode;
cff48d8f 255 int num_clobbers;
9ec36da5 256 rtx mem, mem1;
bf1660a6 257 rtx reg;
9ec36da5 258
e2549997
RS
259 /* Try indexing by frame ptr and try by stack ptr.
260 It is known that on the Convex the stack ptr isn't a valid index.
261 With luck, one or the other is valid on any machine. */
9ec36da5
JL
262 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
263 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 264
bf1660a6
JL
265 /* A scratch register we can modify in-place below to avoid
266 useless RTL allocations. */
267 reg = gen_rtx_REG (VOIDmode, -1);
268
1f8c3c5b
RH
269 insn = rtx_alloc (INSN);
270 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
271 PATTERN (insn) = pat;
4fa52007
RK
272
273 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
274 mode = (enum machine_mode) ((int) mode + 1))
275 {
276 int regno;
4fa52007
RK
277
278 direct_load[(int) mode] = direct_store[(int) mode] = 0;
279 PUT_MODE (mem, mode);
e2549997 280 PUT_MODE (mem1, mode);
bf1660a6 281 PUT_MODE (reg, mode);
4fa52007 282
e6fe56a4
RK
283 /* See if there is some register that can be used in this mode and
284 directly loaded or stored from memory. */
285
7308a047
RS
286 if (mode != VOIDmode && mode != BLKmode)
287 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
288 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
289 regno++)
290 {
291 if (! HARD_REGNO_MODE_OK (regno, mode))
292 continue;
e6fe56a4 293
bf1660a6 294 REGNO (reg) = regno;
e6fe56a4 295
7308a047
RS
296 SET_SRC (pat) = mem;
297 SET_DEST (pat) = reg;
298 if (recog (pat, insn, &num_clobbers) >= 0)
299 direct_load[(int) mode] = 1;
e6fe56a4 300
e2549997
RS
301 SET_SRC (pat) = mem1;
302 SET_DEST (pat) = reg;
303 if (recog (pat, insn, &num_clobbers) >= 0)
304 direct_load[(int) mode] = 1;
305
7308a047
RS
306 SET_SRC (pat) = reg;
307 SET_DEST (pat) = mem;
308 if (recog (pat, insn, &num_clobbers) >= 0)
309 direct_store[(int) mode] = 1;
e2549997
RS
310
311 SET_SRC (pat) = reg;
312 SET_DEST (pat) = mem1;
313 if (recog (pat, insn, &num_clobbers) >= 0)
314 direct_store[(int) mode] = 1;
7308a047 315 }
4fa52007
RK
316 }
317
51286de6
RH
318 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
319
320 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
321 mode = GET_MODE_WIDER_MODE (mode))
322 {
323 enum machine_mode srcmode;
324 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 325 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
326 {
327 enum insn_code ic;
328
329 ic = can_extend_p (mode, srcmode, 0);
330 if (ic == CODE_FOR_nothing)
331 continue;
332
333 PUT_MODE (mem, srcmode);
0fb7aeda 334
51286de6
RH
335 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
336 float_extend_from_mem[mode][srcmode] = true;
337 }
338 }
4fa52007 339}
cff48d8f 340
bbf6f052
RK
341/* This is run at the start of compiling a function. */
342
343void
344init_expr ()
345{
e2500fed 346 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
bbf6f052 347
49ad7cfa 348 pending_chain = 0;
bbf6f052 349 pending_stack_adjust = 0;
1503a7ec 350 stack_pointer_delta = 0;
bbf6f052 351 inhibit_defer_pop = 0;
bbf6f052 352 saveregs_value = 0;
0006469d 353 apply_args_value = 0;
e87b4f3f 354 forced_labels = 0;
bbf6f052
RK
355}
356
49ad7cfa 357/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 358
bbf6f052 359void
49ad7cfa 360finish_expr_for_function ()
bbf6f052 361{
49ad7cfa
BS
362 if (pending_chain)
363 abort ();
bbf6f052
RK
364}
365\f
366/* Manage the queue of increment instructions to be output
367 for POSTINCREMENT_EXPR expressions, etc. */
368
bbf6f052
RK
369/* Queue up to increment (or change) VAR later. BODY says how:
370 BODY should be the same thing you would pass to emit_insn
371 to increment right away. It will go to emit_insn later on.
372
373 The value is a QUEUED expression to be used in place of VAR
374 where you want to guarantee the pre-incrementation value of VAR. */
375
376static rtx
377enqueue_insn (var, body)
378 rtx var, body;
379{
c5c76735
JL
380 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
381 body, pending_chain);
bbf6f052
RK
382 return pending_chain;
383}
384
385/* Use protect_from_queue to convert a QUEUED expression
386 into something that you can put immediately into an instruction.
387 If the queued incrementation has not happened yet,
388 protect_from_queue returns the variable itself.
389 If the incrementation has happened, protect_from_queue returns a temp
390 that contains a copy of the old value of the variable.
391
392 Any time an rtx which might possibly be a QUEUED is to be put
393 into an instruction, it must be passed through protect_from_queue first.
394 QUEUED expressions are not meaningful in instructions.
395
396 Do not pass a value through protect_from_queue and then hold
397 on to it for a while before putting it in an instruction!
398 If the queue is flushed in between, incorrect code will result. */
399
400rtx
401protect_from_queue (x, modify)
b3694847 402 rtx x;
bbf6f052
RK
403 int modify;
404{
b3694847 405 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
406
407#if 0 /* A QUEUED can hang around after the queue is forced out. */
408 /* Shortcut for most common case. */
409 if (pending_chain == 0)
410 return x;
411#endif
412
413 if (code != QUEUED)
414 {
e9baa644
RK
415 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
416 use of autoincrement. Make a copy of the contents of the memory
417 location rather than a copy of the address, but not if the value is
418 of mode BLKmode. Don't modify X in place since it might be
419 shared. */
bbf6f052
RK
420 if (code == MEM && GET_MODE (x) != BLKmode
421 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
422 {
f1ec5147
RK
423 rtx y = XEXP (x, 0);
424 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 425
bbf6f052
RK
426 if (QUEUED_INSN (y))
427 {
f1ec5147
RK
428 rtx temp = gen_reg_rtx (GET_MODE (x));
429
e9baa644 430 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
431 QUEUED_INSN (y));
432 return temp;
433 }
f1ec5147 434
73b7f58c
BS
435 /* Copy the address into a pseudo, so that the returned value
436 remains correct across calls to emit_queue. */
f1ec5147 437 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 438 }
f1ec5147 439
bbf6f052
RK
440 /* Otherwise, recursively protect the subexpressions of all
441 the kinds of rtx's that can contain a QUEUED. */
442 if (code == MEM)
3f15938e
RS
443 {
444 rtx tem = protect_from_queue (XEXP (x, 0), 0);
445 if (tem != XEXP (x, 0))
446 {
447 x = copy_rtx (x);
448 XEXP (x, 0) = tem;
449 }
450 }
bbf6f052
RK
451 else if (code == PLUS || code == MULT)
452 {
3f15938e
RS
453 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
454 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
455 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
456 {
457 x = copy_rtx (x);
458 XEXP (x, 0) = new0;
459 XEXP (x, 1) = new1;
460 }
bbf6f052
RK
461 }
462 return x;
463 }
73b7f58c
BS
464 /* If the increment has not happened, use the variable itself. Copy it
465 into a new pseudo so that the value remains correct across calls to
466 emit_queue. */
bbf6f052 467 if (QUEUED_INSN (x) == 0)
73b7f58c 468 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
469 /* If the increment has happened and a pre-increment copy exists,
470 use that copy. */
471 if (QUEUED_COPY (x) != 0)
472 return QUEUED_COPY (x);
473 /* The increment has happened but we haven't set up a pre-increment copy.
474 Set one up now, and use it. */
475 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
476 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
477 QUEUED_INSN (x));
478 return QUEUED_COPY (x);
479}
480
481/* Return nonzero if X contains a QUEUED expression:
482 if it contains anything that will be altered by a queued increment.
483 We handle only combinations of MEM, PLUS, MINUS and MULT operators
484 since memory addresses generally contain only those. */
485
1f06ee8d 486int
bbf6f052
RK
487queued_subexp_p (x)
488 rtx x;
489{
b3694847 490 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
491 switch (code)
492 {
493 case QUEUED:
494 return 1;
495 case MEM:
496 return queued_subexp_p (XEXP (x, 0));
497 case MULT:
498 case PLUS:
499 case MINUS:
e9a25f70
JL
500 return (queued_subexp_p (XEXP (x, 0))
501 || queued_subexp_p (XEXP (x, 1)));
502 default:
503 return 0;
bbf6f052 504 }
bbf6f052
RK
505}
506
507/* Perform all the pending incrementations. */
508
509void
510emit_queue ()
511{
b3694847 512 rtx p;
381127e8 513 while ((p = pending_chain))
bbf6f052 514 {
41b083c4
R
515 rtx body = QUEUED_BODY (p);
516
2f937369
DM
517 switch (GET_CODE (body))
518 {
519 case INSN:
520 case JUMP_INSN:
521 case CALL_INSN:
522 case CODE_LABEL:
523 case BARRIER:
524 case NOTE:
525 QUEUED_INSN (p) = body;
526 emit_insn (body);
527 break;
528
529#ifdef ENABLE_CHECKING
530 case SEQUENCE:
531 abort ();
532 break;
533#endif
534
535 default:
536 QUEUED_INSN (p) = emit_insn (body);
537 break;
41b083c4 538 }
2f937369 539
bbf6f052
RK
540 pending_chain = QUEUED_NEXT (p);
541 }
542}
bbf6f052
RK
543\f
544/* Copy data from FROM to TO, where the machine modes are not the same.
545 Both modes may be integer, or both may be floating.
546 UNSIGNEDP should be nonzero if FROM is an unsigned type.
547 This causes zero-extension instead of sign-extension. */
548
549void
550convert_move (to, from, unsignedp)
b3694847 551 rtx to, from;
bbf6f052
RK
552 int unsignedp;
553{
554 enum machine_mode to_mode = GET_MODE (to);
555 enum machine_mode from_mode = GET_MODE (from);
556 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
557 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
558 enum insn_code code;
559 rtx libcall;
560
561 /* rtx code for making an equivalent value. */
37d0b254
SE
562 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
563 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
564
565 to = protect_from_queue (to, 1);
566 from = protect_from_queue (from, 0);
567
568 if (to_real != from_real)
569 abort ();
570
1499e0a8
RK
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
573 TO here. */
574
575 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
577 >= GET_MODE_SIZE (to_mode))
578 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
579 from = gen_lowpart (to_mode, from), from_mode = to_mode;
580
581 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 abort ();
583
bbf6f052
RK
584 if (to_mode == from_mode
585 || (from_mode == VOIDmode && CONSTANT_P (from)))
586 {
587 emit_move_insn (to, from);
588 return;
589 }
590
0b4565c9
BS
591 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
592 {
593 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
594 abort ();
3a94c984 595
0b4565c9 596 if (VECTOR_MODE_P (to_mode))
bafe341a 597 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 598 else
bafe341a 599 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
600
601 emit_move_insn (to, from);
602 return;
603 }
604
605 if (to_real != from_real)
606 abort ();
607
bbf6f052
RK
608 if (to_real)
609 {
642dfa8b 610 rtx value, insns;
81d79e2c 611
2b01c326 612 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 613 {
2b01c326
RK
614 /* Try converting directly if the insn is supported. */
615 if ((code = can_extend_p (to_mode, from_mode, 0))
616 != CODE_FOR_nothing)
617 {
618 emit_unop_insn (code, to, from, UNKNOWN);
619 return;
620 }
bbf6f052 621 }
3a94c984 622
b424402e
RS
623#ifdef HAVE_trunchfqf2
624 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
625 {
626 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
627 return;
628 }
629#endif
704af6a1
JL
630#ifdef HAVE_trunctqfqf2
631 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
632 {
633 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
634 return;
635 }
636#endif
b424402e
RS
637#ifdef HAVE_truncsfqf2
638 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
641 return;
642 }
643#endif
644#ifdef HAVE_truncdfqf2
645 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
646 {
647 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
648 return;
649 }
650#endif
651#ifdef HAVE_truncxfqf2
652 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
653 {
654 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
655 return;
656 }
657#endif
658#ifdef HAVE_trunctfqf2
659 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
660 {
661 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
662 return;
663 }
664#endif
03747aa3
RK
665
666#ifdef HAVE_trunctqfhf2
667 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
668 {
669 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
670 return;
671 }
672#endif
b424402e
RS
673#ifdef HAVE_truncsfhf2
674 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
677 return;
678 }
679#endif
680#ifdef HAVE_truncdfhf2
681 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
682 {
683 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
684 return;
685 }
686#endif
687#ifdef HAVE_truncxfhf2
688 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
691 return;
692 }
693#endif
694#ifdef HAVE_trunctfhf2
695 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
696 {
697 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
698 return;
699 }
700#endif
2b01c326
RK
701
702#ifdef HAVE_truncsftqf2
703 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
704 {
705 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
706 return;
707 }
708#endif
709#ifdef HAVE_truncdftqf2
710 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
711 {
712 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
713 return;
714 }
715#endif
716#ifdef HAVE_truncxftqf2
717 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
718 {
719 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
720 return;
721 }
722#endif
723#ifdef HAVE_trunctftqf2
724 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
725 {
726 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
727 return;
728 }
729#endif
730
bbf6f052
RK
731#ifdef HAVE_truncdfsf2
732 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
733 {
734 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
735 return;
736 }
737#endif
b092b471
JW
738#ifdef HAVE_truncxfsf2
739 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
740 {
741 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
742 return;
743 }
744#endif
bbf6f052
RK
745#ifdef HAVE_trunctfsf2
746 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
747 {
748 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
749 return;
750 }
751#endif
b092b471
JW
752#ifdef HAVE_truncxfdf2
753 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
754 {
755 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
756 return;
757 }
758#endif
bbf6f052
RK
759#ifdef HAVE_trunctfdf2
760 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
761 {
762 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
763 return;
764 }
765#endif
766
b092b471
JW
767 libcall = (rtx) 0;
768 switch (from_mode)
769 {
770 case SFmode:
771 switch (to_mode)
772 {
773 case DFmode:
774 libcall = extendsfdf2_libfunc;
775 break;
776
777 case XFmode:
778 libcall = extendsfxf2_libfunc;
779 break;
780
781 case TFmode:
782 libcall = extendsftf2_libfunc;
783 break;
3a94c984 784
e9a25f70
JL
785 default:
786 break;
b092b471
JW
787 }
788 break;
789
790 case DFmode:
791 switch (to_mode)
792 {
793 case SFmode:
794 libcall = truncdfsf2_libfunc;
795 break;
796
797 case XFmode:
798 libcall = extenddfxf2_libfunc;
799 break;
800
801 case TFmode:
802 libcall = extenddftf2_libfunc;
803 break;
3a94c984 804
e9a25f70
JL
805 default:
806 break;
b092b471
JW
807 }
808 break;
809
810 case XFmode:
811 switch (to_mode)
812 {
813 case SFmode:
814 libcall = truncxfsf2_libfunc;
815 break;
816
817 case DFmode:
818 libcall = truncxfdf2_libfunc;
819 break;
3a94c984 820
e9a25f70
JL
821 default:
822 break;
b092b471
JW
823 }
824 break;
825
826 case TFmode:
827 switch (to_mode)
828 {
829 case SFmode:
830 libcall = trunctfsf2_libfunc;
831 break;
832
833 case DFmode:
834 libcall = trunctfdf2_libfunc;
835 break;
3a94c984 836
e9a25f70
JL
837 default:
838 break;
b092b471
JW
839 }
840 break;
3a94c984 841
e9a25f70
JL
842 default:
843 break;
b092b471
JW
844 }
845
846 if (libcall == (rtx) 0)
847 /* This conversion is not implemented yet. */
bbf6f052
RK
848 abort ();
849
642dfa8b 850 start_sequence ();
ebb1b59a 851 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 852 1, from, from_mode);
642dfa8b
BS
853 insns = get_insns ();
854 end_sequence ();
855 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
856 from));
bbf6f052
RK
857 return;
858 }
859
860 /* Now both modes are integers. */
861
862 /* Handle expanding beyond a word. */
863 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
864 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
865 {
866 rtx insns;
867 rtx lowpart;
868 rtx fill_value;
869 rtx lowfrom;
870 int i;
871 enum machine_mode lowpart_mode;
872 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
873
874 /* Try converting directly if the insn is supported. */
875 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
876 != CODE_FOR_nothing)
877 {
cd1b4b44
RK
878 /* If FROM is a SUBREG, put it into a register. Do this
879 so that we always generate the same set of insns for
880 better cse'ing; if an intermediate assignment occurred,
881 we won't be doing the operation directly on the SUBREG. */
882 if (optimize > 0 && GET_CODE (from) == SUBREG)
883 from = force_reg (from_mode, from);
bbf6f052
RK
884 emit_unop_insn (code, to, from, equiv_code);
885 return;
886 }
887 /* Next, try converting via full word. */
888 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
889 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
890 != CODE_FOR_nothing))
891 {
a81fee56 892 if (GET_CODE (to) == REG)
38a448ca 893 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
894 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
895 emit_unop_insn (code, to,
896 gen_lowpart (word_mode, to), equiv_code);
897 return;
898 }
899
900 /* No special multiword conversion insn; do it by hand. */
901 start_sequence ();
902
5c5033c3
RK
903 /* Since we will turn this into a no conflict block, we must ensure
904 that the source does not overlap the target. */
905
906 if (reg_overlap_mentioned_p (to, from))
907 from = force_reg (from_mode, from);
908
bbf6f052
RK
909 /* Get a copy of FROM widened to a word, if necessary. */
910 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
911 lowpart_mode = word_mode;
912 else
913 lowpart_mode = from_mode;
914
915 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
916
917 lowpart = gen_lowpart (lowpart_mode, to);
918 emit_move_insn (lowpart, lowfrom);
919
920 /* Compute the value to put in each remaining word. */
921 if (unsignedp)
922 fill_value = const0_rtx;
923 else
924 {
925#ifdef HAVE_slt
926 if (HAVE_slt
a995e389 927 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
928 && STORE_FLAG_VALUE == -1)
929 {
906c4e36 930 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 931 lowpart_mode, 0);
bbf6f052
RK
932 fill_value = gen_reg_rtx (word_mode);
933 emit_insn (gen_slt (fill_value));
934 }
935 else
936#endif
937 {
938 fill_value
939 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
940 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 941 NULL_RTX, 0);
bbf6f052
RK
942 fill_value = convert_to_mode (word_mode, fill_value, 1);
943 }
944 }
945
946 /* Fill the remaining words. */
947 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
948 {
949 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
950 rtx subword = operand_subword (to, index, 1, to_mode);
951
952 if (subword == 0)
953 abort ();
954
955 if (fill_value != subword)
956 emit_move_insn (subword, fill_value);
957 }
958
959 insns = get_insns ();
960 end_sequence ();
961
906c4e36 962 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 963 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
964 return;
965 }
966
d3c64ee3
RS
967 /* Truncating multi-word to a word or less. */
968 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
969 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 970 {
431a6eca
JW
971 if (!((GET_CODE (from) == MEM
972 && ! MEM_VOLATILE_P (from)
973 && direct_load[(int) to_mode]
974 && ! mode_dependent_address_p (XEXP (from, 0)))
975 || GET_CODE (from) == REG
976 || GET_CODE (from) == SUBREG))
977 from = force_reg (from_mode, from);
bbf6f052
RK
978 convert_move (to, gen_lowpart (word_mode, from), 0);
979 return;
980 }
981
3a94c984 982 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
983 if (to_mode == PQImode)
984 {
985 if (from_mode != QImode)
986 from = convert_to_mode (QImode, from, unsignedp);
987
988#ifdef HAVE_truncqipqi2
989 if (HAVE_truncqipqi2)
990 {
991 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
992 return;
993 }
994#endif /* HAVE_truncqipqi2 */
995 abort ();
996 }
997
998 if (from_mode == PQImode)
999 {
1000 if (to_mode != QImode)
1001 {
1002 from = convert_to_mode (QImode, from, unsignedp);
1003 from_mode = QImode;
1004 }
1005 else
1006 {
1007#ifdef HAVE_extendpqiqi2
1008 if (HAVE_extendpqiqi2)
1009 {
1010 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1011 return;
1012 }
1013#endif /* HAVE_extendpqiqi2 */
1014 abort ();
1015 }
1016 }
1017
bbf6f052
RK
1018 if (to_mode == PSImode)
1019 {
1020 if (from_mode != SImode)
1021 from = convert_to_mode (SImode, from, unsignedp);
1022
1f584163
DE
1023#ifdef HAVE_truncsipsi2
1024 if (HAVE_truncsipsi2)
bbf6f052 1025 {
1f584163 1026 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1027 return;
1028 }
1f584163 1029#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1030 abort ();
1031 }
1032
1033 if (from_mode == PSImode)
1034 {
1035 if (to_mode != SImode)
1036 {
1037 from = convert_to_mode (SImode, from, unsignedp);
1038 from_mode = SImode;
1039 }
1040 else
1041 {
1f584163 1042#ifdef HAVE_extendpsisi2
43d75418 1043 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1044 {
1f584163 1045 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1046 return;
1047 }
1f584163 1048#endif /* HAVE_extendpsisi2 */
43d75418
R
1049#ifdef HAVE_zero_extendpsisi2
1050 if (unsignedp && HAVE_zero_extendpsisi2)
1051 {
1052 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1053 return;
1054 }
1055#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1056 abort ();
1057 }
1058 }
1059
0407367d
RK
1060 if (to_mode == PDImode)
1061 {
1062 if (from_mode != DImode)
1063 from = convert_to_mode (DImode, from, unsignedp);
1064
1065#ifdef HAVE_truncdipdi2
1066 if (HAVE_truncdipdi2)
1067 {
1068 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1069 return;
1070 }
1071#endif /* HAVE_truncdipdi2 */
1072 abort ();
1073 }
1074
1075 if (from_mode == PDImode)
1076 {
1077 if (to_mode != DImode)
1078 {
1079 from = convert_to_mode (DImode, from, unsignedp);
1080 from_mode = DImode;
1081 }
1082 else
1083 {
1084#ifdef HAVE_extendpdidi2
1085 if (HAVE_extendpdidi2)
1086 {
1087 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1088 return;
1089 }
1090#endif /* HAVE_extendpdidi2 */
1091 abort ();
1092 }
1093 }
1094
bbf6f052
RK
1095 /* Now follow all the conversions between integers
1096 no more than a word long. */
1097
1098 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1099 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1100 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1101 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1102 {
d3c64ee3
RS
1103 if (!((GET_CODE (from) == MEM
1104 && ! MEM_VOLATILE_P (from)
1105 && direct_load[(int) to_mode]
1106 && ! mode_dependent_address_p (XEXP (from, 0)))
1107 || GET_CODE (from) == REG
1108 || GET_CODE (from) == SUBREG))
1109 from = force_reg (from_mode, from);
34aa3599
RK
1110 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1111 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1112 from = copy_to_reg (from);
bbf6f052
RK
1113 emit_move_insn (to, gen_lowpart (to_mode, from));
1114 return;
1115 }
1116
d3c64ee3 1117 /* Handle extension. */
bbf6f052
RK
1118 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1119 {
1120 /* Convert directly if that works. */
1121 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1122 != CODE_FOR_nothing)
1123 {
9413de45
RK
1124 if (flag_force_mem)
1125 from = force_not_mem (from);
1126
bbf6f052
RK
1127 emit_unop_insn (code, to, from, equiv_code);
1128 return;
1129 }
1130 else
1131 {
1132 enum machine_mode intermediate;
2b28d92e
NC
1133 rtx tmp;
1134 tree shift_amount;
bbf6f052
RK
1135
1136 /* Search for a mode to convert via. */
1137 for (intermediate = from_mode; intermediate != VOIDmode;
1138 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1139 if (((can_extend_p (to_mode, intermediate, unsignedp)
1140 != CODE_FOR_nothing)
1141 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1142 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1143 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1144 && (can_extend_p (intermediate, from_mode, unsignedp)
1145 != CODE_FOR_nothing))
1146 {
1147 convert_move (to, convert_to_mode (intermediate, from,
1148 unsignedp), unsignedp);
1149 return;
1150 }
1151
2b28d92e 1152 /* No suitable intermediate mode.
3a94c984 1153 Generate what we need with shifts. */
2b28d92e
NC
1154 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1155 - GET_MODE_BITSIZE (from_mode), 0);
1156 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1157 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1158 to, unsignedp);
3a94c984 1159 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1160 to, unsignedp);
1161 if (tmp != to)
1162 emit_move_insn (to, tmp);
1163 return;
bbf6f052
RK
1164 }
1165 }
1166
3a94c984 1167 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1168
1169 if (from_mode == DImode && to_mode == SImode)
1170 {
1171#ifdef HAVE_truncdisi2
1172 if (HAVE_truncdisi2)
1173 {
1174 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1175 return;
1176 }
1177#endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1180 }
1181
1182 if (from_mode == DImode && to_mode == HImode)
1183 {
1184#ifdef HAVE_truncdihi2
1185 if (HAVE_truncdihi2)
1186 {
1187 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1188 return;
1189 }
1190#endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1193 }
1194
1195 if (from_mode == DImode && to_mode == QImode)
1196 {
1197#ifdef HAVE_truncdiqi2
1198 if (HAVE_truncdiqi2)
1199 {
1200 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1201 return;
1202 }
1203#endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1206 }
1207
1208 if (from_mode == SImode && to_mode == HImode)
1209 {
1210#ifdef HAVE_truncsihi2
1211 if (HAVE_truncsihi2)
1212 {
1213 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1214 return;
1215 }
1216#endif
1217 convert_move (to, force_reg (from_mode, from), unsignedp);
1218 return;
1219 }
1220
1221 if (from_mode == SImode && to_mode == QImode)
1222 {
1223#ifdef HAVE_truncsiqi2
1224 if (HAVE_truncsiqi2)
1225 {
1226 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1227 return;
1228 }
1229#endif
1230 convert_move (to, force_reg (from_mode, from), unsignedp);
1231 return;
1232 }
1233
1234 if (from_mode == HImode && to_mode == QImode)
1235 {
1236#ifdef HAVE_trunchiqi2
1237 if (HAVE_trunchiqi2)
1238 {
1239 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1240 return;
1241 }
1242#endif
1243 convert_move (to, force_reg (from_mode, from), unsignedp);
1244 return;
1245 }
1246
b9bcad65
RK
1247 if (from_mode == TImode && to_mode == DImode)
1248 {
1249#ifdef HAVE_trunctidi2
1250 if (HAVE_trunctidi2)
1251 {
1252 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1253 return;
1254 }
1255#endif
1256 convert_move (to, force_reg (from_mode, from), unsignedp);
1257 return;
1258 }
1259
1260 if (from_mode == TImode && to_mode == SImode)
1261 {
1262#ifdef HAVE_trunctisi2
1263 if (HAVE_trunctisi2)
1264 {
1265 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1266 return;
1267 }
1268#endif
1269 convert_move (to, force_reg (from_mode, from), unsignedp);
1270 return;
1271 }
1272
1273 if (from_mode == TImode && to_mode == HImode)
1274 {
1275#ifdef HAVE_trunctihi2
1276 if (HAVE_trunctihi2)
1277 {
1278 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1279 return;
1280 }
1281#endif
1282 convert_move (to, force_reg (from_mode, from), unsignedp);
1283 return;
1284 }
1285
1286 if (from_mode == TImode && to_mode == QImode)
1287 {
1288#ifdef HAVE_trunctiqi2
1289 if (HAVE_trunctiqi2)
1290 {
1291 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1292 return;
1293 }
1294#endif
1295 convert_move (to, force_reg (from_mode, from), unsignedp);
1296 return;
1297 }
1298
bbf6f052
RK
1299 /* Handle truncation of volatile memrefs, and so on;
1300 the things that couldn't be truncated directly,
1301 and for which there was no special instruction. */
1302 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1303 {
1304 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1305 emit_move_insn (to, temp);
1306 return;
1307 }
1308
1309 /* Mode combination is not recognized. */
1310 abort ();
1311}
1312
1313/* Return an rtx for a value that would result
1314 from converting X to mode MODE.
1315 Both X and MODE may be floating, or both integer.
1316 UNSIGNEDP is nonzero if X is an unsigned value.
1317 This can be done by referring to a part of X in place
5d901c31
RS
1318 or by copying to a new temporary with conversion.
1319
1320 This function *must not* call protect_from_queue
1321 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1322
1323rtx
1324convert_to_mode (mode, x, unsignedp)
1325 enum machine_mode mode;
1326 rtx x;
1327 int unsignedp;
5ffe63ed
RS
1328{
1329 return convert_modes (mode, VOIDmode, x, unsignedp);
1330}
1331
1332/* Return an rtx for a value that would result
1333 from converting X from mode OLDMODE to mode MODE.
1334 Both modes may be floating, or both integer.
1335 UNSIGNEDP is nonzero if X is an unsigned value.
1336
1337 This can be done by referring to a part of X in place
1338 or by copying to a new temporary with conversion.
1339
1340 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1341
1342 This function *must not* call protect_from_queue
1343 except when putting X into an insn (in which case convert_move does it). */
1344
1345rtx
1346convert_modes (mode, oldmode, x, unsignedp)
1347 enum machine_mode mode, oldmode;
1348 rtx x;
1349 int unsignedp;
bbf6f052 1350{
b3694847 1351 rtx temp;
5ffe63ed 1352
1499e0a8
RK
1353 /* If FROM is a SUBREG that indicates that we have already done at least
1354 the required extension, strip it. */
1355
1356 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1357 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1358 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1359 x = gen_lowpart (mode, x);
bbf6f052 1360
64791b18
RK
1361 if (GET_MODE (x) != VOIDmode)
1362 oldmode = GET_MODE (x);
3a94c984 1363
5ffe63ed 1364 if (mode == oldmode)
bbf6f052
RK
1365 return x;
1366
1367 /* There is one case that we must handle specially: If we are converting
906c4e36 1368 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1369 we are to interpret the constant as unsigned, gen_lowpart will do
1370 the wrong if the constant appears negative. What we want to do is
1371 make the high-order word of the constant zero, not all ones. */
1372
1373 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1374 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1375 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1376 {
1377 HOST_WIDE_INT val = INTVAL (x);
1378
1379 if (oldmode != VOIDmode
1380 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1381 {
1382 int width = GET_MODE_BITSIZE (oldmode);
1383
1384 /* We need to zero extend VAL. */
1385 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1386 }
1387
1388 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1389 }
bbf6f052
RK
1390
1391 /* We can do this with a gen_lowpart if both desired and current modes
1392 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1393 non-volatile MEM. Except for the constant case where MODE is no
1394 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1395
ba2e110c
RK
1396 if ((GET_CODE (x) == CONST_INT
1397 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1398 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1399 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1400 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1401 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1402 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1403 && direct_load[(int) mode])
2bf29316 1404 || (GET_CODE (x) == REG
006c9f4a
SE
1405 && (! HARD_REGISTER_P (x)
1406 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
1407 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1408 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1409 {
1410 /* ?? If we don't know OLDMODE, we have to assume here that
1411 X does not need sign- or zero-extension. This may not be
1412 the case, but it's the best we can do. */
1413 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1414 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1415 {
1416 HOST_WIDE_INT val = INTVAL (x);
1417 int width = GET_MODE_BITSIZE (oldmode);
1418
1419 /* We must sign or zero-extend in this case. Start by
1420 zero-extending, then sign extend if we need to. */
1421 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1422 if (! unsignedp
1423 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1424 val |= (HOST_WIDE_INT) (-1) << width;
1425
2496c7bd 1426 return gen_int_mode (val, mode);
ba2e110c
RK
1427 }
1428
1429 return gen_lowpart (mode, x);
1430 }
bbf6f052
RK
1431
1432 temp = gen_reg_rtx (mode);
1433 convert_move (temp, x, unsignedp);
1434 return temp;
1435}
1436\f
fbe1758d 1437/* This macro is used to determine what the largest unit size that
3a94c984 1438 move_by_pieces can use is. */
fbe1758d
AM
1439
1440/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1441 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1442 number of bytes we can move with a single instruction. */
fbe1758d
AM
1443
1444#ifndef MOVE_MAX_PIECES
1445#define MOVE_MAX_PIECES MOVE_MAX
1446#endif
1447
cf5124f6
RS
1448/* STORE_MAX_PIECES is the number of bytes at a time that we can
1449 store efficiently. Due to internal GCC limitations, this is
1450 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1451 for an immediate constant. */
1452
1453#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1454
21d93687
RK
1455/* Generate several move instructions to copy LEN bytes from block FROM to
1456 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1457 and TO through protect_from_queue before calling.
566aa174 1458
21d93687
RK
1459 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1460 used to push FROM to the stack.
566aa174 1461
19caa751 1462 ALIGN is maximum alignment we can assume. */
bbf6f052 1463
2e245dac 1464void
bbf6f052
RK
1465move_by_pieces (to, from, len, align)
1466 rtx to, from;
3bdf5ad1 1467 unsigned HOST_WIDE_INT len;
729a2125 1468 unsigned int align;
bbf6f052
RK
1469{
1470 struct move_by_pieces data;
566aa174 1471 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1472 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1473 enum machine_mode mode = VOIDmode, tmode;
1474 enum insn_code icode;
bbf6f052
RK
1475
1476 data.offset = 0;
bbf6f052 1477 data.from_addr = from_addr;
566aa174
JH
1478 if (to)
1479 {
1480 to_addr = XEXP (to, 0);
1481 data.to = to;
1482 data.autinc_to
1483 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1484 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1485 data.reverse
1486 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1487 }
1488 else
1489 {
1490 to_addr = NULL_RTX;
1491 data.to = NULL_RTX;
1492 data.autinc_to = 1;
1493#ifdef STACK_GROWS_DOWNWARD
1494 data.reverse = 1;
1495#else
1496 data.reverse = 0;
1497#endif
1498 }
1499 data.to_addr = to_addr;
bbf6f052 1500 data.from = from;
bbf6f052
RK
1501 data.autinc_from
1502 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1503 || GET_CODE (from_addr) == POST_INC
1504 || GET_CODE (from_addr) == POST_DEC);
1505
1506 data.explicit_inc_from = 0;
1507 data.explicit_inc_to = 0;
bbf6f052
RK
1508 if (data.reverse) data.offset = len;
1509 data.len = len;
1510
1511 /* If copying requires more than two move insns,
1512 copy addresses to registers (to make displacements shorter)
1513 and use post-increment if available. */
1514 if (!(data.autinc_from && data.autinc_to)
1515 && move_by_pieces_ninsns (len, align) > 2)
1516 {
3a94c984 1517 /* Find the mode of the largest move... */
fbe1758d
AM
1518 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1519 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1520 if (GET_MODE_SIZE (tmode) < max_size)
1521 mode = tmode;
1522
1523 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1524 {
1525 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1526 data.autinc_from = 1;
1527 data.explicit_inc_from = -1;
1528 }
fbe1758d 1529 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1530 {
1531 data.from_addr = copy_addr_to_reg (from_addr);
1532 data.autinc_from = 1;
1533 data.explicit_inc_from = 1;
1534 }
bbf6f052
RK
1535 if (!data.autinc_from && CONSTANT_P (from_addr))
1536 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1537 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1538 {
1539 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1540 data.autinc_to = 1;
1541 data.explicit_inc_to = -1;
1542 }
fbe1758d 1543 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1544 {
1545 data.to_addr = copy_addr_to_reg (to_addr);
1546 data.autinc_to = 1;
1547 data.explicit_inc_to = 1;
1548 }
bbf6f052
RK
1549 if (!data.autinc_to && CONSTANT_P (to_addr))
1550 data.to_addr = copy_addr_to_reg (to_addr);
1551 }
1552
e1565e65 1553 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1554 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1555 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1556
1557 /* First move what we can in the largest integer mode, then go to
1558 successively smaller modes. */
1559
1560 while (max_size > 1)
1561 {
e7c33f54
RK
1562 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1563 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1564 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1565 mode = tmode;
1566
1567 if (mode == VOIDmode)
1568 break;
1569
1570 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1571 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1572 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1573
1574 max_size = GET_MODE_SIZE (mode);
1575 }
1576
1577 /* The code above should have handled everything. */
2a8e278c 1578 if (data.len > 0)
bbf6f052
RK
1579 abort ();
1580}
1581
1582/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1583 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1584
3bdf5ad1 1585static unsigned HOST_WIDE_INT
bbf6f052 1586move_by_pieces_ninsns (l, align)
3bdf5ad1 1587 unsigned HOST_WIDE_INT l;
729a2125 1588 unsigned int align;
bbf6f052 1589{
3bdf5ad1
RK
1590 unsigned HOST_WIDE_INT n_insns = 0;
1591 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1592
e1565e65 1593 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1594 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1595 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1596
1597 while (max_size > 1)
1598 {
1599 enum machine_mode mode = VOIDmode, tmode;
1600 enum insn_code icode;
1601
e7c33f54
RK
1602 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1603 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1604 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1605 mode = tmode;
1606
1607 if (mode == VOIDmode)
1608 break;
1609
1610 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1611 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1612 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1613
1614 max_size = GET_MODE_SIZE (mode);
1615 }
1616
13c6f0d5
NS
1617 if (l)
1618 abort ();
bbf6f052
RK
1619 return n_insns;
1620}
1621
1622/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1623 with move instructions for mode MODE. GENFUN is the gen_... function
1624 to make a move insn for that mode. DATA has all the other info. */
1625
1626static void
1627move_by_pieces_1 (genfun, mode, data)
711d877c 1628 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1629 enum machine_mode mode;
1630 struct move_by_pieces *data;
1631{
3bdf5ad1 1632 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1633 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1634
1635 while (data->len >= size)
1636 {
3bdf5ad1
RK
1637 if (data->reverse)
1638 data->offset -= size;
1639
566aa174 1640 if (data->to)
3bdf5ad1 1641 {
566aa174 1642 if (data->autinc_to)
630036c6
JJ
1643 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1644 data->offset);
566aa174 1645 else
f4ef873c 1646 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1647 }
3bdf5ad1
RK
1648
1649 if (data->autinc_from)
630036c6
JJ
1650 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1651 data->offset);
3bdf5ad1 1652 else
f4ef873c 1653 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1654
940da324 1655 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1656 emit_insn (gen_add2_insn (data->to_addr,
1657 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1658 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1659 emit_insn (gen_add2_insn (data->from_addr,
1660 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1661
566aa174
JH
1662 if (data->to)
1663 emit_insn ((*genfun) (to1, from1));
1664 else
21d93687
RK
1665 {
1666#ifdef PUSH_ROUNDING
1667 emit_single_push_insn (mode, from1, NULL);
1668#else
1669 abort ();
1670#endif
1671 }
3bdf5ad1 1672
940da324 1673 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1674 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1675 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1676 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1677
3bdf5ad1
RK
1678 if (! data->reverse)
1679 data->offset += size;
bbf6f052
RK
1680
1681 data->len -= size;
1682 }
1683}
1684\f
4ca79136
RH
1685/* Emit code to move a block Y to a block X. This may be done with
1686 string-move instructions, with multiple scalar move instructions,
1687 or with a library call.
bbf6f052 1688
4ca79136 1689 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1690 SIZE is an rtx that says how long they are.
19caa751 1691 ALIGN is the maximum alignment we can assume they have.
44bb111a 1692 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1693
e9a25f70
JL
1694 Return the address of the new block, if memcpy is called and returns it,
1695 0 otherwise. */
1696
1697rtx
44bb111a 1698emit_block_move (x, y, size, method)
4ca79136 1699 rtx x, y, size;
44bb111a 1700 enum block_op_methods method;
bbf6f052 1701{
44bb111a 1702 bool may_use_call;
e9a25f70 1703 rtx retval = 0;
44bb111a
RH
1704 unsigned int align;
1705
1706 switch (method)
1707 {
1708 case BLOCK_OP_NORMAL:
1709 may_use_call = true;
1710 break;
1711
1712 case BLOCK_OP_CALL_PARM:
1713 may_use_call = block_move_libcall_safe_for_call_parm ();
1714
1715 /* Make inhibit_defer_pop nonzero around the library call
1716 to force it to pop the arguments right away. */
1717 NO_DEFER_POP;
1718 break;
1719
1720 case BLOCK_OP_NO_LIBCALL:
1721 may_use_call = false;
1722 break;
1723
1724 default:
1725 abort ();
1726 }
1727
1728 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1729
bbf6f052
RK
1730 if (GET_MODE (x) != BLKmode)
1731 abort ();
bbf6f052
RK
1732 if (GET_MODE (y) != BLKmode)
1733 abort ();
1734
1735 x = protect_from_queue (x, 1);
1736 y = protect_from_queue (y, 0);
5d901c31 1737 size = protect_from_queue (size, 0);
bbf6f052
RK
1738
1739 if (GET_CODE (x) != MEM)
1740 abort ();
1741 if (GET_CODE (y) != MEM)
1742 abort ();
1743 if (size == 0)
1744 abort ();
1745
cb38fd88
RH
1746 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1747 can be incorrect is coming from __builtin_memcpy. */
1748 if (GET_CODE (size) == CONST_INT)
1749 {
1750 x = shallow_copy_rtx (x);
1751 y = shallow_copy_rtx (y);
1752 set_mem_size (x, size);
1753 set_mem_size (y, size);
1754 }
1755
fbe1758d 1756 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052 1757 move_by_pieces (x, y, INTVAL (size), align);
4ca79136
RH
1758 else if (emit_block_move_via_movstr (x, y, size, align))
1759 ;
44bb111a 1760 else if (may_use_call)
4ca79136 1761 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1762 else
1763 emit_block_move_via_loop (x, y, size, align);
1764
1765 if (method == BLOCK_OP_CALL_PARM)
1766 OK_DEFER_POP;
266007a7 1767
4ca79136
RH
1768 return retval;
1769}
266007a7 1770
44bb111a
RH
1771/* A subroutine of emit_block_move. Returns true if calling the
1772 block move libcall will not clobber any parameters which may have
1773 already been placed on the stack. */
1774
1775static bool
1776block_move_libcall_safe_for_call_parm ()
1777{
1778 if (PUSH_ARGS)
1779 return true;
1780 else
1781 {
1782 /* Check to see whether memcpy takes all register arguments. */
1783 static enum {
1784 takes_regs_uninit, takes_regs_no, takes_regs_yes
1785 } takes_regs = takes_regs_uninit;
1786
1787 switch (takes_regs)
1788 {
1789 case takes_regs_uninit:
1790 {
1791 CUMULATIVE_ARGS args_so_far;
1792 tree fn, arg;
1793
1794 fn = emit_block_move_libcall_fn (false);
1795 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1796
1797 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1798 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1799 {
98c0d8d1 1800 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
44bb111a
RH
1801 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1802 if (!tmp || !REG_P (tmp))
1803 goto fail_takes_regs;
1804#ifdef FUNCTION_ARG_PARTIAL_NREGS
1805 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1806 NULL_TREE, 1))
1807 goto fail_takes_regs;
1808#endif
1809 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1810 }
1811 }
1812 takes_regs = takes_regs_yes;
1813 /* FALLTHRU */
1814
1815 case takes_regs_yes:
1816 return true;
1817
1818 fail_takes_regs:
1819 takes_regs = takes_regs_no;
1820 /* FALLTHRU */
1821 case takes_regs_no:
1822 return false;
1823
1824 default:
1825 abort ();
1826 }
1827 }
1828}
1829
4ca79136
RH
1830/* A subroutine of emit_block_move. Expand a movstr pattern;
1831 return true if successful. */
3ef1eef4 1832
4ca79136
RH
1833static bool
1834emit_block_move_via_movstr (x, y, size, align)
1835 rtx x, y, size;
1836 unsigned int align;
1837{
1838 /* Try the most limited insn first, because there's no point
1839 including more than one in the machine description unless
1840 the more limited one has some advantage. */
266007a7 1841
4ca79136
RH
1842 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1843 enum machine_mode mode;
266007a7 1844
4ca79136
RH
1845 /* Since this is a move insn, we don't care about volatility. */
1846 volatile_ok = 1;
1847
1848 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1849 mode = GET_MODE_WIDER_MODE (mode))
1850 {
1851 enum insn_code code = movstr_optab[(int) mode];
1852 insn_operand_predicate_fn pred;
1853
1854 if (code != CODE_FOR_nothing
1855 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1856 here because if SIZE is less than the mode mask, as it is
1857 returned by the macro, it will definitely be less than the
1858 actual mode mask. */
1859 && ((GET_CODE (size) == CONST_INT
1860 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1861 <= (GET_MODE_MASK (mode) >> 1)))
1862 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1863 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1864 || (*pred) (x, BLKmode))
1865 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1866 || (*pred) (y, BLKmode))
1867 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1868 || (*pred) (opalign, VOIDmode)))
1869 {
1870 rtx op2;
1871 rtx last = get_last_insn ();
1872 rtx pat;
1873
1874 op2 = convert_to_mode (mode, size, 1);
1875 pred = insn_data[(int) code].operand[2].predicate;
1876 if (pred != 0 && ! (*pred) (op2, mode))
1877 op2 = copy_to_mode_reg (mode, op2);
1878
1879 /* ??? When called via emit_block_move_for_call, it'd be
1880 nice if there were some way to inform the backend, so
1881 that it doesn't fail the expansion because it thinks
1882 emitting the libcall would be more efficient. */
1883
1884 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1885 if (pat)
1886 {
1887 emit_insn (pat);
1888 volatile_ok = 0;
1889 return true;
bbf6f052 1890 }
4ca79136
RH
1891 else
1892 delete_insns_since (last);
bbf6f052 1893 }
4ca79136 1894 }
bbf6f052 1895
4ca79136
RH
1896 volatile_ok = 0;
1897 return false;
1898}
3ef1eef4 1899
4ca79136
RH
1900/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1901 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1902
4ca79136
RH
1903static rtx
1904emit_block_move_via_libcall (dst, src, size)
1905 rtx dst, src, size;
1906{
1907 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1908 enum machine_mode size_mode;
1909 rtx retval;
4bc973ae 1910
4ca79136 1911 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1912
4ca79136
RH
1913 It is unsafe to save the value generated by protect_from_queue
1914 and reuse it later. Consider what happens if emit_queue is
1915 called before the return value from protect_from_queue is used.
4bc973ae 1916
4ca79136
RH
1917 Expansion of the CALL_EXPR below will call emit_queue before
1918 we are finished emitting RTL for argument setup. So if we are
1919 not careful we could get the wrong value for an argument.
4bc973ae 1920
4ca79136
RH
1921 To avoid this problem we go ahead and emit code to copy X, Y &
1922 SIZE into new pseudos. We can then place those new pseudos
1923 into an RTL_EXPR and use them later, even after a call to
1924 emit_queue.
4bc973ae 1925
4ca79136
RH
1926 Note this is not strictly needed for library calls since they
1927 do not call emit_queue before loading their arguments. However,
1928 we may need to have library calls call emit_queue in the future
1929 since failing to do so could cause problems for targets which
1930 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1931
1932 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1933 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1934
1935 if (TARGET_MEM_FUNCTIONS)
1936 size_mode = TYPE_MODE (sizetype);
1937 else
1938 size_mode = TYPE_MODE (unsigned_type_node);
1939 size = convert_to_mode (size_mode, size, 1);
1940 size = copy_to_mode_reg (size_mode, size);
1941
1942 /* It is incorrect to use the libcall calling conventions to call
1943 memcpy in this context. This could be a user call to memcpy and
1944 the user may wish to examine the return value from memcpy. For
1945 targets where libcalls and normal calls have different conventions
1946 for returning pointers, we could end up generating incorrect code.
1947
1948 For convenience, we generate the call to bcopy this way as well. */
1949
1950 dst_tree = make_tree (ptr_type_node, dst);
1951 src_tree = make_tree (ptr_type_node, src);
1952 if (TARGET_MEM_FUNCTIONS)
1953 size_tree = make_tree (sizetype, size);
1954 else
1955 size_tree = make_tree (unsigned_type_node, size);
1956
1957 fn = emit_block_move_libcall_fn (true);
1958 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1959 if (TARGET_MEM_FUNCTIONS)
1960 {
1961 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1962 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1963 }
1964 else
1965 {
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1967 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1968 }
1969
1970 /* Now we have to build up the CALL_EXPR itself. */
1971 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1972 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1973 call_expr, arg_list, NULL_TREE);
1974 TREE_SIDE_EFFECTS (call_expr) = 1;
1975
1976 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1977
1978 /* If we are initializing a readonly value, show the above call
1979 clobbered it. Otherwise, a load from it may erroneously be
1980 hoisted from a loop. */
1981 if (RTX_UNCHANGING_P (dst))
1982 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1983
1984 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1985}
52cf7115 1986
4ca79136
RH
1987/* A subroutine of emit_block_move_via_libcall. Create the tree node
1988 for the function we use for block copies. The first time FOR_CALL
1989 is true, we call assemble_external. */
52cf7115 1990
4ca79136
RH
1991static GTY(()) tree block_move_fn;
1992
1993static tree
1994emit_block_move_libcall_fn (for_call)
1995 int for_call;
1996{
1997 static bool emitted_extern;
1998 tree fn = block_move_fn, args;
52cf7115 1999
4ca79136
RH
2000 if (!fn)
2001 {
2002 if (TARGET_MEM_FUNCTIONS)
52cf7115 2003 {
4ca79136
RH
2004 fn = get_identifier ("memcpy");
2005 args = build_function_type_list (ptr_type_node, ptr_type_node,
2006 const_ptr_type_node, sizetype,
2007 NULL_TREE);
2008 }
2009 else
2010 {
2011 fn = get_identifier ("bcopy");
2012 args = build_function_type_list (void_type_node, const_ptr_type_node,
2013 ptr_type_node, unsigned_type_node,
2014 NULL_TREE);
52cf7115
JL
2015 }
2016
4ca79136
RH
2017 fn = build_decl (FUNCTION_DECL, fn, args);
2018 DECL_EXTERNAL (fn) = 1;
2019 TREE_PUBLIC (fn) = 1;
2020 DECL_ARTIFICIAL (fn) = 1;
2021 TREE_NOTHROW (fn) = 1;
66c60e67 2022
4ca79136 2023 block_move_fn = fn;
bbf6f052 2024 }
e9a25f70 2025
4ca79136
RH
2026 if (for_call && !emitted_extern)
2027 {
2028 emitted_extern = true;
2029 make_decl_rtl (fn, NULL);
2030 assemble_external (fn);
2031 }
2032
2033 return fn;
bbf6f052 2034}
44bb111a
RH
2035
2036/* A subroutine of emit_block_move. Copy the data via an explicit
2037 loop. This is used only when libcalls are forbidden. */
2038/* ??? It'd be nice to copy in hunks larger than QImode. */
2039
2040static void
2041emit_block_move_via_loop (x, y, size, align)
2042 rtx x, y, size;
2043 unsigned int align ATTRIBUTE_UNUSED;
2044{
2045 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2046 enum machine_mode iter_mode;
2047
2048 iter_mode = GET_MODE (size);
2049 if (iter_mode == VOIDmode)
2050 iter_mode = word_mode;
2051
2052 top_label = gen_label_rtx ();
2053 cmp_label = gen_label_rtx ();
2054 iter = gen_reg_rtx (iter_mode);
2055
2056 emit_move_insn (iter, const0_rtx);
2057
2058 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2059 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2060 do_pending_stack_adjust ();
2061
2062 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2063
2064 emit_jump (cmp_label);
2065 emit_label (top_label);
2066
2067 tmp = convert_modes (Pmode, iter_mode, iter, true);
2068 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2069 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2070 x = change_address (x, QImode, x_addr);
2071 y = change_address (y, QImode, y_addr);
2072
2073 emit_move_insn (x, y);
2074
2075 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2076 true, OPTAB_LIB_WIDEN);
2077 if (tmp != iter)
2078 emit_move_insn (iter, tmp);
2079
2080 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2081 emit_label (cmp_label);
2082
2083 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2084 true, top_label);
2085
2086 emit_note (NULL, NOTE_INSN_LOOP_END);
2087}
bbf6f052
RK
2088\f
2089/* Copy all or part of a value X into registers starting at REGNO.
2090 The number of registers to be filled is NREGS. */
2091
2092void
2093move_block_to_reg (regno, x, nregs, mode)
2094 int regno;
2095 rtx x;
2096 int nregs;
2097 enum machine_mode mode;
2098{
2099 int i;
381127e8 2100#ifdef HAVE_load_multiple
3a94c984 2101 rtx pat;
381127e8
RL
2102 rtx last;
2103#endif
bbf6f052 2104
72bb9717
RK
2105 if (nregs == 0)
2106 return;
2107
bbf6f052
RK
2108 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2109 x = validize_mem (force_const_mem (mode, x));
2110
2111 /* See if the machine can do this with a load multiple insn. */
2112#ifdef HAVE_load_multiple
c3a02afe 2113 if (HAVE_load_multiple)
bbf6f052 2114 {
c3a02afe 2115 last = get_last_insn ();
38a448ca 2116 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
2117 GEN_INT (nregs));
2118 if (pat)
2119 {
2120 emit_insn (pat);
2121 return;
2122 }
2123 else
2124 delete_insns_since (last);
bbf6f052 2125 }
bbf6f052
RK
2126#endif
2127
2128 for (i = 0; i < nregs; i++)
38a448ca 2129 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
2130 operand_subword_force (x, i, mode));
2131}
2132
2133/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
2134 The number of registers to be filled is NREGS. SIZE indicates the number
2135 of bytes in the object X. */
2136
bbf6f052 2137void
0040593d 2138move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
2139 int regno;
2140 rtx x;
2141 int nregs;
0040593d 2142 int size;
bbf6f052
RK
2143{
2144 int i;
381127e8 2145#ifdef HAVE_store_multiple
3a94c984 2146 rtx pat;
381127e8
RL
2147 rtx last;
2148#endif
58a32c5c 2149 enum machine_mode mode;
bbf6f052 2150
2954d7db
RK
2151 if (nregs == 0)
2152 return;
2153
58a32c5c
DE
2154 /* If SIZE is that of a mode no bigger than a word, just use that
2155 mode's store operation. */
2156 if (size <= UNITS_PER_WORD
9ac3e73b 2157 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
58a32c5c 2158 {
792760b9 2159 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
58a32c5c
DE
2160 return;
2161 }
3a94c984 2162
0040593d 2163 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
2164 to the left before storing to memory. Note that the previous test
2165 doesn't handle all cases (e.g. SIZE == 3). */
9ac3e73b 2166 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
0040593d
JW
2167 {
2168 rtx tem = operand_subword (x, 0, 1, BLKmode);
2169 rtx shift;
2170
2171 if (tem == 0)
2172 abort ();
2173
2174 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 2175 gen_rtx_REG (word_mode, regno),
0040593d
JW
2176 build_int_2 ((UNITS_PER_WORD - size)
2177 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2178 emit_move_insn (tem, shift);
2179 return;
2180 }
2181
bbf6f052
RK
2182 /* See if the machine can do this with a store multiple insn. */
2183#ifdef HAVE_store_multiple
c3a02afe 2184 if (HAVE_store_multiple)
bbf6f052 2185 {
c3a02afe 2186 last = get_last_insn ();
38a448ca 2187 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
2188 GEN_INT (nregs));
2189 if (pat)
2190 {
2191 emit_insn (pat);
2192 return;
2193 }
2194 else
2195 delete_insns_since (last);
bbf6f052 2196 }
bbf6f052
RK
2197#endif
2198
2199 for (i = 0; i < nregs; i++)
2200 {
2201 rtx tem = operand_subword (x, i, 1, BLKmode);
2202
2203 if (tem == 0)
2204 abort ();
2205
38a448ca 2206 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
2207 }
2208}
2209
084a1106
JDA
2210/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2211 ORIG, where ORIG is a non-consecutive group of registers represented by
2212 a PARALLEL. The clone is identical to the original except in that the
2213 original set of registers is replaced by a new set of pseudo registers.
2214 The new set has the same modes as the original set. */
2215
2216rtx
2217gen_group_rtx (orig)
2218 rtx orig;
2219{
2220 int i, length;
2221 rtx *tmps;
2222
2223 if (GET_CODE (orig) != PARALLEL)
2224 abort ();
2225
2226 length = XVECLEN (orig, 0);
2227 tmps = (rtx *) alloca (sizeof (rtx) * length);
2228
2229 /* Skip a NULL entry in first slot. */
2230 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2231
2232 if (i)
2233 tmps[0] = 0;
2234
2235 for (; i < length; i++)
2236 {
2237 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2238 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2239
2240 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2241 }
2242
2243 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2244}
2245
aac5cc16
RH
2246/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2247 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2248 block SRC in bytes, or -1 if not known. */
d6a7951f 2249/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
aac5cc16
RH
2250 the balance will be in what would be the low-order memory addresses, i.e.
2251 left justified for big endian, right justified for little endian. This
2252 happens to be true for the targets currently using this support. If this
2253 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2254 would be needed. */
fffa9c1d
JW
2255
2256void
04050c69 2257emit_group_load (dst, orig_src, ssize)
aac5cc16 2258 rtx dst, orig_src;
729a2125 2259 int ssize;
fffa9c1d 2260{
aac5cc16
RH
2261 rtx *tmps, src;
2262 int start, i;
fffa9c1d 2263
aac5cc16 2264 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
2265 abort ();
2266
2267 /* Check for a NULL entry, used to indicate that the parameter goes
2268 both on the stack and in registers. */
aac5cc16
RH
2269 if (XEXP (XVECEXP (dst, 0, 0), 0))
2270 start = 0;
fffa9c1d 2271 else
aac5cc16
RH
2272 start = 1;
2273
3a94c984 2274 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 2275
aac5cc16
RH
2276 /* Process the pieces. */
2277 for (i = start; i < XVECLEN (dst, 0); i++)
2278 {
2279 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
2280 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2281 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2282 int shift = 0;
2283
2284 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2285 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
2286 {
2287 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2288 bytelen = ssize - bytepos;
2289 if (bytelen <= 0)
729a2125 2290 abort ();
aac5cc16
RH
2291 }
2292
f3ce87a9
DE
2293 /* If we won't be loading directly from memory, protect the real source
2294 from strange tricks we might play; but make sure that the source can
2295 be loaded directly into the destination. */
2296 src = orig_src;
2297 if (GET_CODE (orig_src) != MEM
2298 && (!CONSTANT_P (orig_src)
2299 || (GET_MODE (orig_src) != mode
2300 && GET_MODE (orig_src) != VOIDmode)))
2301 {
2302 if (GET_MODE (orig_src) == VOIDmode)
2303 src = gen_reg_rtx (mode);
2304 else
2305 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 2306
f3ce87a9
DE
2307 emit_move_insn (src, orig_src);
2308 }
2309
aac5cc16
RH
2310 /* Optimize the access just a bit. */
2311 if (GET_CODE (src) == MEM
04050c69 2312 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
729a2125 2313 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2314 && bytelen == GET_MODE_SIZE (mode))
2315 {
2316 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2317 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2318 }
7c4a6db0
JW
2319 else if (GET_CODE (src) == CONCAT)
2320 {
015b1ad1
JDA
2321 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2322 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2323
2324 if ((bytepos == 0 && bytelen == slen0)
2325 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 2326 {
015b1ad1
JDA
2327 /* The following assumes that the concatenated objects all
2328 have the same size. In this case, a simple calculation
2329 can be used to determine the object and the bit field
2330 to be extracted. */
2331 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
2332 if (! CONSTANT_P (tmps[i])
2333 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2334 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
2335 (bytepos % slen0) * BITS_PER_UNIT,
2336 1, NULL_RTX, mode, mode, ssize);
cbb92744 2337 }
58f69841
JH
2338 else if (bytepos == 0)
2339 {
015b1ad1 2340 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 2341 emit_move_insn (mem, src);
04050c69 2342 tmps[i] = adjust_address (mem, mode, 0);
58f69841 2343 }
7c4a6db0
JW
2344 else
2345 abort ();
2346 }
f3ce87a9 2347 else if (CONSTANT_P (src)
2ee5437b
RH
2348 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2349 tmps[i] = src;
fffa9c1d 2350 else
19caa751
RK
2351 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2352 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 2353 mode, mode, ssize);
fffa9c1d 2354
aac5cc16 2355 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2356 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2357 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2358 }
19caa751 2359
3a94c984 2360 emit_queue ();
aac5cc16
RH
2361
2362 /* Copy the extracted pieces into the proper (probable) hard regs. */
2363 for (i = start; i < XVECLEN (dst, 0); i++)
2364 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2365}
2366
084a1106
JDA
2367/* Emit code to move a block SRC to block DST, where SRC and DST are
2368 non-consecutive groups of registers, each represented by a PARALLEL. */
2369
2370void
2371emit_group_move (dst, src)
2372 rtx dst, src;
2373{
2374 int i;
2375
2376 if (GET_CODE (src) != PARALLEL
2377 || GET_CODE (dst) != PARALLEL
2378 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2379 abort ();
2380
2381 /* Skip first entry if NULL. */
2382 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2383 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2384 XEXP (XVECEXP (src, 0, i), 0));
2385}
2386
aac5cc16
RH
2387/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2388 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2389 block DST, or -1 if not known. */
fffa9c1d
JW
2390
2391void
04050c69 2392emit_group_store (orig_dst, src, ssize)
aac5cc16 2393 rtx orig_dst, src;
729a2125 2394 int ssize;
fffa9c1d 2395{
aac5cc16
RH
2396 rtx *tmps, dst;
2397 int start, i;
fffa9c1d 2398
aac5cc16 2399 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2400 abort ();
2401
2402 /* Check for a NULL entry, used to indicate that the parameter goes
2403 both on the stack and in registers. */
aac5cc16
RH
2404 if (XEXP (XVECEXP (src, 0, 0), 0))
2405 start = 0;
fffa9c1d 2406 else
aac5cc16
RH
2407 start = 1;
2408
3a94c984 2409 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2410
aac5cc16
RH
2411 /* Copy the (probable) hard regs into pseudos. */
2412 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2413 {
aac5cc16
RH
2414 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2415 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2416 emit_move_insn (tmps[i], reg);
2417 }
3a94c984 2418 emit_queue ();
fffa9c1d 2419
aac5cc16
RH
2420 /* If we won't be storing directly into memory, protect the real destination
2421 from strange tricks we might play. */
2422 dst = orig_dst;
10a9f2be
JW
2423 if (GET_CODE (dst) == PARALLEL)
2424 {
2425 rtx temp;
2426
2427 /* We can get a PARALLEL dst if there is a conditional expression in
2428 a return statement. In that case, the dst and src are the same,
2429 so no action is necessary. */
2430 if (rtx_equal_p (dst, src))
2431 return;
2432
2433 /* It is unclear if we can ever reach here, but we may as well handle
2434 it. Allocate a temporary, and split this into a store/load to/from
2435 the temporary. */
2436
2437 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
04050c69
RK
2438 emit_group_store (temp, src, ssize);
2439 emit_group_load (dst, temp, ssize);
10a9f2be
JW
2440 return;
2441 }
75897075 2442 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2443 {
2444 dst = gen_reg_rtx (GET_MODE (orig_dst));
2445 /* Make life a bit easier for combine. */
8ae91fc0 2446 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2447 }
aac5cc16
RH
2448
2449 /* Process the pieces. */
2450 for (i = start; i < XVECLEN (src, 0); i++)
2451 {
770ae6cc 2452 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2453 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2454 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2455 rtx dest = dst;
aac5cc16
RH
2456
2457 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2458 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2459 {
aac5cc16
RH
2460 if (BYTES_BIG_ENDIAN)
2461 {
2462 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2463 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2464 tmps[i], 0, OPTAB_WIDEN);
2465 }
2466 bytelen = ssize - bytepos;
71bc0330 2467 }
fffa9c1d 2468
6ddae612
JJ
2469 if (GET_CODE (dst) == CONCAT)
2470 {
2471 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2472 dest = XEXP (dst, 0);
2473 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2474 {
2475 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2476 dest = XEXP (dst, 1);
2477 }
0d446150
JH
2478 else if (bytepos == 0 && XVECLEN (src, 0))
2479 {
2480 dest = assign_stack_temp (GET_MODE (dest),
2481 GET_MODE_SIZE (GET_MODE (dest)), 0);
2482 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2483 tmps[i]);
2484 dst = dest;
2485 break;
2486 }
6ddae612
JJ
2487 else
2488 abort ();
2489 }
2490
aac5cc16 2491 /* Optimize the access just a bit. */
6ddae612
JJ
2492 if (GET_CODE (dest) == MEM
2493 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
729a2125 2494 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2495 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2496 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2497 else
6ddae612 2498 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2499 mode, tmps[i], ssize);
fffa9c1d 2500 }
729a2125 2501
3a94c984 2502 emit_queue ();
aac5cc16
RH
2503
2504 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2505 if (orig_dst != dst)
aac5cc16 2506 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2507}
2508
c36fce9a
GRK
2509/* Generate code to copy a BLKmode object of TYPE out of a
2510 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2511 is null, a stack temporary is created. TGTBLK is returned.
2512
2513 The primary purpose of this routine is to handle functions
2514 that return BLKmode structures in registers. Some machines
2515 (the PA for example) want to return all small structures
3a94c984 2516 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2517
2518rtx
19caa751 2519copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2520 rtx tgtblk;
2521 rtx srcreg;
2522 tree type;
2523{
19caa751
RK
2524 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2525 rtx src = NULL, dst = NULL;
2526 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2527 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2528
2529 if (tgtblk == 0)
2530 {
1da68f56
RK
2531 tgtblk = assign_temp (build_qualified_type (type,
2532 (TYPE_QUALS (type)
2533 | TYPE_QUAL_CONST)),
2534 0, 1, 1);
19caa751
RK
2535 preserve_temp_slots (tgtblk);
2536 }
3a94c984 2537
1ed1b4fb 2538 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2539 into a new pseudo which is a full word. */
0d7839da 2540
19caa751
RK
2541 if (GET_MODE (srcreg) != BLKmode
2542 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2543 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751
RK
2544
2545 /* Structures whose size is not a multiple of a word are aligned
2546 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2547 machine, this means we must skip the empty high order bytes when
2548 calculating the bit offset. */
0d7839da 2549 if (BYTES_BIG_ENDIAN
0d7839da 2550 && bytes % UNITS_PER_WORD)
19caa751
RK
2551 big_endian_correction
2552 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2553
2554 /* Copy the structure BITSIZE bites at a time.
3a94c984 2555
19caa751
RK
2556 We could probably emit more efficient code for machines which do not use
2557 strict alignment, but it doesn't seem worth the effort at the current
2558 time. */
2559 for (bitpos = 0, xbitpos = big_endian_correction;
2560 bitpos < bytes * BITS_PER_UNIT;
2561 bitpos += bitsize, xbitpos += bitsize)
2562 {
3a94c984 2563 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2564 word boundary and when xbitpos == big_endian_correction
2565 (the first time through). */
2566 if (xbitpos % BITS_PER_WORD == 0
2567 || xbitpos == big_endian_correction)
b47f8cfc
JH
2568 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2569 GET_MODE (srcreg));
19caa751
RK
2570
2571 /* We need a new destination operand each time bitpos is on
2572 a word boundary. */
2573 if (bitpos % BITS_PER_WORD == 0)
2574 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2575
19caa751
RK
2576 /* Use xbitpos for the source extraction (right justified) and
2577 xbitpos for the destination store (left justified). */
2578 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2579 extract_bit_field (src, bitsize,
2580 xbitpos % BITS_PER_WORD, 1,
2581 NULL_RTX, word_mode, word_mode,
04050c69
RK
2582 BITS_PER_WORD),
2583 BITS_PER_WORD);
19caa751
RK
2584 }
2585
2586 return tgtblk;
c36fce9a
GRK
2587}
2588
94b25f81
RK
2589/* Add a USE expression for REG to the (possibly empty) list pointed
2590 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2591
2592void
b3f8cf4a
RK
2593use_reg (call_fusage, reg)
2594 rtx *call_fusage, reg;
2595{
0304dfbb
DE
2596 if (GET_CODE (reg) != REG
2597 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2598 abort ();
b3f8cf4a
RK
2599
2600 *call_fusage
38a448ca
RH
2601 = gen_rtx_EXPR_LIST (VOIDmode,
2602 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2603}
2604
94b25f81
RK
2605/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2606 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2607
2608void
0304dfbb
DE
2609use_regs (call_fusage, regno, nregs)
2610 rtx *call_fusage;
bbf6f052
RK
2611 int regno;
2612 int nregs;
2613{
0304dfbb 2614 int i;
bbf6f052 2615
0304dfbb
DE
2616 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2617 abort ();
2618
2619 for (i = 0; i < nregs; i++)
e50126e8 2620 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2621}
fffa9c1d
JW
2622
2623/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2624 PARALLEL REGS. This is for calls that pass values in multiple
2625 non-contiguous locations. The Irix 6 ABI has examples of this. */
2626
2627void
2628use_group_regs (call_fusage, regs)
2629 rtx *call_fusage;
2630 rtx regs;
2631{
2632 int i;
2633
6bd35f86
DE
2634 for (i = 0; i < XVECLEN (regs, 0); i++)
2635 {
2636 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2637
6bd35f86
DE
2638 /* A NULL entry means the parameter goes both on the stack and in
2639 registers. This can also be a MEM for targets that pass values
2640 partially on the stack and partially in registers. */
e9a25f70 2641 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2642 use_reg (call_fusage, reg);
2643 }
fffa9c1d 2644}
bbf6f052 2645\f
57814e5e 2646
cf5124f6
RS
2647/* Determine whether the LEN bytes generated by CONSTFUN can be
2648 stored to memory using several move instructions. CONSTFUNDATA is
2649 a pointer which will be passed as argument in every CONSTFUN call.
2650 ALIGN is maximum alignment we can assume. Return nonzero if a
2651 call to store_by_pieces should succeed. */
2652
57814e5e
JJ
2653int
2654can_store_by_pieces (len, constfun, constfundata, align)
2655 unsigned HOST_WIDE_INT len;
2656 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2657 PTR constfundata;
2658 unsigned int align;
2659{
98166639 2660 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2661 HOST_WIDE_INT offset = 0;
2662 enum machine_mode mode, tmode;
2663 enum insn_code icode;
2664 int reverse;
2665 rtx cst;
2666
4977bab6 2667 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2668 return 0;
2669
2670 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2671 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2672 align = MOVE_MAX * BITS_PER_UNIT;
2673
2674 /* We would first store what we can in the largest integer mode, then go to
2675 successively smaller modes. */
2676
2677 for (reverse = 0;
2678 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2679 reverse++)
2680 {
2681 l = len;
2682 mode = VOIDmode;
cf5124f6 2683 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2684 while (max_size > 1)
2685 {
2686 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2687 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2688 if (GET_MODE_SIZE (tmode) < max_size)
2689 mode = tmode;
2690
2691 if (mode == VOIDmode)
2692 break;
2693
2694 icode = mov_optab->handlers[(int) mode].insn_code;
2695 if (icode != CODE_FOR_nothing
2696 && align >= GET_MODE_ALIGNMENT (mode))
2697 {
2698 unsigned int size = GET_MODE_SIZE (mode);
2699
2700 while (l >= size)
2701 {
2702 if (reverse)
2703 offset -= size;
2704
2705 cst = (*constfun) (constfundata, offset, mode);
2706 if (!LEGITIMATE_CONSTANT_P (cst))
2707 return 0;
2708
2709 if (!reverse)
2710 offset += size;
2711
2712 l -= size;
2713 }
2714 }
2715
2716 max_size = GET_MODE_SIZE (mode);
2717 }
2718
2719 /* The code above should have handled everything. */
2720 if (l != 0)
2721 abort ();
2722 }
2723
2724 return 1;
2725}
2726
2727/* Generate several move instructions to store LEN bytes generated by
2728 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2729 pointer which will be passed as argument in every CONSTFUN call.
2730 ALIGN is maximum alignment we can assume. */
2731
2732void
2733store_by_pieces (to, len, constfun, constfundata, align)
2734 rtx to;
2735 unsigned HOST_WIDE_INT len;
2736 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2737 PTR constfundata;
2738 unsigned int align;
2739{
2740 struct store_by_pieces data;
2741
4977bab6 2742 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2743 abort ();
2744 to = protect_from_queue (to, 1);
2745 data.constfun = constfun;
2746 data.constfundata = constfundata;
2747 data.len = len;
2748 data.to = to;
2749 store_by_pieces_1 (&data, align);
2750}
2751
19caa751
RK
2752/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2753 rtx with BLKmode). The caller must pass TO through protect_from_queue
2754 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2755
2756static void
2757clear_by_pieces (to, len, align)
2758 rtx to;
3bdf5ad1 2759 unsigned HOST_WIDE_INT len;
729a2125 2760 unsigned int align;
9de08200 2761{
57814e5e
JJ
2762 struct store_by_pieces data;
2763
2764 data.constfun = clear_by_pieces_1;
df4ae160 2765 data.constfundata = NULL;
57814e5e
JJ
2766 data.len = len;
2767 data.to = to;
2768 store_by_pieces_1 (&data, align);
2769}
2770
2771/* Callback routine for clear_by_pieces.
2772 Return const0_rtx unconditionally. */
2773
2774static rtx
2775clear_by_pieces_1 (data, offset, mode)
2776 PTR data ATTRIBUTE_UNUSED;
2777 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2778 enum machine_mode mode ATTRIBUTE_UNUSED;
2779{
2780 return const0_rtx;
2781}
2782
2783/* Subroutine of clear_by_pieces and store_by_pieces.
2784 Generate several move instructions to store LEN bytes of block TO. (A MEM
2785 rtx with BLKmode). The caller must pass TO through protect_from_queue
2786 before calling. ALIGN is maximum alignment we can assume. */
2787
2788static void
2789store_by_pieces_1 (data, align)
2790 struct store_by_pieces *data;
2791 unsigned int align;
2792{
2793 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2794 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2795 enum machine_mode mode = VOIDmode, tmode;
2796 enum insn_code icode;
9de08200 2797
57814e5e
JJ
2798 data->offset = 0;
2799 data->to_addr = to_addr;
2800 data->autinc_to
9de08200
RK
2801 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2802 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2803
57814e5e
JJ
2804 data->explicit_inc_to = 0;
2805 data->reverse
9de08200 2806 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2807 if (data->reverse)
2808 data->offset = data->len;
9de08200 2809
57814e5e 2810 /* If storing requires more than two move insns,
9de08200
RK
2811 copy addresses to registers (to make displacements shorter)
2812 and use post-increment if available. */
57814e5e
JJ
2813 if (!data->autinc_to
2814 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2815 {
3a94c984 2816 /* Determine the main mode we'll be using. */
fbe1758d
AM
2817 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2818 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2819 if (GET_MODE_SIZE (tmode) < max_size)
2820 mode = tmode;
2821
57814e5e 2822 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2823 {
57814e5e
JJ
2824 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2825 data->autinc_to = 1;
2826 data->explicit_inc_to = -1;
9de08200 2827 }
3bdf5ad1 2828
57814e5e
JJ
2829 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2830 && ! data->autinc_to)
9de08200 2831 {
57814e5e
JJ
2832 data->to_addr = copy_addr_to_reg (to_addr);
2833 data->autinc_to = 1;
2834 data->explicit_inc_to = 1;
9de08200 2835 }
3bdf5ad1 2836
57814e5e
JJ
2837 if ( !data->autinc_to && CONSTANT_P (to_addr))
2838 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2839 }
2840
e1565e65 2841 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2842 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2843 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2844
57814e5e 2845 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2846 successively smaller modes. */
2847
2848 while (max_size > 1)
2849 {
9de08200
RK
2850 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2851 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2852 if (GET_MODE_SIZE (tmode) < max_size)
2853 mode = tmode;
2854
2855 if (mode == VOIDmode)
2856 break;
2857
2858 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2859 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2860 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2861
2862 max_size = GET_MODE_SIZE (mode);
2863 }
2864
2865 /* The code above should have handled everything. */
57814e5e 2866 if (data->len != 0)
9de08200
RK
2867 abort ();
2868}
2869
57814e5e 2870/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2871 with move instructions for mode MODE. GENFUN is the gen_... function
2872 to make a move insn for that mode. DATA has all the other info. */
2873
2874static void
57814e5e 2875store_by_pieces_2 (genfun, mode, data)
711d877c 2876 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2877 enum machine_mode mode;
57814e5e 2878 struct store_by_pieces *data;
9de08200 2879{
3bdf5ad1 2880 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2881 rtx to1, cst;
9de08200
RK
2882
2883 while (data->len >= size)
2884 {
3bdf5ad1
RK
2885 if (data->reverse)
2886 data->offset -= size;
9de08200 2887
3bdf5ad1 2888 if (data->autinc_to)
630036c6
JJ
2889 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2890 data->offset);
3a94c984 2891 else
f4ef873c 2892 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2893
940da324 2894 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2895 emit_insn (gen_add2_insn (data->to_addr,
2896 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2897
57814e5e
JJ
2898 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2899 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2900
940da324 2901 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2902 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2903
3bdf5ad1
RK
2904 if (! data->reverse)
2905 data->offset += size;
9de08200
RK
2906
2907 data->len -= size;
2908 }
2909}
2910\f
19caa751 2911/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2912 its length in bytes. */
e9a25f70
JL
2913
2914rtx
8ac61af7 2915clear_storage (object, size)
bbf6f052 2916 rtx object;
4c08eef0 2917 rtx size;
bbf6f052 2918{
e9a25f70 2919 rtx retval = 0;
8ac61af7
RK
2920 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2921 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2922
fcf1b822
RK
2923 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2924 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2925 if (GET_MODE (object) != BLKmode
fcf1b822 2926 && GET_CODE (size) == CONST_INT
4ca79136 2927 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2928 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2929 else
bbf6f052 2930 {
9de08200
RK
2931 object = protect_from_queue (object, 1);
2932 size = protect_from_queue (size, 0);
2933
2934 if (GET_CODE (size) == CONST_INT
78762e3b 2935 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2936 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2937 else if (clear_storage_via_clrstr (object, size, align))
2938 ;
9de08200 2939 else
4ca79136
RH
2940 retval = clear_storage_via_libcall (object, size);
2941 }
2942
2943 return retval;
2944}
2945
2946/* A subroutine of clear_storage. Expand a clrstr pattern;
2947 return true if successful. */
2948
2949static bool
2950clear_storage_via_clrstr (object, size, align)
2951 rtx object, size;
2952 unsigned int align;
2953{
2954 /* Try the most limited insn first, because there's no point
2955 including more than one in the machine description unless
2956 the more limited one has some advantage. */
2957
2958 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2959 enum machine_mode mode;
2960
2961 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2962 mode = GET_MODE_WIDER_MODE (mode))
2963 {
2964 enum insn_code code = clrstr_optab[(int) mode];
2965 insn_operand_predicate_fn pred;
2966
2967 if (code != CODE_FOR_nothing
2968 /* We don't need MODE to be narrower than
2969 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2970 the mode mask, as it is returned by the macro, it will
2971 definitely be less than the actual mode mask. */
2972 && ((GET_CODE (size) == CONST_INT
2973 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2974 <= (GET_MODE_MASK (mode) >> 1)))
2975 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2976 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2977 || (*pred) (object, BLKmode))
2978 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2979 || (*pred) (opalign, VOIDmode)))
9de08200 2980 {
4ca79136
RH
2981 rtx op1;
2982 rtx last = get_last_insn ();
2983 rtx pat;
9de08200 2984
4ca79136
RH
2985 op1 = convert_to_mode (mode, size, 1);
2986 pred = insn_data[(int) code].operand[1].predicate;
2987 if (pred != 0 && ! (*pred) (op1, mode))
2988 op1 = copy_to_mode_reg (mode, op1);
9de08200 2989
4ca79136
RH
2990 pat = GEN_FCN ((int) code) (object, op1, opalign);
2991 if (pat)
9de08200 2992 {
4ca79136
RH
2993 emit_insn (pat);
2994 return true;
2995 }
2996 else
2997 delete_insns_since (last);
2998 }
2999 }
9de08200 3000
4ca79136
RH
3001 return false;
3002}
9de08200 3003
4ca79136
RH
3004/* A subroutine of clear_storage. Expand a call to memset or bzero.
3005 Return the return value of memset, 0 otherwise. */
9de08200 3006
4ca79136
RH
3007static rtx
3008clear_storage_via_libcall (object, size)
3009 rtx object, size;
3010{
3011 tree call_expr, arg_list, fn, object_tree, size_tree;
3012 enum machine_mode size_mode;
3013 rtx retval;
9de08200 3014
4ca79136 3015 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 3016
4ca79136
RH
3017 It is unsafe to save the value generated by protect_from_queue
3018 and reuse it later. Consider what happens if emit_queue is
3019 called before the return value from protect_from_queue is used.
52cf7115 3020
4ca79136
RH
3021 Expansion of the CALL_EXPR below will call emit_queue before
3022 we are finished emitting RTL for argument setup. So if we are
3023 not careful we could get the wrong value for an argument.
52cf7115 3024
4ca79136
RH
3025 To avoid this problem we go ahead and emit code to copy OBJECT
3026 and SIZE into new pseudos. We can then place those new pseudos
3027 into an RTL_EXPR and use them later, even after a call to
3028 emit_queue.
52cf7115 3029
4ca79136
RH
3030 Note this is not strictly needed for library calls since they
3031 do not call emit_queue before loading their arguments. However,
3032 we may need to have library calls call emit_queue in the future
3033 since failing to do so could cause problems for targets which
3034 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 3035
4ca79136 3036 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 3037
4ca79136
RH
3038 if (TARGET_MEM_FUNCTIONS)
3039 size_mode = TYPE_MODE (sizetype);
3040 else
3041 size_mode = TYPE_MODE (unsigned_type_node);
3042 size = convert_to_mode (size_mode, size, 1);
3043 size = copy_to_mode_reg (size_mode, size);
52cf7115 3044
4ca79136
RH
3045 /* It is incorrect to use the libcall calling conventions to call
3046 memset in this context. This could be a user call to memset and
3047 the user may wish to examine the return value from memset. For
3048 targets where libcalls and normal calls have different conventions
3049 for returning pointers, we could end up generating incorrect code.
4bc973ae 3050
4ca79136 3051 For convenience, we generate the call to bzero this way as well. */
4bc973ae 3052
4ca79136
RH
3053 object_tree = make_tree (ptr_type_node, object);
3054 if (TARGET_MEM_FUNCTIONS)
3055 size_tree = make_tree (sizetype, size);
3056 else
3057 size_tree = make_tree (unsigned_type_node, size);
3058
3059 fn = clear_storage_libcall_fn (true);
3060 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3061 if (TARGET_MEM_FUNCTIONS)
3062 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3063 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3064
3065 /* Now we have to build up the CALL_EXPR itself. */
3066 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3067 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3068 call_expr, arg_list, NULL_TREE);
3069 TREE_SIDE_EFFECTS (call_expr) = 1;
3070
3071 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3072
3073 /* If we are initializing a readonly value, show the above call
3074 clobbered it. Otherwise, a load from it may erroneously be
3075 hoisted from a loop. */
3076 if (RTX_UNCHANGING_P (object))
3077 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3078
3079 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3080}
3081
3082/* A subroutine of clear_storage_via_libcall. Create the tree node
3083 for the function we use for block clears. The first time FOR_CALL
3084 is true, we call assemble_external. */
3085
3086static GTY(()) tree block_clear_fn;
66c60e67 3087
4ca79136
RH
3088static tree
3089clear_storage_libcall_fn (for_call)
3090 int for_call;
3091{
3092 static bool emitted_extern;
3093 tree fn = block_clear_fn, args;
3094
3095 if (!fn)
3096 {
3097 if (TARGET_MEM_FUNCTIONS)
3098 {
3099 fn = get_identifier ("memset");
3100 args = build_function_type_list (ptr_type_node, ptr_type_node,
3101 integer_type_node, sizetype,
3102 NULL_TREE);
3103 }
3104 else
3105 {
3106 fn = get_identifier ("bzero");
3107 args = build_function_type_list (void_type_node, ptr_type_node,
3108 unsigned_type_node, NULL_TREE);
9de08200 3109 }
4ca79136
RH
3110
3111 fn = build_decl (FUNCTION_DECL, fn, args);
3112 DECL_EXTERNAL (fn) = 1;
3113 TREE_PUBLIC (fn) = 1;
3114 DECL_ARTIFICIAL (fn) = 1;
3115 TREE_NOTHROW (fn) = 1;
3116
3117 block_clear_fn = fn;
bbf6f052 3118 }
e9a25f70 3119
4ca79136
RH
3120 if (for_call && !emitted_extern)
3121 {
3122 emitted_extern = true;
3123 make_decl_rtl (fn, NULL);
3124 assemble_external (fn);
3125 }
bbf6f052 3126
4ca79136
RH
3127 return fn;
3128}
3129\f
bbf6f052
RK
3130/* Generate code to copy Y into X.
3131 Both Y and X must have the same mode, except that
3132 Y can be a constant with VOIDmode.
3133 This mode cannot be BLKmode; use emit_block_move for that.
3134
3135 Return the last instruction emitted. */
3136
3137rtx
3138emit_move_insn (x, y)
3139 rtx x, y;
3140{
3141 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
3142 rtx y_cst = NULL_RTX;
3143 rtx last_insn;
bbf6f052
RK
3144
3145 x = protect_from_queue (x, 1);
3146 y = protect_from_queue (y, 0);
3147
3148 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3149 abort ();
3150
ee5332b8
RH
3151 /* Never force constant_p_rtx to memory. */
3152 if (GET_CODE (y) == CONSTANT_P_RTX)
3153 ;
51286de6 3154 else if (CONSTANT_P (y))
de1b33dd 3155 {
51286de6 3156 if (optimize
075fc17a 3157 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
3158 && (last_insn = compress_float_constant (x, y)))
3159 return last_insn;
3160
3161 if (!LEGITIMATE_CONSTANT_P (y))
3162 {
3163 y_cst = y;
3164 y = force_const_mem (mode, y);
3a04ff64
RH
3165
3166 /* If the target's cannot_force_const_mem prevented the spill,
3167 assume that the target's move expanders will also take care
3168 of the non-legitimate constant. */
3169 if (!y)
3170 y = y_cst;
51286de6 3171 }
de1b33dd 3172 }
bbf6f052
RK
3173
3174 /* If X or Y are memory references, verify that their addresses are valid
3175 for the machine. */
3176 if (GET_CODE (x) == MEM
3177 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3178 && ! push_operand (x, GET_MODE (x)))
3179 || (flag_force_addr
3180 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 3181 x = validize_mem (x);
bbf6f052
RK
3182
3183 if (GET_CODE (y) == MEM
3184 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3185 || (flag_force_addr
3186 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 3187 y = validize_mem (y);
bbf6f052
RK
3188
3189 if (mode == BLKmode)
3190 abort ();
3191
de1b33dd
AO
3192 last_insn = emit_move_insn_1 (x, y);
3193
3194 if (y_cst && GET_CODE (x) == REG)
3d238248 3195 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
3196
3197 return last_insn;
261c4230
RS
3198}
3199
3200/* Low level part of emit_move_insn.
3201 Called just like emit_move_insn, but assumes X and Y
3202 are basically valid. */
3203
3204rtx
3205emit_move_insn_1 (x, y)
3206 rtx x, y;
3207{
3208 enum machine_mode mode = GET_MODE (x);
3209 enum machine_mode submode;
3210 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 3211
dbbbbf3b 3212 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 3213 abort ();
76bbe028 3214
bbf6f052
RK
3215 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3216 return
3217 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3218
89742723 3219 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 3220 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 3221 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
3222 && (mov_optab->handlers[(int) submode].insn_code
3223 != CODE_FOR_nothing))
3224 {
3225 /* Don't split destination if it is a stack push. */
3226 int stack = push_operand (x, GET_MODE (x));
7308a047 3227
79ce92d7 3228#ifdef PUSH_ROUNDING
1a06f5fe
JH
3229 /* In case we output to the stack, but the size is smaller machine can
3230 push exactly, we need to use move instructions. */
3231 if (stack
bb93b973
RK
3232 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3233 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
3234 {
3235 rtx temp;
bb93b973 3236 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
3237
3238 /* Do not use anti_adjust_stack, since we don't want to update
3239 stack_pointer_delta. */
3240 temp = expand_binop (Pmode,
3241#ifdef STACK_GROWS_DOWNWARD
3242 sub_optab,
3243#else
3244 add_optab,
3245#endif
3246 stack_pointer_rtx,
3247 GEN_INT
bb93b973
RK
3248 (PUSH_ROUNDING
3249 (GET_MODE_SIZE (GET_MODE (x)))),
3250 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3251
1a06f5fe
JH
3252 if (temp != stack_pointer_rtx)
3253 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 3254
1a06f5fe
JH
3255#ifdef STACK_GROWS_DOWNWARD
3256 offset1 = 0;
3257 offset2 = GET_MODE_SIZE (submode);
3258#else
3259 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3260 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3261 + GET_MODE_SIZE (submode));
3262#endif
bb93b973 3263
1a06f5fe
JH
3264 emit_move_insn (change_address (x, submode,
3265 gen_rtx_PLUS (Pmode,
3266 stack_pointer_rtx,
3267 GEN_INT (offset1))),
3268 gen_realpart (submode, y));
3269 emit_move_insn (change_address (x, submode,
3270 gen_rtx_PLUS (Pmode,
3271 stack_pointer_rtx,
3272 GEN_INT (offset2))),
3273 gen_imagpart (submode, y));
3274 }
e9c0bd54 3275 else
79ce92d7 3276#endif
7308a047
RS
3277 /* If this is a stack, push the highpart first, so it
3278 will be in the argument order.
3279
3280 In that case, change_address is used only to convert
3281 the mode, not to change the address. */
e9c0bd54 3282 if (stack)
c937357e 3283 {
e33c0d66
RS
3284 /* Note that the real part always precedes the imag part in memory
3285 regardless of machine's endianness. */
c937357e
RS
3286#ifdef STACK_GROWS_DOWNWARD
3287 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3288 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3289 gen_imagpart (submode, y)));
c937357e 3290 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3291 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3292 gen_realpart (submode, y)));
c937357e
RS
3293#else
3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3295 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3296 gen_realpart (submode, y)));
c937357e 3297 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3298 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3299 gen_imagpart (submode, y)));
c937357e
RS
3300#endif
3301 }
3302 else
3303 {
235ae7be
DM
3304 rtx realpart_x, realpart_y;
3305 rtx imagpart_x, imagpart_y;
3306
405f63da
MM
3307 /* If this is a complex value with each part being smaller than a
3308 word, the usual calling sequence will likely pack the pieces into
3309 a single register. Unfortunately, SUBREG of hard registers only
3310 deals in terms of words, so we have a problem converting input
3311 arguments to the CONCAT of two registers that is used elsewhere
3312 for complex values. If this is before reload, we can copy it into
3313 memory and reload. FIXME, we should see about using extract and
3314 insert on integer registers, but complex short and complex char
3315 variables should be rarely used. */
3a94c984 3316 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
3317 && (reload_in_progress | reload_completed) == 0)
3318 {
bb93b973
RK
3319 int packed_dest_p
3320 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3321 int packed_src_p
3322 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
3323
3324 if (packed_dest_p || packed_src_p)
3325 {
3326 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3327 ? MODE_FLOAT : MODE_INT);
3328
1da68f56
RK
3329 enum machine_mode reg_mode
3330 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
3331
3332 if (reg_mode != BLKmode)
3333 {
3334 rtx mem = assign_stack_temp (reg_mode,
3335 GET_MODE_SIZE (mode), 0);
f4ef873c 3336 rtx cmem = adjust_address (mem, mode, 0);
405f63da 3337
1da68f56
RK
3338 cfun->cannot_inline
3339 = N_("function using short complex types cannot be inline");
405f63da
MM
3340
3341 if (packed_dest_p)
3342 {
3343 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 3344
405f63da
MM
3345 emit_move_insn_1 (cmem, y);
3346 return emit_move_insn_1 (sreg, mem);
3347 }
3348 else
3349 {
3350 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3351
405f63da
MM
3352 emit_move_insn_1 (mem, sreg);
3353 return emit_move_insn_1 (x, cmem);
3354 }
3355 }
3356 }
3357 }
3358
235ae7be
DM
3359 realpart_x = gen_realpart (submode, x);
3360 realpart_y = gen_realpart (submode, y);
3361 imagpart_x = gen_imagpart (submode, x);
3362 imagpart_y = gen_imagpart (submode, y);
3363
3364 /* Show the output dies here. This is necessary for SUBREGs
3365 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3366 hard regs shouldn't appear here except as return values.
3367 We never want to emit such a clobber after reload. */
3368 if (x != y
235ae7be
DM
3369 && ! (reload_in_progress || reload_completed)
3370 && (GET_CODE (realpart_x) == SUBREG
3371 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3372 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3373
c937357e 3374 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3375 (realpart_x, realpart_y));
c937357e 3376 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3377 (imagpart_x, imagpart_y));
c937357e 3378 }
7308a047 3379
7a1ab50a 3380 return get_last_insn ();
7308a047
RS
3381 }
3382
a3600c71
HPN
3383 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3384 find a mode to do it in. If we have a movcc, use it. Otherwise,
3385 find the MODE_INT mode of the same width. */
3386 else if (GET_MODE_CLASS (mode) == MODE_CC
3387 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3388 {
3389 enum insn_code insn_code;
3390 enum machine_mode tmode = VOIDmode;
3391 rtx x1 = x, y1 = y;
3392
3393 if (mode != CCmode
3394 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3395 tmode = CCmode;
3396 else
3397 for (tmode = QImode; tmode != VOIDmode;
3398 tmode = GET_MODE_WIDER_MODE (tmode))
3399 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3400 break;
3401
3402 if (tmode == VOIDmode)
3403 abort ();
3404
3405 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3406 may call change_address which is not appropriate if we were
3407 called when a reload was in progress. We don't have to worry
3408 about changing the address since the size in bytes is supposed to
3409 be the same. Copy the MEM to change the mode and move any
3410 substitutions from the old MEM to the new one. */
3411
3412 if (reload_in_progress)
3413 {
3414 x = gen_lowpart_common (tmode, x1);
3415 if (x == 0 && GET_CODE (x1) == MEM)
3416 {
3417 x = adjust_address_nv (x1, tmode, 0);
3418 copy_replacements (x1, x);
3419 }
3420
3421 y = gen_lowpart_common (tmode, y1);
3422 if (y == 0 && GET_CODE (y1) == MEM)
3423 {
3424 y = adjust_address_nv (y1, tmode, 0);
3425 copy_replacements (y1, y);
3426 }
3427 }
3428 else
3429 {
3430 x = gen_lowpart (tmode, x);
3431 y = gen_lowpart (tmode, y);
3432 }
3433
3434 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3435 return emit_insn (GEN_FCN (insn_code) (x, y));
3436 }
3437
cffa2189
R
3438 /* This will handle any multi-word or full-word mode that lacks a move_insn
3439 pattern. However, you will get better code if you define such patterns,
bbf6f052 3440 even if they must turn into multiple assembler instructions. */
cffa2189 3441 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3442 {
3443 rtx last_insn = 0;
3ef1eef4 3444 rtx seq, inner;
235ae7be 3445 int need_clobber;
bb93b973 3446 int i;
3a94c984 3447
a98c9f1a
RK
3448#ifdef PUSH_ROUNDING
3449
3450 /* If X is a push on the stack, do the push now and replace
3451 X with a reference to the stack pointer. */
3452 if (push_operand (x, GET_MODE (x)))
3453 {
918a6124
GK
3454 rtx temp;
3455 enum rtx_code code;
0fb7aeda 3456
918a6124
GK
3457 /* Do not use anti_adjust_stack, since we don't want to update
3458 stack_pointer_delta. */
3459 temp = expand_binop (Pmode,
3460#ifdef STACK_GROWS_DOWNWARD
3461 sub_optab,
3462#else
3463 add_optab,
3464#endif
3465 stack_pointer_rtx,
3466 GEN_INT
bb93b973
RK
3467 (PUSH_ROUNDING
3468 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3469 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3470
0fb7aeda
KH
3471 if (temp != stack_pointer_rtx)
3472 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3473
3474 code = GET_CODE (XEXP (x, 0));
bb93b973 3475
918a6124
GK
3476 /* Just hope that small offsets off SP are OK. */
3477 if (code == POST_INC)
0fb7aeda 3478 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3479 GEN_INT (-((HOST_WIDE_INT)
3480 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3481 else if (code == POST_DEC)
0fb7aeda 3482 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3483 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3484 else
3485 temp = stack_pointer_rtx;
3486
3487 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3488 }
3489#endif
3a94c984 3490
3ef1eef4
RK
3491 /* If we are in reload, see if either operand is a MEM whose address
3492 is scheduled for replacement. */
3493 if (reload_in_progress && GET_CODE (x) == MEM
3494 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3495 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3496 if (reload_in_progress && GET_CODE (y) == MEM
3497 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3498 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3499
235ae7be 3500 start_sequence ();
15a7a8ec 3501
235ae7be 3502 need_clobber = 0;
bbf6f052 3503 for (i = 0;
3a94c984 3504 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3505 i++)
3506 {
3507 rtx xpart = operand_subword (x, i, 1, mode);
3508 rtx ypart = operand_subword (y, i, 1, mode);
3509
3510 /* If we can't get a part of Y, put Y into memory if it is a
3511 constant. Otherwise, force it into a register. If we still
3512 can't get a part of Y, abort. */
3513 if (ypart == 0 && CONSTANT_P (y))
3514 {
3515 y = force_const_mem (mode, y);
3516 ypart = operand_subword (y, i, 1, mode);
3517 }
3518 else if (ypart == 0)
3519 ypart = operand_subword_force (y, i, mode);
3520
3521 if (xpart == 0 || ypart == 0)
3522 abort ();
3523
235ae7be
DM
3524 need_clobber |= (GET_CODE (xpart) == SUBREG);
3525
bbf6f052
RK
3526 last_insn = emit_move_insn (xpart, ypart);
3527 }
6551fa4d 3528
2f937369 3529 seq = get_insns ();
235ae7be
DM
3530 end_sequence ();
3531
3532 /* Show the output dies here. This is necessary for SUBREGs
3533 of pseudos since we cannot track their lifetimes correctly;
3534 hard regs shouldn't appear here except as return values.
3535 We never want to emit such a clobber after reload. */
3536 if (x != y
3537 && ! (reload_in_progress || reload_completed)
3538 && need_clobber != 0)
bb93b973 3539 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3540
3541 emit_insn (seq);
3542
bbf6f052
RK
3543 return last_insn;
3544 }
3545 else
3546 abort ();
3547}
51286de6
RH
3548
3549/* If Y is representable exactly in a narrower mode, and the target can
3550 perform the extension directly from constant or memory, then emit the
3551 move as an extension. */
3552
3553static rtx
3554compress_float_constant (x, y)
3555 rtx x, y;
3556{
3557 enum machine_mode dstmode = GET_MODE (x);
3558 enum machine_mode orig_srcmode = GET_MODE (y);
3559 enum machine_mode srcmode;
3560 REAL_VALUE_TYPE r;
3561
3562 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3563
3564 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3565 srcmode != orig_srcmode;
3566 srcmode = GET_MODE_WIDER_MODE (srcmode))
3567 {
3568 enum insn_code ic;
3569 rtx trunc_y, last_insn;
3570
3571 /* Skip if the target can't extend this way. */
3572 ic = can_extend_p (dstmode, srcmode, 0);
3573 if (ic == CODE_FOR_nothing)
3574 continue;
3575
3576 /* Skip if the narrowed value isn't exact. */
3577 if (! exact_real_truncate (srcmode, &r))
3578 continue;
3579
3580 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3581
3582 if (LEGITIMATE_CONSTANT_P (trunc_y))
3583 {
3584 /* Skip if the target needs extra instructions to perform
3585 the extension. */
3586 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3587 continue;
3588 }
3589 else if (float_extend_from_mem[dstmode][srcmode])
3590 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3591 else
3592 continue;
3593
3594 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3595 last_insn = get_last_insn ();
3596
3597 if (GET_CODE (x) == REG)
3598 REG_NOTES (last_insn)
3599 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3600
3601 return last_insn;
3602 }
3603
3604 return NULL_RTX;
3605}
bbf6f052
RK
3606\f
3607/* Pushing data onto the stack. */
3608
3609/* Push a block of length SIZE (perhaps variable)
3610 and return an rtx to address the beginning of the block.
3611 Note that it is not possible for the value returned to be a QUEUED.
3612 The value may be virtual_outgoing_args_rtx.
3613
3614 EXTRA is the number of bytes of padding to push in addition to SIZE.
3615 BELOW nonzero means this padding comes at low addresses;
3616 otherwise, the padding comes at high addresses. */
3617
3618rtx
3619push_block (size, extra, below)
3620 rtx size;
3621 int extra, below;
3622{
b3694847 3623 rtx temp;
88f63c77
RK
3624
3625 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3626 if (CONSTANT_P (size))
3627 anti_adjust_stack (plus_constant (size, extra));
3628 else if (GET_CODE (size) == REG && extra == 0)
3629 anti_adjust_stack (size);
3630 else
3631 {
ce48579b 3632 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3633 if (extra != 0)
906c4e36 3634 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3635 temp, 0, OPTAB_LIB_WIDEN);
3636 anti_adjust_stack (temp);
3637 }
3638
f73ad30e 3639#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3640 if (0)
f73ad30e
JH
3641#else
3642 if (1)
bbf6f052 3643#endif
f73ad30e 3644 {
f73ad30e
JH
3645 temp = virtual_outgoing_args_rtx;
3646 if (extra != 0 && below)
3647 temp = plus_constant (temp, extra);
3648 }
3649 else
3650 {
3651 if (GET_CODE (size) == CONST_INT)
3652 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3653 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3654 else if (extra != 0 && !below)
3655 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3656 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3657 else
3658 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3659 negate_rtx (Pmode, size));
3660 }
bbf6f052
RK
3661
3662 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3663}
3664
21d93687
RK
3665#ifdef PUSH_ROUNDING
3666
566aa174 3667/* Emit single push insn. */
21d93687 3668
566aa174
JH
3669static void
3670emit_single_push_insn (mode, x, type)
3671 rtx x;
3672 enum machine_mode mode;
3673 tree type;
3674{
566aa174 3675 rtx dest_addr;
918a6124 3676 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3677 rtx dest;
371b8fc0
JH
3678 enum insn_code icode;
3679 insn_operand_predicate_fn pred;
566aa174 3680
371b8fc0
JH
3681 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3682 /* If there is push pattern, use it. Otherwise try old way of throwing
3683 MEM representing push operation to move expander. */
3684 icode = push_optab->handlers[(int) mode].insn_code;
3685 if (icode != CODE_FOR_nothing)
3686 {
3687 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3688 && !((*pred) (x, mode))))
371b8fc0
JH
3689 x = force_reg (mode, x);
3690 emit_insn (GEN_FCN (icode) (x));
3691 return;
3692 }
566aa174
JH
3693 if (GET_MODE_SIZE (mode) == rounded_size)
3694 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3695 else
3696 {
3697#ifdef STACK_GROWS_DOWNWARD
3698 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3699 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174
JH
3700#else
3701 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3702 GEN_INT (rounded_size));
3703#endif
3704 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3705 }
3706
3707 dest = gen_rtx_MEM (mode, dest_addr);
3708
566aa174
JH
3709 if (type != 0)
3710 {
3711 set_mem_attributes (dest, type, 1);
c3d32120
RK
3712
3713 if (flag_optimize_sibling_calls)
3714 /* Function incoming arguments may overlap with sibling call
3715 outgoing arguments and we cannot allow reordering of reads
3716 from function arguments with stores to outgoing arguments
3717 of sibling calls. */
3718 set_mem_alias_set (dest, 0);
566aa174
JH
3719 }
3720 emit_move_insn (dest, x);
566aa174 3721}
21d93687 3722#endif
566aa174 3723
bbf6f052
RK
3724/* Generate code to push X onto the stack, assuming it has mode MODE and
3725 type TYPE.
3726 MODE is redundant except when X is a CONST_INT (since they don't
3727 carry mode info).
3728 SIZE is an rtx for the size of data to be copied (in bytes),
3729 needed only if X is BLKmode.
3730
f1eaaf73 3731 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3732
cd048831
RK
3733 If PARTIAL and REG are both nonzero, then copy that many of the first
3734 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3735 The amount of space pushed is decreased by PARTIAL words,
3736 rounded *down* to a multiple of PARM_BOUNDARY.
3737 REG must be a hard register in this case.
cd048831
RK
3738 If REG is zero but PARTIAL is not, take any all others actions for an
3739 argument partially in registers, but do not actually load any
3740 registers.
bbf6f052
RK
3741
3742 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3743 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3744
3745 On a machine that lacks real push insns, ARGS_ADDR is the address of
3746 the bottom of the argument block for this call. We use indexing off there
3747 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3748 argument block has not been preallocated.
3749
e5e809f4
JL
3750 ARGS_SO_FAR is the size of args previously pushed for this call.
3751
3752 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3753 for arguments passed in registers. If nonzero, it will be the number
3754 of bytes required. */
bbf6f052
RK
3755
3756void
3757emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd 3758 args_addr, args_so_far, reg_parm_stack_space,
0fb7aeda 3759 alignment_pad)
b3694847 3760 rtx x;
bbf6f052
RK
3761 enum machine_mode mode;
3762 tree type;
3763 rtx size;
729a2125 3764 unsigned int align;
bbf6f052
RK
3765 int partial;
3766 rtx reg;
3767 int extra;
3768 rtx args_addr;
3769 rtx args_so_far;
e5e809f4 3770 int reg_parm_stack_space;
4fc026cd 3771 rtx alignment_pad;
bbf6f052
RK
3772{
3773 rtx xinner;
3774 enum direction stack_direction
3775#ifdef STACK_GROWS_DOWNWARD
3776 = downward;
3777#else
3778 = upward;
3779#endif
3780
3781 /* Decide where to pad the argument: `downward' for below,
3782 `upward' for above, or `none' for don't pad it.
3783 Default is below for small data on big-endian machines; else above. */
3784 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3785
0fb7aeda 3786 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3787 FIXME: why? */
3788 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3789 if (where_pad != none)
3790 where_pad = (where_pad == downward ? upward : downward);
3791
3792 xinner = x = protect_from_queue (x, 0);
3793
3794 if (mode == BLKmode)
3795 {
3796 /* Copy a block into the stack, entirely or partially. */
3797
b3694847 3798 rtx temp;
bbf6f052
RK
3799 int used = partial * UNITS_PER_WORD;
3800 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3801 int skip;
3a94c984 3802
bbf6f052
RK
3803 if (size == 0)
3804 abort ();
3805
3806 used -= offset;
3807
3808 /* USED is now the # of bytes we need not copy to the stack
3809 because registers will take care of them. */
3810
3811 if (partial != 0)
f4ef873c 3812 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3813
3814 /* If the partial register-part of the arg counts in its stack size,
3815 skip the part of stack space corresponding to the registers.
3816 Otherwise, start copying to the beginning of the stack space,
3817 by setting SKIP to 0. */
e5e809f4 3818 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3819
3820#ifdef PUSH_ROUNDING
3821 /* Do it with several push insns if that doesn't take lots of insns
3822 and if there is no difficulty with push insns that skip bytes
3823 on the stack for alignment purposes. */
3824 if (args_addr == 0
f73ad30e 3825 && PUSH_ARGS
bbf6f052
RK
3826 && GET_CODE (size) == CONST_INT
3827 && skip == 0
15914757 3828 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3829 /* Here we avoid the case of a structure whose weak alignment
3830 forces many pushes of a small amount of data,
3831 and such small pushes do rounding that causes trouble. */
e1565e65 3832 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3833 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3834 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3835 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3836 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3837 {
3838 /* Push padding now if padding above and stack grows down,
3839 or if padding below and stack grows up.
3840 But if space already allocated, this has already been done. */
3841 if (extra && args_addr == 0
3842 && where_pad != none && where_pad != stack_direction)
906c4e36 3843 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3844
566aa174 3845 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
bbf6f052
RK
3846 }
3847 else
3a94c984 3848#endif /* PUSH_ROUNDING */
bbf6f052 3849 {
7ab923cc
JJ
3850 rtx target;
3851
bbf6f052
RK
3852 /* Otherwise make space on the stack and copy the data
3853 to the address of that space. */
3854
3855 /* Deduct words put into registers from the size we must copy. */
3856 if (partial != 0)
3857 {
3858 if (GET_CODE (size) == CONST_INT)
906c4e36 3859 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3860 else
3861 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3862 GEN_INT (used), NULL_RTX, 0,
3863 OPTAB_LIB_WIDEN);
bbf6f052
RK
3864 }
3865
3866 /* Get the address of the stack space.
3867 In this case, we do not deal with EXTRA separately.
3868 A single stack adjust will do. */
3869 if (! args_addr)
3870 {
3871 temp = push_block (size, extra, where_pad == downward);
3872 extra = 0;
3873 }
3874 else if (GET_CODE (args_so_far) == CONST_INT)
3875 temp = memory_address (BLKmode,
3876 plus_constant (args_addr,
3877 skip + INTVAL (args_so_far)));
3878 else
3879 temp = memory_address (BLKmode,
38a448ca
RH
3880 plus_constant (gen_rtx_PLUS (Pmode,
3881 args_addr,
3882 args_so_far),
bbf6f052 3883 skip));
4ca79136
RH
3884
3885 if (!ACCUMULATE_OUTGOING_ARGS)
3886 {
3887 /* If the source is referenced relative to the stack pointer,
3888 copy it to another register to stabilize it. We do not need
3889 to do this if we know that we won't be changing sp. */
3890
3891 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3892 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3893 temp = copy_to_reg (temp);
3894 }
3895
3a94c984 3896 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3897
3a94c984
KH
3898 if (type != 0)
3899 {
3900 set_mem_attributes (target, type, 1);
3901 /* Function incoming arguments may overlap with sibling call
3902 outgoing arguments and we cannot allow reordering of reads
3903 from function arguments with stores to outgoing arguments
3904 of sibling calls. */
ba4828e0 3905 set_mem_alias_set (target, 0);
3a94c984 3906 }
4ca79136 3907
44bb111a
RH
3908 /* ALIGN may well be better aligned than TYPE, e.g. due to
3909 PARM_BOUNDARY. Assume the caller isn't lying. */
3910 set_mem_align (target, align);
4ca79136 3911
44bb111a 3912 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3913 }
3914 }
3915 else if (partial > 0)
3916 {
3917 /* Scalar partly in registers. */
3918
3919 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3920 int i;
3921 int not_stack;
3922 /* # words of start of argument
3923 that we must make space for but need not store. */
3924 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3925 int args_offset = INTVAL (args_so_far);
3926 int skip;
3927
3928 /* Push padding now if padding above and stack grows down,
3929 or if padding below and stack grows up.
3930 But if space already allocated, this has already been done. */
3931 if (extra && args_addr == 0
3932 && where_pad != none && where_pad != stack_direction)
906c4e36 3933 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3934
3935 /* If we make space by pushing it, we might as well push
3936 the real data. Otherwise, we can leave OFFSET nonzero
3937 and leave the space uninitialized. */
3938 if (args_addr == 0)
3939 offset = 0;
3940
3941 /* Now NOT_STACK gets the number of words that we don't need to
3942 allocate on the stack. */
3943 not_stack = partial - offset;
3944
3945 /* If the partial register-part of the arg counts in its stack size,
3946 skip the part of stack space corresponding to the registers.
3947 Otherwise, start copying to the beginning of the stack space,
3948 by setting SKIP to 0. */
e5e809f4 3949 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3950
3951 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3952 x = validize_mem (force_const_mem (mode, x));
3953
3954 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3955 SUBREGs of such registers are not allowed. */
3956 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3957 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3958 x = copy_to_reg (x);
3959
3960 /* Loop over all the words allocated on the stack for this arg. */
3961 /* We can do it by words, because any scalar bigger than a word
3962 has a size a multiple of a word. */
3963#ifndef PUSH_ARGS_REVERSED
3964 for (i = not_stack; i < size; i++)
3965#else
3966 for (i = size - 1; i >= not_stack; i--)
3967#endif
3968 if (i >= not_stack + offset)
3969 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3970 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3971 0, args_addr,
3972 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3973 * UNITS_PER_WORD)),
4fc026cd 3974 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3975 }
3976 else
3977 {
3978 rtx addr;
3bdf5ad1 3979 rtx dest;
bbf6f052
RK
3980
3981 /* Push padding now if padding above and stack grows down,
3982 or if padding below and stack grows up.
3983 But if space already allocated, this has already been done. */
3984 if (extra && args_addr == 0
3985 && where_pad != none && where_pad != stack_direction)
906c4e36 3986 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3987
3988#ifdef PUSH_ROUNDING
f73ad30e 3989 if (args_addr == 0 && PUSH_ARGS)
566aa174 3990 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3991 else
3992#endif
921b3427
RK
3993 {
3994 if (GET_CODE (args_so_far) == CONST_INT)
3995 addr
3996 = memory_address (mode,
3a94c984 3997 plus_constant (args_addr,
921b3427 3998 INTVAL (args_so_far)));
3a94c984 3999 else
38a448ca
RH
4000 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4001 args_so_far));
566aa174
JH
4002 dest = gen_rtx_MEM (mode, addr);
4003 if (type != 0)
4004 {
4005 set_mem_attributes (dest, type, 1);
4006 /* Function incoming arguments may overlap with sibling call
4007 outgoing arguments and we cannot allow reordering of reads
4008 from function arguments with stores to outgoing arguments
4009 of sibling calls. */
ba4828e0 4010 set_mem_alias_set (dest, 0);
566aa174 4011 }
bbf6f052 4012
566aa174 4013 emit_move_insn (dest, x);
566aa174 4014 }
bbf6f052
RK
4015 }
4016
bbf6f052
RK
4017 /* If part should go in registers, copy that part
4018 into the appropriate registers. Do this now, at the end,
4019 since mem-to-mem copies above may do function calls. */
cd048831 4020 if (partial > 0 && reg != 0)
fffa9c1d
JW
4021 {
4022 /* Handle calls that pass values in multiple non-contiguous locations.
4023 The Irix 6 ABI has examples of this. */
4024 if (GET_CODE (reg) == PARALLEL)
04050c69 4025 emit_group_load (reg, x, -1); /* ??? size? */
fffa9c1d
JW
4026 else
4027 move_block_to_reg (REGNO (reg), x, partial, mode);
4028 }
bbf6f052
RK
4029
4030 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 4031 anti_adjust_stack (GEN_INT (extra));
3a94c984 4032
3ea2292a 4033 if (alignment_pad && args_addr == 0)
4fc026cd 4034 anti_adjust_stack (alignment_pad);
bbf6f052
RK
4035}
4036\f
296b4ed9
RK
4037/* Return X if X can be used as a subtarget in a sequence of arithmetic
4038 operations. */
4039
4040static rtx
4041get_subtarget (x)
4042 rtx x;
4043{
4044 return ((x == 0
4045 /* Only registers can be subtargets. */
4046 || GET_CODE (x) != REG
4047 /* If the register is readonly, it can't be set more than once. */
4048 || RTX_UNCHANGING_P (x)
4049 /* Don't use hard regs to avoid extending their life. */
4050 || REGNO (x) < FIRST_PSEUDO_REGISTER
4051 /* Avoid subtargets inside loops,
4052 since they hide some invariant expressions. */
4053 || preserve_subexpressions_p ())
4054 ? 0 : x);
4055}
4056
bbf6f052
RK
4057/* Expand an assignment that stores the value of FROM into TO.
4058 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
4059 (This may contain a QUEUED rtx;
4060 if the value is constant, this rtx is a constant.)
4061 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
4062
4063 SUGGEST_REG is no longer actually used.
4064 It used to mean, copy the value through a register
4065 and return that register, if that is possible.
709f5be1 4066 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
4067
4068rtx
4069expand_assignment (to, from, want_value, suggest_reg)
4070 tree to, from;
4071 int want_value;
c5c76735 4072 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052 4073{
b3694847 4074 rtx to_rtx = 0;
bbf6f052
RK
4075 rtx result;
4076
4077 /* Don't crash if the lhs of the assignment was erroneous. */
4078
4079 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
4080 {
4081 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4082 return want_value ? result : NULL_RTX;
4083 }
bbf6f052
RK
4084
4085 /* Assignment of a structure component needs special treatment
4086 if the structure component's rtx is not simply a MEM.
6be58303
JW
4087 Assignment of an array element at a constant index, and assignment of
4088 an array element in an unaligned packed structure field, has the same
4089 problem. */
bbf6f052 4090
08293add 4091 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
4092 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4093 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
4094 {
4095 enum machine_mode mode1;
770ae6cc 4096 HOST_WIDE_INT bitsize, bitpos;
a06ef755 4097 rtx orig_to_rtx;
7bb0943f 4098 tree offset;
bbf6f052
RK
4099 int unsignedp;
4100 int volatilep = 0;
0088fcb1
RK
4101 tree tem;
4102
4103 push_temp_slots ();
839c4796 4104 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 4105 &unsignedp, &volatilep);
bbf6f052
RK
4106
4107 /* If we are going to use store_bit_field and extract_bit_field,
4108 make sure to_rtx will be safe for multiple use. */
4109
4110 if (mode1 == VOIDmode && want_value)
4111 tem = stabilize_reference (tem);
4112
1ed1b4fb
RK
4113 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4114
7bb0943f
RS
4115 if (offset != 0)
4116 {
e3c8ea67 4117 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
4118
4119 if (GET_CODE (to_rtx) != MEM)
4120 abort ();
bd070e1a 4121
bd070e1a 4122#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
4123 if (GET_MODE (offset_rtx) != Pmode)
4124 offset_rtx = convert_memory_address (Pmode, offset_rtx);
fa06ab5c
RK
4125#else
4126 if (GET_MODE (offset_rtx) != ptr_mode)
4127 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4128#endif
bd070e1a 4129
9a7b9f4f
JL
4130 /* A constant address in TO_RTX can have VOIDmode, we must not try
4131 to call force_reg for that case. Avoid that case. */
89752202
HB
4132 if (GET_CODE (to_rtx) == MEM
4133 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 4134 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 4135 && bitsize > 0
3a94c984 4136 && (bitpos % bitsize) == 0
89752202 4137 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 4138 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 4139 {
e3c8ea67 4140 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
4141 bitpos = 0;
4142 }
4143
0d4903b8 4144 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
4145 highest_pow2_factor_for_type (TREE_TYPE (to),
4146 offset));
7bb0943f 4147 }
c5c76735 4148
998d7deb
RH
4149 if (GET_CODE (to_rtx) == MEM)
4150 {
998d7deb
RH
4151 /* If the field is at offset zero, we could have been given the
4152 DECL_RTX of the parent struct. Don't munge it. */
4153 to_rtx = shallow_copy_rtx (to_rtx);
4154
6f1087be 4155 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 4156 }
effbcc6a 4157
a06ef755
RK
4158 /* Deal with volatile and readonly fields. The former is only done
4159 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4160 if (volatilep && GET_CODE (to_rtx) == MEM)
4161 {
4162 if (to_rtx == orig_to_rtx)
4163 to_rtx = copy_rtx (to_rtx);
4164 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
4165 }
4166
956d6950
JL
4167 if (TREE_CODE (to) == COMPONENT_REF
4168 && TREE_READONLY (TREE_OPERAND (to, 1)))
4169 {
a06ef755 4170 if (to_rtx == orig_to_rtx)
956d6950 4171 to_rtx = copy_rtx (to_rtx);
956d6950
JL
4172 RTX_UNCHANGING_P (to_rtx) = 1;
4173 }
4174
a84b4898 4175 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
4176 {
4177 if (to_rtx == orig_to_rtx)
4178 to_rtx = copy_rtx (to_rtx);
4179 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4180 }
4181
a06ef755
RK
4182 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4183 (want_value
4184 /* Spurious cast for HPUX compiler. */
4185 ? ((enum machine_mode)
4186 TYPE_MODE (TREE_TYPE (to)))
4187 : VOIDmode),
4188 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 4189
a06ef755
RK
4190 preserve_temp_slots (result);
4191 free_temp_slots ();
4192 pop_temp_slots ();
a69beca1 4193
a06ef755
RK
4194 /* If the value is meaningful, convert RESULT to the proper mode.
4195 Otherwise, return nothing. */
4196 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4197 TYPE_MODE (TREE_TYPE (from)),
4198 result,
4199 TREE_UNSIGNED (TREE_TYPE (to)))
4200 : NULL_RTX);
bbf6f052
RK
4201 }
4202
cd1db108
RS
4203 /* If the rhs is a function call and its value is not an aggregate,
4204 call the function before we start to compute the lhs.
4205 This is needed for correct code for cases such as
4206 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4207 requires loading up part of an address in a separate insn.
4208
1858863b
JW
4209 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4210 since it might be a promoted variable where the zero- or sign- extension
4211 needs to be done. Handling this in the normal way is safe because no
4212 computation is done before the call. */
1ad87b63 4213 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 4214 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
4215 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4216 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 4217 {
0088fcb1
RK
4218 rtx value;
4219
4220 push_temp_slots ();
4221 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4222 if (to_rtx == 0)
37a08a29 4223 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4224
fffa9c1d
JW
4225 /* Handle calls that return values in multiple non-contiguous locations.
4226 The Irix 6 ABI has examples of this. */
4227 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4228 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4229 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4230 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4231 else
6419e5b0
DT
4232 {
4233#ifdef POINTERS_EXTEND_UNSIGNED
0d4903b8
RK
4234 if (POINTER_TYPE_P (TREE_TYPE (to))
4235 && GET_MODE (to_rtx) != GET_MODE (value))
6419e5b0
DT
4236 value = convert_memory_address (GET_MODE (to_rtx), value);
4237#endif
4238 emit_move_insn (to_rtx, value);
4239 }
cd1db108
RS
4240 preserve_temp_slots (to_rtx);
4241 free_temp_slots ();
0088fcb1 4242 pop_temp_slots ();
709f5be1 4243 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
4244 }
4245
bbf6f052
RK
4246 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4247 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4248
4249 if (to_rtx == 0)
37a08a29 4250 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4251
86d38d25 4252 /* Don't move directly into a return register. */
14a774a9
RK
4253 if (TREE_CODE (to) == RESULT_DECL
4254 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4255 {
0088fcb1
RK
4256 rtx temp;
4257
4258 push_temp_slots ();
4259 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4260
4261 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4262 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4263 else
4264 emit_move_insn (to_rtx, temp);
4265
86d38d25
RS
4266 preserve_temp_slots (to_rtx);
4267 free_temp_slots ();
0088fcb1 4268 pop_temp_slots ();
709f5be1 4269 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
4270 }
4271
bbf6f052
RK
4272 /* In case we are returning the contents of an object which overlaps
4273 the place the value is being stored, use a safe function when copying
4274 a value through a pointer into a structure value return block. */
4275 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4276 && current_function_returns_struct
4277 && !current_function_returns_pcc_struct)
4278 {
0088fcb1
RK
4279 rtx from_rtx, size;
4280
4281 push_temp_slots ();
33a20d10 4282 size = expr_size (from);
37a08a29 4283 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4284
4ca79136
RH
4285 if (TARGET_MEM_FUNCTIONS)
4286 emit_library_call (memmove_libfunc, LCT_NORMAL,
4287 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4288 XEXP (from_rtx, 0), Pmode,
4289 convert_to_mode (TYPE_MODE (sizetype),
4290 size, TREE_UNSIGNED (sizetype)),
4291 TYPE_MODE (sizetype));
4292 else
4293 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4294 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4295 XEXP (to_rtx, 0), Pmode,
4296 convert_to_mode (TYPE_MODE (integer_type_node),
4297 size,
4298 TREE_UNSIGNED (integer_type_node)),
4299 TYPE_MODE (integer_type_node));
bbf6f052
RK
4300
4301 preserve_temp_slots (to_rtx);
4302 free_temp_slots ();
0088fcb1 4303 pop_temp_slots ();
709f5be1 4304 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
4305 }
4306
4307 /* Compute FROM and store the value in the rtx we got. */
4308
0088fcb1 4309 push_temp_slots ();
bbf6f052
RK
4310 result = store_expr (from, to_rtx, want_value);
4311 preserve_temp_slots (result);
4312 free_temp_slots ();
0088fcb1 4313 pop_temp_slots ();
709f5be1 4314 return want_value ? result : NULL_RTX;
bbf6f052
RK
4315}
4316
4317/* Generate code for computing expression EXP,
4318 and storing the value into TARGET.
bbf6f052
RK
4319 TARGET may contain a QUEUED rtx.
4320
8403445a 4321 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4322 not in TARGET, so that we can be sure to use the proper
4323 value in a containing expression even if TARGET has something
4324 else stored in it. If possible, we copy the value through a pseudo
4325 and return that pseudo. Or, if the value is constant, we try to
4326 return the constant. In some cases, we return a pseudo
4327 copied *from* TARGET.
4328
4329 If the mode is BLKmode then we may return TARGET itself.
4330 It turns out that in BLKmode it doesn't cause a problem.
4331 because C has no operators that could combine two different
4332 assignments into the same BLKmode object with different values
4333 with no sequence point. Will other languages need this to
4334 be more thorough?
4335
8403445a 4336 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4337 to catch quickly any cases where the caller uses the value
8403445a
AM
4338 and fails to set WANT_VALUE.
4339
4340 If WANT_VALUE & 2 is set, this is a store into a call param on the
4341 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4342
4343rtx
709f5be1 4344store_expr (exp, target, want_value)
b3694847
SS
4345 tree exp;
4346 rtx target;
709f5be1 4347 int want_value;
bbf6f052 4348{
b3694847 4349 rtx temp;
bbf6f052 4350 int dont_return_target = 0;
e5408e52 4351 int dont_store_target = 0;
bbf6f052 4352
847311f4
AL
4353 if (VOID_TYPE_P (TREE_TYPE (exp)))
4354 {
4355 /* C++ can generate ?: expressions with a throw expression in one
4356 branch and an rvalue in the other. Here, we resolve attempts to
4357 store the throw expression's nonexistant result. */
4358 if (want_value)
4359 abort ();
4360 expand_expr (exp, const0_rtx, VOIDmode, 0);
4361 return NULL_RTX;
4362 }
bbf6f052
RK
4363 if (TREE_CODE (exp) == COMPOUND_EXPR)
4364 {
4365 /* Perform first part of compound expression, then assign from second
4366 part. */
8403445a
AM
4367 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4368 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4369 emit_queue ();
709f5be1 4370 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4371 }
4372 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4373 {
4374 /* For conditional expression, get safe form of the target. Then
4375 test the condition, doing the appropriate assignment on either
4376 side. This avoids the creation of unnecessary temporaries.
4377 For non-BLKmode, it is more efficient not to do this. */
4378
4379 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4380
4381 emit_queue ();
4382 target = protect_from_queue (target, 1);
4383
dabf8373 4384 do_pending_stack_adjust ();
bbf6f052
RK
4385 NO_DEFER_POP;
4386 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4387 start_cleanup_deferral ();
8403445a 4388 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4389 end_cleanup_deferral ();
bbf6f052
RK
4390 emit_queue ();
4391 emit_jump_insn (gen_jump (lab2));
4392 emit_barrier ();
4393 emit_label (lab1);
956d6950 4394 start_cleanup_deferral ();
8403445a 4395 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4396 end_cleanup_deferral ();
bbf6f052
RK
4397 emit_queue ();
4398 emit_label (lab2);
4399 OK_DEFER_POP;
a3a58acc 4400
8403445a 4401 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4402 }
bbf6f052 4403 else if (queued_subexp_p (target))
709f5be1
RS
4404 /* If target contains a postincrement, let's not risk
4405 using it as the place to generate the rhs. */
bbf6f052
RK
4406 {
4407 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4408 {
4409 /* Expand EXP into a new pseudo. */
4410 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4411 temp = expand_expr (exp, temp, GET_MODE (target),
4412 (want_value & 2
4413 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4414 }
4415 else
8403445a
AM
4416 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4417 (want_value & 2
4418 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4419
4420 /* If target is volatile, ANSI requires accessing the value
4421 *from* the target, if it is accessed. So make that happen.
4422 In no case return the target itself. */
8403445a 4423 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4424 dont_return_target = 1;
bbf6f052 4425 }
8403445a
AM
4426 else if ((want_value & 1) != 0
4427 && GET_CODE (target) == MEM
4428 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4429 && GET_MODE (target) != BLKmode)
4430 /* If target is in memory and caller wants value in a register instead,
4431 arrange that. Pass TARGET as target for expand_expr so that,
4432 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4433 We know expand_expr will not use the target in that case.
4434 Don't do this if TARGET is volatile because we are supposed
4435 to write it and then read it. */
4436 {
8403445a
AM
4437 temp = expand_expr (exp, target, GET_MODE (target),
4438 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4439 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4440 {
4441 /* If TEMP is already in the desired TARGET, only copy it from
4442 memory and don't store it there again. */
4443 if (temp == target
4444 || (rtx_equal_p (temp, target)
4445 && ! side_effects_p (temp) && ! side_effects_p (target)))
4446 dont_store_target = 1;
4447 temp = copy_to_reg (temp);
4448 }
12f06d17
CH
4449 dont_return_target = 1;
4450 }
1499e0a8 4451 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4452 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4453 than the declared mode, compute the result into its declared mode
4454 and then convert to the wider mode. Our value is the computed
4455 expression. */
4456 {
b76b08ef
RK
4457 rtx inner_target = 0;
4458
5a32d038 4459 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4460 which will often result in some optimizations. Do the conversion
4461 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4462 the extend. But don't do this if the type of EXP is a subtype
4463 of something else since then the conversion might involve
4464 more than just converting modes. */
8403445a
AM
4465 if ((want_value & 1) == 0
4466 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4467 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4468 {
4469 if (TREE_UNSIGNED (TREE_TYPE (exp))
4470 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4471 exp = convert
4472 ((*lang_hooks.types.signed_or_unsigned_type)
4473 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4474
b0c48229
NB
4475 exp = convert ((*lang_hooks.types.type_for_mode)
4476 (GET_MODE (SUBREG_REG (target)),
4477 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4478 exp);
b76b08ef
RK
4479
4480 inner_target = SUBREG_REG (target);
f635a84d 4481 }
3a94c984 4482
8403445a
AM
4483 temp = expand_expr (exp, inner_target, VOIDmode,
4484 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4485
7abec5be
RH
4486 /* If TEMP is a MEM and we want a result value, make the access
4487 now so it gets done only once. Strictly speaking, this is
4488 only necessary if the MEM is volatile, or if the address
4489 overlaps TARGET. But not performing the load twice also
4490 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4491 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4492 temp = copy_to_reg (temp);
4493
b258707c
RS
4494 /* If TEMP is a VOIDmode constant, use convert_modes to make
4495 sure that we properly convert it. */
4496 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4497 {
4498 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4499 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4500 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4501 GET_MODE (target), temp,
4502 SUBREG_PROMOTED_UNSIGNED_P (target));
4503 }
b258707c 4504
1499e0a8
RK
4505 convert_move (SUBREG_REG (target), temp,
4506 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4507
4508 /* If we promoted a constant, change the mode back down to match
4509 target. Otherwise, the caller might get confused by a result whose
4510 mode is larger than expected. */
4511
8403445a 4512 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4513 {
b3ca30df
JJ
4514 if (GET_MODE (temp) != VOIDmode)
4515 {
4516 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4517 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4518 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4519 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4520 }
4521 else
4522 temp = convert_modes (GET_MODE (target),
4523 GET_MODE (SUBREG_REG (target)),
4524 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4525 }
4526
8403445a 4527 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4528 }
bbf6f052
RK
4529 else
4530 {
8403445a
AM
4531 temp = expand_expr (exp, target, GET_MODE (target),
4532 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
766f36c7 4533 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4534 If TARGET is a volatile mem ref, either return TARGET
4535 or return a reg copied *from* TARGET; ANSI requires this.
4536
4537 Otherwise, if TEMP is not TARGET, return TEMP
4538 if it is constant (for efficiency),
4539 or if we really want the correct value. */
bbf6f052
RK
4540 if (!(target && GET_CODE (target) == REG
4541 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4542 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4543 && ! rtx_equal_p (temp, target)
8403445a 4544 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4545 dont_return_target = 1;
4546 }
4547
b258707c
RS
4548 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4549 the same as that of TARGET, adjust the constant. This is needed, for
4550 example, in case it is a CONST_DOUBLE and we want only a word-sized
4551 value. */
4552 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4553 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4554 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4555 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4556 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4557
bbf6f052 4558 /* If value was not generated in the target, store it there.
37a08a29
RK
4559 Convert the value to TARGET's type first if necessary.
4560 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4561 one or both of them are volatile memory refs, we have to distinguish
4562 two cases:
4563 - expand_expr has used TARGET. In this case, we must not generate
4564 another copy. This can be detected by TARGET being equal according
4565 to == .
4566 - expand_expr has not used TARGET - that means that the source just
4567 happens to have the same RTX form. Since temp will have been created
4568 by expand_expr, it will compare unequal according to == .
4569 We must generate a copy in this case, to reach the correct number
4570 of volatile memory references. */
bbf6f052 4571
6036acbb 4572 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4573 || (temp != target && (side_effects_p (temp)
4574 || side_effects_p (target))))
e5408e52 4575 && TREE_CODE (exp) != ERROR_MARK
a9772b60
JJ
4576 && ! dont_store_target
4577 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4578 but TARGET is not valid memory reference, TEMP will differ
4579 from TARGET although it is really the same location. */
4580 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
e56fc090
HPN
4581 || target != DECL_RTL_IF_SET (exp))
4582 /* If there's nothing to copy, don't bother. Don't call expr_size
4583 unless necessary, because some front-ends (C++) expr_size-hook
4584 aborts on objects that are not supposed to be bit-copied or
4585 bit-initialized. */
4586 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4587 {
4588 target = protect_from_queue (target, 1);
4589 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4590 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4591 {
4592 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4593 if (dont_return_target)
4594 {
4595 /* In this case, we will return TEMP,
4596 so make sure it has the proper mode.
4597 But don't forget to store the value into TARGET. */
4598 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4599 emit_move_insn (target, temp);
4600 }
4601 else
4602 convert_move (target, temp, unsignedp);
4603 }
4604
4605 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4606 {
c24ae149
RK
4607 /* Handle copying a string constant into an array. The string
4608 constant may be shorter than the array. So copy just the string's
4609 actual length, and clear the rest. First get the size of the data
4610 type of the string, which is actually the size of the target. */
4611 rtx size = expr_size (exp);
bbf6f052 4612
e87b4f3f
RS
4613 if (GET_CODE (size) == CONST_INT
4614 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4615 emit_block_move (target, temp, size,
4616 (want_value & 2
4617 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4618 else
bbf6f052 4619 {
e87b4f3f
RS
4620 /* Compute the size of the data to copy from the string. */
4621 tree copy_size
c03b7665 4622 = size_binop (MIN_EXPR,
b50d17a1 4623 make_tree (sizetype, size),
fed3cef0 4624 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4625 rtx copy_size_rtx
4626 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4627 (want_value & 2
4628 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4629 rtx label = 0;
4630
4631 /* Copy that much. */
c24ae149 4632 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx, 0);
8403445a
AM
4633 emit_block_move (target, temp, copy_size_rtx,
4634 (want_value & 2
4635 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4636
88f63c77
RK
4637 /* Figure out how much is left in TARGET that we have to clear.
4638 Do all calculations in ptr_mode. */
e87b4f3f
RS
4639 if (GET_CODE (copy_size_rtx) == CONST_INT)
4640 {
c24ae149
RK
4641 size = plus_constant (size, -INTVAL (copy_size_rtx));
4642 target = adjust_address (target, BLKmode,
4643 INTVAL (copy_size_rtx));
e87b4f3f
RS
4644 }
4645 else
4646 {
fa06ab5c 4647 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4648 copy_size_rtx, NULL_RTX, 0,
4649 OPTAB_LIB_WIDEN);
e87b4f3f 4650
c24ae149
RK
4651#ifdef POINTERS_EXTEND_UNSIGNED
4652 if (GET_MODE (copy_size_rtx) != Pmode)
4653 copy_size_rtx = convert_memory_address (Pmode,
4654 copy_size_rtx);
4655#endif
4656
4657 target = offset_address (target, copy_size_rtx,
4658 highest_pow2_factor (copy_size));
e87b4f3f 4659 label = gen_label_rtx ();
c5d5d461 4660 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4661 GET_MODE (size), 0, label);
e87b4f3f
RS
4662 }
4663
4664 if (size != const0_rtx)
37a08a29 4665 clear_storage (target, size);
22619c3f 4666
e87b4f3f
RS
4667 if (label)
4668 emit_label (label);
bbf6f052
RK
4669 }
4670 }
fffa9c1d
JW
4671 /* Handle calls that return values in multiple non-contiguous locations.
4672 The Irix 6 ABI has examples of this. */
4673 else if (GET_CODE (target) == PARALLEL)
04050c69 4674 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4675 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4676 emit_block_move (target, temp, expr_size (exp),
4677 (want_value & 2
4678 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052
RK
4679 else
4680 emit_move_insn (target, temp);
4681 }
709f5be1 4682
766f36c7 4683 /* If we don't want a value, return NULL_RTX. */
8403445a 4684 if ((want_value & 1) == 0)
766f36c7
RK
4685 return NULL_RTX;
4686
4687 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4688 ??? The latter test doesn't seem to make sense. */
4689 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4690 return temp;
766f36c7
RK
4691
4692 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4693 else if ((want_value & 1) != 0
4694 && GET_MODE (target) != BLKmode
766f36c7
RK
4695 && ! (GET_CODE (target) == REG
4696 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4697 return copy_to_reg (target);
3a94c984 4698
766f36c7 4699 else
709f5be1 4700 return target;
bbf6f052
RK
4701}
4702\f
9de08200
RK
4703/* Return 1 if EXP just contains zeros. */
4704
4705static int
4706is_zeros_p (exp)
4707 tree exp;
4708{
4709 tree elt;
4710
4711 switch (TREE_CODE (exp))
4712 {
4713 case CONVERT_EXPR:
4714 case NOP_EXPR:
4715 case NON_LVALUE_EXPR:
ed239f5a 4716 case VIEW_CONVERT_EXPR:
9de08200
RK
4717 return is_zeros_p (TREE_OPERAND (exp, 0));
4718
4719 case INTEGER_CST:
05bccae2 4720 return integer_zerop (exp);
9de08200
RK
4721
4722 case COMPLEX_CST:
4723 return
4724 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4725
4726 case REAL_CST:
41c9120b 4727 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4728
69ef87e2
AH
4729 case VECTOR_CST:
4730 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4731 elt = TREE_CHAIN (elt))
4732 if (!is_zeros_p (TREE_VALUE (elt)))
4733 return 0;
4734
4735 return 1;
4736
9de08200 4737 case CONSTRUCTOR:
e1a43f73
PB
4738 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4739 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4740 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4741 if (! is_zeros_p (TREE_VALUE (elt)))
4742 return 0;
4743
4744 return 1;
3a94c984 4745
e9a25f70
JL
4746 default:
4747 return 0;
9de08200 4748 }
9de08200
RK
4749}
4750
4751/* Return 1 if EXP contains mostly (3/4) zeros. */
4752
4753static int
4754mostly_zeros_p (exp)
4755 tree exp;
4756{
9de08200
RK
4757 if (TREE_CODE (exp) == CONSTRUCTOR)
4758 {
e1a43f73
PB
4759 int elts = 0, zeros = 0;
4760 tree elt = CONSTRUCTOR_ELTS (exp);
4761 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4762 {
4763 /* If there are no ranges of true bits, it is all zero. */
4764 return elt == NULL_TREE;
4765 }
4766 for (; elt; elt = TREE_CHAIN (elt))
4767 {
4768 /* We do not handle the case where the index is a RANGE_EXPR,
4769 so the statistic will be somewhat inaccurate.
4770 We do make a more accurate count in store_constructor itself,
4771 so since this function is only used for nested array elements,
0f41302f 4772 this should be close enough. */
e1a43f73
PB
4773 if (mostly_zeros_p (TREE_VALUE (elt)))
4774 zeros++;
4775 elts++;
4776 }
9de08200
RK
4777
4778 return 4 * zeros >= 3 * elts;
4779 }
4780
4781 return is_zeros_p (exp);
4782}
4783\f
e1a43f73
PB
4784/* Helper function for store_constructor.
4785 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4786 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4787 CLEARED is as for store_constructor.
23cb1766 4788 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4789
4790 This provides a recursive shortcut back to store_constructor when it isn't
4791 necessary to go through store_field. This is so that we can pass through
4792 the cleared field to let store_constructor know that we may not have to
4793 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4794
4795static void
04050c69
RK
4796store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4797 alias_set)
e1a43f73 4798 rtx target;
770ae6cc
RK
4799 unsigned HOST_WIDE_INT bitsize;
4800 HOST_WIDE_INT bitpos;
e1a43f73
PB
4801 enum machine_mode mode;
4802 tree exp, type;
4803 int cleared;
23cb1766 4804 int alias_set;
e1a43f73
PB
4805{
4806 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4807 && bitpos % BITS_PER_UNIT == 0
cc2902df 4808 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4809 let store_field do the bitfield handling. This is unlikely to
4810 generate unnecessary clear instructions anyways. */
4811 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4812 {
61cb205c
RK
4813 if (GET_CODE (target) == MEM)
4814 target
4815 = adjust_address (target,
4816 GET_MODE (target) == BLKmode
4817 || 0 != (bitpos
4818 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4819 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4820
e0339ef7 4821
04050c69 4822 /* Update the alias set, if required. */
10b76d73
RK
4823 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4824 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4825 {
4826 target = copy_rtx (target);
4827 set_mem_alias_set (target, alias_set);
4828 }
e0339ef7 4829
04050c69 4830 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4831 }
4832 else
a06ef755
RK
4833 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4834 alias_set);
e1a43f73
PB
4835}
4836
bbf6f052 4837/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4838 TARGET is either a REG or a MEM; we know it cannot conflict, since
4839 safe_from_p has been called.
b7010412
RK
4840 CLEARED is true if TARGET is known to have been zero'd.
4841 SIZE is the number of bytes of TARGET we are allowed to modify: this
4842 may not be the same as the size of EXP if we are assigning to a field
4843 which has been packed to exclude padding bits. */
bbf6f052
RK
4844
4845static void
04050c69 4846store_constructor (exp, target, cleared, size)
bbf6f052
RK
4847 tree exp;
4848 rtx target;
e1a43f73 4849 int cleared;
13eb1f7f 4850 HOST_WIDE_INT size;
bbf6f052 4851{
4af3895e 4852 tree type = TREE_TYPE (exp);
a5efcd63 4853#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4854 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4855#endif
4af3895e 4856
e44842fe
RK
4857 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4858 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4859 {
b3694847 4860 tree elt;
bbf6f052 4861
04050c69 4862 /* We either clear the aggregate or indicate the value is dead. */
dd1db5ec
RK
4863 if ((TREE_CODE (type) == UNION_TYPE
4864 || TREE_CODE (type) == QUAL_UNION_TYPE)
04050c69
RK
4865 && ! cleared
4866 && ! CONSTRUCTOR_ELTS (exp))
4867 /* If the constructor is empty, clear the union. */
a59f8640 4868 {
04050c69
RK
4869 clear_storage (target, expr_size (exp));
4870 cleared = 1;
a59f8640 4871 }
4af3895e
JVA
4872
4873 /* If we are building a static constructor into a register,
4874 set the initial value as zero so we can fold the value into
67225c15
RK
4875 a constant. But if more than one register is involved,
4876 this probably loses. */
04050c69 4877 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4878 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4879 {
04050c69 4880 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4881 cleared = 1;
4882 }
4883
4884 /* If the constructor has fewer fields than the structure
4885 or if we are initializing the structure to mostly zeros,
0d97bf4c 4886 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4887 register whose mode size isn't equal to SIZE since clear_storage
4888 can't handle this case. */
04050c69 4889 else if (! cleared && size > 0
9376fcd6 4890 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4891 != fields_length (type))
fcf1b822
RK
4892 || mostly_zeros_p (exp))
4893 && (GET_CODE (target) != REG
04050c69
RK
4894 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4895 == size)))
9de08200 4896 {
04050c69 4897 clear_storage (target, GEN_INT (size));
9de08200
RK
4898 cleared = 1;
4899 }
04050c69
RK
4900
4901 if (! cleared)
38a448ca 4902 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4903
4904 /* Store each element of the constructor into
4905 the corresponding field of TARGET. */
4906
4907 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4908 {
b3694847 4909 tree field = TREE_PURPOSE (elt);
34c73909 4910 tree value = TREE_VALUE (elt);
b3694847 4911 enum machine_mode mode;
770ae6cc
RK
4912 HOST_WIDE_INT bitsize;
4913 HOST_WIDE_INT bitpos = 0;
770ae6cc 4914 tree offset;
b50d17a1 4915 rtx to_rtx = target;
bbf6f052 4916
f32fd778
RS
4917 /* Just ignore missing fields.
4918 We cleared the whole structure, above,
4919 if any fields are missing. */
4920 if (field == 0)
4921 continue;
4922
8b6000fc 4923 if (cleared && is_zeros_p (value))
e1a43f73 4924 continue;
9de08200 4925
770ae6cc
RK
4926 if (host_integerp (DECL_SIZE (field), 1))
4927 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4928 else
4929 bitsize = -1;
4930
bbf6f052
RK
4931 mode = DECL_MODE (field);
4932 if (DECL_BIT_FIELD (field))
4933 mode = VOIDmode;
4934
770ae6cc
RK
4935 offset = DECL_FIELD_OFFSET (field);
4936 if (host_integerp (offset, 0)
4937 && host_integerp (bit_position (field), 0))
4938 {
4939 bitpos = int_bit_position (field);
4940 offset = 0;
4941 }
b50d17a1 4942 else
770ae6cc 4943 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4944
b50d17a1
RK
4945 if (offset)
4946 {
4947 rtx offset_rtx;
4948
4949 if (contains_placeholder_p (offset))
7fa96708 4950 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4951 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4952
b50d17a1
RK
4953 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4954 if (GET_CODE (to_rtx) != MEM)
4955 abort ();
4956
bd070e1a 4957#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
4958 if (GET_MODE (offset_rtx) != Pmode)
4959 offset_rtx = convert_memory_address (Pmode, offset_rtx);
fa06ab5c
RK
4960#else
4961 if (GET_MODE (offset_rtx) != ptr_mode)
4962 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4963#endif
bd070e1a 4964
0d4903b8
RK
4965 to_rtx = offset_address (to_rtx, offset_rtx,
4966 highest_pow2_factor (offset));
b50d17a1 4967 }
c5c76735 4968
cf04eb80
RK
4969 if (TREE_READONLY (field))
4970 {
9151b3bf 4971 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4972 to_rtx = copy_rtx (to_rtx);
4973
cf04eb80
RK
4974 RTX_UNCHANGING_P (to_rtx) = 1;
4975 }
4976
34c73909
R
4977#ifdef WORD_REGISTER_OPERATIONS
4978 /* If this initializes a field that is smaller than a word, at the
4979 start of a word, try to widen it to a full word.
4980 This special case allows us to output C++ member function
4981 initializations in a form that the optimizers can understand. */
770ae6cc 4982 if (GET_CODE (target) == REG
34c73909
R
4983 && bitsize < BITS_PER_WORD
4984 && bitpos % BITS_PER_WORD == 0
4985 && GET_MODE_CLASS (mode) == MODE_INT
4986 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4987 && exp_size >= 0
4988 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4989 {
4990 tree type = TREE_TYPE (value);
04050c69 4991
34c73909
R
4992 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4993 {
b0c48229
NB
4994 type = (*lang_hooks.types.type_for_size)
4995 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
4996 value = convert (type, value);
4997 }
04050c69 4998
34c73909
R
4999 if (BYTES_BIG_ENDIAN)
5000 value
5001 = fold (build (LSHIFT_EXPR, type, value,
5002 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5003 bitsize = BITS_PER_WORD;
5004 mode = word_mode;
5005 }
5006#endif
10b76d73
RK
5007
5008 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5009 && DECL_NONADDRESSABLE_P (field))
5010 {
5011 to_rtx = copy_rtx (to_rtx);
5012 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5013 }
5014
c5c76735 5015 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 5016 value, type, cleared,
10b76d73 5017 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
5018 }
5019 }
e6834654
SS
5020 else if (TREE_CODE (type) == ARRAY_TYPE
5021 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 5022 {
b3694847
SS
5023 tree elt;
5024 int i;
e1a43f73 5025 int need_to_clear;
4af3895e 5026 tree domain = TYPE_DOMAIN (type);
4af3895e 5027 tree elttype = TREE_TYPE (type);
e6834654 5028 int const_bounds_p;
ae0ed63a
JM
5029 HOST_WIDE_INT minelt = 0;
5030 HOST_WIDE_INT maxelt = 0;
85f3d674 5031
e6834654
SS
5032 /* Vectors are like arrays, but the domain is stored via an array
5033 type indirectly. */
5034 if (TREE_CODE (type) == VECTOR_TYPE)
5035 {
5036 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5037 the same field as TYPE_DOMAIN, we are not guaranteed that
5038 it always will. */
5039 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5040 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5041 }
5042
5043 const_bounds_p = (TYPE_MIN_VALUE (domain)
5044 && TYPE_MAX_VALUE (domain)
5045 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5046 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5047
85f3d674
RK
5048 /* If we have constant bounds for the range of the type, get them. */
5049 if (const_bounds_p)
5050 {
5051 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5052 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5053 }
bbf6f052 5054
e1a43f73 5055 /* If the constructor has fewer elements than the array,
38e01259 5056 clear the whole array first. Similarly if this is
e1a43f73
PB
5057 static constructor of a non-BLKmode object. */
5058 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5059 need_to_clear = 1;
5060 else
5061 {
5062 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
5063 need_to_clear = ! const_bounds_p;
5064
e1a43f73
PB
5065 /* This loop is a more accurate version of the loop in
5066 mostly_zeros_p (it handles RANGE_EXPR in an index).
5067 It is also needed to check for missing elements. */
5068 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 5069 elt != NULL_TREE && ! need_to_clear;
df0faff1 5070 elt = TREE_CHAIN (elt))
e1a43f73
PB
5071 {
5072 tree index = TREE_PURPOSE (elt);
5073 HOST_WIDE_INT this_node_count;
19caa751 5074
e1a43f73
PB
5075 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5076 {
5077 tree lo_index = TREE_OPERAND (index, 0);
5078 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 5079
19caa751
RK
5080 if (! host_integerp (lo_index, 1)
5081 || ! host_integerp (hi_index, 1))
e1a43f73
PB
5082 {
5083 need_to_clear = 1;
5084 break;
5085 }
19caa751
RK
5086
5087 this_node_count = (tree_low_cst (hi_index, 1)
5088 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
5089 }
5090 else
5091 this_node_count = 1;
85f3d674 5092
e1a43f73
PB
5093 count += this_node_count;
5094 if (mostly_zeros_p (TREE_VALUE (elt)))
5095 zero_count += this_node_count;
5096 }
85f3d674 5097
8e958f70 5098 /* Clear the entire array first if there are any missing elements,
0f41302f 5099 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
5100 if (! need_to_clear
5101 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
5102 need_to_clear = 1;
5103 }
85f3d674 5104
9376fcd6 5105 if (need_to_clear && size > 0)
9de08200
RK
5106 {
5107 if (! cleared)
725e58b1
RK
5108 {
5109 if (REG_P (target))
5110 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5111 else
5112 clear_storage (target, GEN_INT (size));
5113 }
9de08200
RK
5114 cleared = 1;
5115 }
df4556a3 5116 else if (REG_P (target))
bbf6f052 5117 /* Inform later passes that the old value is dead. */
38a448ca 5118 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
5119
5120 /* Store each element of the constructor into
5121 the corresponding element of TARGET, determined
5122 by counting the elements. */
5123 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5124 elt;
5125 elt = TREE_CHAIN (elt), i++)
5126 {
b3694847 5127 enum machine_mode mode;
19caa751
RK
5128 HOST_WIDE_INT bitsize;
5129 HOST_WIDE_INT bitpos;
bbf6f052 5130 int unsignedp;
e1a43f73 5131 tree value = TREE_VALUE (elt);
03dc44a6
RS
5132 tree index = TREE_PURPOSE (elt);
5133 rtx xtarget = target;
bbf6f052 5134
e1a43f73
PB
5135 if (cleared && is_zeros_p (value))
5136 continue;
9de08200 5137
bbf6f052 5138 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
5139 mode = TYPE_MODE (elttype);
5140 if (mode == BLKmode)
19caa751
RK
5141 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5142 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5143 : -1);
14a774a9
RK
5144 else
5145 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5146
e1a43f73
PB
5147 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5148 {
5149 tree lo_index = TREE_OPERAND (index, 0);
5150 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 5151 rtx index_r, pos_rtx, loop_end;
e1a43f73 5152 struct nesting *loop;
05c0b405
PB
5153 HOST_WIDE_INT lo, hi, count;
5154 tree position;
e1a43f73 5155
0f41302f 5156 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
5157 if (const_bounds_p
5158 && host_integerp (lo_index, 0)
19caa751
RK
5159 && host_integerp (hi_index, 0)
5160 && (lo = tree_low_cst (lo_index, 0),
5161 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
5162 count = hi - lo + 1,
5163 (GET_CODE (target) != MEM
5164 || count <= 2
19caa751
RK
5165 || (host_integerp (TYPE_SIZE (elttype), 1)
5166 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5167 <= 40 * 8)))))
e1a43f73 5168 {
05c0b405
PB
5169 lo -= minelt; hi -= minelt;
5170 for (; lo <= hi; lo++)
e1a43f73 5171 {
19caa751 5172 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
5173
5174 if (GET_CODE (target) == MEM
5175 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5176 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5177 && TYPE_NONALIASED_COMPONENT (type))
5178 {
5179 target = copy_rtx (target);
5180 MEM_KEEP_ALIAS_SET_P (target) = 1;
5181 }
5182
23cb1766 5183 store_constructor_field
04050c69
RK
5184 (target, bitsize, bitpos, mode, value, type, cleared,
5185 get_alias_set (elttype));
e1a43f73
PB
5186 }
5187 }
5188 else
5189 {
4977bab6 5190 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
5191 loop_end = gen_label_rtx ();
5192
5193 unsignedp = TREE_UNSIGNED (domain);
5194
5195 index = build_decl (VAR_DECL, NULL_TREE, domain);
5196
19e7881c 5197 index_r
e1a43f73
PB
5198 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5199 &unsignedp, 0));
19e7881c 5200 SET_DECL_RTL (index, index_r);
e1a43f73
PB
5201 if (TREE_CODE (value) == SAVE_EXPR
5202 && SAVE_EXPR_RTL (value) == 0)
5203 {
0f41302f
MS
5204 /* Make sure value gets expanded once before the
5205 loop. */
e1a43f73
PB
5206 expand_expr (value, const0_rtx, VOIDmode, 0);
5207 emit_queue ();
5208 }
5209 store_expr (lo_index, index_r, 0);
5210 loop = expand_start_loop (0);
5211
0f41302f 5212 /* Assign value to element index. */
fed3cef0
RK
5213 position
5214 = convert (ssizetype,
5215 fold (build (MINUS_EXPR, TREE_TYPE (index),
5216 index, TYPE_MIN_VALUE (domain))));
5217 position = size_binop (MULT_EXPR, position,
5218 convert (ssizetype,
5219 TYPE_SIZE_UNIT (elttype)));
5220
e1a43f73 5221 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5222 xtarget = offset_address (target, pos_rtx,
5223 highest_pow2_factor (position));
5224 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5225 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 5226 store_constructor (value, xtarget, cleared,
b7010412 5227 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5228 else
5229 store_expr (value, xtarget, 0);
5230
5231 expand_exit_loop_if_false (loop,
5232 build (LT_EXPR, integer_type_node,
5233 index, hi_index));
5234
5235 expand_increment (build (PREINCREMENT_EXPR,
5236 TREE_TYPE (index),
7b8b9722 5237 index, integer_one_node), 0, 0);
e1a43f73
PB
5238 expand_end_loop ();
5239 emit_label (loop_end);
e1a43f73
PB
5240 }
5241 }
19caa751
RK
5242 else if ((index != 0 && ! host_integerp (index, 0))
5243 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5244 {
03dc44a6
RS
5245 tree position;
5246
5b6c44ff 5247 if (index == 0)
fed3cef0 5248 index = ssize_int (1);
5b6c44ff 5249
e1a43f73 5250 if (minelt)
fed3cef0
RK
5251 index = convert (ssizetype,
5252 fold (build (MINUS_EXPR, index,
5253 TYPE_MIN_VALUE (domain))));
19caa751 5254
fed3cef0
RK
5255 position = size_binop (MULT_EXPR, index,
5256 convert (ssizetype,
5257 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5258 xtarget = offset_address (target,
5259 expand_expr (position, 0, VOIDmode, 0),
5260 highest_pow2_factor (position));
5261 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5262 store_expr (value, xtarget, 0);
03dc44a6
RS
5263 }
5264 else
5265 {
5266 if (index != 0)
19caa751
RK
5267 bitpos = ((tree_low_cst (index, 0) - minelt)
5268 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5269 else
19caa751
RK
5270 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5271
10b76d73 5272 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5273 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5274 && TYPE_NONALIASED_COMPONENT (type))
5275 {
5276 target = copy_rtx (target);
5277 MEM_KEEP_ALIAS_SET_P (target) = 1;
5278 }
5279
c5c76735 5280 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 5281 type, cleared, get_alias_set (elttype));
23cb1766 5282
03dc44a6 5283 }
bbf6f052
RK
5284 }
5285 }
19caa751 5286
3a94c984 5287 /* Set constructor assignments. */
071a6595
PB
5288 else if (TREE_CODE (type) == SET_TYPE)
5289 {
e1a43f73 5290 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5291 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5292 tree domain = TYPE_DOMAIN (type);
5293 tree domain_min, domain_max, bitlength;
5294
9faa82d8 5295 /* The default implementation strategy is to extract the constant
071a6595
PB
5296 parts of the constructor, use that to initialize the target,
5297 and then "or" in whatever non-constant ranges we need in addition.
5298
5299 If a large set is all zero or all ones, it is
5300 probably better to set it using memset (if available) or bzero.
5301 Also, if a large set has just a single range, it may also be
5302 better to first clear all the first clear the set (using
0f41302f 5303 bzero/memset), and set the bits we want. */
3a94c984 5304
0f41302f 5305 /* Check for all zeros. */
9376fcd6 5306 if (elt == NULL_TREE && size > 0)
071a6595 5307 {
e1a43f73 5308 if (!cleared)
8ac61af7 5309 clear_storage (target, GEN_INT (size));
071a6595
PB
5310 return;
5311 }
5312
071a6595
PB
5313 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5314 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5315 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5316 size_diffop (domain_max, domain_min),
5317 ssize_int (1));
071a6595 5318
19caa751 5319 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5320
5321 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5322 are "complicated" (more than one range), initialize (the
3a94c984 5323 constant parts) by copying from a constant. */
e1a43f73
PB
5324 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5325 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5326 {
19caa751 5327 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5328 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 5329 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 5330 HOST_WIDE_INT word = 0;
19caa751
RK
5331 unsigned int bit_pos = 0;
5332 unsigned int ibit = 0;
5333 unsigned int offset = 0; /* In bytes from beginning of set. */
5334
e1a43f73 5335 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5336 for (;;)
071a6595 5337 {
b4ee5a72
PB
5338 if (bit_buffer[ibit])
5339 {
b09f3348 5340 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5341 word |= (1 << (set_word_size - 1 - bit_pos));
5342 else
5343 word |= 1 << bit_pos;
5344 }
19caa751 5345
b4ee5a72
PB
5346 bit_pos++; ibit++;
5347 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5348 {
e1a43f73
PB
5349 if (word != 0 || ! cleared)
5350 {
5351 rtx datum = GEN_INT (word);
5352 rtx to_rtx;
19caa751 5353
0f41302f
MS
5354 /* The assumption here is that it is safe to use
5355 XEXP if the set is multi-word, but not if
5356 it's single-word. */
e1a43f73 5357 if (GET_CODE (target) == MEM)
f4ef873c 5358 to_rtx = adjust_address (target, mode, offset);
3a94c984 5359 else if (offset == 0)
e1a43f73
PB
5360 to_rtx = target;
5361 else
5362 abort ();
5363 emit_move_insn (to_rtx, datum);
5364 }
19caa751 5365
b4ee5a72
PB
5366 if (ibit == nbits)
5367 break;
5368 word = 0;
5369 bit_pos = 0;
5370 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5371 }
5372 }
071a6595 5373 }
e1a43f73 5374 else if (!cleared)
19caa751
RK
5375 /* Don't bother clearing storage if the set is all ones. */
5376 if (TREE_CHAIN (elt) != NULL_TREE
5377 || (TREE_PURPOSE (elt) == NULL_TREE
5378 ? nbits != 1
5379 : ( ! host_integerp (TREE_VALUE (elt), 0)
5380 || ! host_integerp (TREE_PURPOSE (elt), 0)
5381 || (tree_low_cst (TREE_VALUE (elt), 0)
5382 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5383 != (HOST_WIDE_INT) nbits))))
8ac61af7 5384 clear_storage (target, expr_size (exp));
3a94c984 5385
e1a43f73 5386 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5387 {
3a94c984 5388 /* Start of range of element or NULL. */
071a6595 5389 tree startbit = TREE_PURPOSE (elt);
3a94c984 5390 /* End of range of element, or element value. */
071a6595
PB
5391 tree endbit = TREE_VALUE (elt);
5392 HOST_WIDE_INT startb, endb;
19caa751 5393 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5394
5395 bitlength_rtx = expand_expr (bitlength,
19caa751 5396 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5397
3a94c984 5398 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5399 if (startbit == NULL_TREE)
5400 {
5401 startbit = save_expr (endbit);
5402 endbit = startbit;
5403 }
19caa751 5404
071a6595
PB
5405 startbit = convert (sizetype, startbit);
5406 endbit = convert (sizetype, endbit);
5407 if (! integer_zerop (domain_min))
5408 {
5409 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5410 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5411 }
3a94c984 5412 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5413 EXPAND_CONST_ADDRESS);
3a94c984 5414 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5415 EXPAND_CONST_ADDRESS);
5416
5417 if (REG_P (target))
5418 {
1da68f56
RK
5419 targetx
5420 = assign_temp
b0c48229
NB
5421 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5422 (GET_MODE (target), 0),
1da68f56
RK
5423 TYPE_QUAL_CONST)),
5424 0, 1, 1);
071a6595
PB
5425 emit_move_insn (targetx, target);
5426 }
19caa751 5427
071a6595
PB
5428 else if (GET_CODE (target) == MEM)
5429 targetx = target;
5430 else
5431 abort ();
5432
4ca79136
RH
5433 /* Optimization: If startbit and endbit are constants divisible
5434 by BITS_PER_UNIT, call memset instead. */
5435 if (TARGET_MEM_FUNCTIONS
5436 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5437 && TREE_CODE (endbit) == INTEGER_CST
5438 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5439 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5440 {
ebb1b59a 5441 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5442 VOIDmode, 3,
e1a43f73
PB
5443 plus_constant (XEXP (targetx, 0),
5444 startb / BITS_PER_UNIT),
071a6595 5445 Pmode,
3b6f75e2 5446 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5447 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5448 TYPE_MODE (sizetype));
071a6595
PB
5449 }
5450 else
19caa751 5451 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
5452 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
5453 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5454 startbit_rtx, TYPE_MODE (sizetype),
5455 endbit_rtx, TYPE_MODE (sizetype));
5456
071a6595
PB
5457 if (REG_P (target))
5458 emit_move_insn (target, targetx);
5459 }
5460 }
bbf6f052
RK
5461
5462 else
5463 abort ();
5464}
5465
5466/* Store the value of EXP (an expression tree)
5467 into a subfield of TARGET which has mode MODE and occupies
5468 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5469 If MODE is VOIDmode, it means that we are storing into a bit-field.
5470
5471 If VALUE_MODE is VOIDmode, return nothing in particular.
5472 UNSIGNEDP is not used in this case.
5473
5474 Otherwise, return an rtx for the value stored. This rtx
5475 has mode VALUE_MODE if that is convenient to do.
5476 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5477
a06ef755 5478 TYPE is the type of the underlying object,
ece32014
MM
5479
5480 ALIAS_SET is the alias set for the destination. This value will
5481 (in general) be different from that for TARGET, since TARGET is a
5482 reference to the containing structure. */
bbf6f052
RK
5483
5484static rtx
a06ef755
RK
5485store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5486 alias_set)
bbf6f052 5487 rtx target;
770ae6cc
RK
5488 HOST_WIDE_INT bitsize;
5489 HOST_WIDE_INT bitpos;
bbf6f052
RK
5490 enum machine_mode mode;
5491 tree exp;
5492 enum machine_mode value_mode;
5493 int unsignedp;
a06ef755 5494 tree type;
ece32014 5495 int alias_set;
bbf6f052 5496{
906c4e36 5497 HOST_WIDE_INT width_mask = 0;
bbf6f052 5498
e9a25f70
JL
5499 if (TREE_CODE (exp) == ERROR_MARK)
5500 return const0_rtx;
5501
2be6a7e9
RK
5502 /* If we have nothing to store, do nothing unless the expression has
5503 side-effects. */
5504 if (bitsize == 0)
5505 return expand_expr (exp, const0_rtx, VOIDmode, 0);
a06ef755 5506 else if (bitsize >=0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5507 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5508
5509 /* If we are storing into an unaligned field of an aligned union that is
5510 in a register, we may have the mode of TARGET being an integer mode but
5511 MODE == BLKmode. In that case, get an aligned object whose size and
5512 alignment are the same as TARGET and store TARGET into it (we can avoid
5513 the store if the field being stored is the entire width of TARGET). Then
5514 call ourselves recursively to store the field into a BLKmode version of
5515 that object. Finally, load from the object into TARGET. This is not
5516 very efficient in general, but should only be slightly more expensive
5517 than the otherwise-required unaligned accesses. Perhaps this can be
5518 cleaned up later. */
5519
5520 if (mode == BLKmode
5521 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5522 {
1da68f56
RK
5523 rtx object
5524 = assign_temp
a06ef755 5525 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
1da68f56 5526 0, 1, 1);
c4e59f51 5527 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5528
8752c357 5529 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5530 emit_move_insn (object, target);
5531
a06ef755
RK
5532 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5533 alias_set);
bbf6f052
RK
5534
5535 emit_move_insn (target, object);
5536
a06ef755 5537 /* We want to return the BLKmode version of the data. */
46093b97 5538 return blk_object;
bbf6f052 5539 }
c3b247b4
JM
5540
5541 if (GET_CODE (target) == CONCAT)
5542 {
5543 /* We're storing into a struct containing a single __complex. */
5544
5545 if (bitpos != 0)
5546 abort ();
5547 return store_expr (exp, target, 0);
5548 }
bbf6f052
RK
5549
5550 /* If the structure is in a register or if the component
5551 is a bit field, we cannot use addressing to access it.
5552 Use bit-field techniques or SUBREG to store in it. */
5553
4fa52007 5554 if (mode == VOIDmode
6ab06cbb
JW
5555 || (mode != BLKmode && ! direct_store[(int) mode]
5556 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5557 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5558 || GET_CODE (target) == REG
c980ac49 5559 || GET_CODE (target) == SUBREG
ccc98036
RS
5560 /* If the field isn't aligned enough to store as an ordinary memref,
5561 store it as a bit field. */
04050c69
RK
5562 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5563 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)
14a774a9 5564 || bitpos % GET_MODE_ALIGNMENT (mode)))
14a774a9
RK
5565 /* If the RHS and field are a constant size and the size of the
5566 RHS isn't the same size as the bitfield, we must use bitfield
5567 operations. */
05bccae2
RK
5568 || (bitsize >= 0
5569 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5570 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5571 {
906c4e36 5572 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5573
ef19912d
RK
5574 /* If BITSIZE is narrower than the size of the type of EXP
5575 we will be narrowing TEMP. Normally, what's wanted are the
5576 low-order bits. However, if EXP's type is a record and this is
5577 big-endian machine, we want the upper BITSIZE bits. */
5578 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5579 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5580 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5581 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5582 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5583 - bitsize),
5584 temp, 1);
5585
bbd6cf73
RK
5586 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5587 MODE. */
5588 if (mode != VOIDmode && mode != BLKmode
5589 && mode != TYPE_MODE (TREE_TYPE (exp)))
5590 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5591
a281e72d
RK
5592 /* If the modes of TARGET and TEMP are both BLKmode, both
5593 must be in memory and BITPOS must be aligned on a byte
5594 boundary. If so, we simply do a block copy. */
5595 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5596 {
5597 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5598 || bitpos % BITS_PER_UNIT != 0)
5599 abort ();
5600
f4ef873c 5601 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5602 emit_block_move (target, temp,
a06ef755 5603 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5604 / BITS_PER_UNIT),
5605 BLOCK_OP_NORMAL);
a281e72d
RK
5606
5607 return value_mode == VOIDmode ? const0_rtx : target;
5608 }
5609
bbf6f052 5610 /* Store the value in the bitfield. */
a06ef755
RK
5611 store_bit_field (target, bitsize, bitpos, mode, temp,
5612 int_size_in_bytes (type));
5613
bbf6f052
RK
5614 if (value_mode != VOIDmode)
5615 {
04050c69
RK
5616 /* The caller wants an rtx for the value.
5617 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5618 if (width_mask != 0
5619 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5620 {
9074de27 5621 tree count;
5c4d7cfb 5622 enum machine_mode tmode;
86a2c12a 5623
5c4d7cfb 5624 tmode = GET_MODE (temp);
86a2c12a
RS
5625 if (tmode == VOIDmode)
5626 tmode = value_mode;
22273300
JJ
5627
5628 if (unsignedp)
5629 return expand_and (tmode, temp,
2496c7bd 5630 gen_int_mode (width_mask, tmode),
22273300
JJ
5631 NULL_RTX);
5632
5c4d7cfb
RS
5633 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5634 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5635 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5636 }
04050c69 5637
bbf6f052 5638 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5639 NULL_RTX, value_mode, VOIDmode,
a06ef755 5640 int_size_in_bytes (type));
bbf6f052
RK
5641 }
5642 return const0_rtx;
5643 }
5644 else
5645 {
5646 rtx addr = XEXP (target, 0);
a06ef755 5647 rtx to_rtx = target;
bbf6f052
RK
5648
5649 /* If a value is wanted, it must be the lhs;
5650 so make the address stable for multiple use. */
5651
5652 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5653 && ! CONSTANT_ADDRESS_P (addr)
5654 /* A frame-pointer reference is already stable. */
5655 && ! (GET_CODE (addr) == PLUS
5656 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5657 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5658 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5659 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5660
5661 /* Now build a reference to just the desired component. */
5662
a06ef755
RK
5663 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5664
5665 if (to_rtx == target)
5666 to_rtx = copy_rtx (to_rtx);
792760b9 5667
c6df88cb 5668 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5669 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5670 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5671
5672 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5673 }
5674}
5675\f
5676/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5677 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5678 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5679
5680 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5681 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5682 If the position of the field is variable, we store a tree
5683 giving the variable offset (in units) in *POFFSET.
5684 This offset is in addition to the bit position.
5685 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5686
5687 If any of the extraction expressions is volatile,
5688 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5689
5690 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5691 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5692 is redundant.
5693
5694 If the field describes a variable-sized object, *PMODE is set to
5695 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5696 this case, but the address of the object can be found. */
bbf6f052
RK
5697
5698tree
4969d05d 5699get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
a06ef755 5700 punsignedp, pvolatilep)
bbf6f052 5701 tree exp;
770ae6cc
RK
5702 HOST_WIDE_INT *pbitsize;
5703 HOST_WIDE_INT *pbitpos;
7bb0943f 5704 tree *poffset;
bbf6f052
RK
5705 enum machine_mode *pmode;
5706 int *punsignedp;
5707 int *pvolatilep;
5708{
5709 tree size_tree = 0;
5710 enum machine_mode mode = VOIDmode;
fed3cef0 5711 tree offset = size_zero_node;
770ae6cc 5712 tree bit_offset = bitsize_zero_node;
738cc472 5713 tree placeholder_ptr = 0;
770ae6cc 5714 tree tem;
bbf6f052 5715
770ae6cc
RK
5716 /* First get the mode, signedness, and size. We do this from just the
5717 outermost expression. */
bbf6f052
RK
5718 if (TREE_CODE (exp) == COMPONENT_REF)
5719 {
5720 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5721 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5722 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5723
bbf6f052
RK
5724 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5725 }
5726 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5727 {
5728 size_tree = TREE_OPERAND (exp, 1);
5729 *punsignedp = TREE_UNSIGNED (exp);
5730 }
5731 else
5732 {
5733 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5734 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5735
ab87f8c8
JL
5736 if (mode == BLKmode)
5737 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5738 else
5739 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5740 }
3a94c984 5741
770ae6cc 5742 if (size_tree != 0)
bbf6f052 5743 {
770ae6cc 5744 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5745 mode = BLKmode, *pbitsize = -1;
5746 else
770ae6cc 5747 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5748 }
5749
5750 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5751 and find the ultimate containing object. */
bbf6f052
RK
5752 while (1)
5753 {
770ae6cc
RK
5754 if (TREE_CODE (exp) == BIT_FIELD_REF)
5755 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5756 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5757 {
770ae6cc
RK
5758 tree field = TREE_OPERAND (exp, 1);
5759 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5760
e7f3c83f
RK
5761 /* If this field hasn't been filled in yet, don't go
5762 past it. This should only happen when folding expressions
5763 made during type construction. */
770ae6cc 5764 if (this_offset == 0)
e7f3c83f 5765 break;
770ae6cc
RK
5766 else if (! TREE_CONSTANT (this_offset)
5767 && contains_placeholder_p (this_offset))
5768 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5769
7156dead 5770 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5771 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5772 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5773
a06ef755 5774 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5775 }
7156dead 5776
b4e3fabb
RK
5777 else if (TREE_CODE (exp) == ARRAY_REF
5778 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5779 {
742920c7 5780 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5781 tree array = TREE_OPERAND (exp, 0);
5782 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5783 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5784 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5785
770ae6cc
RK
5786 /* We assume all arrays have sizes that are a multiple of a byte.
5787 First subtract the lower bound, if any, in the type of the
5788 index, then convert to sizetype and multiply by the size of the
5789 array element. */
5790 if (low_bound != 0 && ! integer_zerop (low_bound))
5791 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5792 index, low_bound));
f8dac6eb 5793
7156dead
RK
5794 /* If the index has a self-referential type, pass it to a
5795 WITH_RECORD_EXPR; if the component size is, pass our
5796 component to one. */
770ae6cc
RK
5797 if (! TREE_CONSTANT (index)
5798 && contains_placeholder_p (index))
5799 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5800 if (! TREE_CONSTANT (unit_size)
5801 && contains_placeholder_p (unit_size))
b4e3fabb 5802 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5803
770ae6cc
RK
5804 offset = size_binop (PLUS_EXPR, offset,
5805 size_binop (MULT_EXPR,
5806 convert (sizetype, index),
7156dead 5807 unit_size));
bbf6f052 5808 }
7156dead 5809
738cc472
RK
5810 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5811 {
70072ed9
RK
5812 tree new = find_placeholder (exp, &placeholder_ptr);
5813
5814 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5815 We might have been called from tree optimization where we
5816 haven't set up an object yet. */
5817 if (new == 0)
5818 break;
5819 else
5820 exp = new;
5821
738cc472
RK
5822 continue;
5823 }
bbf6f052 5824 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
ed239f5a 5825 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
bbf6f052
RK
5826 && ! ((TREE_CODE (exp) == NOP_EXPR
5827 || TREE_CODE (exp) == CONVERT_EXPR)
5828 && (TYPE_MODE (TREE_TYPE (exp))
5829 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5830 break;
7bb0943f
RS
5831
5832 /* If any reference in the chain is volatile, the effect is volatile. */
5833 if (TREE_THIS_VOLATILE (exp))
5834 *pvolatilep = 1;
839c4796 5835
bbf6f052
RK
5836 exp = TREE_OPERAND (exp, 0);
5837 }
5838
770ae6cc
RK
5839 /* If OFFSET is constant, see if we can return the whole thing as a
5840 constant bit position. Otherwise, split it up. */
5841 if (host_integerp (offset, 0)
5842 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5843 bitsize_unit_node))
5844 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5845 && host_integerp (tem, 0))
5846 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5847 else
5848 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5849
bbf6f052 5850 *pmode = mode;
bbf6f052
RK
5851 return exp;
5852}
921b3427 5853
ed239f5a
RK
5854/* Return 1 if T is an expression that get_inner_reference handles. */
5855
5856int
5857handled_component_p (t)
5858 tree t;
5859{
5860 switch (TREE_CODE (t))
5861 {
5862 case BIT_FIELD_REF:
5863 case COMPONENT_REF:
5864 case ARRAY_REF:
5865 case ARRAY_RANGE_REF:
5866 case NON_LVALUE_EXPR:
5867 case VIEW_CONVERT_EXPR:
5868 return 1;
5869
5870 case NOP_EXPR:
5871 case CONVERT_EXPR:
5872 return (TYPE_MODE (TREE_TYPE (t))
5873 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5874
5875 default:
5876 return 0;
5877 }
5878}
bbf6f052 5879\f
3fe44edd
RK
5880/* Given an rtx VALUE that may contain additions and multiplications, return
5881 an equivalent value that just refers to a register, memory, or constant.
5882 This is done by generating instructions to perform the arithmetic and
5883 returning a pseudo-register containing the value.
c45a13a6
RK
5884
5885 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5886
5887rtx
5888force_operand (value, target)
5889 rtx value, target;
5890{
8a28dbcc 5891 rtx op1, op2;
bbf6f052 5892 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5893 rtx subtarget = get_subtarget (target);
8a28dbcc 5894 enum rtx_code code = GET_CODE (value);
bbf6f052 5895
8b015896 5896 /* Check for a PIC address load. */
8a28dbcc 5897 if ((code == PLUS || code == MINUS)
8b015896
RH
5898 && XEXP (value, 0) == pic_offset_table_rtx
5899 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5900 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5901 || GET_CODE (XEXP (value, 1)) == CONST))
5902 {
5903 if (!subtarget)
5904 subtarget = gen_reg_rtx (GET_MODE (value));
5905 emit_move_insn (subtarget, value);
5906 return subtarget;
5907 }
5908
8a28dbcc 5909 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5910 {
8a28dbcc
JH
5911 if (!target)
5912 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5913 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5914 code == ZERO_EXTEND);
5915 return target;
bbf6f052
RK
5916 }
5917
8a28dbcc 5918 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5919 {
5920 op2 = XEXP (value, 1);
8a28dbcc 5921 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5922 subtarget = 0;
8a28dbcc 5923 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5924 {
8a28dbcc 5925 code = PLUS;
bbf6f052
RK
5926 op2 = negate_rtx (GET_MODE (value), op2);
5927 }
5928
5929 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5930 operand a PLUS of a virtual register and something else. In that
5931 case, we want to emit the sum of the virtual register and the
5932 constant first and then add the other value. This allows virtual
5933 register instantiation to simply modify the constant rather than
5934 creating another one around this addition. */
5935 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5936 && GET_CODE (XEXP (value, 0)) == PLUS
5937 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5938 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5939 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5940 {
8a28dbcc
JH
5941 rtx temp = expand_simple_binop (GET_MODE (value), code,
5942 XEXP (XEXP (value, 0), 0), op2,
5943 subtarget, 0, OPTAB_LIB_WIDEN);
5944 return expand_simple_binop (GET_MODE (value), code, temp,
5945 force_operand (XEXP (XEXP (value,
5946 0), 1), 0),
5947 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5948 }
3a94c984 5949
8a28dbcc
JH
5950 op1 = force_operand (XEXP (value, 0), subtarget);
5951 op2 = force_operand (op2, NULL_RTX);
5952 switch (code)
5953 {
5954 case MULT:
5955 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5956 case DIV:
5957 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5958 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5959 target, 1, OPTAB_LIB_WIDEN);
5960 else
5961 return expand_divmod (0,
5962 FLOAT_MODE_P (GET_MODE (value))
5963 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5964 GET_MODE (value), op1, op2, target, 0);
5965 break;
5966 case MOD:
5967 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5968 target, 0);
5969 break;
5970 case UDIV:
5971 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5972 target, 1);
5973 break;
5974 case UMOD:
5975 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5976 target, 1);
5977 break;
5978 case ASHIFTRT:
5979 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5980 target, 0, OPTAB_LIB_WIDEN);
5981 break;
5982 default:
5983 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5984 target, 1, OPTAB_LIB_WIDEN);
5985 }
5986 }
5987 if (GET_RTX_CLASS (code) == '1')
5988 {
5989 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5990 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5991 }
34e81b5a
RK
5992
5993#ifdef INSN_SCHEDULING
5994 /* On machines that have insn scheduling, we want all memory reference to be
5995 explicit, so we need to deal with such paradoxical SUBREGs. */
5996 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
5997 && (GET_MODE_SIZE (GET_MODE (value))
5998 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
5999 value
6000 = simplify_gen_subreg (GET_MODE (value),
6001 force_reg (GET_MODE (SUBREG_REG (value)),
6002 force_operand (SUBREG_REG (value),
6003 NULL_RTX)),
6004 GET_MODE (SUBREG_REG (value)),
6005 SUBREG_BYTE (value));
6006#endif
6007
bbf6f052
RK
6008 return value;
6009}
6010\f
bbf6f052 6011/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
6012 EXP can reference X, which is being modified. TOP_P is nonzero if this
6013 call is going to be used to determine whether we need a temporary
ff439b5f
CB
6014 for EXP, as opposed to a recursive call to this function.
6015
6016 It is always safe for this routine to return zero since it merely
6017 searches for optimization opportunities. */
bbf6f052 6018
8f17b5c5 6019int
e5e809f4 6020safe_from_p (x, exp, top_p)
bbf6f052
RK
6021 rtx x;
6022 tree exp;
e5e809f4 6023 int top_p;
bbf6f052
RK
6024{
6025 rtx exp_rtl = 0;
6026 int i, nops;
1da68f56 6027 static tree save_expr_list;
bbf6f052 6028
6676e72f
RK
6029 if (x == 0
6030 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
6031 have no way of allocating temporaries of variable size
6032 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6033 So we assume here that something at a higher level has prevented a
f4510f37 6034 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 6035 do this when X is BLKmode and when we are at the top level. */
d0f062fb 6036 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 6037 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
6038 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6039 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6040 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6041 != INTEGER_CST)
1da68f56
RK
6042 && GET_MODE (x) == BLKmode)
6043 /* If X is in the outgoing argument area, it is always safe. */
6044 || (GET_CODE (x) == MEM
6045 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6046 || (GET_CODE (XEXP (x, 0)) == PLUS
6047 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
6048 return 1;
6049
6050 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6051 find the underlying pseudo. */
6052 if (GET_CODE (x) == SUBREG)
6053 {
6054 x = SUBREG_REG (x);
6055 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6056 return 0;
6057 }
6058
1da68f56
RK
6059 /* A SAVE_EXPR might appear many times in the expression passed to the
6060 top-level safe_from_p call, and if it has a complex subexpression,
6061 examining it multiple times could result in a combinatorial explosion.
6062 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6063 with optimization took about 28 minutes to compile -- even though it was
6064 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6065 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6066 we have processed. Note that the only test of top_p was above. */
6067
6068 if (top_p)
6069 {
6070 int rtn;
6071 tree t;
6072
6073 save_expr_list = 0;
6074
6075 rtn = safe_from_p (x, exp, 0);
6076
6077 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6078 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6079
6080 return rtn;
6081 }
bbf6f052 6082
1da68f56 6083 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
6084 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6085 {
6086 case 'd':
a9772b60 6087 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
6088 break;
6089
6090 case 'c':
6091 return 1;
6092
6093 case 'x':
6094 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 6095 return ((TREE_VALUE (exp) == 0
e5e809f4 6096 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 6097 && (TREE_CHAIN (exp) == 0
e5e809f4 6098 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
6099 else if (TREE_CODE (exp) == ERROR_MARK)
6100 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
6101 else
6102 return 0;
6103
6104 case '1':
e5e809f4 6105 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
6106
6107 case '2':
6108 case '<':
e5e809f4
JL
6109 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
6110 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
6111
6112 case 'e':
6113 case 'r':
6114 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6115 the expression. If it is set, we conflict iff we are that rtx or
6116 both are in memory. Otherwise, we check all operands of the
6117 expression recursively. */
6118
6119 switch (TREE_CODE (exp))
6120 {
6121 case ADDR_EXPR:
70072ed9
RK
6122 /* If the operand is static or we are static, we can't conflict.
6123 Likewise if we don't conflict with the operand at all. */
6124 if (staticp (TREE_OPERAND (exp, 0))
6125 || TREE_STATIC (exp)
6126 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6127 return 1;
6128
6129 /* Otherwise, the only way this can conflict is if we are taking
6130 the address of a DECL a that address if part of X, which is
6131 very rare. */
6132 exp = TREE_OPERAND (exp, 0);
6133 if (DECL_P (exp))
6134 {
6135 if (!DECL_RTL_SET_P (exp)
6136 || GET_CODE (DECL_RTL (exp)) != MEM)
6137 return 0;
6138 else
6139 exp_rtl = XEXP (DECL_RTL (exp), 0);
6140 }
6141 break;
bbf6f052
RK
6142
6143 case INDIRECT_REF:
1da68f56
RK
6144 if (GET_CODE (x) == MEM
6145 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6146 get_alias_set (exp)))
bbf6f052
RK
6147 return 0;
6148 break;
6149
6150 case CALL_EXPR:
f9808f81
MM
6151 /* Assume that the call will clobber all hard registers and
6152 all of memory. */
6153 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6154 || GET_CODE (x) == MEM)
6155 return 0;
bbf6f052
RK
6156 break;
6157
6158 case RTL_EXPR:
3bb5826a
RK
6159 /* If a sequence exists, we would have to scan every instruction
6160 in the sequence to see if it was safe. This is probably not
6161 worthwhile. */
6162 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
6163 return 0;
6164
3bb5826a 6165 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
6166 break;
6167
6168 case WITH_CLEANUP_EXPR:
6ad7895a 6169 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
6170 break;
6171
5dab5552 6172 case CLEANUP_POINT_EXPR:
e5e809f4 6173 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 6174
bbf6f052
RK
6175 case SAVE_EXPR:
6176 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
6177 if (exp_rtl)
6178 break;
6179
1da68f56
RK
6180 /* If we've already scanned this, don't do it again. Otherwise,
6181 show we've scanned it and record for clearing the flag if we're
6182 going on. */
6183 if (TREE_PRIVATE (exp))
6184 return 1;
ff439b5f 6185
1da68f56
RK
6186 TREE_PRIVATE (exp) = 1;
6187 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 6188 {
1da68f56
RK
6189 TREE_PRIVATE (exp) = 0;
6190 return 0;
ff59bfe6 6191 }
1da68f56
RK
6192
6193 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 6194 return 1;
bbf6f052 6195
8129842c
RS
6196 case BIND_EXPR:
6197 /* The only operand we look at is operand 1. The rest aren't
6198 part of the expression. */
e5e809f4 6199 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6200
bbf6f052 6201 case METHOD_CALL_EXPR:
4fe9b91c 6202 /* This takes an rtx argument, but shouldn't appear here. */
bbf6f052 6203 abort ();
3a94c984 6204
e9a25f70
JL
6205 default:
6206 break;
bbf6f052
RK
6207 }
6208
6209 /* If we have an rtx, we do not need to scan our operands. */
6210 if (exp_rtl)
6211 break;
6212
8f17b5c5 6213 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6214 for (i = 0; i < nops; i++)
6215 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6216 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6217 return 0;
8f17b5c5
MM
6218
6219 /* If this is a language-specific tree code, it may require
6220 special handling. */
dbbbbf3b
JDA
6221 if ((unsigned int) TREE_CODE (exp)
6222 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 6223 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 6224 return 0;
bbf6f052
RK
6225 }
6226
6227 /* If we have an rtl, find any enclosed object. Then see if we conflict
6228 with it. */
6229 if (exp_rtl)
6230 {
6231 if (GET_CODE (exp_rtl) == SUBREG)
6232 {
6233 exp_rtl = SUBREG_REG (exp_rtl);
6234 if (GET_CODE (exp_rtl) == REG
6235 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6236 return 0;
6237 }
6238
6239 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6240 are memory and they conflict. */
bbf6f052
RK
6241 return ! (rtx_equal_p (x, exp_rtl)
6242 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 6243 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6244 rtx_addr_varies_p)));
bbf6f052
RK
6245 }
6246
6247 /* If we reach here, it is safe. */
6248 return 1;
6249}
6250
01c8a7c8
RK
6251/* Subroutine of expand_expr: return rtx if EXP is a
6252 variable or parameter; else return 0. */
6253
6254static rtx
6255var_rtx (exp)
6256 tree exp;
6257{
6258 STRIP_NOPS (exp);
6259 switch (TREE_CODE (exp))
6260 {
6261 case PARM_DECL:
6262 case VAR_DECL:
6263 return DECL_RTL (exp);
6264 default:
6265 return 0;
6266 }
6267}
dbecbbe4
JL
6268
6269#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 6270
dbecbbe4
JL
6271void
6272check_max_integer_computation_mode (exp)
3a94c984 6273 tree exp;
dbecbbe4 6274{
5f652c07 6275 enum tree_code code;
dbecbbe4
JL
6276 enum machine_mode mode;
6277
5f652c07
JM
6278 /* Strip any NOPs that don't change the mode. */
6279 STRIP_NOPS (exp);
6280 code = TREE_CODE (exp);
6281
71bca506
JL
6282 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6283 if (code == NOP_EXPR
6284 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6285 return;
6286
dbecbbe4
JL
6287 /* First check the type of the overall operation. We need only look at
6288 unary, binary and relational operations. */
6289 if (TREE_CODE_CLASS (code) == '1'
6290 || TREE_CODE_CLASS (code) == '2'
6291 || TREE_CODE_CLASS (code) == '<')
6292 {
6293 mode = TYPE_MODE (TREE_TYPE (exp));
6294 if (GET_MODE_CLASS (mode) == MODE_INT
6295 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6296 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6297 }
6298
6299 /* Check operand of a unary op. */
6300 if (TREE_CODE_CLASS (code) == '1')
6301 {
6302 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6303 if (GET_MODE_CLASS (mode) == MODE_INT
6304 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6305 internal_error ("unsupported wide integer operation");
dbecbbe4 6306 }
3a94c984 6307
dbecbbe4
JL
6308 /* Check operands of a binary/comparison op. */
6309 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6310 {
6311 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6312 if (GET_MODE_CLASS (mode) == MODE_INT
6313 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6314 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6315
6316 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6317 if (GET_MODE_CLASS (mode) == MODE_INT
6318 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6319 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6320 }
6321}
6322#endif
14a774a9 6323\f
0d4903b8
RK
6324/* Return the highest power of two that EXP is known to be a multiple of.
6325 This is used in updating alignment of MEMs in array references. */
6326
6327static HOST_WIDE_INT
6328highest_pow2_factor (exp)
6329 tree exp;
6330{
6331 HOST_WIDE_INT c0, c1;
6332
6333 switch (TREE_CODE (exp))
6334 {
6335 case INTEGER_CST:
e0f1be5c
JJ
6336 /* We can find the lowest bit that's a one. If the low
6337 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6338 We need to handle this case since we can find it in a COND_EXPR,
6339 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6340 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6341 later ICE. */
e0f1be5c 6342 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6343 return BIGGEST_ALIGNMENT;
e0f1be5c 6344 else
0d4903b8 6345 {
e0f1be5c
JJ
6346 /* Note: tree_low_cst is intentionally not used here,
6347 we don't care about the upper bits. */
6348 c0 = TREE_INT_CST_LOW (exp);
6349 c0 &= -c0;
6350 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6351 }
6352 break;
6353
65a07688 6354 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6355 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6356 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6357 return MIN (c0, c1);
6358
6359 case MULT_EXPR:
6360 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6361 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6362 return c0 * c1;
6363
6364 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6365 case CEIL_DIV_EXPR:
65a07688
RK
6366 if (integer_pow2p (TREE_OPERAND (exp, 1))
6367 && host_integerp (TREE_OPERAND (exp, 1), 1))
6368 {
6369 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6370 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6371 return MAX (1, c0 / c1);
6372 }
6373 break;
0d4903b8
RK
6374
6375 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6376 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6377 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6378
65a07688
RK
6379 case COMPOUND_EXPR:
6380 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6381
0d4903b8
RK
6382 case COND_EXPR:
6383 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6384 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6385 return MIN (c0, c1);
6386
6387 default:
6388 break;
6389 }
6390
6391 return 1;
6392}
818c0c94
RH
6393
6394/* Similar, except that it is known that the expression must be a multiple
6395 of the alignment of TYPE. */
6396
6397static HOST_WIDE_INT
6398highest_pow2_factor_for_type (type, exp)
6399 tree type;
6400 tree exp;
6401{
6402 HOST_WIDE_INT type_align, factor;
6403
6404 factor = highest_pow2_factor (exp);
6405 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6406 return MAX (factor, type_align);
6407}
0d4903b8 6408\f
f47e9b4e
RK
6409/* Return an object on the placeholder list that matches EXP, a
6410 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6411 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6412 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6413 is a location which initially points to a starting location in the
738cc472
RK
6414 placeholder list (zero means start of the list) and where a pointer into
6415 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6416
6417tree
6418find_placeholder (exp, plist)
6419 tree exp;
6420 tree *plist;
6421{
6422 tree type = TREE_TYPE (exp);
6423 tree placeholder_expr;
6424
738cc472
RK
6425 for (placeholder_expr
6426 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6427 placeholder_expr != 0;
f47e9b4e
RK
6428 placeholder_expr = TREE_CHAIN (placeholder_expr))
6429 {
6430 tree need_type = TYPE_MAIN_VARIANT (type);
6431 tree elt;
6432
6433 /* Find the outermost reference that is of the type we want. If none,
6434 see if any object has a type that is a pointer to the type we
6435 want. */
6436 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6437 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6438 || TREE_CODE (elt) == COND_EXPR)
6439 ? TREE_OPERAND (elt, 1)
6440 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6441 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6442 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6443 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6444 ? TREE_OPERAND (elt, 0) : 0))
6445 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6446 {
6447 if (plist)
6448 *plist = placeholder_expr;
6449 return elt;
6450 }
6451
6452 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6453 elt
6454 = ((TREE_CODE (elt) == COMPOUND_EXPR
6455 || TREE_CODE (elt) == COND_EXPR)
6456 ? TREE_OPERAND (elt, 1)
6457 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6458 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6459 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6460 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6461 ? TREE_OPERAND (elt, 0) : 0))
6462 if (POINTER_TYPE_P (TREE_TYPE (elt))
6463 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6464 == need_type))
6465 {
6466 if (plist)
6467 *plist = placeholder_expr;
6468 return build1 (INDIRECT_REF, need_type, elt);
6469 }
6470 }
6471
70072ed9 6472 return 0;
f47e9b4e
RK
6473}
6474\f
bbf6f052
RK
6475/* expand_expr: generate code for computing expression EXP.
6476 An rtx for the computed value is returned. The value is never null.
6477 In the case of a void EXP, const0_rtx is returned.
6478
6479 The value may be stored in TARGET if TARGET is nonzero.
6480 TARGET is just a suggestion; callers must assume that
6481 the rtx returned may not be the same as TARGET.
6482
6483 If TARGET is CONST0_RTX, it means that the value will be ignored.
6484
6485 If TMODE is not VOIDmode, it suggests generating the
6486 result in mode TMODE. But this is done only when convenient.
6487 Otherwise, TMODE is ignored and the value generated in its natural mode.
6488 TMODE is just a suggestion; callers must assume that
6489 the rtx returned may not have mode TMODE.
6490
d6a5ac33
RK
6491 Note that TARGET may have neither TMODE nor MODE. In that case, it
6492 probably will not be used.
bbf6f052
RK
6493
6494 If MODIFIER is EXPAND_SUM then when EXP is an addition
6495 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6496 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6497 products as above, or REG or MEM, or constant.
6498 Ordinarily in such cases we would output mul or add instructions
6499 and then return a pseudo reg containing the sum.
6500
6501 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6502 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6503 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6504 This is used for outputting expressions used in initializers.
6505
6506 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6507 with a constant address even if that address is not normally legitimate.
8403445a
AM
6508 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6509
6510 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6511 a call parameter. Such targets require special care as we haven't yet
6512 marked TARGET so that it's safe from being trashed by libcalls. We
6513 don't want to use TARGET for anything but the final result;
6514 Intermediate values must go elsewhere. Additionally, calls to
6515 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
bbf6f052
RK
6516
6517rtx
6518expand_expr (exp, target, tmode, modifier)
b3694847 6519 tree exp;
bbf6f052
RK
6520 rtx target;
6521 enum machine_mode tmode;
6522 enum expand_modifier modifier;
6523{
b3694847 6524 rtx op0, op1, temp;
bbf6f052
RK
6525 tree type = TREE_TYPE (exp);
6526 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6527 enum machine_mode mode;
6528 enum tree_code code = TREE_CODE (exp);
bbf6f052 6529 optab this_optab;
68557e14
ML
6530 rtx subtarget, original_target;
6531 int ignore;
bbf6f052
RK
6532 tree context;
6533
3a94c984 6534 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6535 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6536 {
6537 op0 = CONST0_RTX (tmode);
6538 if (op0 != 0)
6539 return op0;
6540 return const0_rtx;
6541 }
6542
6543 mode = TYPE_MODE (type);
6544 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6545 subtarget = get_subtarget (target);
68557e14
ML
6546 original_target = target;
6547 ignore = (target == const0_rtx
6548 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6549 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6550 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6551 && TREE_CODE (type) == VOID_TYPE));
6552
dd27116b
RK
6553 /* If we are going to ignore this result, we need only do something
6554 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6555 is, short-circuit the most common cases here. Note that we must
6556 not call expand_expr with anything but const0_rtx in case this
6557 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6558
dd27116b
RK
6559 if (ignore)
6560 {
6561 if (! TREE_SIDE_EFFECTS (exp))
6562 return const0_rtx;
6563
14a774a9
RK
6564 /* Ensure we reference a volatile object even if value is ignored, but
6565 don't do this if all we are doing is taking its address. */
dd27116b
RK
6566 if (TREE_THIS_VOLATILE (exp)
6567 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6568 && mode != VOIDmode && mode != BLKmode
6569 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6570 {
37a08a29 6571 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6572 if (GET_CODE (temp) == MEM)
6573 temp = copy_to_reg (temp);
6574 return const0_rtx;
6575 }
6576
14a774a9
RK
6577 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6578 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6579 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6580 modifier);
6581
14a774a9 6582 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6583 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6584 {
37a08a29
RK
6585 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6586 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6587 return const0_rtx;
6588 }
6589 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6590 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6591 /* If the second operand has no side effects, just evaluate
0f41302f 6592 the first. */
37a08a29
RK
6593 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6594 modifier);
14a774a9
RK
6595 else if (code == BIT_FIELD_REF)
6596 {
37a08a29
RK
6597 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6598 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6599 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6600 return const0_rtx;
6601 }
37a08a29 6602
90764a87 6603 target = 0;
dd27116b 6604 }
bbf6f052 6605
dbecbbe4 6606#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07 6607 /* Only check stuff here if the mode we want is different from the mode
fbe5a4a6 6608 of the expression; if it's the same, check_max_integer_computation_mode
5f652c07
JM
6609 will handle it. Do we really need to check this stuff at all? */
6610
ce3c0b53 6611 if (target
5f652c07 6612 && GET_MODE (target) != mode
ce3c0b53
JL
6613 && TREE_CODE (exp) != INTEGER_CST
6614 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6615 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6616 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6617 && TREE_CODE (exp) != COMPONENT_REF
6618 && TREE_CODE (exp) != BIT_FIELD_REF
6619 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6620 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6621 && TREE_CODE (exp) != VAR_DECL
6622 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6623 {
6624 enum machine_mode mode = GET_MODE (target);
6625
6626 if (GET_MODE_CLASS (mode) == MODE_INT
6627 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6628 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6629 }
6630
5f652c07
JM
6631 if (tmode != mode
6632 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6633 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6634 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6635 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6636 && TREE_CODE (exp) != COMPONENT_REF
6637 && TREE_CODE (exp) != BIT_FIELD_REF
6638 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6639 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6640 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6641 && TREE_CODE (exp) != RTL_EXPR
71bca506 6642 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6643 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6644 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6645
6646 check_max_integer_computation_mode (exp);
6647#endif
6648
e44842fe
RK
6649 /* If will do cse, generate all results into pseudo registers
6650 since 1) that allows cse to find more things
6651 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6652 cannot support. An exception is a CONSTRUCTOR into a multi-word
6653 MEM: that's much more likely to be most efficient into the MEM.
6654 Another is a CALL_EXPR which must return in memory. */
e44842fe 6655
bbf6f052 6656 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6657 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6
ZW
6658 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6659 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
8403445a 6660 target = 0;
bbf6f052 6661
bbf6f052
RK
6662 switch (code)
6663 {
6664 case LABEL_DECL:
b552441b
RS
6665 {
6666 tree function = decl_function_context (exp);
6667 /* Handle using a label in a containing function. */
d0977240
RK
6668 if (function != current_function_decl
6669 && function != inline_function_decl && function != 0)
b552441b
RS
6670 {
6671 struct function *p = find_function_data (function);
49ad7cfa
BS
6672 p->expr->x_forced_labels
6673 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6674 p->expr->x_forced_labels);
b552441b 6675 }
ab87f8c8
JL
6676 else
6677 {
ab87f8c8
JL
6678 if (modifier == EXPAND_INITIALIZER)
6679 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6680 label_rtx (exp),
6681 forced_labels);
6682 }
c5c76735 6683
38a448ca
RH
6684 temp = gen_rtx_MEM (FUNCTION_MODE,
6685 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6686 if (function != current_function_decl
6687 && function != inline_function_decl && function != 0)
26fcb35a
RS
6688 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6689 return temp;
b552441b 6690 }
bbf6f052
RK
6691
6692 case PARM_DECL:
1877be45 6693 if (!DECL_RTL_SET_P (exp))
bbf6f052
RK
6694 {
6695 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6696 return CONST0_RTX (mode);
bbf6f052
RK
6697 }
6698
0f41302f 6699 /* ... fall through ... */
d6a5ac33 6700
bbf6f052 6701 case VAR_DECL:
2dca20cd
RS
6702 /* If a static var's type was incomplete when the decl was written,
6703 but the type is complete now, lay out the decl now. */
d0f062fb 6704 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6705 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6706 {
ed239f5a
RK
6707 rtx value = DECL_RTL_IF_SET (exp);
6708
2dca20cd 6709 layout_decl (exp, 0);
ed239f5a
RK
6710
6711 /* If the RTL was already set, update its mode and memory
6712 attributes. */
6713 if (value != 0)
6714 {
6715 PUT_MODE (value, DECL_MODE (exp));
6716 SET_DECL_RTL (exp, 0);
6717 set_mem_attributes (value, exp, 1);
6718 SET_DECL_RTL (exp, value);
6719 }
505ddab6 6720 }
921b3427 6721
0f41302f 6722 /* ... fall through ... */
d6a5ac33 6723
2dca20cd 6724 case FUNCTION_DECL:
bbf6f052
RK
6725 case RESULT_DECL:
6726 if (DECL_RTL (exp) == 0)
6727 abort ();
d6a5ac33 6728
e44842fe
RK
6729 /* Ensure variable marked as used even if it doesn't go through
6730 a parser. If it hasn't be used yet, write out an external
6731 definition. */
6732 if (! TREE_USED (exp))
6733 {
6734 assemble_external (exp);
6735 TREE_USED (exp) = 1;
6736 }
6737
dc6d66b3
RK
6738 /* Show we haven't gotten RTL for this yet. */
6739 temp = 0;
6740
bbf6f052
RK
6741 /* Handle variables inherited from containing functions. */
6742 context = decl_function_context (exp);
6743
6744 /* We treat inline_function_decl as an alias for the current function
6745 because that is the inline function whose vars, types, etc.
6746 are being merged into the current function.
6747 See expand_inline_function. */
d6a5ac33 6748
bbf6f052
RK
6749 if (context != 0 && context != current_function_decl
6750 && context != inline_function_decl
6751 /* If var is static, we don't need a static chain to access it. */
6752 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6753 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6754 {
6755 rtx addr;
6756
6757 /* Mark as non-local and addressable. */
81feeecb 6758 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6759 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6760 abort ();
dffd7eb6 6761 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6762 if (GET_CODE (DECL_RTL (exp)) != MEM)
6763 abort ();
6764 addr = XEXP (DECL_RTL (exp), 0);
6765 if (GET_CODE (addr) == MEM)
792760b9
RK
6766 addr
6767 = replace_equiv_address (addr,
6768 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6769 else
6770 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6771
792760b9 6772 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6773 }
4af3895e 6774
bbf6f052
RK
6775 /* This is the case of an array whose size is to be determined
6776 from its initializer, while the initializer is still being parsed.
6777 See expand_decl. */
d6a5ac33 6778
dc6d66b3
RK
6779 else if (GET_CODE (DECL_RTL (exp)) == MEM
6780 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6781 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6782
6783 /* If DECL_RTL is memory, we are in the normal case and either
6784 the address is not valid or it is not a register and -fforce-addr
6785 is specified, get the address into a register. */
6786
dc6d66b3
RK
6787 else if (GET_CODE (DECL_RTL (exp)) == MEM
6788 && modifier != EXPAND_CONST_ADDRESS
6789 && modifier != EXPAND_SUM
6790 && modifier != EXPAND_INITIALIZER
6791 && (! memory_address_p (DECL_MODE (exp),
6792 XEXP (DECL_RTL (exp), 0))
6793 || (flag_force_addr
6794 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6795 temp = replace_equiv_address (DECL_RTL (exp),
6796 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6797
dc6d66b3 6798 /* If we got something, return it. But first, set the alignment
04956a1a 6799 if the address is a register. */
dc6d66b3
RK
6800 if (temp != 0)
6801 {
6802 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6803 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6804
6805 return temp;
6806 }
6807
1499e0a8
RK
6808 /* If the mode of DECL_RTL does not match that of the decl, it
6809 must be a promoted value. We return a SUBREG of the wanted mode,
6810 but mark it so that we know that it was already extended. */
6811
6812 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6813 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6814 {
1499e0a8
RK
6815 /* Get the signedness used for this variable. Ensure we get the
6816 same mode we got when the variable was declared. */
78911e8b 6817 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6818 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6819 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6820 abort ();
6821
ddef6bc7 6822 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6823 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6824 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6825 return temp;
6826 }
6827
bbf6f052
RK
6828 return DECL_RTL (exp);
6829
6830 case INTEGER_CST:
d8a50944 6831 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6832 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6833
d8a50944
RH
6834 /* ??? If overflow is set, fold will have done an incomplete job,
6835 which can result in (plus xx (const_int 0)), which can get
6836 simplified by validate_replace_rtx during virtual register
6837 instantiation, which can result in unrecognizable insns.
6838 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6839 if (TREE_CONSTANT_OVERFLOW (exp)
6840 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6841 temp = force_reg (mode, temp);
6842
6843 return temp;
6844
bbf6f052 6845 case CONST_DECL:
8403445a 6846 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6847
6848 case REAL_CST:
6849 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6850 which will be turned into memory by reload if necessary.
6851
bbf6f052
RK
6852 We used to force a register so that loop.c could see it. But
6853 this does not allow gen_* patterns to perform optimizations with
6854 the constants. It also produces two insns in cases like "x = 1.0;".
6855 On most machines, floating-point constants are not permitted in
6856 many insns, so we'd end up copying it to a register in any case.
6857
6858 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6859 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6860 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6861
6862 case COMPLEX_CST:
6863 case STRING_CST:
6864 if (! TREE_CST_RTL (exp))
bd7cf17e 6865 output_constant_def (exp, 1);
bbf6f052
RK
6866
6867 /* TREE_CST_RTL probably contains a constant address.
6868 On RISC machines where a constant address isn't valid,
6869 make some insns to get that address into a register. */
6870 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6871 && modifier != EXPAND_CONST_ADDRESS
6872 && modifier != EXPAND_INITIALIZER
6873 && modifier != EXPAND_SUM
d6a5ac33
RK
6874 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6875 || (flag_force_addr
6876 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
792760b9
RK
6877 return replace_equiv_address (TREE_CST_RTL (exp),
6878 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
bbf6f052
RK
6879 return TREE_CST_RTL (exp);
6880
bf1e5319 6881 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6882 {
6883 rtx to_return;
3b304f5b 6884 const char *saved_input_filename = input_filename;
b24f65cd
APB
6885 int saved_lineno = lineno;
6886 input_filename = EXPR_WFL_FILENAME (exp);
6887 lineno = EXPR_WFL_LINENO (exp);
6888 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6889 emit_line_note (input_filename, lineno);
6ad7895a 6890 /* Possibly avoid switching back and forth here. */
b0ca54af 6891 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
b24f65cd
APB
6892 input_filename = saved_input_filename;
6893 lineno = saved_lineno;
6894 return to_return;
6895 }
bf1e5319 6896
bbf6f052
RK
6897 case SAVE_EXPR:
6898 context = decl_function_context (exp);
d6a5ac33 6899
d0977240
RK
6900 /* If this SAVE_EXPR was at global context, assume we are an
6901 initialization function and move it into our context. */
6902 if (context == 0)
6903 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6904
bbf6f052
RK
6905 /* We treat inline_function_decl as an alias for the current function
6906 because that is the inline function whose vars, types, etc.
6907 are being merged into the current function.
6908 See expand_inline_function. */
6909 if (context == current_function_decl || context == inline_function_decl)
6910 context = 0;
6911
6912 /* If this is non-local, handle it. */
6913 if (context)
6914 {
d0977240
RK
6915 /* The following call just exists to abort if the context is
6916 not of a containing function. */
6917 find_function_data (context);
6918
bbf6f052
RK
6919 temp = SAVE_EXPR_RTL (exp);
6920 if (temp && GET_CODE (temp) == REG)
6921 {
6922 put_var_into_stack (exp);
6923 temp = SAVE_EXPR_RTL (exp);
6924 }
6925 if (temp == 0 || GET_CODE (temp) != MEM)
6926 abort ();
792760b9
RK
6927 return
6928 replace_equiv_address (temp,
6929 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6930 }
6931 if (SAVE_EXPR_RTL (exp) == 0)
6932 {
06089a8b
RK
6933 if (mode == VOIDmode)
6934 temp = const0_rtx;
6935 else
1da68f56
RK
6936 temp = assign_temp (build_qualified_type (type,
6937 (TYPE_QUALS (type)
6938 | TYPE_QUAL_CONST)),
6939 3, 0, 0);
1499e0a8 6940
bbf6f052 6941 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6942 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6943 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6944 save_expr_regs);
ff78f773
RK
6945
6946 /* If the mode of TEMP does not match that of the expression, it
6947 must be a promoted value. We pass store_expr a SUBREG of the
6948 wanted mode but mark it so that we know that it was already
3ac1a319 6949 extended. */
ff78f773
RK
6950
6951 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6952 {
ddef6bc7 6953 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6954 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6955 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6956 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6957 }
6958
4c7a0be9 6959 if (temp == const0_rtx)
37a08a29 6960 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6961 else
8403445a
AM
6962 store_expr (TREE_OPERAND (exp, 0), temp,
6963 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6964
6965 TREE_USED (exp) = 1;
bbf6f052 6966 }
1499e0a8
RK
6967
6968 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6969 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6970 but mark it so that we know that it was already extended. */
1499e0a8
RK
6971
6972 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6973 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6974 {
e70d22c8
RK
6975 /* Compute the signedness and make the proper SUBREG. */
6976 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6977 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6978 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6979 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6980 return temp;
6981 }
6982
bbf6f052
RK
6983 return SAVE_EXPR_RTL (exp);
6984
679163cf
MS
6985 case UNSAVE_EXPR:
6986 {
6987 rtx temp;
6988 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6989 TREE_OPERAND (exp, 0)
6990 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6991 return temp;
6992 }
6993
b50d17a1 6994 case PLACEHOLDER_EXPR:
e9a25f70 6995 {
f47e9b4e 6996 tree old_list = placeholder_list;
738cc472 6997 tree placeholder_expr = 0;
e9a25f70 6998
f47e9b4e 6999 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
7000 if (exp == 0)
7001 abort ();
7002
f47e9b4e 7003 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 7004 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
7005 placeholder_list = old_list;
7006 return temp;
e9a25f70 7007 }
b50d17a1 7008
b50d17a1
RK
7009 case WITH_RECORD_EXPR:
7010 /* Put the object on the placeholder list, expand our first operand,
7011 and pop the list. */
7012 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7013 placeholder_list);
37a08a29
RK
7014 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7015 modifier);
b50d17a1
RK
7016 placeholder_list = TREE_CHAIN (placeholder_list);
7017 return target;
7018
70e6ca43
APB
7019 case GOTO_EXPR:
7020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7021 expand_goto (TREE_OPERAND (exp, 0));
7022 else
7023 expand_computed_goto (TREE_OPERAND (exp, 0));
7024 return const0_rtx;
7025
bbf6f052 7026 case EXIT_EXPR:
df4ae160 7027 expand_exit_loop_if_false (NULL,
e44842fe 7028 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
7029 return const0_rtx;
7030
f42e28dd
APB
7031 case LABELED_BLOCK_EXPR:
7032 if (LABELED_BLOCK_BODY (exp))
b0832fe1 7033 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 7034 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 7035 do_pending_stack_adjust ();
f42e28dd
APB
7036 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7037 return const0_rtx;
7038
7039 case EXIT_BLOCK_EXPR:
7040 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 7041 sorry ("returned value in block_exit_expr");
f42e28dd
APB
7042 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7043 return const0_rtx;
7044
bbf6f052 7045 case LOOP_EXPR:
0088fcb1 7046 push_temp_slots ();
bbf6f052 7047 expand_start_loop (1);
b0832fe1 7048 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 7049 expand_end_loop ();
0088fcb1 7050 pop_temp_slots ();
bbf6f052
RK
7051
7052 return const0_rtx;
7053
7054 case BIND_EXPR:
7055 {
7056 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
7057
7058 /* Need to open a binding contour here because
e976b8b2 7059 if there are any cleanups they must be contained here. */
8e91754e 7060 expand_start_bindings (2);
bbf6f052 7061
2df53c0b
RS
7062 /* Mark the corresponding BLOCK for output in its proper place. */
7063 if (TREE_OPERAND (exp, 2) != 0
7064 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 7065 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
7066
7067 /* If VARS have not yet been expanded, expand them now. */
7068 while (vars)
7069 {
19e7881c 7070 if (!DECL_RTL_SET_P (vars))
4977bab6 7071 expand_decl (vars);
bbf6f052
RK
7072 expand_decl_init (vars);
7073 vars = TREE_CHAIN (vars);
7074 }
7075
37a08a29 7076 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
7077
7078 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7079
7080 return temp;
7081 }
7082
7083 case RTL_EXPR:
83b853c9
JM
7084 if (RTL_EXPR_SEQUENCE (exp))
7085 {
7086 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7087 abort ();
2f937369 7088 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
7089 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7090 }
64dc53f3
MM
7091 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7092 free_temps_for_rtl_expr (exp);
bbf6f052
RK
7093 return RTL_EXPR_RTL (exp);
7094
7095 case CONSTRUCTOR:
dd27116b
RK
7096 /* If we don't need the result, just ensure we evaluate any
7097 subexpressions. */
7098 if (ignore)
7099 {
7100 tree elt;
37a08a29 7101
dd27116b 7102 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
7103 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7104
dd27116b
RK
7105 return const0_rtx;
7106 }
3207b172 7107
4af3895e
JVA
7108 /* All elts simple constants => refer to a constant in memory. But
7109 if this is a non-BLKmode mode, let it store a field at a time
7110 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 7111 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
7112 store directly into the target unless the type is large enough
7113 that memcpy will be used. If we are making an initializer and
00182e1e
AH
7114 all operands are constant, put it in memory as well.
7115
7116 FIXME: Avoid trying to fill vector constructors piece-meal.
7117 Output them with output_constant_def below unless we're sure
7118 they're zeros. This should go away when vector initializers
7119 are treated like VECTOR_CST instead of arrays.
7120 */
dd27116b 7121 else if ((TREE_STATIC (exp)
3207b172 7122 && ((mode == BLKmode
e5e809f4 7123 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 7124 || TREE_ADDRESSABLE (exp)
19caa751 7125 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 7126 && (! MOVE_BY_PIECES_P
19caa751
RK
7127 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7128 TYPE_ALIGN (type)))
0fb7aeda
KH
7129 && ((TREE_CODE (type) == VECTOR_TYPE
7130 && !is_zeros_p (exp))
7131 || ! mostly_zeros_p (exp)))))
dd27116b 7132 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 7133 {
bd7cf17e 7134 rtx constructor = output_constant_def (exp, 1);
19caa751 7135
b552441b
RS
7136 if (modifier != EXPAND_CONST_ADDRESS
7137 && modifier != EXPAND_INITIALIZER
792760b9
RK
7138 && modifier != EXPAND_SUM)
7139 constructor = validize_mem (constructor);
7140
bbf6f052
RK
7141 return constructor;
7142 }
bbf6f052
RK
7143 else
7144 {
e9ac02a6
JW
7145 /* Handle calls that pass values in multiple non-contiguous
7146 locations. The Irix 6 ABI has examples of this. */
e5e809f4 7147 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
7148 || GET_CODE (target) == PARALLEL
7149 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
7150 target
7151 = assign_temp (build_qualified_type (type,
7152 (TYPE_QUALS (type)
7153 | (TREE_READONLY (exp)
7154 * TYPE_QUAL_CONST))),
c24ae149 7155 0, TREE_ADDRESSABLE (exp), 1);
07604beb 7156
de8920be 7157 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
7158 return target;
7159 }
7160
7161 case INDIRECT_REF:
7162 {
7163 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 7164 tree index;
3a94c984
KH
7165 tree string = string_constant (exp1, &index);
7166
06eaa86f 7167 /* Try to optimize reads from const strings. */
0fb7aeda
KH
7168 if (string
7169 && TREE_CODE (string) == STRING_CST
7170 && TREE_CODE (index) == INTEGER_CST
05bccae2 7171 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
7172 && GET_MODE_CLASS (mode) == MODE_INT
7173 && GET_MODE_SIZE (mode) == 1
37a08a29 7174 && modifier != EXPAND_WRITE)
0fb7aeda 7175 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 7176 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7177
405f0da6
JW
7178 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7179 op0 = memory_address (mode, op0);
38a448ca 7180 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 7181 set_mem_attributes (temp, exp, 0);
1125706f 7182
14a774a9
RK
7183 /* If we are writing to this object and its type is a record with
7184 readonly fields, we must mark it as readonly so it will
7185 conflict with readonly references to those fields. */
37a08a29 7186 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
7187 RTX_UNCHANGING_P (temp) = 1;
7188
8c8a8e34
JW
7189 return temp;
7190 }
bbf6f052
RK
7191
7192 case ARRAY_REF:
742920c7
RK
7193 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7194 abort ();
bbf6f052 7195
bbf6f052 7196 {
742920c7
RK
7197 tree array = TREE_OPERAND (exp, 0);
7198 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7199 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 7200 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 7201 HOST_WIDE_INT i;
b50d17a1 7202
d4c89139
PB
7203 /* Optimize the special-case of a zero lower bound.
7204
7205 We convert the low_bound to sizetype to avoid some problems
7206 with constant folding. (E.g. suppose the lower bound is 1,
7207 and its mode is QI. Without the conversion, (ARRAY
7208 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 7209 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 7210
742920c7 7211 if (! integer_zerop (low_bound))
fed3cef0 7212 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 7213
742920c7 7214 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7215 This is not done in fold so it won't happen inside &.
7216 Don't fold if this is for wide characters since it's too
7217 difficult to do correctly and this is a very rare case. */
742920c7 7218
cb5fa0f8
RK
7219 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7220 && TREE_CODE (array) == STRING_CST
742920c7 7221 && TREE_CODE (index) == INTEGER_CST
05bccae2 7222 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
7223 && GET_MODE_CLASS (mode) == MODE_INT
7224 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7225 return gen_int_mode (TREE_STRING_POINTER (array)
7226 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7227
742920c7
RK
7228 /* If this is a constant index into a constant array,
7229 just get the value from the array. Handle both the cases when
7230 we have an explicit constructor and when our operand is a variable
7231 that was declared const. */
4af3895e 7232
cb5fa0f8
RK
7233 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7234 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 7235 && TREE_CODE (index) == INTEGER_CST
3a94c984 7236 && 0 > compare_tree_int (index,
05bccae2
RK
7237 list_length (CONSTRUCTOR_ELTS
7238 (TREE_OPERAND (exp, 0)))))
742920c7 7239 {
05bccae2
RK
7240 tree elem;
7241
7242 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7243 i = TREE_INT_CST_LOW (index);
7244 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7245 ;
7246
7247 if (elem)
37a08a29
RK
7248 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7249 modifier);
742920c7 7250 }
3a94c984 7251
742920c7 7252 else if (optimize >= 1
cb5fa0f8
RK
7253 && modifier != EXPAND_CONST_ADDRESS
7254 && modifier != EXPAND_INITIALIZER
742920c7
RK
7255 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7256 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7257 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7258 {
08293add 7259 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7260 {
7261 tree init = DECL_INITIAL (array);
7262
742920c7
RK
7263 if (TREE_CODE (init) == CONSTRUCTOR)
7264 {
665f2503 7265 tree elem;
742920c7 7266
05bccae2 7267 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7268 (elem
7269 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7270 elem = TREE_CHAIN (elem))
7271 ;
7272
c54b0a5e 7273 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7274 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7275 tmode, modifier);
742920c7
RK
7276 }
7277 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7278 && 0 > compare_tree_int (index,
7279 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7280 {
7281 tree type = TREE_TYPE (TREE_TYPE (init));
7282 enum machine_mode mode = TYPE_MODE (type);
7283
7284 if (GET_MODE_CLASS (mode) == MODE_INT
7285 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7286 return gen_int_mode (TREE_STRING_POINTER (init)
7287 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7288 }
742920c7
RK
7289 }
7290 }
7291 }
3a94c984 7292 /* Fall through. */
bbf6f052
RK
7293
7294 case COMPONENT_REF:
7295 case BIT_FIELD_REF:
b4e3fabb 7296 case ARRAY_RANGE_REF:
4af3895e 7297 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
7298 appropriate field if it is present. Don't do this if we have
7299 already written the data since we want to refer to that copy
7300 and varasm.c assumes that's what we'll do. */
b4e3fabb 7301 if (code == COMPONENT_REF
7a0b7b9a
RK
7302 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7303 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
7304 {
7305 tree elt;
7306
7307 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7308 elt = TREE_CHAIN (elt))
86b5812c
RK
7309 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7310 /* We can normally use the value of the field in the
7311 CONSTRUCTOR. However, if this is a bitfield in
7312 an integral mode that we can fit in a HOST_WIDE_INT,
7313 we must mask only the number of bits in the bitfield,
7314 since this is done implicitly by the constructor. If
7315 the bitfield does not meet either of those conditions,
7316 we can't do this optimization. */
7317 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7318 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7319 == MODE_INT)
7320 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7321 <= HOST_BITS_PER_WIDE_INT))))
7322 {
8403445a
AM
7323 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7324 && modifier == EXPAND_STACK_PARM)
7325 target = 0;
3a94c984 7326 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7327 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7328 {
9df2c88c
RK
7329 HOST_WIDE_INT bitsize
7330 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7331 enum machine_mode imode
7332 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
7333
7334 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7335 {
7336 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7337 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7338 }
7339 else
7340 {
7341 tree count
e5e809f4
JL
7342 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7343 0);
86b5812c
RK
7344
7345 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7346 target, 0);
7347 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7348 target, 0);
7349 }
7350 }
7351
7352 return op0;
7353 }
4af3895e
JVA
7354 }
7355
bbf6f052
RK
7356 {
7357 enum machine_mode mode1;
770ae6cc 7358 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7359 tree offset;
bbf6f052 7360 int volatilep = 0;
839c4796 7361 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7362 &mode1, &unsignedp, &volatilep);
f47e9b4e 7363 rtx orig_op0;
bbf6f052 7364
e7f3c83f
RK
7365 /* If we got back the original object, something is wrong. Perhaps
7366 we are evaluating an expression too early. In any event, don't
7367 infinitely recurse. */
7368 if (tem == exp)
7369 abort ();
7370
3d27140a 7371 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7372 computation, since it will need a temporary and TARGET is known
7373 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7374
f47e9b4e
RK
7375 orig_op0 = op0
7376 = expand_expr (tem,
7377 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7378 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7379 != INTEGER_CST)
8403445a 7380 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7381 ? target : NULL_RTX),
7382 VOIDmode,
7383 (modifier == EXPAND_INITIALIZER
8403445a
AM
7384 || modifier == EXPAND_CONST_ADDRESS
7385 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7386 ? modifier : EXPAND_NORMAL);
bbf6f052 7387
8c8a8e34 7388 /* If this is a constant, put it into a register if it is a
14a774a9 7389 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7390 if (CONSTANT_P (op0))
7391 {
7392 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7393 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7394 && offset == 0)
8c8a8e34
JW
7395 op0 = force_reg (mode, op0);
7396 else
7397 op0 = validize_mem (force_const_mem (mode, op0));
7398 }
7399
7bb0943f
RS
7400 if (offset != 0)
7401 {
8403445a
AM
7402 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7403 EXPAND_SUM);
7bb0943f 7404
a2725049 7405 /* If this object is in a register, put it into memory.
14a774a9
RK
7406 This case can't occur in C, but can in Ada if we have
7407 unchecked conversion of an expression from a scalar type to
7408 an array or record type. */
7409 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7410 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7411 {
d04218c0
RK
7412 /* If the operand is a SAVE_EXPR, we can deal with this by
7413 forcing the SAVE_EXPR into memory. */
7414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45
RK
7415 {
7416 put_var_into_stack (TREE_OPERAND (exp, 0));
7417 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7418 }
d04218c0
RK
7419 else
7420 {
7421 tree nt
7422 = build_qualified_type (TREE_TYPE (tem),
7423 (TYPE_QUALS (TREE_TYPE (tem))
7424 | TYPE_QUAL_CONST));
7425 rtx memloc = assign_temp (nt, 1, 1, 1);
7426
d04218c0
RK
7427 emit_move_insn (memloc, op0);
7428 op0 = memloc;
7429 }
14a774a9
RK
7430 }
7431
7bb0943f
RS
7432 if (GET_CODE (op0) != MEM)
7433 abort ();
2d48c13d 7434
2d48c13d 7435#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672
RK
7436 if (GET_MODE (offset_rtx) != Pmode)
7437 offset_rtx = convert_memory_address (Pmode, offset_rtx);
fa06ab5c
RK
7438#else
7439 if (GET_MODE (offset_rtx) != ptr_mode)
7440 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7441#endif
7442
14a774a9 7443 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7444 to call force_reg for that case. Avoid that case. */
89752202
HB
7445 if (GET_CODE (op0) == MEM
7446 && GET_MODE (op0) == BLKmode
efd07ca7 7447 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7448 && bitsize != 0
3a94c984 7449 && (bitpos % bitsize) == 0
89752202 7450 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7451 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7452 {
e3c8ea67 7453 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7454 bitpos = 0;
7455 }
7456
0d4903b8
RK
7457 op0 = offset_address (op0, offset_rtx,
7458 highest_pow2_factor (offset));
7bb0943f
RS
7459 }
7460
1ce7f3c2
RK
7461 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7462 record its alignment as BIGGEST_ALIGNMENT. */
7463 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7464 && is_aligning_offset (offset, tem))
7465 set_mem_align (op0, BIGGEST_ALIGNMENT);
7466
bbf6f052
RK
7467 /* Don't forget about volatility even if this is a bitfield. */
7468 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7469 {
f47e9b4e
RK
7470 if (op0 == orig_op0)
7471 op0 = copy_rtx (op0);
7472
bbf6f052
RK
7473 MEM_VOLATILE_P (op0) = 1;
7474 }
7475
010f87c4
JJ
7476 /* The following code doesn't handle CONCAT.
7477 Assume only bitpos == 0 can be used for CONCAT, due to
7478 one element arrays having the same mode as its element. */
7479 if (GET_CODE (op0) == CONCAT)
7480 {
7481 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7482 abort ();
7483 return op0;
7484 }
7485
ccc98036
RS
7486 /* In cases where an aligned union has an unaligned object
7487 as a field, we might be extracting a BLKmode value from
7488 an integer-mode (e.g., SImode) object. Handle this case
7489 by doing the extract into an object as wide as the field
7490 (which we know to be the width of a basic mode), then
cb5fa0f8 7491 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7492 if (mode1 == VOIDmode
ccc98036 7493 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7494 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7495 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7496 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7497 && modifier != EXPAND_CONST_ADDRESS
7498 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7499 /* If the field isn't aligned enough to fetch as a memref,
7500 fetch it as a bit field. */
7501 || (mode1 != BLKmode
38b3baae 7502 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0))
cb5fa0f8
RK
7503 && ((TYPE_ALIGN (TREE_TYPE (tem))
7504 < GET_MODE_ALIGNMENT (mode))
7505 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7506 /* If the type and the field are a constant size and the
7507 size of the type isn't the same size as the bitfield,
7508 we must use bitfield operations. */
7509 || (bitsize >= 0
7510 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7511 == INTEGER_CST)
7512 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7513 bitsize)))
bbf6f052 7514 {
bbf6f052
RK
7515 enum machine_mode ext_mode = mode;
7516
14a774a9
RK
7517 if (ext_mode == BLKmode
7518 && ! (target != 0 && GET_CODE (op0) == MEM
7519 && GET_CODE (target) == MEM
7520 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7521 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7522
7523 if (ext_mode == BLKmode)
a281e72d
RK
7524 {
7525 /* In this case, BITPOS must start at a byte boundary and
7526 TARGET, if specified, must be a MEM. */
7527 if (GET_CODE (op0) != MEM
7528 || (target != 0 && GET_CODE (target) != MEM)
7529 || bitpos % BITS_PER_UNIT != 0)
7530 abort ();
7531
f4ef873c 7532 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7533 if (target == 0)
7534 target = assign_temp (type, 0, 1, 1);
7535
7536 emit_block_move (target, op0,
a06ef755 7537 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7538 / BITS_PER_UNIT),
8403445a
AM
7539 (modifier == EXPAND_STACK_PARM
7540 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7541
a281e72d
RK
7542 return target;
7543 }
bbf6f052 7544
dc6d66b3
RK
7545 op0 = validize_mem (op0);
7546
7547 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7548 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7549
8403445a
AM
7550 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7551 (modifier == EXPAND_STACK_PARM
7552 ? NULL_RTX : target),
7553 ext_mode, ext_mode,
bbf6f052 7554 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7555
7556 /* If the result is a record type and BITSIZE is narrower than
7557 the mode of OP0, an integral mode, and this is a big endian
7558 machine, we must put the field into the high-order bits. */
7559 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7560 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7561 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7562 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7563 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7564 - bitsize),
7565 op0, 1);
7566
bbf6f052
RK
7567 if (mode == BLKmode)
7568 {
c3d32120 7569 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7570 ((*lang_hooks.types.type_for_mode)
7571 (ext_mode, 0),
c3d32120 7572 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7573
7574 emit_move_insn (new, op0);
7575 op0 = copy_rtx (new);
7576 PUT_MODE (op0, BLKmode);
c3d32120 7577 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7578 }
7579
7580 return op0;
7581 }
7582
05019f83
RK
7583 /* If the result is BLKmode, use that to access the object
7584 now as well. */
7585 if (mode == BLKmode)
7586 mode1 = BLKmode;
7587
bbf6f052
RK
7588 /* Get a reference to just this component. */
7589 if (modifier == EXPAND_CONST_ADDRESS
7590 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7591 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7592 else
f4ef873c 7593 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7594
f47e9b4e
RK
7595 if (op0 == orig_op0)
7596 op0 = copy_rtx (op0);
7597
3bdf5ad1 7598 set_mem_attributes (op0, exp, 0);
dc6d66b3 7599 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7600 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7601
bbf6f052 7602 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7603 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7604 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7605 || modifier == EXPAND_INITIALIZER)
bbf6f052 7606 return op0;
0d15e60c 7607 else if (target == 0)
bbf6f052 7608 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7609
bbf6f052
RK
7610 convert_move (target, op0, unsignedp);
7611 return target;
7612 }
7613
4a8d0c9c
RH
7614 case VTABLE_REF:
7615 {
7616 rtx insn, before = get_last_insn (), vtbl_ref;
7617
7618 /* Evaluate the interior expression. */
7619 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7620 tmode, modifier);
7621
7622 /* Get or create an instruction off which to hang a note. */
7623 if (REG_P (subtarget))
7624 {
7625 target = subtarget;
7626 insn = get_last_insn ();
7627 if (insn == before)
7628 abort ();
7629 if (! INSN_P (insn))
7630 insn = prev_nonnote_insn (insn);
7631 }
7632 else
7633 {
7634 target = gen_reg_rtx (GET_MODE (subtarget));
7635 insn = emit_move_insn (target, subtarget);
7636 }
7637
7638 /* Collect the data for the note. */
7639 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7640 vtbl_ref = plus_constant (vtbl_ref,
7641 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7642 /* Discard the initial CONST that was added. */
7643 vtbl_ref = XEXP (vtbl_ref, 0);
7644
7645 REG_NOTES (insn)
7646 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7647
7648 return target;
7649 }
7650
bbf6f052
RK
7651 /* Intended for a reference to a buffer of a file-object in Pascal.
7652 But it's not certain that a special tree code will really be
7653 necessary for these. INDIRECT_REF might work for them. */
7654 case BUFFER_REF:
7655 abort ();
7656
7308a047 7657 case IN_EXPR:
7308a047 7658 {
d6a5ac33
RK
7659 /* Pascal set IN expression.
7660
7661 Algorithm:
7662 rlo = set_low - (set_low%bits_per_word);
7663 the_word = set [ (index - rlo)/bits_per_word ];
7664 bit_index = index % bits_per_word;
7665 bitmask = 1 << bit_index;
7666 return !!(the_word & bitmask); */
7667
7308a047
RS
7668 tree set = TREE_OPERAND (exp, 0);
7669 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7670 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7671 tree set_type = TREE_TYPE (set);
7308a047
RS
7672 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7673 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7674 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7675 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7676 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7677 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7678 rtx setaddr = XEXP (setval, 0);
7679 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7680 rtx rlow;
7681 rtx diff, quo, rem, addr, bit, result;
7308a047 7682
d6a5ac33
RK
7683 /* If domain is empty, answer is no. Likewise if index is constant
7684 and out of bounds. */
51723711 7685 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7686 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7687 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7688 || (TREE_CODE (index) == INTEGER_CST
7689 && TREE_CODE (set_low_bound) == INTEGER_CST
7690 && tree_int_cst_lt (index, set_low_bound))
7691 || (TREE_CODE (set_high_bound) == INTEGER_CST
7692 && TREE_CODE (index) == INTEGER_CST
7693 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7694 return const0_rtx;
7695
d6a5ac33
RK
7696 if (target == 0)
7697 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7698
7699 /* If we get here, we have to generate the code for both cases
7700 (in range and out of range). */
7701
7702 op0 = gen_label_rtx ();
7703 op1 = gen_label_rtx ();
7704
7705 if (! (GET_CODE (index_val) == CONST_INT
7706 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7707 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7708 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7709
7710 if (! (GET_CODE (index_val) == CONST_INT
7711 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7712 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7713 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7714
7715 /* Calculate the element number of bit zero in the first word
7716 of the set. */
7717 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7718 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7719 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7720 else
17938e57
RK
7721 rlow = expand_binop (index_mode, and_optab, lo_r,
7722 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7723 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7724
d6a5ac33
RK
7725 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7726 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7727
7728 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7729 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7730 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7731 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7732
7308a047 7733 addr = memory_address (byte_mode,
d6a5ac33
RK
7734 expand_binop (index_mode, add_optab, diff,
7735 setaddr, NULL_RTX, iunsignedp,
17938e57 7736 OPTAB_LIB_WIDEN));
d6a5ac33 7737
3a94c984 7738 /* Extract the bit we want to examine. */
7308a047 7739 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7740 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7741 make_tree (TREE_TYPE (index), rem),
7742 NULL_RTX, 1);
7743 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7744 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7745 1, OPTAB_LIB_WIDEN);
17938e57
RK
7746
7747 if (result != target)
7748 convert_move (target, result, 1);
7308a047
RS
7749
7750 /* Output the code to handle the out-of-range case. */
7751 emit_jump (op0);
7752 emit_label (op1);
7753 emit_move_insn (target, const0_rtx);
7754 emit_label (op0);
7755 return target;
7756 }
7757
bbf6f052 7758 case WITH_CLEANUP_EXPR:
6ad7895a 7759 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7760 {
6ad7895a 7761 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7762 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7763 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7764 CLEANUP_EH_ONLY (exp));
e976b8b2 7765
bbf6f052 7766 /* That's it for this cleanup. */
6ad7895a 7767 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7768 }
6ad7895a 7769 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7770
5dab5552
MS
7771 case CLEANUP_POINT_EXPR:
7772 {
e976b8b2
MS
7773 /* Start a new binding layer that will keep track of all cleanup
7774 actions to be performed. */
8e91754e 7775 expand_start_bindings (2);
e976b8b2 7776
d93d4205 7777 target_temp_slot_level = temp_slot_level;
e976b8b2 7778
37a08a29 7779 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7780 /* If we're going to use this value, load it up now. */
7781 if (! ignore)
7782 op0 = force_not_mem (op0);
d93d4205 7783 preserve_temp_slots (op0);
e976b8b2 7784 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7785 }
7786 return op0;
7787
bbf6f052
RK
7788 case CALL_EXPR:
7789 /* Check for a built-in function. */
7790 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7791 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7792 == FUNCTION_DECL)
bbf6f052 7793 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7794 {
c70eaeaf
KG
7795 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7796 == BUILT_IN_FRONTEND)
8403445a
AM
7797 return (*lang_hooks.expand_expr) (exp, original_target,
7798 tmode, modifier);
c70eaeaf
KG
7799 else
7800 return expand_builtin (exp, target, subtarget, tmode, ignore);
7801 }
d6a5ac33 7802
8129842c 7803 return expand_call (exp, target, ignore);
bbf6f052
RK
7804
7805 case NON_LVALUE_EXPR:
7806 case NOP_EXPR:
7807 case CONVERT_EXPR:
7808 case REFERENCE_EXPR:
4a53008b 7809 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7810 return const0_rtx;
4a53008b 7811
bbf6f052
RK
7812 if (TREE_CODE (type) == UNION_TYPE)
7813 {
7814 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7815
c3d32120
RK
7816 /* If both input and output are BLKmode, this conversion isn't doing
7817 anything except possibly changing memory attribute. */
7818 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7819 {
7820 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7821 modifier);
7822
7823 result = copy_rtx (result);
7824 set_mem_attributes (result, exp, 0);
7825 return result;
7826 }
14a774a9 7827
bbf6f052 7828 if (target == 0)
1da68f56 7829 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7830
bbf6f052
RK
7831 if (GET_CODE (target) == MEM)
7832 /* Store data into beginning of memory target. */
7833 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7834 adjust_address (target, TYPE_MODE (valtype), 0),
7835 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7836
bbf6f052
RK
7837 else if (GET_CODE (target) == REG)
7838 /* Store this field into a union of the proper type. */
14a774a9
RK
7839 store_field (target,
7840 MIN ((int_size_in_bytes (TREE_TYPE
7841 (TREE_OPERAND (exp, 0)))
7842 * BITS_PER_UNIT),
8752c357 7843 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7844 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7845 VOIDmode, 0, type, 0);
bbf6f052
RK
7846 else
7847 abort ();
7848
7849 /* Return the entire union. */
7850 return target;
7851 }
d6a5ac33 7852
7f62854a
RK
7853 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7854 {
7855 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7856 modifier);
7f62854a
RK
7857
7858 /* If the signedness of the conversion differs and OP0 is
7859 a promoted SUBREG, clear that indication since we now
7860 have to do the proper extension. */
7861 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7862 && GET_CODE (op0) == SUBREG)
7863 SUBREG_PROMOTED_VAR_P (op0) = 0;
7864
7865 return op0;
7866 }
7867
fdf473ae 7868 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7869 if (GET_MODE (op0) == mode)
7870 return op0;
12342f90 7871
d6a5ac33
RK
7872 /* If OP0 is a constant, just convert it into the proper mode. */
7873 if (CONSTANT_P (op0))
fdf473ae
RH
7874 {
7875 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7876 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7877
0fb7aeda 7878 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7879 return simplify_gen_subreg (mode, op0, inner_mode,
7880 subreg_lowpart_offset (mode,
7881 inner_mode));
7882 else
7883 return convert_modes (mode, inner_mode, op0,
7884 TREE_UNSIGNED (inner_type));
7885 }
12342f90 7886
26fcb35a 7887 if (modifier == EXPAND_INITIALIZER)
38a448ca 7888 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7889
bbf6f052 7890 if (target == 0)
d6a5ac33
RK
7891 return
7892 convert_to_mode (mode, op0,
7893 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7894 else
d6a5ac33
RK
7895 convert_move (target, op0,
7896 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7897 return target;
7898
ed239f5a 7899 case VIEW_CONVERT_EXPR:
37a08a29 7900 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7901
7902 /* If the input and output modes are both the same, we are done.
7903 Otherwise, if neither mode is BLKmode and both are within a word, we
c11c10d8
RK
7904 can use gen_lowpart. If neither is true, make sure the operand is
7905 in memory and convert the MEM to the new mode. */
ed239f5a
RK
7906 if (TYPE_MODE (type) == GET_MODE (op0))
7907 ;
7908 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7909 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7910 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7911 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7912 else if (GET_CODE (op0) != MEM)
ed239f5a 7913 {
c11c10d8
RK
7914 /* If the operand is not a MEM, force it into memory. Since we
7915 are going to be be changing the mode of the MEM, don't call
7916 force_const_mem for constants because we don't allow pool
7917 constants to change mode. */
ed239f5a 7918 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7919
c11c10d8
RK
7920 if (TREE_ADDRESSABLE (exp))
7921 abort ();
ed239f5a 7922
c11c10d8
RK
7923 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7924 target
7925 = assign_stack_temp_for_type
7926 (TYPE_MODE (inner_type),
7927 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7928
c11c10d8
RK
7929 emit_move_insn (target, op0);
7930 op0 = target;
ed239f5a
RK
7931 }
7932
c11c10d8
RK
7933 /* At this point, OP0 is in the correct mode. If the output type is such
7934 that the operand is known to be aligned, indicate that it is.
7935 Otherwise, we need only be concerned about alignment for non-BLKmode
7936 results. */
ed239f5a
RK
7937 if (GET_CODE (op0) == MEM)
7938 {
7939 op0 = copy_rtx (op0);
7940
ed239f5a
RK
7941 if (TYPE_ALIGN_OK (type))
7942 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7943 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7944 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7945 {
7946 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7947 HOST_WIDE_INT temp_size
7948 = MAX (int_size_in_bytes (inner_type),
7949 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7950 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7951 temp_size, 0, type);
c4e59f51 7952 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7953
c11c10d8
RK
7954 if (TREE_ADDRESSABLE (exp))
7955 abort ();
7956
ed239f5a
RK
7957 if (GET_MODE (op0) == BLKmode)
7958 emit_block_move (new_with_op0_mode, op0,
44bb111a 7959 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7960 (modifier == EXPAND_STACK_PARM
7961 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7962 else
7963 emit_move_insn (new_with_op0_mode, op0);
7964
7965 op0 = new;
7966 }
0fb7aeda 7967
c4e59f51 7968 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7969 }
7970
7971 return op0;
7972
bbf6f052 7973 case PLUS_EXPR:
91ce572a 7974 this_optab = ! unsignedp && flag_trapv
a9785c70 7975 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7976 ? addv_optab : add_optab;
bbf6f052
RK
7977
7978 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7979 something else, make sure we add the register to the constant and
7980 then to the other thing. This case can occur during strength
7981 reduction and doing it this way will produce better code if the
7982 frame pointer or argument pointer is eliminated.
7983
7984 fold-const.c will ensure that the constant is always in the inner
7985 PLUS_EXPR, so the only case we need to do anything about is if
7986 sp, ap, or fp is our second argument, in which case we must swap
7987 the innermost first argument and our second argument. */
7988
7989 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7990 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7991 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7992 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7993 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7994 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7995 {
7996 tree t = TREE_OPERAND (exp, 1);
7997
7998 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7999 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8000 }
8001
88f63c77 8002 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
8003 something, we might be forming a constant. So try to use
8004 plus_constant. If it produces a sum and we can't accept it,
8005 use force_operand. This allows P = &ARR[const] to generate
8006 efficient code on machines where a SYMBOL_REF is not a valid
8007 address.
8008
8009 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 8010 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 8011 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 8012 {
8403445a
AM
8013 if (modifier == EXPAND_STACK_PARM)
8014 target = 0;
c980ac49
RS
8015 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8016 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8017 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8018 {
cbbc503e
JL
8019 rtx constant_part;
8020
c980ac49
RS
8021 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8022 EXPAND_SUM);
cbbc503e
JL
8023 /* Use immed_double_const to ensure that the constant is
8024 truncated according to the mode of OP1, then sign extended
8025 to a HOST_WIDE_INT. Using the constant directly can result
8026 in non-canonical RTL in a 64x32 cross compile. */
8027 constant_part
8028 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8029 (HOST_WIDE_INT) 0,
a5efcd63 8030 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 8031 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
8032 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8033 op1 = force_operand (op1, target);
8034 return op1;
8035 }
bbf6f052 8036
c980ac49
RS
8037 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8038 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8039 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8040 {
cbbc503e
JL
8041 rtx constant_part;
8042
c980ac49 8043 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
8044 (modifier == EXPAND_INITIALIZER
8045 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
8046 if (! CONSTANT_P (op0))
8047 {
8048 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8049 VOIDmode, modifier);
709f5be1
RS
8050 /* Don't go to both_summands if modifier
8051 says it's not right to return a PLUS. */
8052 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8053 goto binop2;
c980ac49
RS
8054 goto both_summands;
8055 }
cbbc503e
JL
8056 /* Use immed_double_const to ensure that the constant is
8057 truncated according to the mode of OP1, then sign extended
8058 to a HOST_WIDE_INT. Using the constant directly can result
8059 in non-canonical RTL in a 64x32 cross compile. */
8060 constant_part
8061 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8062 (HOST_WIDE_INT) 0,
2a94e396 8063 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 8064 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
8065 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8066 op0 = force_operand (op0, target);
8067 return op0;
8068 }
bbf6f052
RK
8069 }
8070
4ef7870a
EB
8071 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8072 subtarget = 0;
8073
bbf6f052
RK
8074 /* No sense saving up arithmetic to be done
8075 if it's all in the wrong mode to form part of an address.
8076 And force_operand won't know whether to sign-extend or
8077 zero-extend. */
8078 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 8079 || mode != ptr_mode)
4ef7870a
EB
8080 {
8081 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8082 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6e7727eb
EB
8083 if (op0 == const0_rtx)
8084 return op1;
8085 if (op1 == const0_rtx)
8086 return op0;
4ef7870a
EB
8087 goto binop2;
8088 }
bbf6f052 8089
37a08a29
RK
8090 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8091 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 8092
1717e19e
UW
8093 /* We come here from MINUS_EXPR when the second operand is a
8094 constant. */
c980ac49 8095 both_summands:
bbf6f052
RK
8096 /* Make sure any term that's a sum with a constant comes last. */
8097 if (GET_CODE (op0) == PLUS
8098 && CONSTANT_P (XEXP (op0, 1)))
8099 {
8100 temp = op0;
8101 op0 = op1;
8102 op1 = temp;
8103 }
8104 /* If adding to a sum including a constant,
8105 associate it to put the constant outside. */
8106 if (GET_CODE (op1) == PLUS
8107 && CONSTANT_P (XEXP (op1, 1)))
8108 {
8109 rtx constant_term = const0_rtx;
8110
8111 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8112 if (temp != 0)
8113 op0 = temp;
6f90e075
JW
8114 /* Ensure that MULT comes first if there is one. */
8115 else if (GET_CODE (op0) == MULT)
38a448ca 8116 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 8117 else
38a448ca 8118 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
8119
8120 /* Let's also eliminate constants from op0 if possible. */
8121 op0 = eliminate_constant_term (op0, &constant_term);
8122
8123 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 8124 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
8125 result we want will then be OP0 + OP1. */
8126
8127 temp = simplify_binary_operation (PLUS, mode, constant_term,
8128 XEXP (op1, 1));
8129 if (temp != 0)
8130 op1 = temp;
8131 else
38a448ca 8132 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
8133 }
8134
8135 /* Put a constant term last and put a multiplication first. */
8136 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8137 temp = op1, op1 = op0, op0 = temp;
8138
8139 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 8140 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
8141
8142 case MINUS_EXPR:
ea87523e
RK
8143 /* For initializers, we are allowed to return a MINUS of two
8144 symbolic constants. Here we handle all cases when both operands
8145 are constant. */
bbf6f052
RK
8146 /* Handle difference of two symbolic constants,
8147 for the sake of an initializer. */
8148 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8149 && really_constant_p (TREE_OPERAND (exp, 0))
8150 && really_constant_p (TREE_OPERAND (exp, 1)))
8151 {
37a08a29
RK
8152 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8153 modifier);
8154 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8155 modifier);
ea87523e 8156
ea87523e
RK
8157 /* If the last operand is a CONST_INT, use plus_constant of
8158 the negated constant. Else make the MINUS. */
8159 if (GET_CODE (op1) == CONST_INT)
8160 return plus_constant (op0, - INTVAL (op1));
8161 else
38a448ca 8162 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 8163 }
ae431183 8164
91ce572a
CC
8165 this_optab = ! unsignedp && flag_trapv
8166 && (GET_MODE_CLASS(mode) == MODE_INT)
8167 ? subv_optab : sub_optab;
1717e19e
UW
8168
8169 /* No sense saving up arithmetic to be done
8170 if it's all in the wrong mode to form part of an address.
8171 And force_operand won't know whether to sign-extend or
8172 zero-extend. */
8173 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8174 || mode != ptr_mode)
8175 goto binop;
8176
8177 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8178 subtarget = 0;
8179
8180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8182
8183 /* Convert A - const to A + (-const). */
8184 if (GET_CODE (op1) == CONST_INT)
8185 {
8186 op1 = negate_rtx (mode, op1);
8187 goto both_summands;
8188 }
8189
8190 goto binop2;
bbf6f052
RK
8191
8192 case MULT_EXPR:
bbf6f052
RK
8193 /* If first operand is constant, swap them.
8194 Thus the following special case checks need only
8195 check the second operand. */
8196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8197 {
b3694847 8198 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
8199 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8200 TREE_OPERAND (exp, 1) = t1;
8201 }
8202
8203 /* Attempt to return something suitable for generating an
8204 indexed address, for machines that support that. */
8205
88f63c77 8206 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 8207 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 8208 {
48a5f2fa
DJ
8209 tree exp1 = TREE_OPERAND (exp, 1);
8210
921b3427
RK
8211 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8212 EXPAND_SUM);
bbf6f052 8213
3b40e71b
RH
8214 /* If we knew for certain that this is arithmetic for an array
8215 reference, and we knew the bounds of the array, then we could
8216 apply the distributive law across (PLUS X C) for constant C.
8217 Without such knowledge, we risk overflowing the computation
8218 when both X and C are large, but X+C isn't. */
8219 /* ??? Could perhaps special-case EXP being unsigned and C being
8220 positive. In that case we are certain that X+C is no smaller
8221 than X and so the transformed expression will overflow iff the
8222 original would have. */
bbf6f052
RK
8223
8224 if (GET_CODE (op0) != REG)
906c4e36 8225 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
8226 if (GET_CODE (op0) != REG)
8227 op0 = copy_to_mode_reg (mode, op0);
8228
48a5f2fa
DJ
8229 return gen_rtx_MULT (mode, op0,
8230 gen_int_mode (tree_low_cst (exp1, 0),
8231 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
8232 }
8233
e5e809f4 8234 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8235 subtarget = 0;
8236
8403445a
AM
8237 if (modifier == EXPAND_STACK_PARM)
8238 target = 0;
8239
bbf6f052
RK
8240 /* Check for multiplying things that have been extended
8241 from a narrower type. If this machine supports multiplying
8242 in that narrower type with a result in the desired type,
8243 do it that way, and avoid the explicit type-conversion. */
8244 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8245 && TREE_CODE (type) == INTEGER_TYPE
8246 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8247 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8248 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8249 && int_fits_type_p (TREE_OPERAND (exp, 1),
8250 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8251 /* Don't use a widening multiply if a shift will do. */
8252 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8253 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8254 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8255 ||
8256 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8257 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8258 ==
8259 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8260 /* If both operands are extended, they must either both
8261 be zero-extended or both be sign-extended. */
8262 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8263 ==
8264 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8265 {
8266 enum machine_mode innermode
8267 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
8268 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8269 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
8270 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8271 ? umul_widen_optab : smul_widen_optab);
b10af0c8 8272 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 8273 {
b10af0c8
TG
8274 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8275 {
8276 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8277 NULL_RTX, VOIDmode, 0);
8278 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8279 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8280 VOIDmode, 0);
8281 else
8282 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8283 NULL_RTX, VOIDmode, 0);
8284 goto binop2;
8285 }
8286 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8287 && innermode == word_mode)
8288 {
8289 rtx htem;
8290 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8291 NULL_RTX, VOIDmode, 0);
8292 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
8293 op1 = convert_modes (innermode, mode,
8294 expand_expr (TREE_OPERAND (exp, 1),
8295 NULL_RTX, VOIDmode, 0),
8296 unsignedp);
b10af0c8
TG
8297 else
8298 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8299 NULL_RTX, VOIDmode, 0);
8300 temp = expand_binop (mode, other_optab, op0, op1, target,
8301 unsignedp, OPTAB_LIB_WIDEN);
8302 htem = expand_mult_highpart_adjust (innermode,
8303 gen_highpart (innermode, temp),
8304 op0, op1,
8305 gen_highpart (innermode, temp),
8306 unsignedp);
8307 emit_move_insn (gen_highpart (innermode, temp), htem);
8308 return temp;
8309 }
bbf6f052
RK
8310 }
8311 }
8312 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8313 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8314 return expand_mult (mode, op0, op1, target, unsignedp);
8315
8316 case TRUNC_DIV_EXPR:
8317 case FLOOR_DIV_EXPR:
8318 case CEIL_DIV_EXPR:
8319 case ROUND_DIV_EXPR:
8320 case EXACT_DIV_EXPR:
e5e809f4 8321 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8322 subtarget = 0;
8403445a
AM
8323 if (modifier == EXPAND_STACK_PARM)
8324 target = 0;
bbf6f052
RK
8325 /* Possible optimization: compute the dividend with EXPAND_SUM
8326 then if the divisor is constant can optimize the case
8327 where some terms of the dividend have coeffs divisible by it. */
8328 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8329 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8330 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8331
8332 case RDIV_EXPR:
b7e9703c
JH
8333 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8334 expensive divide. If not, combine will rebuild the original
8335 computation. */
8336 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 8337 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
8338 && !real_onep (TREE_OPERAND (exp, 0)))
8339 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8340 build (RDIV_EXPR, type,
8341 build_real (type, dconst1),
8342 TREE_OPERAND (exp, 1))),
8e37cba8 8343 target, tmode, modifier);
ef89d648 8344 this_optab = sdiv_optab;
bbf6f052
RK
8345 goto binop;
8346
8347 case TRUNC_MOD_EXPR:
8348 case FLOOR_MOD_EXPR:
8349 case CEIL_MOD_EXPR:
8350 case ROUND_MOD_EXPR:
e5e809f4 8351 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8352 subtarget = 0;
8403445a
AM
8353 if (modifier == EXPAND_STACK_PARM)
8354 target = 0;
bbf6f052 8355 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8356 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8357 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8358
8359 case FIX_ROUND_EXPR:
8360 case FIX_FLOOR_EXPR:
8361 case FIX_CEIL_EXPR:
8362 abort (); /* Not used for C. */
8363
8364 case FIX_TRUNC_EXPR:
906c4e36 8365 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8366 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8367 target = gen_reg_rtx (mode);
8368 expand_fix (target, op0, unsignedp);
8369 return target;
8370
8371 case FLOAT_EXPR:
906c4e36 8372 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8373 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8374 target = gen_reg_rtx (mode);
8375 /* expand_float can't figure out what to do if FROM has VOIDmode.
8376 So give it the correct mode. With -O, cse will optimize this. */
8377 if (GET_MODE (op0) == VOIDmode)
8378 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8379 op0);
8380 expand_float (target, op0,
8381 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8382 return target;
8383
8384 case NEGATE_EXPR:
5b22bee8 8385 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8386 if (modifier == EXPAND_STACK_PARM)
8387 target = 0;
91ce572a 8388 temp = expand_unop (mode,
0fb7aeda
KH
8389 ! unsignedp && flag_trapv
8390 && (GET_MODE_CLASS(mode) == MODE_INT)
8391 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8392 if (temp == 0)
8393 abort ();
8394 return temp;
8395
8396 case ABS_EXPR:
8397 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8398 if (modifier == EXPAND_STACK_PARM)
8399 target = 0;
bbf6f052 8400
2d7050fd 8401 /* Handle complex values specially. */
d6a5ac33
RK
8402 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8403 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8404 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 8405
bbf6f052
RK
8406 /* Unsigned abs is simply the operand. Testing here means we don't
8407 risk generating incorrect code below. */
8408 if (TREE_UNSIGNED (type))
8409 return op0;
8410
91ce572a 8411 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8412 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8413
8414 case MAX_EXPR:
8415 case MIN_EXPR:
8416 target = original_target;
8403445a
AM
8417 if (target == 0
8418 || modifier == EXPAND_STACK_PARM
8419 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 8420 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8421 || GET_MODE (target) != mode
bbf6f052
RK
8422 || (GET_CODE (target) == REG
8423 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8424 target = gen_reg_rtx (mode);
906c4e36 8425 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8426 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8427
8428 /* First try to do it with a special MIN or MAX instruction.
8429 If that does not win, use a conditional jump to select the proper
8430 value. */
8431 this_optab = (TREE_UNSIGNED (type)
8432 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8433 : (code == MIN_EXPR ? smin_optab : smax_optab));
8434
8435 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8436 OPTAB_WIDEN);
8437 if (temp != 0)
8438 return temp;
8439
fa2981d8
JW
8440 /* At this point, a MEM target is no longer useful; we will get better
8441 code without it. */
3a94c984 8442
fa2981d8
JW
8443 if (GET_CODE (target) == MEM)
8444 target = gen_reg_rtx (mode);
8445
ee456b1c
RK
8446 if (target != op0)
8447 emit_move_insn (target, op0);
d6a5ac33 8448
bbf6f052 8449 op0 = gen_label_rtx ();
d6a5ac33 8450
f81497d9
RS
8451 /* If this mode is an integer too wide to compare properly,
8452 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8453 if (GET_MODE_CLASS (mode) == MODE_INT
8454 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8455 {
f81497d9 8456 if (code == MAX_EXPR)
d6a5ac33
RK
8457 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8458 target, op1, NULL_RTX, op0);
bbf6f052 8459 else
d6a5ac33
RK
8460 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8461 op1, target, NULL_RTX, op0);
bbf6f052 8462 }
f81497d9
RS
8463 else
8464 {
b30f05db
BS
8465 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8466 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8467 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8468 op0);
f81497d9 8469 }
b30f05db 8470 emit_move_insn (target, op1);
bbf6f052
RK
8471 emit_label (op0);
8472 return target;
8473
bbf6f052
RK
8474 case BIT_NOT_EXPR:
8475 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8476 if (modifier == EXPAND_STACK_PARM)
8477 target = 0;
bbf6f052
RK
8478 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8479 if (temp == 0)
8480 abort ();
8481 return temp;
8482
8483 case FFS_EXPR:
8484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8485 if (modifier == EXPAND_STACK_PARM)
8486 target = 0;
bbf6f052
RK
8487 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8488 if (temp == 0)
8489 abort ();
8490 return temp;
8491
2928cd7a
RH
8492 case CLZ_EXPR:
8493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8494 temp = expand_unop (mode, clz_optab, op0, target, 1);
8495 if (temp == 0)
8496 abort ();
8497 return temp;
8498
8499 case CTZ_EXPR:
8500 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8501 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8502 if (temp == 0)
8503 abort ();
8504 return temp;
8505
8506 case POPCOUNT_EXPR:
8507 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8508 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8509 if (temp == 0)
8510 abort ();
8511 return temp;
8512
8513 case PARITY_EXPR:
8514 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8515 temp = expand_unop (mode, parity_optab, op0, target, 1);
8516 if (temp == 0)
8517 abort ();
8518 return temp;
8519
d6a5ac33
RK
8520 /* ??? Can optimize bitwise operations with one arg constant.
8521 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8522 and (a bitwise1 b) bitwise2 b (etc)
8523 but that is probably not worth while. */
8524
8525 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8526 boolean values when we want in all cases to compute both of them. In
8527 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8528 as actual zero-or-1 values and then bitwise anding. In cases where
8529 there cannot be any side effects, better code would be made by
8530 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8531 how to recognize those cases. */
8532
bbf6f052
RK
8533 case TRUTH_AND_EXPR:
8534 case BIT_AND_EXPR:
8535 this_optab = and_optab;
8536 goto binop;
8537
bbf6f052
RK
8538 case TRUTH_OR_EXPR:
8539 case BIT_IOR_EXPR:
8540 this_optab = ior_optab;
8541 goto binop;
8542
874726a8 8543 case TRUTH_XOR_EXPR:
bbf6f052
RK
8544 case BIT_XOR_EXPR:
8545 this_optab = xor_optab;
8546 goto binop;
8547
8548 case LSHIFT_EXPR:
8549 case RSHIFT_EXPR:
8550 case LROTATE_EXPR:
8551 case RROTATE_EXPR:
e5e809f4 8552 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8553 subtarget = 0;
8403445a
AM
8554 if (modifier == EXPAND_STACK_PARM)
8555 target = 0;
bbf6f052
RK
8556 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8557 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8558 unsignedp);
8559
d6a5ac33
RK
8560 /* Could determine the answer when only additive constants differ. Also,
8561 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8562 case LT_EXPR:
8563 case LE_EXPR:
8564 case GT_EXPR:
8565 case GE_EXPR:
8566 case EQ_EXPR:
8567 case NE_EXPR:
1eb8759b
RH
8568 case UNORDERED_EXPR:
8569 case ORDERED_EXPR:
8570 case UNLT_EXPR:
8571 case UNLE_EXPR:
8572 case UNGT_EXPR:
8573 case UNGE_EXPR:
8574 case UNEQ_EXPR:
8403445a
AM
8575 temp = do_store_flag (exp,
8576 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8577 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8578 if (temp != 0)
8579 return temp;
d6a5ac33 8580
0f41302f 8581 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8582 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8583 && original_target
8584 && GET_CODE (original_target) == REG
8585 && (GET_MODE (original_target)
8586 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8587 {
d6a5ac33
RK
8588 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8589 VOIDmode, 0);
8590
c0a3eeac
UW
8591 /* If temp is constant, we can just compute the result. */
8592 if (GET_CODE (temp) == CONST_INT)
8593 {
8594 if (INTVAL (temp) != 0)
8595 emit_move_insn (target, const1_rtx);
8596 else
8597 emit_move_insn (target, const0_rtx);
8598
8599 return target;
8600 }
8601
bbf6f052 8602 if (temp != original_target)
c0a3eeac
UW
8603 {
8604 enum machine_mode mode1 = GET_MODE (temp);
8605 if (mode1 == VOIDmode)
8606 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8607
c0a3eeac
UW
8608 temp = copy_to_mode_reg (mode1, temp);
8609 }
d6a5ac33 8610
bbf6f052 8611 op1 = gen_label_rtx ();
c5d5d461 8612 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8613 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8614 emit_move_insn (temp, const1_rtx);
8615 emit_label (op1);
8616 return temp;
8617 }
d6a5ac33 8618
bbf6f052
RK
8619 /* If no set-flag instruction, must generate a conditional
8620 store into a temporary variable. Drop through
8621 and handle this like && and ||. */
8622
8623 case TRUTH_ANDIF_EXPR:
8624 case TRUTH_ORIF_EXPR:
e44842fe 8625 if (! ignore
8403445a
AM
8626 && (target == 0
8627 || modifier == EXPAND_STACK_PARM
8628 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8629 /* Make sure we don't have a hard reg (such as function's return
8630 value) live across basic blocks, if not optimizing. */
8631 || (!optimize && GET_CODE (target) == REG
8632 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8633 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8634
8635 if (target)
8636 emit_clr_insn (target);
8637
bbf6f052
RK
8638 op1 = gen_label_rtx ();
8639 jumpifnot (exp, op1);
e44842fe
RK
8640
8641 if (target)
8642 emit_0_to_1_insn (target);
8643
bbf6f052 8644 emit_label (op1);
e44842fe 8645 return ignore ? const0_rtx : target;
bbf6f052
RK
8646
8647 case TRUTH_NOT_EXPR:
8403445a
AM
8648 if (modifier == EXPAND_STACK_PARM)
8649 target = 0;
bbf6f052
RK
8650 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8651 /* The parser is careful to generate TRUTH_NOT_EXPR
8652 only with operands that are always zero or one. */
906c4e36 8653 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8654 target, 1, OPTAB_LIB_WIDEN);
8655 if (temp == 0)
8656 abort ();
8657 return temp;
8658
8659 case COMPOUND_EXPR:
8660 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8661 emit_queue ();
8662 return expand_expr (TREE_OPERAND (exp, 1),
8663 (ignore ? const0_rtx : target),
8403445a 8664 VOIDmode, modifier);
bbf6f052
RK
8665
8666 case COND_EXPR:
ac01eace
RK
8667 /* If we would have a "singleton" (see below) were it not for a
8668 conversion in each arm, bring that conversion back out. */
8669 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8670 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8671 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8672 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8673 {
d6edb99e
ZW
8674 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8675 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8676
8677 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8678 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8679 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8680 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8681 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8682 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8683 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8684 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8685 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8686 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8687 TREE_OPERAND (exp, 0),
d6edb99e 8688 iftrue, iffalse)),
ac01eace
RK
8689 target, tmode, modifier);
8690 }
8691
bbf6f052
RK
8692 {
8693 /* Note that COND_EXPRs whose type is a structure or union
8694 are required to be constructed to contain assignments of
8695 a temporary variable, so that we can evaluate them here
8696 for side effect only. If type is void, we must do likewise. */
8697
8698 /* If an arm of the branch requires a cleanup,
8699 only that cleanup is performed. */
8700
8701 tree singleton = 0;
8702 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8703
8704 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8705 convert it to our mode, if necessary. */
8706 if (integer_onep (TREE_OPERAND (exp, 1))
8707 && integer_zerop (TREE_OPERAND (exp, 2))
8708 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8709 {
dd27116b
RK
8710 if (ignore)
8711 {
8712 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8713 modifier);
dd27116b
RK
8714 return const0_rtx;
8715 }
8716
8403445a
AM
8717 if (modifier == EXPAND_STACK_PARM)
8718 target = 0;
37a08a29 8719 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8720 if (GET_MODE (op0) == mode)
8721 return op0;
d6a5ac33 8722
bbf6f052
RK
8723 if (target == 0)
8724 target = gen_reg_rtx (mode);
8725 convert_move (target, op0, unsignedp);
8726 return target;
8727 }
8728
ac01eace
RK
8729 /* Check for X ? A + B : A. If we have this, we can copy A to the
8730 output and conditionally add B. Similarly for unary operations.
8731 Don't do this if X has side-effects because those side effects
8732 might affect A or B and the "?" operation is a sequence point in
8733 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8734
8735 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8736 && operand_equal_p (TREE_OPERAND (exp, 2),
8737 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8738 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8739 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8740 && operand_equal_p (TREE_OPERAND (exp, 1),
8741 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8742 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8743 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8744 && operand_equal_p (TREE_OPERAND (exp, 2),
8745 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8746 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8747 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8748 && operand_equal_p (TREE_OPERAND (exp, 1),
8749 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8750 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8751
01c8a7c8
RK
8752 /* If we are not to produce a result, we have no target. Otherwise,
8753 if a target was specified use it; it will not be used as an
3a94c984 8754 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8755 temporary. */
8756
8757 if (ignore)
8758 temp = 0;
8403445a
AM
8759 else if (modifier == EXPAND_STACK_PARM)
8760 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8761 else if (original_target
e5e809f4 8762 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8763 || (singleton && GET_CODE (original_target) == REG
8764 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8765 && original_target == var_rtx (singleton)))
8766 && GET_MODE (original_target) == mode
7c00d1fe
RK
8767#ifdef HAVE_conditional_move
8768 && (! can_conditionally_move_p (mode)
8769 || GET_CODE (original_target) == REG
8770 || TREE_ADDRESSABLE (type))
8771#endif
8125d7e9
BS
8772 && (GET_CODE (original_target) != MEM
8773 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8774 temp = original_target;
8775 else if (TREE_ADDRESSABLE (type))
8776 abort ();
8777 else
8778 temp = assign_temp (type, 0, 0, 1);
8779
ac01eace
RK
8780 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8781 do the test of X as a store-flag operation, do this as
8782 A + ((X != 0) << log C). Similarly for other simple binary
8783 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8784 if (temp && singleton && binary_op
bbf6f052
RK
8785 && (TREE_CODE (binary_op) == PLUS_EXPR
8786 || TREE_CODE (binary_op) == MINUS_EXPR
8787 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8788 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8789 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8790 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8791 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8792 {
8793 rtx result;
61f6c84f 8794 tree cond;
91ce572a 8795 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8796 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8797 ? addv_optab : add_optab)
8798 : TREE_CODE (binary_op) == MINUS_EXPR
8799 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8800 ? subv_optab : sub_optab)
8801 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8802 : xor_optab);
bbf6f052 8803
61f6c84f 8804 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8805 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8806 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8807 else
8808 cond = TREE_OPERAND (exp, 0);
bbf6f052 8809
61f6c84f
JJ
8810 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8811 ? temp : NULL_RTX),
bbf6f052
RK
8812 mode, BRANCH_COST <= 1);
8813
ac01eace
RK
8814 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8815 result = expand_shift (LSHIFT_EXPR, mode, result,
8816 build_int_2 (tree_log2
8817 (TREE_OPERAND
8818 (binary_op, 1)),
8819 0),
e5e809f4 8820 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8821 ? temp : NULL_RTX), 0);
8822
bbf6f052
RK
8823 if (result)
8824 {
906c4e36 8825 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8826 return expand_binop (mode, boptab, op1, result, temp,
8827 unsignedp, OPTAB_LIB_WIDEN);
8828 }
bbf6f052 8829 }
3a94c984 8830
dabf8373 8831 do_pending_stack_adjust ();
bbf6f052
RK
8832 NO_DEFER_POP;
8833 op0 = gen_label_rtx ();
8834
8835 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8836 {
8837 if (temp != 0)
8838 {
8839 /* If the target conflicts with the other operand of the
8840 binary op, we can't use it. Also, we can't use the target
8841 if it is a hard register, because evaluating the condition
8842 might clobber it. */
8843 if ((binary_op
e5e809f4 8844 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8845 || (GET_CODE (temp) == REG
8846 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8847 temp = gen_reg_rtx (mode);
8403445a
AM
8848 store_expr (singleton, temp,
8849 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8850 }
8851 else
906c4e36 8852 expand_expr (singleton,
2937cf87 8853 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8854 if (singleton == TREE_OPERAND (exp, 1))
8855 jumpif (TREE_OPERAND (exp, 0), op0);
8856 else
8857 jumpifnot (TREE_OPERAND (exp, 0), op0);
8858
956d6950 8859 start_cleanup_deferral ();
bbf6f052
RK
8860 if (binary_op && temp == 0)
8861 /* Just touch the other operand. */
8862 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8863 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8864 else if (binary_op)
8865 store_expr (build (TREE_CODE (binary_op), type,
8866 make_tree (type, temp),
8867 TREE_OPERAND (binary_op, 1)),
8403445a 8868 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8869 else
8870 store_expr (build1 (TREE_CODE (unary_op), type,
8871 make_tree (type, temp)),
8403445a 8872 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8873 op1 = op0;
bbf6f052 8874 }
bbf6f052
RK
8875 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8876 comparison operator. If we have one of these cases, set the
8877 output to A, branch on A (cse will merge these two references),
8878 then set the output to FOO. */
8879 else if (temp
8880 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8881 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8882 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8883 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8884 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8885 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8886 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8887 {
3a94c984
KH
8888 if (GET_CODE (temp) == REG
8889 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8890 temp = gen_reg_rtx (mode);
8403445a
AM
8891 store_expr (TREE_OPERAND (exp, 1), temp,
8892 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8893 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8894
956d6950 8895 start_cleanup_deferral ();
8403445a
AM
8896 store_expr (TREE_OPERAND (exp, 2), temp,
8897 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8898 op1 = op0;
8899 }
8900 else if (temp
8901 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8902 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8903 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8904 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8905 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8906 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8907 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8908 {
3a94c984
KH
8909 if (GET_CODE (temp) == REG
8910 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8911 temp = gen_reg_rtx (mode);
8403445a
AM
8912 store_expr (TREE_OPERAND (exp, 2), temp,
8913 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8914 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8915
956d6950 8916 start_cleanup_deferral ();
8403445a
AM
8917 store_expr (TREE_OPERAND (exp, 1), temp,
8918 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8919 op1 = op0;
8920 }
8921 else
8922 {
8923 op1 = gen_label_rtx ();
8924 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8925
956d6950 8926 start_cleanup_deferral ();
3a94c984 8927
2ac84cfe 8928 /* One branch of the cond can be void, if it never returns. For
3a94c984 8929 example A ? throw : E */
2ac84cfe 8930 if (temp != 0
3a94c984 8931 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8932 store_expr (TREE_OPERAND (exp, 1), temp,
8933 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8934 else
906c4e36
RK
8935 expand_expr (TREE_OPERAND (exp, 1),
8936 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8937 end_cleanup_deferral ();
bbf6f052
RK
8938 emit_queue ();
8939 emit_jump_insn (gen_jump (op1));
8940 emit_barrier ();
8941 emit_label (op0);
956d6950 8942 start_cleanup_deferral ();
2ac84cfe 8943 if (temp != 0
3a94c984 8944 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8945 store_expr (TREE_OPERAND (exp, 2), temp,
8946 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8947 else
906c4e36
RK
8948 expand_expr (TREE_OPERAND (exp, 2),
8949 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8950 }
8951
956d6950 8952 end_cleanup_deferral ();
bbf6f052
RK
8953
8954 emit_queue ();
8955 emit_label (op1);
8956 OK_DEFER_POP;
5dab5552 8957
bbf6f052
RK
8958 return temp;
8959 }
8960
8961 case TARGET_EXPR:
8962 {
8963 /* Something needs to be initialized, but we didn't know
8964 where that thing was when building the tree. For example,
8965 it could be the return value of a function, or a parameter
8966 to a function which lays down in the stack, or a temporary
8967 variable which must be passed by reference.
8968
8969 We guarantee that the expression will either be constructed
8970 or copied into our original target. */
8971
8972 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8973 tree cleanups = NULL_TREE;
5c062816 8974 tree exp1;
bbf6f052
RK
8975
8976 if (TREE_CODE (slot) != VAR_DECL)
8977 abort ();
8978
9c51f375
RK
8979 if (! ignore)
8980 target = original_target;
8981
6fbfac92
JM
8982 /* Set this here so that if we get a target that refers to a
8983 register variable that's already been used, put_reg_into_stack
3a94c984 8984 knows that it should fix up those uses. */
6fbfac92
JM
8985 TREE_USED (slot) = 1;
8986
bbf6f052
RK
8987 if (target == 0)
8988 {
19e7881c 8989 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8990 {
8991 target = DECL_RTL (slot);
5c062816 8992 /* If we have already expanded the slot, so don't do
ac993f4f 8993 it again. (mrs) */
5c062816
MS
8994 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8995 return target;
ac993f4f 8996 }
bbf6f052
RK
8997 else
8998 {
e9a25f70 8999 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
9000 /* All temp slots at this level must not conflict. */
9001 preserve_temp_slots (target);
19e7881c 9002 SET_DECL_RTL (slot, target);
e9a25f70 9003 if (TREE_ADDRESSABLE (slot))
4361b41d 9004 put_var_into_stack (slot);
bbf6f052 9005
e287fd6e
RK
9006 /* Since SLOT is not known to the called function
9007 to belong to its stack frame, we must build an explicit
9008 cleanup. This case occurs when we must build up a reference
9009 to pass the reference as an argument. In this case,
9010 it is very likely that such a reference need not be
9011 built here. */
9012
9013 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
9014 TREE_OPERAND (exp, 2)
9015 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 9016 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 9017 }
bbf6f052
RK
9018 }
9019 else
9020 {
9021 /* This case does occur, when expanding a parameter which
9022 needs to be constructed on the stack. The target
9023 is the actual stack address that we want to initialize.
9024 The function we call will perform the cleanup in this case. */
9025
8c042b47
RS
9026 /* If we have already assigned it space, use that space,
9027 not target that we were passed in, as our target
9028 parameter is only a hint. */
19e7881c 9029 if (DECL_RTL_SET_P (slot))
3a94c984
KH
9030 {
9031 target = DECL_RTL (slot);
9032 /* If we have already expanded the slot, so don't do
8c042b47 9033 it again. (mrs) */
3a94c984
KH
9034 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9035 return target;
8c042b47 9036 }
21002281
JW
9037 else
9038 {
19e7881c 9039 SET_DECL_RTL (slot, target);
21002281
JW
9040 /* If we must have an addressable slot, then make sure that
9041 the RTL that we just stored in slot is OK. */
9042 if (TREE_ADDRESSABLE (slot))
4361b41d 9043 put_var_into_stack (slot);
21002281 9044 }
bbf6f052
RK
9045 }
9046
4847c938 9047 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
9048 /* Mark it as expanded. */
9049 TREE_OPERAND (exp, 1) = NULL_TREE;
9050
8403445a 9051 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 9052
659e5a7a 9053 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 9054
41531e5b 9055 return target;
bbf6f052
RK
9056 }
9057
9058 case INIT_EXPR:
9059 {
9060 tree lhs = TREE_OPERAND (exp, 0);
9061 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9062
9063 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
bbf6f052
RK
9064 return temp;
9065 }
9066
9067 case MODIFY_EXPR:
9068 {
9069 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
9070 That's so we don't compute a pointer and save it over a
9071 call. If lhs is simple, compute it first so we can give it
9072 as a target if the rhs is just a call. This avoids an
9073 extra temp and copy and that prevents a partial-subsumption
9074 which makes bad code. Actually we could treat
9075 component_ref's of vars like vars. */
bbf6f052
RK
9076
9077 tree lhs = TREE_OPERAND (exp, 0);
9078 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9079
9080 temp = 0;
9081
bbf6f052
RK
9082 /* Check for |= or &= of a bitfield of size one into another bitfield
9083 of size 1. In this case, (unless we need the result of the
9084 assignment) we can do this more efficiently with a
9085 test followed by an assignment, if necessary.
9086
9087 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9088 things change so we do, this code should be enhanced to
9089 support it. */
9090 if (ignore
9091 && TREE_CODE (lhs) == COMPONENT_REF
9092 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9093 || TREE_CODE (rhs) == BIT_AND_EXPR)
9094 && TREE_OPERAND (rhs, 0) == lhs
9095 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
9096 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9097 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
9098 {
9099 rtx label = gen_label_rtx ();
9100
9101 do_jump (TREE_OPERAND (rhs, 1),
9102 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9103 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9104 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9105 (TREE_CODE (rhs) == BIT_IOR_EXPR
9106 ? integer_one_node
9107 : integer_zero_node)),
9108 0, 0);
e7c33f54 9109 do_pending_stack_adjust ();
bbf6f052
RK
9110 emit_label (label);
9111 return const0_rtx;
9112 }
9113
bbf6f052 9114 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
0fb7aeda 9115
bbf6f052
RK
9116 return temp;
9117 }
9118
6e7f84a7
APB
9119 case RETURN_EXPR:
9120 if (!TREE_OPERAND (exp, 0))
9121 expand_null_return ();
9122 else
9123 expand_return (TREE_OPERAND (exp, 0));
9124 return const0_rtx;
9125
bbf6f052
RK
9126 case PREINCREMENT_EXPR:
9127 case PREDECREMENT_EXPR:
7b8b9722 9128 return expand_increment (exp, 0, ignore);
bbf6f052
RK
9129
9130 case POSTINCREMENT_EXPR:
9131 case POSTDECREMENT_EXPR:
9132 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 9133 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
9134
9135 case ADDR_EXPR:
8403445a
AM
9136 if (modifier == EXPAND_STACK_PARM)
9137 target = 0;
bbf6f052
RK
9138 /* Are we taking the address of a nested function? */
9139 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 9140 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
9141 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9142 && ! TREE_STATIC (exp))
bbf6f052
RK
9143 {
9144 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9145 op0 = force_operand (op0, target);
9146 }
682ba3a6
RK
9147 /* If we are taking the address of something erroneous, just
9148 return a zero. */
9149 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9150 return const0_rtx;
d6b6783b
RK
9151 /* If we are taking the address of a constant and are at the
9152 top level, we have to use output_constant_def since we can't
9153 call force_const_mem at top level. */
9154 else if (cfun == 0
9155 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9156 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9157 == 'c')))
9158 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
9159 else
9160 {
e287fd6e
RK
9161 /* We make sure to pass const0_rtx down if we came in with
9162 ignore set, to avoid doing the cleanups twice for something. */
9163 op0 = expand_expr (TREE_OPERAND (exp, 0),
9164 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
9165 (modifier == EXPAND_INITIALIZER
9166 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 9167
119af78a
RK
9168 /* If we are going to ignore the result, OP0 will have been set
9169 to const0_rtx, so just return it. Don't get confused and
9170 think we are taking the address of the constant. */
9171 if (ignore)
9172 return op0;
9173
73b7f58c
BS
9174 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9175 clever and returns a REG when given a MEM. */
9176 op0 = protect_from_queue (op0, 1);
3539e816 9177
c5c76735
JL
9178 /* We would like the object in memory. If it is a constant, we can
9179 have it be statically allocated into memory. For a non-constant,
9180 we need to allocate some memory and store the value into it. */
896102d0
RK
9181
9182 if (CONSTANT_P (op0))
9183 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9184 op0);
682ba3a6 9185 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
9186 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9187 || GET_CODE (op0) == PARALLEL)
896102d0 9188 {
6c7d86ec
RK
9189 /* If the operand is a SAVE_EXPR, we can deal with this by
9190 forcing the SAVE_EXPR into memory. */
9191 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9192 {
9193 put_var_into_stack (TREE_OPERAND (exp, 0));
9194 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9195 }
df6018fd 9196 else
6c7d86ec
RK
9197 {
9198 /* If this object is in a register, it can't be BLKmode. */
9199 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 9200 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
9201
9202 if (GET_CODE (op0) == PARALLEL)
9203 /* Handle calls that pass values in multiple
9204 non-contiguous locations. The Irix 6 ABI has examples
9205 of this. */
0fb7aeda 9206 emit_group_store (memloc, op0,
6c7d86ec
RK
9207 int_size_in_bytes (inner_type));
9208 else
9209 emit_move_insn (memloc, op0);
0fb7aeda 9210
6c7d86ec
RK
9211 op0 = memloc;
9212 }
896102d0
RK
9213 }
9214
bbf6f052
RK
9215 if (GET_CODE (op0) != MEM)
9216 abort ();
3a94c984 9217
34e81b5a 9218 mark_temp_addr_taken (op0);
bbf6f052 9219 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 9220 {
34e81b5a 9221 op0 = XEXP (op0, 0);
88f63c77 9222#ifdef POINTERS_EXTEND_UNSIGNED
34e81b5a 9223 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
88f63c77 9224 && mode == ptr_mode)
34e81b5a 9225 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 9226#endif
34e81b5a 9227 return op0;
88f63c77 9228 }
987c71d9 9229
c952ff4b
RK
9230 /* If OP0 is not aligned as least as much as the type requires, we
9231 need to make a temporary, copy OP0 to it, and take the address of
9232 the temporary. We want to use the alignment of the type, not of
9233 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9234 the test for BLKmode means that can't happen. The test for
9235 BLKmode is because we never make mis-aligned MEMs with
9236 non-BLKmode.
9237
9238 We don't need to do this at all if the machine doesn't have
9239 strict alignment. */
9240 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9241 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
9242 > MEM_ALIGN (op0))
9243 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
9244 {
9245 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
9246 rtx new
9247 = assign_stack_temp_for_type
9248 (TYPE_MODE (inner_type),
9249 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
e27cc84b 9250 : int_size_in_bytes (inner_type),
a06ef755
RK
9251 1, build_qualified_type (inner_type,
9252 (TYPE_QUALS (inner_type)
9253 | TYPE_QUAL_CONST)));
9254
c3d32120
RK
9255 if (TYPE_ALIGN_OK (inner_type))
9256 abort ();
9257
44bb111a 9258 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
9259 (modifier == EXPAND_STACK_PARM
9260 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
a06ef755
RK
9261 op0 = new;
9262 }
9263
bbf6f052
RK
9264 op0 = force_operand (XEXP (op0, 0), target);
9265 }
987c71d9 9266
05c8e58b
HPN
9267 if (flag_force_addr
9268 && GET_CODE (op0) != REG
9269 && modifier != EXPAND_CONST_ADDRESS
9270 && modifier != EXPAND_INITIALIZER
9271 && modifier != EXPAND_SUM)
987c71d9
RK
9272 op0 = force_reg (Pmode, op0);
9273
dc6d66b3
RK
9274 if (GET_CODE (op0) == REG
9275 && ! REG_USERVAR_P (op0))
bdb429a5 9276 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 9277
88f63c77
RK
9278#ifdef POINTERS_EXTEND_UNSIGNED
9279 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9280 && mode == ptr_mode)
9fcfcce7 9281 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
9282#endif
9283
bbf6f052
RK
9284 return op0;
9285
9286 case ENTRY_VALUE_EXPR:
9287 abort ();
9288
7308a047
RS
9289 /* COMPLEX type for Extended Pascal & Fortran */
9290 case COMPLEX_EXPR:
9291 {
9292 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 9293 rtx insns;
7308a047
RS
9294
9295 /* Get the rtx code of the operands. */
9296 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9297 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9298
9299 if (! target)
9300 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9301
6551fa4d 9302 start_sequence ();
7308a047
RS
9303
9304 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
9305 emit_move_insn (gen_realpart (mode, target), op0);
9306 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 9307
6551fa4d
JW
9308 insns = get_insns ();
9309 end_sequence ();
9310
7308a047 9311 /* Complex construction should appear as a single unit. */
6551fa4d
JW
9312 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9313 each with a separate pseudo as destination.
9314 It's not correct for flow to treat them as a unit. */
6d6e61ce 9315 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9316 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9317 else
2f937369 9318 emit_insn (insns);
7308a047
RS
9319
9320 return target;
9321 }
9322
9323 case REALPART_EXPR:
2d7050fd
RS
9324 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9325 return gen_realpart (mode, op0);
3a94c984 9326
7308a047 9327 case IMAGPART_EXPR:
2d7050fd
RS
9328 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9329 return gen_imagpart (mode, op0);
7308a047
RS
9330
9331 case CONJ_EXPR:
9332 {
62acb978 9333 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 9334 rtx imag_t;
6551fa4d 9335 rtx insns;
3a94c984
KH
9336
9337 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
9338
9339 if (! target)
d6a5ac33 9340 target = gen_reg_rtx (mode);
3a94c984 9341
6551fa4d 9342 start_sequence ();
7308a047
RS
9343
9344 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
9345 emit_move_insn (gen_realpart (partmode, target),
9346 gen_realpart (partmode, op0));
7308a047 9347
62acb978 9348 imag_t = gen_imagpart (partmode, target);
91ce572a 9349 temp = expand_unop (partmode,
0fb7aeda
KH
9350 ! unsignedp && flag_trapv
9351 && (GET_MODE_CLASS(partmode) == MODE_INT)
9352 ? negv_optab : neg_optab,
3a94c984 9353 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
9354 if (temp != imag_t)
9355 emit_move_insn (imag_t, temp);
9356
6551fa4d
JW
9357 insns = get_insns ();
9358 end_sequence ();
9359
3a94c984 9360 /* Conjugate should appear as a single unit
d6a5ac33 9361 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
9362 each with a separate pseudo as destination.
9363 It's not correct for flow to treat them as a unit. */
6d6e61ce 9364 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9365 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9366 else
2f937369 9367 emit_insn (insns);
7308a047
RS
9368
9369 return target;
9370 }
9371
e976b8b2
MS
9372 case TRY_CATCH_EXPR:
9373 {
9374 tree handler = TREE_OPERAND (exp, 1);
9375
9376 expand_eh_region_start ();
9377
9378 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9379
52a11cbf 9380 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
9381
9382 return op0;
9383 }
9384
b335b813
PB
9385 case TRY_FINALLY_EXPR:
9386 {
9387 tree try_block = TREE_OPERAND (exp, 0);
9388 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9389
8ad8135a 9390 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9391 {
9392 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9393 is not sufficient, so we cannot expand the block twice.
9394 So we play games with GOTO_SUBROUTINE_EXPR to let us
9395 expand the thing only once. */
8ad8135a
RH
9396 /* When not optimizing, we go ahead with this form since
9397 (1) user breakpoints operate more predictably without
9398 code duplication, and
9399 (2) we're not running any of the global optimizers
9400 that would explode in time/space with the highly
9401 connected CFG created by the indirect branching. */
8943a0b4
RH
9402
9403 rtx finally_label = gen_label_rtx ();
9404 rtx done_label = gen_label_rtx ();
9405 rtx return_link = gen_reg_rtx (Pmode);
9406 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9407 (tree) finally_label, (tree) return_link);
9408 TREE_SIDE_EFFECTS (cleanup) = 1;
9409
9410 /* Start a new binding layer that will keep track of all cleanup
9411 actions to be performed. */
9412 expand_start_bindings (2);
9413 target_temp_slot_level = temp_slot_level;
9414
9415 expand_decl_cleanup (NULL_TREE, cleanup);
9416 op0 = expand_expr (try_block, target, tmode, modifier);
9417
9418 preserve_temp_slots (op0);
9419 expand_end_bindings (NULL_TREE, 0, 0);
9420 emit_jump (done_label);
9421 emit_label (finally_label);
9422 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9423 emit_indirect_jump (return_link);
9424 emit_label (done_label);
9425 }
9426 else
9427 {
9428 expand_start_bindings (2);
9429 target_temp_slot_level = temp_slot_level;
b335b813 9430
8943a0b4
RH
9431 expand_decl_cleanup (NULL_TREE, finally_block);
9432 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9433
8943a0b4
RH
9434 preserve_temp_slots (op0);
9435 expand_end_bindings (NULL_TREE, 0, 0);
9436 }
b335b813 9437
b335b813
PB
9438 return op0;
9439 }
9440
3a94c984 9441 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9442 {
9443 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9444 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9445 rtx return_address = gen_label_rtx ();
3a94c984
KH
9446 emit_move_insn (return_link,
9447 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9448 emit_jump (subr);
9449 emit_label (return_address);
9450 return const0_rtx;
9451 }
9452
d3707adb
RH
9453 case VA_ARG_EXPR:
9454 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9455
52a11cbf 9456 case EXC_PTR_EXPR:
86c99549 9457 return get_exception_pointer (cfun);
52a11cbf 9458
67231816
RH
9459 case FDESC_EXPR:
9460 /* Function descriptors are not valid except for as
9461 initialization constants, and should not be expanded. */
9462 abort ();
9463
bbf6f052 9464 default:
c9d892a8 9465 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
9466 }
9467
9468 /* Here to do an ordinary binary operator, generating an instruction
9469 from the optab already placed in `this_optab'. */
9470 binop:
e5e809f4 9471 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
9472 subtarget = 0;
9473 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 9474 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9475 binop2:
8403445a
AM
9476 if (modifier == EXPAND_STACK_PARM)
9477 target = 0;
bbf6f052
RK
9478 temp = expand_binop (mode, this_optab, op0, op1, target,
9479 unsignedp, OPTAB_LIB_WIDEN);
9480 if (temp == 0)
9481 abort ();
9482 return temp;
9483}
b93a436e 9484\f
1ce7f3c2
RK
9485/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9486 when applied to the address of EXP produces an address known to be
9487 aligned more than BIGGEST_ALIGNMENT. */
9488
9489static int
9490is_aligning_offset (offset, exp)
9491 tree offset;
9492 tree exp;
9493{
9494 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9495 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9496 || TREE_CODE (offset) == NOP_EXPR
9497 || TREE_CODE (offset) == CONVERT_EXPR
9498 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9499 offset = TREE_OPERAND (offset, 0);
9500
9501 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9502 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9503 if (TREE_CODE (offset) != BIT_AND_EXPR
9504 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9505 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9506 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9507 return 0;
9508
9509 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9510 It must be NEGATE_EXPR. Then strip any more conversions. */
9511 offset = TREE_OPERAND (offset, 0);
9512 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9513 || TREE_CODE (offset) == NOP_EXPR
9514 || TREE_CODE (offset) == CONVERT_EXPR)
9515 offset = TREE_OPERAND (offset, 0);
9516
9517 if (TREE_CODE (offset) != NEGATE_EXPR)
9518 return 0;
9519
9520 offset = TREE_OPERAND (offset, 0);
9521 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9522 || TREE_CODE (offset) == NOP_EXPR
9523 || TREE_CODE (offset) == CONVERT_EXPR)
9524 offset = TREE_OPERAND (offset, 0);
9525
9526 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9527 whose type is the same as EXP. */
9528 return (TREE_CODE (offset) == ADDR_EXPR
9529 && (TREE_OPERAND (offset, 0) == exp
9530 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9531 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9532 == TREE_TYPE (exp)))));
9533}
9534\f
e0a2f705 9535/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9536 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9537 in bytes within the string that ARG is accessing. The type of the
9538 offset will be `sizetype'. */
b93a436e 9539
28f4ec01 9540tree
b93a436e
JL
9541string_constant (arg, ptr_offset)
9542 tree arg;
9543 tree *ptr_offset;
9544{
9545 STRIP_NOPS (arg);
9546
9547 if (TREE_CODE (arg) == ADDR_EXPR
9548 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9549 {
fed3cef0 9550 *ptr_offset = size_zero_node;
b93a436e
JL
9551 return TREE_OPERAND (arg, 0);
9552 }
9553 else if (TREE_CODE (arg) == PLUS_EXPR)
9554 {
9555 tree arg0 = TREE_OPERAND (arg, 0);
9556 tree arg1 = TREE_OPERAND (arg, 1);
9557
9558 STRIP_NOPS (arg0);
9559 STRIP_NOPS (arg1);
9560
9561 if (TREE_CODE (arg0) == ADDR_EXPR
9562 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9563 {
fed3cef0 9564 *ptr_offset = convert (sizetype, arg1);
b93a436e 9565 return TREE_OPERAND (arg0, 0);
bbf6f052 9566 }
b93a436e
JL
9567 else if (TREE_CODE (arg1) == ADDR_EXPR
9568 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9569 {
fed3cef0 9570 *ptr_offset = convert (sizetype, arg0);
b93a436e 9571 return TREE_OPERAND (arg1, 0);
bbf6f052 9572 }
b93a436e 9573 }
ca695ac9 9574
b93a436e
JL
9575 return 0;
9576}
ca695ac9 9577\f
b93a436e
JL
9578/* Expand code for a post- or pre- increment or decrement
9579 and return the RTX for the result.
9580 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9581
b93a436e
JL
9582static rtx
9583expand_increment (exp, post, ignore)
b3694847 9584 tree exp;
b93a436e 9585 int post, ignore;
ca695ac9 9586{
b3694847
SS
9587 rtx op0, op1;
9588 rtx temp, value;
9589 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9590 optab this_optab = add_optab;
9591 int icode;
9592 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9593 int op0_is_copy = 0;
9594 int single_insn = 0;
9595 /* 1 means we can't store into OP0 directly,
9596 because it is a subreg narrower than a word,
9597 and we don't dare clobber the rest of the word. */
9598 int bad_subreg = 0;
1499e0a8 9599
b93a436e
JL
9600 /* Stabilize any component ref that might need to be
9601 evaluated more than once below. */
9602 if (!post
9603 || TREE_CODE (incremented) == BIT_FIELD_REF
9604 || (TREE_CODE (incremented) == COMPONENT_REF
9605 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9606 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9607 incremented = stabilize_reference (incremented);
9608 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9609 ones into save exprs so that they don't accidentally get evaluated
9610 more than once by the code below. */
9611 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9612 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9613 incremented = save_expr (incremented);
e9a25f70 9614
b93a436e
JL
9615 /* Compute the operands as RTX.
9616 Note whether OP0 is the actual lvalue or a copy of it:
9617 I believe it is a copy iff it is a register or subreg
6d2f8887 9618 and insns were generated in computing it. */
e9a25f70 9619
b93a436e 9620 temp = get_last_insn ();
37a08a29 9621 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9622
b93a436e
JL
9623 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9624 in place but instead must do sign- or zero-extension during assignment,
9625 so we copy it into a new register and let the code below use it as
9626 a copy.
e9a25f70 9627
b93a436e
JL
9628 Note that we can safely modify this SUBREG since it is know not to be
9629 shared (it was made by the expand_expr call above). */
9630
9631 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9632 {
9633 if (post)
9634 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9635 else
9636 bad_subreg = 1;
9637 }
9638 else if (GET_CODE (op0) == SUBREG
9639 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9640 {
9641 /* We cannot increment this SUBREG in place. If we are
9642 post-incrementing, get a copy of the old value. Otherwise,
9643 just mark that we cannot increment in place. */
9644 if (post)
9645 op0 = copy_to_reg (op0);
9646 else
9647 bad_subreg = 1;
e9a25f70
JL
9648 }
9649
b93a436e
JL
9650 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9651 && temp != get_last_insn ());
37a08a29 9652 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9653
b93a436e
JL
9654 /* Decide whether incrementing or decrementing. */
9655 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9656 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9657 this_optab = sub_optab;
9658
9659 /* Convert decrement by a constant into a negative increment. */
9660 if (this_optab == sub_optab
9661 && GET_CODE (op1) == CONST_INT)
ca695ac9 9662 {
3a94c984 9663 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9664 this_optab = add_optab;
ca695ac9 9665 }
1499e0a8 9666
91ce572a 9667 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9668 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9669
b93a436e
JL
9670 /* For a preincrement, see if we can do this with a single instruction. */
9671 if (!post)
9672 {
9673 icode = (int) this_optab->handlers[(int) mode].insn_code;
9674 if (icode != (int) CODE_FOR_nothing
9675 /* Make sure that OP0 is valid for operands 0 and 1
9676 of the insn we want to queue. */
a995e389
RH
9677 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9678 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9679 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9680 single_insn = 1;
9681 }
bbf6f052 9682
b93a436e
JL
9683 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9684 then we cannot just increment OP0. We must therefore contrive to
9685 increment the original value. Then, for postincrement, we can return
9686 OP0 since it is a copy of the old value. For preincrement, expand here
9687 unless we can do it with a single insn.
bbf6f052 9688
b93a436e
JL
9689 Likewise if storing directly into OP0 would clobber high bits
9690 we need to preserve (bad_subreg). */
9691 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9692 {
b93a436e
JL
9693 /* This is the easiest way to increment the value wherever it is.
9694 Problems with multiple evaluation of INCREMENTED are prevented
9695 because either (1) it is a component_ref or preincrement,
9696 in which case it was stabilized above, or (2) it is an array_ref
9697 with constant index in an array in a register, which is
9698 safe to reevaluate. */
9699 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9700 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9701 ? MINUS_EXPR : PLUS_EXPR),
9702 TREE_TYPE (exp),
9703 incremented,
9704 TREE_OPERAND (exp, 1));
a358cee0 9705
b93a436e
JL
9706 while (TREE_CODE (incremented) == NOP_EXPR
9707 || TREE_CODE (incremented) == CONVERT_EXPR)
9708 {
9709 newexp = convert (TREE_TYPE (incremented), newexp);
9710 incremented = TREE_OPERAND (incremented, 0);
9711 }
bbf6f052 9712
b93a436e
JL
9713 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9714 return post ? op0 : temp;
9715 }
bbf6f052 9716
b93a436e
JL
9717 if (post)
9718 {
9719 /* We have a true reference to the value in OP0.
9720 If there is an insn to add or subtract in this mode, queue it.
9721 Queueing the increment insn avoids the register shuffling
9722 that often results if we must increment now and first save
9723 the old value for subsequent use. */
bbf6f052 9724
b93a436e
JL
9725#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9726 op0 = stabilize (op0);
9727#endif
41dfd40c 9728
b93a436e
JL
9729 icode = (int) this_optab->handlers[(int) mode].insn_code;
9730 if (icode != (int) CODE_FOR_nothing
9731 /* Make sure that OP0 is valid for operands 0 and 1
9732 of the insn we want to queue. */
a995e389
RH
9733 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9734 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9735 {
a995e389 9736 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9737 op1 = force_reg (mode, op1);
bbf6f052 9738
b93a436e
JL
9739 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9740 }
9741 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9742 {
9743 rtx addr = (general_operand (XEXP (op0, 0), mode)
9744 ? force_reg (Pmode, XEXP (op0, 0))
9745 : copy_to_reg (XEXP (op0, 0)));
9746 rtx temp, result;
ca695ac9 9747
792760b9 9748 op0 = replace_equiv_address (op0, addr);
b93a436e 9749 temp = force_reg (GET_MODE (op0), op0);
a995e389 9750 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9751 op1 = force_reg (mode, op1);
ca695ac9 9752
b93a436e
JL
9753 /* The increment queue is LIFO, thus we have to `queue'
9754 the instructions in reverse order. */
9755 enqueue_insn (op0, gen_move_insn (op0, temp));
9756 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9757 return result;
bbf6f052
RK
9758 }
9759 }
ca695ac9 9760
b93a436e
JL
9761 /* Preincrement, or we can't increment with one simple insn. */
9762 if (post)
9763 /* Save a copy of the value before inc or dec, to return it later. */
9764 temp = value = copy_to_reg (op0);
9765 else
9766 /* Arrange to return the incremented value. */
9767 /* Copy the rtx because expand_binop will protect from the queue,
9768 and the results of that would be invalid for us to return
9769 if our caller does emit_queue before using our result. */
9770 temp = copy_rtx (value = op0);
bbf6f052 9771
b93a436e 9772 /* Increment however we can. */
37a08a29 9773 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9774 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9775
b93a436e
JL
9776 /* Make sure the value is stored into OP0. */
9777 if (op1 != op0)
9778 emit_move_insn (op0, op1);
5718612f 9779
b93a436e
JL
9780 return temp;
9781}
9782\f
b93a436e
JL
9783/* Generate code to calculate EXP using a store-flag instruction
9784 and return an rtx for the result. EXP is either a comparison
9785 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9786
b93a436e 9787 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9788
cc2902df 9789 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9790 cheap.
ca695ac9 9791
b93a436e
JL
9792 Return zero if there is no suitable set-flag instruction
9793 available on this machine.
ca695ac9 9794
b93a436e
JL
9795 Once expand_expr has been called on the arguments of the comparison,
9796 we are committed to doing the store flag, since it is not safe to
9797 re-evaluate the expression. We emit the store-flag insn by calling
9798 emit_store_flag, but only expand the arguments if we have a reason
9799 to believe that emit_store_flag will be successful. If we think that
9800 it will, but it isn't, we have to simulate the store-flag with a
9801 set/jump/set sequence. */
ca695ac9 9802
b93a436e
JL
9803static rtx
9804do_store_flag (exp, target, mode, only_cheap)
9805 tree exp;
9806 rtx target;
9807 enum machine_mode mode;
9808 int only_cheap;
9809{
9810 enum rtx_code code;
9811 tree arg0, arg1, type;
9812 tree tem;
9813 enum machine_mode operand_mode;
9814 int invert = 0;
9815 int unsignedp;
9816 rtx op0, op1;
9817 enum insn_code icode;
9818 rtx subtarget = target;
381127e8 9819 rtx result, label;
ca695ac9 9820
b93a436e
JL
9821 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9822 result at the end. We can't simply invert the test since it would
9823 have already been inverted if it were valid. This case occurs for
9824 some floating-point comparisons. */
ca695ac9 9825
b93a436e
JL
9826 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9827 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9828
b93a436e
JL
9829 arg0 = TREE_OPERAND (exp, 0);
9830 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9831
9832 /* Don't crash if the comparison was erroneous. */
9833 if (arg0 == error_mark_node || arg1 == error_mark_node)
9834 return const0_rtx;
9835
b93a436e
JL
9836 type = TREE_TYPE (arg0);
9837 operand_mode = TYPE_MODE (type);
9838 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9839
b93a436e
JL
9840 /* We won't bother with BLKmode store-flag operations because it would mean
9841 passing a lot of information to emit_store_flag. */
9842 if (operand_mode == BLKmode)
9843 return 0;
ca695ac9 9844
b93a436e
JL
9845 /* We won't bother with store-flag operations involving function pointers
9846 when function pointers must be canonicalized before comparisons. */
9847#ifdef HAVE_canonicalize_funcptr_for_compare
9848 if (HAVE_canonicalize_funcptr_for_compare
9849 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9850 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9851 == FUNCTION_TYPE))
9852 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9853 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9854 == FUNCTION_TYPE))))
9855 return 0;
ca695ac9
JB
9856#endif
9857
b93a436e
JL
9858 STRIP_NOPS (arg0);
9859 STRIP_NOPS (arg1);
ca695ac9 9860
b93a436e
JL
9861 /* Get the rtx comparison code to use. We know that EXP is a comparison
9862 operation of some type. Some comparisons against 1 and -1 can be
9863 converted to comparisons with zero. Do so here so that the tests
9864 below will be aware that we have a comparison with zero. These
9865 tests will not catch constants in the first operand, but constants
9866 are rarely passed as the first operand. */
ca695ac9 9867
b93a436e
JL
9868 switch (TREE_CODE (exp))
9869 {
9870 case EQ_EXPR:
9871 code = EQ;
bbf6f052 9872 break;
b93a436e
JL
9873 case NE_EXPR:
9874 code = NE;
bbf6f052 9875 break;
b93a436e
JL
9876 case LT_EXPR:
9877 if (integer_onep (arg1))
9878 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9879 else
9880 code = unsignedp ? LTU : LT;
ca695ac9 9881 break;
b93a436e
JL
9882 case LE_EXPR:
9883 if (! unsignedp && integer_all_onesp (arg1))
9884 arg1 = integer_zero_node, code = LT;
9885 else
9886 code = unsignedp ? LEU : LE;
ca695ac9 9887 break;
b93a436e
JL
9888 case GT_EXPR:
9889 if (! unsignedp && integer_all_onesp (arg1))
9890 arg1 = integer_zero_node, code = GE;
9891 else
9892 code = unsignedp ? GTU : GT;
9893 break;
9894 case GE_EXPR:
9895 if (integer_onep (arg1))
9896 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9897 else
9898 code = unsignedp ? GEU : GE;
ca695ac9 9899 break;
1eb8759b
RH
9900
9901 case UNORDERED_EXPR:
9902 code = UNORDERED;
9903 break;
9904 case ORDERED_EXPR:
9905 code = ORDERED;
9906 break;
9907 case UNLT_EXPR:
9908 code = UNLT;
9909 break;
9910 case UNLE_EXPR:
9911 code = UNLE;
9912 break;
9913 case UNGT_EXPR:
9914 code = UNGT;
9915 break;
9916 case UNGE_EXPR:
9917 code = UNGE;
9918 break;
9919 case UNEQ_EXPR:
9920 code = UNEQ;
9921 break;
1eb8759b 9922
ca695ac9 9923 default:
b93a436e 9924 abort ();
bbf6f052 9925 }
bbf6f052 9926
b93a436e
JL
9927 /* Put a constant second. */
9928 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9929 {
9930 tem = arg0; arg0 = arg1; arg1 = tem;
9931 code = swap_condition (code);
ca695ac9 9932 }
bbf6f052 9933
b93a436e
JL
9934 /* If this is an equality or inequality test of a single bit, we can
9935 do this by shifting the bit being tested to the low-order bit and
9936 masking the result with the constant 1. If the condition was EQ,
9937 we xor it with 1. This does not require an scc insn and is faster
9938 than an scc insn even if we have it. */
d39985fa 9939
b93a436e
JL
9940 if ((code == NE || code == EQ)
9941 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9942 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9943 {
9944 tree inner = TREE_OPERAND (arg0, 0);
9945 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9946 int ops_unsignedp;
bbf6f052 9947
b93a436e
JL
9948 /* If INNER is a right shift of a constant and it plus BITNUM does
9949 not overflow, adjust BITNUM and INNER. */
ca695ac9 9950
b93a436e
JL
9951 if (TREE_CODE (inner) == RSHIFT_EXPR
9952 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9953 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
9954 && bitnum < TYPE_PRECISION (type)
9955 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9956 bitnum - TYPE_PRECISION (type)))
ca695ac9 9957 {
b93a436e
JL
9958 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9959 inner = TREE_OPERAND (inner, 0);
ca695ac9 9960 }
ca695ac9 9961
b93a436e
JL
9962 /* If we are going to be able to omit the AND below, we must do our
9963 operations as unsigned. If we must use the AND, we have a choice.
9964 Normally unsigned is faster, but for some machines signed is. */
9965 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9966#ifdef LOAD_EXTEND_OP
9967 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9968#else
9969 : 1
9970#endif
9971 );
bbf6f052 9972
296b4ed9 9973 if (! get_subtarget (subtarget)
a47fed55 9974 || GET_MODE (subtarget) != operand_mode
e5e809f4 9975 || ! safe_from_p (subtarget, inner, 1))
b93a436e 9976 subtarget = 0;
bbf6f052 9977
b93a436e 9978 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9979
b93a436e 9980 if (bitnum != 0)
681cb233 9981 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 9982 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9983
b93a436e
JL
9984 if (GET_MODE (op0) != mode)
9985 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9986
b93a436e
JL
9987 if ((code == EQ && ! invert) || (code == NE && invert))
9988 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9989 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9990
b93a436e
JL
9991 /* Put the AND last so it can combine with more things. */
9992 if (bitnum != TYPE_PRECISION (type) - 1)
22273300 9993 op0 = expand_and (mode, op0, const1_rtx, subtarget);
bbf6f052 9994
b93a436e
JL
9995 return op0;
9996 }
bbf6f052 9997
b93a436e 9998 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 9999 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10000 return 0;
1eb8759b 10001
b93a436e
JL
10002 icode = setcc_gen_code[(int) code];
10003 if (icode == CODE_FOR_nothing
a995e389 10004 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10005 {
b93a436e
JL
10006 /* We can only do this if it is one of the special cases that
10007 can be handled without an scc insn. */
10008 if ((code == LT && integer_zerop (arg1))
10009 || (! only_cheap && code == GE && integer_zerop (arg1)))
10010 ;
10011 else if (BRANCH_COST >= 0
10012 && ! only_cheap && (code == NE || code == EQ)
10013 && TREE_CODE (type) != REAL_TYPE
10014 && ((abs_optab->handlers[(int) operand_mode].insn_code
10015 != CODE_FOR_nothing)
10016 || (ffs_optab->handlers[(int) operand_mode].insn_code
10017 != CODE_FOR_nothing)))
10018 ;
10019 else
10020 return 0;
ca695ac9 10021 }
3a94c984 10022
296b4ed9 10023 if (! get_subtarget (target)
a47fed55 10024 || GET_MODE (subtarget) != operand_mode
e5e809f4 10025 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10026 subtarget = 0;
10027
10028 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10029 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10030
10031 if (target == 0)
10032 target = gen_reg_rtx (mode);
10033
10034 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10035 because, if the emit_store_flag does anything it will succeed and
10036 OP0 and OP1 will not be used subsequently. */
ca695ac9 10037
b93a436e
JL
10038 result = emit_store_flag (target, code,
10039 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10040 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10041 operand_mode, unsignedp, 1);
ca695ac9 10042
b93a436e
JL
10043 if (result)
10044 {
10045 if (invert)
10046 result = expand_binop (mode, xor_optab, result, const1_rtx,
10047 result, 0, OPTAB_LIB_WIDEN);
10048 return result;
ca695ac9 10049 }
bbf6f052 10050
b93a436e
JL
10051 /* If this failed, we have to do this with set/compare/jump/set code. */
10052 if (GET_CODE (target) != REG
10053 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10054 target = gen_reg_rtx (GET_MODE (target));
10055
10056 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10057 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 10058 operand_mode, NULL_RTX);
b93a436e
JL
10059 if (GET_CODE (result) == CONST_INT)
10060 return (((result == const0_rtx && ! invert)
10061 || (result != const0_rtx && invert))
10062 ? const0_rtx : const1_rtx);
ca695ac9 10063
8f08e8c0
JL
10064 /* The code of RESULT may not match CODE if compare_from_rtx
10065 decided to swap its operands and reverse the original code.
10066
10067 We know that compare_from_rtx returns either a CONST_INT or
10068 a new comparison code, so it is safe to just extract the
10069 code from RESULT. */
10070 code = GET_CODE (result);
10071
b93a436e
JL
10072 label = gen_label_rtx ();
10073 if (bcc_gen_fctn[(int) code] == 0)
10074 abort ();
0f41302f 10075
b93a436e
JL
10076 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10077 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10078 emit_label (label);
bbf6f052 10079
b93a436e 10080 return target;
ca695ac9 10081}
b93a436e 10082\f
b93a436e 10083
ad82abb8
ZW
10084/* Stubs in case we haven't got a casesi insn. */
10085#ifndef HAVE_casesi
10086# define HAVE_casesi 0
10087# define gen_casesi(a, b, c, d, e) (0)
10088# define CODE_FOR_casesi CODE_FOR_nothing
10089#endif
10090
10091/* If the machine does not have a case insn that compares the bounds,
10092 this means extra overhead for dispatch tables, which raises the
10093 threshold for using them. */
10094#ifndef CASE_VALUES_THRESHOLD
10095#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10096#endif /* CASE_VALUES_THRESHOLD */
10097
10098unsigned int
10099case_values_threshold ()
10100{
10101 return CASE_VALUES_THRESHOLD;
10102}
10103
10104/* Attempt to generate a casesi instruction. Returns 1 if successful,
10105 0 otherwise (i.e. if there is no casesi instruction). */
10106int
10107try_casesi (index_type, index_expr, minval, range,
10108 table_label, default_label)
10109 tree index_type, index_expr, minval, range;
10110 rtx table_label ATTRIBUTE_UNUSED;
10111 rtx default_label;
10112{
10113 enum machine_mode index_mode = SImode;
10114 int index_bits = GET_MODE_BITSIZE (index_mode);
10115 rtx op1, op2, index;
10116 enum machine_mode op_mode;
10117
10118 if (! HAVE_casesi)
10119 return 0;
10120
10121 /* Convert the index to SImode. */
10122 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10123 {
10124 enum machine_mode omode = TYPE_MODE (index_type);
10125 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10126
10127 /* We must handle the endpoints in the original mode. */
10128 index_expr = build (MINUS_EXPR, index_type,
10129 index_expr, minval);
10130 minval = integer_zero_node;
10131 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10132 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 10133 omode, 1, default_label);
ad82abb8
ZW
10134 /* Now we can safely truncate. */
10135 index = convert_to_mode (index_mode, index, 0);
10136 }
10137 else
10138 {
10139 if (TYPE_MODE (index_type) != index_mode)
10140 {
b0c48229
NB
10141 index_expr = convert ((*lang_hooks.types.type_for_size)
10142 (index_bits, 0), index_expr);
ad82abb8
ZW
10143 index_type = TREE_TYPE (index_expr);
10144 }
10145
10146 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10147 }
10148 emit_queue ();
10149 index = protect_from_queue (index, 0);
10150 do_pending_stack_adjust ();
10151
10152 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10153 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10154 (index, op_mode))
10155 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10156
ad82abb8
ZW
10157 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10158
10159 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10160 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10161 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10162 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10163 (op1, op_mode))
10164 op1 = copy_to_mode_reg (op_mode, op1);
10165
10166 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10167
10168 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10169 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10170 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10171 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10172 (op2, op_mode))
10173 op2 = copy_to_mode_reg (op_mode, op2);
10174
10175 emit_jump_insn (gen_casesi (index, op1, op2,
10176 table_label, default_label));
10177 return 1;
10178}
10179
10180/* Attempt to generate a tablejump instruction; same concept. */
10181#ifndef HAVE_tablejump
10182#define HAVE_tablejump 0
10183#define gen_tablejump(x, y) (0)
10184#endif
10185
10186/* Subroutine of the next function.
10187
10188 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10189 in the table already subtracted.
10190 MODE is its expected mode (needed if INDEX is constant).
10191 RANGE is the length of the jump table.
10192 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10193
b93a436e
JL
10194 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10195 index value is out of range. */
0f41302f 10196
ad82abb8 10197static void
b93a436e
JL
10198do_tablejump (index, mode, range, table_label, default_label)
10199 rtx index, range, table_label, default_label;
10200 enum machine_mode mode;
ca695ac9 10201{
b3694847 10202 rtx temp, vector;
88d3b7f0 10203
74f6d071
JH
10204 if (INTVAL (range) > cfun->max_jumptable_ents)
10205 cfun->max_jumptable_ents = INTVAL (range);
1877be45 10206
b93a436e
JL
10207 /* Do an unsigned comparison (in the proper mode) between the index
10208 expression and the value which represents the length of the range.
10209 Since we just finished subtracting the lower bound of the range
10210 from the index expression, this comparison allows us to simultaneously
10211 check that the original index expression value is both greater than
10212 or equal to the minimum value of the range and less than or equal to
10213 the maximum value of the range. */
709f5be1 10214
c5d5d461 10215 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10216 default_label);
bbf6f052 10217
b93a436e
JL
10218 /* If index is in range, it must fit in Pmode.
10219 Convert to Pmode so we can index with it. */
10220 if (mode != Pmode)
10221 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10222
b93a436e
JL
10223 /* Don't let a MEM slip thru, because then INDEX that comes
10224 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10225 and break_out_memory_refs will go to work on it and mess it up. */
10226#ifdef PIC_CASE_VECTOR_ADDRESS
10227 if (flag_pic && GET_CODE (index) != REG)
10228 index = copy_to_mode_reg (Pmode, index);
10229#endif
ca695ac9 10230
b93a436e
JL
10231 /* If flag_force_addr were to affect this address
10232 it could interfere with the tricky assumptions made
10233 about addresses that contain label-refs,
10234 which may be valid only very near the tablejump itself. */
10235 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10236 GET_MODE_SIZE, because this indicates how large insns are. The other
10237 uses should all be Pmode, because they are addresses. This code
10238 could fail if addresses and insns are not the same size. */
10239 index = gen_rtx_PLUS (Pmode,
10240 gen_rtx_MULT (Pmode, index,
10241 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10242 gen_rtx_LABEL_REF (Pmode, table_label));
10243#ifdef PIC_CASE_VECTOR_ADDRESS
10244 if (flag_pic)
10245 index = PIC_CASE_VECTOR_ADDRESS (index);
10246 else
bbf6f052 10247#endif
b93a436e
JL
10248 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10249 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10250 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10251 RTX_UNCHANGING_P (vector) = 1;
10252 convert_move (temp, vector, 0);
10253
10254 emit_jump_insn (gen_tablejump (temp, table_label));
10255
10256 /* If we are generating PIC code or if the table is PC-relative, the
10257 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10258 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10259 emit_barrier ();
bbf6f052 10260}
b93a436e 10261
ad82abb8
ZW
10262int
10263try_tablejump (index_type, index_expr, minval, range,
10264 table_label, default_label)
10265 tree index_type, index_expr, minval, range;
10266 rtx table_label, default_label;
10267{
10268 rtx index;
10269
10270 if (! HAVE_tablejump)
10271 return 0;
10272
10273 index_expr = fold (build (MINUS_EXPR, index_type,
10274 convert (index_type, index_expr),
10275 convert (index_type, minval)));
10276 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10277 emit_queue ();
10278 index = protect_from_queue (index, 0);
10279 do_pending_stack_adjust ();
10280
10281 do_tablejump (index, TYPE_MODE (index_type),
10282 convert_modes (TYPE_MODE (index_type),
10283 TYPE_MODE (TREE_TYPE (range)),
10284 expand_expr (range, NULL_RTX,
10285 VOIDmode, 0),
10286 TREE_UNSIGNED (TREE_TYPE (range))),
10287 table_label, default_label);
10288 return 1;
10289}
e2500fed 10290
cb2a532e
AH
10291/* Nonzero if the mode is a valid vector mode for this architecture.
10292 This returns nonzero even if there is no hardware support for the
10293 vector mode, but we can emulate with narrower modes. */
10294
10295int
10296vector_mode_valid_p (mode)
10297 enum machine_mode mode;
10298{
10299 enum mode_class class = GET_MODE_CLASS (mode);
10300 enum machine_mode innermode;
10301
10302 /* Doh! What's going on? */
10303 if (class != MODE_VECTOR_INT
10304 && class != MODE_VECTOR_FLOAT)
10305 return 0;
10306
10307 /* Hardware support. Woo hoo! */
10308 if (VECTOR_MODE_SUPPORTED_P (mode))
10309 return 1;
10310
10311 innermode = GET_MODE_INNER (mode);
10312
10313 /* We should probably return 1 if requesting V4DI and we have no DI,
10314 but we have V2DI, but this is probably very unlikely. */
10315
10316 /* If we have support for the inner mode, we can safely emulate it.
10317 We may not have V2DI, but me can emulate with a pair of DIs. */
10318 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10319}
10320
e2500fed 10321#include "gt-expr.h"
This page took 3.770549 seconds and 5 git commands to generate.