]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
winnt.c (i386_pe_mark_dllimport): Make the new RTL have the same form as the old...
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8e37cba8 3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
bbf6f052 50
bbf6f052 51/* Decide whether a function's arguments should be processed
bbc8a071
RK
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
bbf6f052 56
bbf6f052 57#ifdef PUSH_ROUNDING
bbc8a071 58
2da4124d 59#ifndef PUSH_ARGS_REVERSED
3319a347 60#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 61#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 62#endif
2da4124d 63#endif
bbc8a071 64
bbf6f052
RK
65#endif
66
67#ifndef STACK_PUSH_CODE
68#ifdef STACK_GROWS_DOWNWARD
69#define STACK_PUSH_CODE PRE_DEC
70#else
71#define STACK_PUSH_CODE PRE_INC
72#endif
73#endif
74
18543a22
ILT
75/* Assume that case vectors are not pc-relative. */
76#ifndef CASE_VECTOR_PC_RELATIVE
77#define CASE_VECTOR_PC_RELATIVE 0
78#endif
79
4ca79136
RH
80/* Convert defined/undefined to boolean. */
81#ifdef TARGET_MEM_FUNCTIONS
82#undef TARGET_MEM_FUNCTIONS
83#define TARGET_MEM_FUNCTIONS 1
84#else
85#define TARGET_MEM_FUNCTIONS 0
86#endif
87
88
bbf6f052
RK
89/* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95int cse_not_expected;
96
14a774a9 97/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 98tree placeholder_list = 0;
14a774a9 99
4969d05d
RK
100/* This structure is used by move_by_pieces to describe the move to
101 be performed. */
4969d05d
RK
102struct move_by_pieces
103{
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
3bdf5ad1
RK
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
4969d05d
RK
114 int reverse;
115};
116
57814e5e 117/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
118 be performed. */
119
57814e5e 120struct store_by_pieces
9de08200
RK
121{
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
3bdf5ad1
RK
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
57814e5e
JJ
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
9de08200
RK
130 int reverse;
131};
132
711d877c 133static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
134static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
711d877c
KG
137static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
44bb111a 139static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
4ca79136
RH
140static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142static tree emit_block_move_libcall_fn PARAMS ((int));
44bb111a 143static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
57814e5e
JJ
144static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
3bdf5ad1
RK
146static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
57814e5e
JJ
148static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 151 enum machine_mode,
57814e5e 152 struct store_by_pieces *));
4ca79136
RH
153static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155static tree clear_storage_libcall_fn PARAMS ((int));
51286de6 156static rtx compress_float_constant PARAMS ((rtx, rtx));
296b4ed9 157static rtx get_subtarget PARAMS ((rtx));
d744e06e 158static int is_zeros_p PARAMS ((tree));
711d877c 159static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
160static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
04050c69
RK
162 tree, tree, int, int));
163static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
770ae6cc
RK
164static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
a06ef755
RK
166 tree, enum machine_mode, int, tree,
167 int));
711d877c 168static rtx var_rtx PARAMS ((tree));
9ceca302
OH
169
170static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
173
1ce7f3c2 174static int is_aligning_offset PARAMS ((tree, tree));
711d877c 175static rtx expand_increment PARAMS ((tree, int, int));
711d877c 176static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
21d93687 177#ifdef PUSH_ROUNDING
566aa174 178static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
21d93687 179#endif
ad82abb8 180static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
d744e06e 181static rtx const_vector_from_tree PARAMS ((tree));
bbf6f052 182
4fa52007
RK
183/* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
186
187static char direct_load[NUM_MACHINE_MODES];
188static char direct_store[NUM_MACHINE_MODES];
189
51286de6
RH
190/* Record for each mode whether we can float-extend from memory. */
191
192static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
193
7e24ffc9
HPN
194/* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
196
197#ifndef MOVE_RATIO
266007a7 198#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
199#define MOVE_RATIO 2
200#else
3a94c984 201/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 202#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
203#endif
204#endif
e87b4f3f 205
fbe1758d 206/* This macro is used to determine whether move_by_pieces should be called
3a94c984 207 to perform a structure copy. */
fbe1758d 208#ifndef MOVE_BY_PIECES_P
19caa751 209#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
211#endif
212
78762e3b
RS
213/* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
215
216#ifndef CLEAR_RATIO
217#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218#define CLEAR_RATIO 2
219#else
220/* If we are optimizing for space, cut down the default clear ratio. */
221#define CLEAR_RATIO (optimize_size ? 3 : 15)
222#endif
223#endif
224
225/* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227#ifndef CLEAR_BY_PIECES_P
228#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230#endif
231
4977bab6
ZW
232/* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235#ifndef STORE_BY_PIECES_P
236#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237#endif
238
266007a7 239/* This array records the insn_code of insns to perform block moves. */
e6677db3 240enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 241
9de08200
RK
242/* This array records the insn_code of insns to perform block clears. */
243enum insn_code clrstr_optab[NUM_MACHINE_MODES];
244
cc2902df 245/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
246
247#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 248#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 249#endif
bbf6f052 250\f
4fa52007 251/* This is run once per compilation to set up which modes can be used
266007a7 252 directly in memory and to initialize the block move optab. */
4fa52007
RK
253
254void
255init_expr_once ()
256{
257 rtx insn, pat;
258 enum machine_mode mode;
cff48d8f 259 int num_clobbers;
9ec36da5 260 rtx mem, mem1;
bf1660a6 261 rtx reg;
9ec36da5 262
e2549997
RS
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
9ec36da5
JL
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 268
bf1660a6
JL
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
272
1f8c3c5b
RH
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
4fa52007
RK
276
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
279 {
280 int regno;
4fa52007
RK
281
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
e2549997 284 PUT_MODE (mem1, mode);
bf1660a6 285 PUT_MODE (reg, mode);
4fa52007 286
e6fe56a4
RK
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
289
7308a047
RS
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
294 {
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
e6fe56a4 297
bf1660a6 298 REGNO (reg) = regno;
e6fe56a4 299
7308a047
RS
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
e6fe56a4 304
e2549997
RS
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
309
7308a047
RS
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
e2549997
RS
314
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
7308a047 319 }
4fa52007
RK
320 }
321
51286de6
RH
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
323
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
326 {
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 329 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
330 {
331 enum insn_code ic;
332
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
336
337 PUT_MODE (mem, srcmode);
0fb7aeda 338
51286de6
RH
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
341 }
342 }
4fa52007 343}
cff48d8f 344
bbf6f052
RK
345/* This is run at the start of compiling a function. */
346
347void
348init_expr ()
349{
e2500fed 350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
bbf6f052 351
49ad7cfa 352 pending_chain = 0;
bbf6f052 353 pending_stack_adjust = 0;
1503a7ec 354 stack_pointer_delta = 0;
bbf6f052 355 inhibit_defer_pop = 0;
bbf6f052 356 saveregs_value = 0;
0006469d 357 apply_args_value = 0;
e87b4f3f 358 forced_labels = 0;
bbf6f052
RK
359}
360
49ad7cfa 361/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 362
bbf6f052 363void
49ad7cfa 364finish_expr_for_function ()
bbf6f052 365{
49ad7cfa
BS
366 if (pending_chain)
367 abort ();
bbf6f052
RK
368}
369\f
370/* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
372
bbf6f052
RK
373/* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
376
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
379
380static rtx
381enqueue_insn (var, body)
382 rtx var, body;
383{
c5c76735
JL
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
bbf6f052
RK
386 return pending_chain;
387}
388
389/* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
395
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
399
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
403
404rtx
405protect_from_queue (x, modify)
b3694847 406 rtx x;
bbf6f052
RK
407 int modify;
408{
b3694847 409 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
410
411#if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415#endif
416
417 if (code != QUEUED)
418 {
e9baa644
RK
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
bbf6f052
RK
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 {
f1ec5147
RK
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 429
bbf6f052
RK
430 if (QUEUED_INSN (y))
431 {
f1ec5147
RK
432 rtx temp = gen_reg_rtx (GET_MODE (x));
433
e9baa644 434 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
435 QUEUED_INSN (y));
436 return temp;
437 }
f1ec5147 438
73b7f58c
BS
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
f1ec5147 441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 442 }
f1ec5147 443
bbf6f052
RK
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
3f15938e
RS
447 {
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
453 }
454 }
bbf6f052
RK
455 else if (code == PLUS || code == MULT)
456 {
3f15938e
RS
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
460 {
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
464 }
bbf6f052
RK
465 }
466 return x;
467 }
73b7f58c
BS
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
bbf6f052 471 if (QUEUED_INSN (x) == 0)
73b7f58c 472 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
483}
484
485/* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
489
1f06ee8d 490int
bbf6f052
RK
491queued_subexp_p (x)
492 rtx x;
493{
b3694847 494 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
495 switch (code)
496 {
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
e9a25f70
JL
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
bbf6f052 508 }
bbf6f052
RK
509}
510
511/* Perform all the pending incrementations. */
512
513void
514emit_queue ()
515{
b3694847 516 rtx p;
381127e8 517 while ((p = pending_chain))
bbf6f052 518 {
41b083c4
R
519 rtx body = QUEUED_BODY (p);
520
2f937369
DM
521 switch (GET_CODE (body))
522 {
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
532
533#ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537#endif
538
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
41b083c4 542 }
2f937369 543
bbf6f052
RK
544 pending_chain = QUEUED_NEXT (p);
545 }
546}
bbf6f052
RK
547\f
548/* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
552
553void
554convert_move (to, from, unsignedp)
b3694847 555 rtx to, from;
bbf6f052
RK
556 int unsignedp;
557{
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
564
565 /* rtx code for making an equivalent value. */
37d0b254
SE
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
568
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
571
572 if (to_real != from_real)
573 abort ();
574
1499e0a8
RK
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
578
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
584
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
587
bbf6f052
RK
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
590 {
591 emit_move_insn (to, from);
592 return;
593 }
594
0b4565c9
BS
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
596 {
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
3a94c984 599
0b4565c9 600 if (VECTOR_MODE_P (to_mode))
bafe341a 601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 602 else
bafe341a 603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
604
605 emit_move_insn (to, from);
606 return;
607 }
608
609 if (to_real != from_real)
610 abort ();
611
bbf6f052
RK
612 if (to_real)
613 {
642dfa8b 614 rtx value, insns;
81d79e2c 615
2b01c326 616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 617 {
2b01c326
RK
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
621 {
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
624 }
bbf6f052 625 }
3a94c984 626
b424402e
RS
627#ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
704af6a1
JL
634#ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
b424402e
RS
641#ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
648#ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
653 }
654#endif
655#ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
662#ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
03747aa3
RK
669
670#ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
b424402e
RS
677#ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
684#ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
691#ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
698#ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
700 {
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
2b01c326
RK
705
706#ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713#ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
718 }
719#endif
720#ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
725 }
726#endif
727#ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
729 {
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
732 }
733#endif
734
bbf6f052
RK
735#ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
b092b471
JW
742#ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
bbf6f052
RK
749#ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
b092b471
JW
756#ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
bbf6f052
RK
763#ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769#endif
770
b092b471
JW
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
3a94c984 788
e9a25f70
JL
789 default:
790 break;
b092b471
JW
791 }
792 break;
793
794 case DFmode:
795 switch (to_mode)
796 {
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
800
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
804
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
3a94c984 808
e9a25f70
JL
809 default:
810 break;
b092b471
JW
811 }
812 break;
813
814 case XFmode:
815 switch (to_mode)
816 {
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
820
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
3a94c984 824
e9a25f70
JL
825 default:
826 break;
b092b471
JW
827 }
828 break;
829
830 case TFmode:
831 switch (to_mode)
832 {
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
836
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
3a94c984 840
e9a25f70
JL
841 default:
842 break;
b092b471
JW
843 }
844 break;
3a94c984 845
e9a25f70
JL
846 default:
847 break;
b092b471
JW
848 }
849
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
bbf6f052
RK
852 abort ();
853
642dfa8b 854 start_sequence ();
ebb1b59a 855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 856 1, from, from_mode);
642dfa8b
BS
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
bbf6f052
RK
861 return;
862 }
863
864 /* Now both modes are integers. */
865
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
869 {
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
877
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
881 {
cd1b4b44
RK
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
bbf6f052
RK
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
890 }
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
895 {
a81fee56 896 if (GET_CODE (to) == REG)
38a448ca 897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
902 }
903
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
906
5c5033c3
RK
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
909
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
912
bbf6f052
RK
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
918
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
920
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
923
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
928 {
929#ifdef HAVE_slt
930 if (HAVE_slt
a995e389 931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
932 && STORE_FLAG_VALUE == -1)
933 {
906c4e36 934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 935 lowpart_mode, 0);
bbf6f052
RK
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
938 }
939 else
940#endif
941 {
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 945 NULL_RTX, 0);
bbf6f052
RK
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
947 }
948 }
949
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
952 {
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
955
956 if (subword == 0)
957 abort ();
958
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
961 }
962
963 insns = get_insns ();
964 end_sequence ();
965
906c4e36 966 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
968 return;
969 }
970
d3c64ee3
RS
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 974 {
431a6eca
JW
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
bbf6f052
RK
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
984 }
985
3a94c984 986 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
987 if (to_mode == PQImode)
988 {
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
991
992#ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
994 {
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
997 }
998#endif /* HAVE_truncqipqi2 */
999 abort ();
1000 }
1001
1002 if (from_mode == PQImode)
1003 {
1004 if (to_mode != QImode)
1005 {
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1008 }
1009 else
1010 {
1011#ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1013 {
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1016 }
1017#endif /* HAVE_extendpqiqi2 */
1018 abort ();
1019 }
1020 }
1021
bbf6f052
RK
1022 if (to_mode == PSImode)
1023 {
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1026
1f584163
DE
1027#ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
bbf6f052 1029 {
1f584163 1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1031 return;
1032 }
1f584163 1033#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1034 abort ();
1035 }
1036
1037 if (from_mode == PSImode)
1038 {
1039 if (to_mode != SImode)
1040 {
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1043 }
1044 else
1045 {
1f584163 1046#ifdef HAVE_extendpsisi2
43d75418 1047 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1048 {
1f584163 1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1050 return;
1051 }
1f584163 1052#endif /* HAVE_extendpsisi2 */
43d75418
R
1053#ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1055 {
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1058 }
1059#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1060 abort ();
1061 }
1062 }
1063
0407367d
RK
1064 if (to_mode == PDImode)
1065 {
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1068
1069#ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1071 {
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1074 }
1075#endif /* HAVE_truncdipdi2 */
1076 abort ();
1077 }
1078
1079 if (from_mode == PDImode)
1080 {
1081 if (to_mode != DImode)
1082 {
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1085 }
1086 else
1087 {
1088#ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1090 {
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1093 }
1094#endif /* HAVE_extendpdidi2 */
1095 abort ();
1096 }
1097 }
1098
bbf6f052
RK
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1101
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1105 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1106 {
d3c64ee3
RS
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
34aa3599
RK
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
bbf6f052
RK
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1119 }
1120
d3c64ee3 1121 /* Handle extension. */
bbf6f052
RK
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1123 {
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1127 {
9413de45
RK
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1130
bbf6f052
RK
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1133 }
1134 else
1135 {
1136 enum machine_mode intermediate;
2b28d92e
NC
1137 rtx tmp;
1138 tree shift_amount;
bbf6f052
RK
1139
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1150 {
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1154 }
1155
2b28d92e 1156 /* No suitable intermediate mode.
3a94c984 1157 Generate what we need with shifts. */
2b28d92e
NC
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
3a94c984 1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
bbf6f052
RK
1168 }
1169 }
1170
3a94c984 1171 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1172
1173 if (from_mode == DImode && to_mode == SImode)
1174 {
1175#ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1177 {
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1180 }
1181#endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == DImode && to_mode == HImode)
1187 {
1188#ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1190 {
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1193 }
1194#endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == DImode && to_mode == QImode)
1200 {
1201#ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1203 {
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1206 }
1207#endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == SImode && to_mode == HImode)
1213 {
1214#ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1216 {
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1219 }
1220#endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 if (from_mode == SImode && to_mode == QImode)
1226 {
1227#ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1229 {
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1232 }
1233#endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1236 }
1237
1238 if (from_mode == HImode && to_mode == QImode)
1239 {
1240#ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1242 {
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1245 }
1246#endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1249 }
1250
b9bcad65
RK
1251 if (from_mode == TImode && to_mode == DImode)
1252 {
1253#ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1255 {
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1258 }
1259#endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1262 }
1263
1264 if (from_mode == TImode && to_mode == SImode)
1265 {
1266#ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1268 {
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1271 }
1272#endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1275 }
1276
1277 if (from_mode == TImode && to_mode == HImode)
1278 {
1279#ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1281 {
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1284 }
1285#endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1288 }
1289
1290 if (from_mode == TImode && to_mode == QImode)
1291 {
1292#ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1294 {
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1297 }
1298#endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1301 }
1302
bbf6f052
RK
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1307 {
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1311 }
1312
1313 /* Mode combination is not recognized. */
1314 abort ();
1315}
1316
1317/* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
5d901c31
RS
1322 or by copying to a new temporary with conversion.
1323
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1326
1327rtx
1328convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
5ffe63ed
RS
1332{
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1334}
1335
1336/* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1340
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1343
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1345
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1348
1349rtx
1350convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
bbf6f052 1354{
b3694847 1355 rtx temp;
5ffe63ed 1356
1499e0a8
RK
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1359
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
bbf6f052 1364
64791b18
RK
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
3a94c984 1367
5ffe63ed 1368 if (mode == oldmode)
bbf6f052
RK
1369 return x;
1370
1371 /* There is one case that we must handle specially: If we are converting
906c4e36 1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1376
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1380 {
1381 HOST_WIDE_INT val = INTVAL (x);
1382
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1385 {
1386 int width = GET_MODE_BITSIZE (oldmode);
1387
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1390 }
1391
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1393 }
bbf6f052
RK
1394
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1399
ba2e110c
RK
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1402 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1403 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1404 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
2bf29316 1408 || (GET_CODE (x) == REG
006c9f4a
SE
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1413 {
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1419 {
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1422
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1429
2496c7bd 1430 return gen_int_mode (val, mode);
ba2e110c
RK
1431 }
1432
1433 return gen_lowpart (mode, x);
1434 }
bbf6f052
RK
1435
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1439}
1440\f
fbe1758d 1441/* This macro is used to determine what the largest unit size that
3a94c984 1442 move_by_pieces can use is. */
fbe1758d
AM
1443
1444/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1446 number of bytes we can move with a single instruction. */
fbe1758d
AM
1447
1448#ifndef MOVE_MAX_PIECES
1449#define MOVE_MAX_PIECES MOVE_MAX
1450#endif
1451
cf5124f6
RS
1452/* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1456
1457#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1458
21d93687
RK
1459/* Generate several move instructions to copy LEN bytes from block FROM to
1460 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1461 and TO through protect_from_queue before calling.
566aa174 1462
21d93687
RK
1463 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1464 used to push FROM to the stack.
566aa174 1465
19caa751 1466 ALIGN is maximum alignment we can assume. */
bbf6f052 1467
2e245dac 1468void
bbf6f052
RK
1469move_by_pieces (to, from, len, align)
1470 rtx to, from;
3bdf5ad1 1471 unsigned HOST_WIDE_INT len;
729a2125 1472 unsigned int align;
bbf6f052
RK
1473{
1474 struct move_by_pieces data;
566aa174 1475 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1476 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1477 enum machine_mode mode = VOIDmode, tmode;
1478 enum insn_code icode;
bbf6f052
RK
1479
1480 data.offset = 0;
bbf6f052 1481 data.from_addr = from_addr;
566aa174
JH
1482 if (to)
1483 {
1484 to_addr = XEXP (to, 0);
1485 data.to = to;
1486 data.autinc_to
1487 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1488 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1489 data.reverse
1490 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1491 }
1492 else
1493 {
1494 to_addr = NULL_RTX;
1495 data.to = NULL_RTX;
1496 data.autinc_to = 1;
1497#ifdef STACK_GROWS_DOWNWARD
1498 data.reverse = 1;
1499#else
1500 data.reverse = 0;
1501#endif
1502 }
1503 data.to_addr = to_addr;
bbf6f052 1504 data.from = from;
bbf6f052
RK
1505 data.autinc_from
1506 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1507 || GET_CODE (from_addr) == POST_INC
1508 || GET_CODE (from_addr) == POST_DEC);
1509
1510 data.explicit_inc_from = 0;
1511 data.explicit_inc_to = 0;
bbf6f052
RK
1512 if (data.reverse) data.offset = len;
1513 data.len = len;
1514
1515 /* If copying requires more than two move insns,
1516 copy addresses to registers (to make displacements shorter)
1517 and use post-increment if available. */
1518 if (!(data.autinc_from && data.autinc_to)
1519 && move_by_pieces_ninsns (len, align) > 2)
1520 {
3a94c984 1521 /* Find the mode of the largest move... */
fbe1758d
AM
1522 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1523 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1524 if (GET_MODE_SIZE (tmode) < max_size)
1525 mode = tmode;
1526
1527 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1528 {
1529 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1530 data.autinc_from = 1;
1531 data.explicit_inc_from = -1;
1532 }
fbe1758d 1533 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1534 {
1535 data.from_addr = copy_addr_to_reg (from_addr);
1536 data.autinc_from = 1;
1537 data.explicit_inc_from = 1;
1538 }
bbf6f052
RK
1539 if (!data.autinc_from && CONSTANT_P (from_addr))
1540 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1541 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1542 {
1543 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1544 data.autinc_to = 1;
1545 data.explicit_inc_to = -1;
1546 }
fbe1758d 1547 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1548 {
1549 data.to_addr = copy_addr_to_reg (to_addr);
1550 data.autinc_to = 1;
1551 data.explicit_inc_to = 1;
1552 }
bbf6f052
RK
1553 if (!data.autinc_to && CONSTANT_P (to_addr))
1554 data.to_addr = copy_addr_to_reg (to_addr);
1555 }
1556
e1565e65 1557 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1558 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1559 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1560
1561 /* First move what we can in the largest integer mode, then go to
1562 successively smaller modes. */
1563
1564 while (max_size > 1)
1565 {
e7c33f54
RK
1566 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1567 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1568 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1569 mode = tmode;
1570
1571 if (mode == VOIDmode)
1572 break;
1573
1574 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1575 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1576 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1577
1578 max_size = GET_MODE_SIZE (mode);
1579 }
1580
1581 /* The code above should have handled everything. */
2a8e278c 1582 if (data.len > 0)
bbf6f052
RK
1583 abort ();
1584}
1585
1586/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1587 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1588
3bdf5ad1 1589static unsigned HOST_WIDE_INT
bbf6f052 1590move_by_pieces_ninsns (l, align)
3bdf5ad1 1591 unsigned HOST_WIDE_INT l;
729a2125 1592 unsigned int align;
bbf6f052 1593{
3bdf5ad1
RK
1594 unsigned HOST_WIDE_INT n_insns = 0;
1595 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1596
e1565e65 1597 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1598 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1599 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1600
1601 while (max_size > 1)
1602 {
1603 enum machine_mode mode = VOIDmode, tmode;
1604 enum insn_code icode;
1605
e7c33f54
RK
1606 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1607 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1608 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1609 mode = tmode;
1610
1611 if (mode == VOIDmode)
1612 break;
1613
1614 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1615 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1616 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1617
1618 max_size = GET_MODE_SIZE (mode);
1619 }
1620
13c6f0d5
NS
1621 if (l)
1622 abort ();
bbf6f052
RK
1623 return n_insns;
1624}
1625
1626/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1627 with move instructions for mode MODE. GENFUN is the gen_... function
1628 to make a move insn for that mode. DATA has all the other info. */
1629
1630static void
1631move_by_pieces_1 (genfun, mode, data)
711d877c 1632 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1633 enum machine_mode mode;
1634 struct move_by_pieces *data;
1635{
3bdf5ad1 1636 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1637 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1638
1639 while (data->len >= size)
1640 {
3bdf5ad1
RK
1641 if (data->reverse)
1642 data->offset -= size;
1643
566aa174 1644 if (data->to)
3bdf5ad1 1645 {
566aa174 1646 if (data->autinc_to)
630036c6
JJ
1647 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1648 data->offset);
566aa174 1649 else
f4ef873c 1650 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1651 }
3bdf5ad1
RK
1652
1653 if (data->autinc_from)
630036c6
JJ
1654 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1655 data->offset);
3bdf5ad1 1656 else
f4ef873c 1657 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1658
940da324 1659 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1660 emit_insn (gen_add2_insn (data->to_addr,
1661 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1662 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1663 emit_insn (gen_add2_insn (data->from_addr,
1664 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1665
566aa174
JH
1666 if (data->to)
1667 emit_insn ((*genfun) (to1, from1));
1668 else
21d93687
RK
1669 {
1670#ifdef PUSH_ROUNDING
1671 emit_single_push_insn (mode, from1, NULL);
1672#else
1673 abort ();
1674#endif
1675 }
3bdf5ad1 1676
940da324 1677 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1678 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1679 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1680 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1681
3bdf5ad1
RK
1682 if (! data->reverse)
1683 data->offset += size;
bbf6f052
RK
1684
1685 data->len -= size;
1686 }
1687}
1688\f
4ca79136
RH
1689/* Emit code to move a block Y to a block X. This may be done with
1690 string-move instructions, with multiple scalar move instructions,
1691 or with a library call.
bbf6f052 1692
4ca79136 1693 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1694 SIZE is an rtx that says how long they are.
19caa751 1695 ALIGN is the maximum alignment we can assume they have.
44bb111a 1696 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1697
e9a25f70
JL
1698 Return the address of the new block, if memcpy is called and returns it,
1699 0 otherwise. */
1700
1701rtx
44bb111a 1702emit_block_move (x, y, size, method)
4ca79136 1703 rtx x, y, size;
44bb111a 1704 enum block_op_methods method;
bbf6f052 1705{
44bb111a 1706 bool may_use_call;
e9a25f70 1707 rtx retval = 0;
44bb111a
RH
1708 unsigned int align;
1709
1710 switch (method)
1711 {
1712 case BLOCK_OP_NORMAL:
1713 may_use_call = true;
1714 break;
1715
1716 case BLOCK_OP_CALL_PARM:
1717 may_use_call = block_move_libcall_safe_for_call_parm ();
1718
1719 /* Make inhibit_defer_pop nonzero around the library call
1720 to force it to pop the arguments right away. */
1721 NO_DEFER_POP;
1722 break;
1723
1724 case BLOCK_OP_NO_LIBCALL:
1725 may_use_call = false;
1726 break;
1727
1728 default:
1729 abort ();
1730 }
1731
1732 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1733
bbf6f052
RK
1734 if (GET_MODE (x) != BLKmode)
1735 abort ();
bbf6f052
RK
1736 if (GET_MODE (y) != BLKmode)
1737 abort ();
1738
1739 x = protect_from_queue (x, 1);
1740 y = protect_from_queue (y, 0);
5d901c31 1741 size = protect_from_queue (size, 0);
bbf6f052
RK
1742
1743 if (GET_CODE (x) != MEM)
1744 abort ();
1745 if (GET_CODE (y) != MEM)
1746 abort ();
1747 if (size == 0)
1748 abort ();
1749
cb38fd88
RH
1750 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1751 can be incorrect is coming from __builtin_memcpy. */
1752 if (GET_CODE (size) == CONST_INT)
1753 {
1754 x = shallow_copy_rtx (x);
1755 y = shallow_copy_rtx (y);
1756 set_mem_size (x, size);
1757 set_mem_size (y, size);
1758 }
1759
fbe1758d 1760 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052 1761 move_by_pieces (x, y, INTVAL (size), align);
4ca79136
RH
1762 else if (emit_block_move_via_movstr (x, y, size, align))
1763 ;
44bb111a 1764 else if (may_use_call)
4ca79136 1765 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1766 else
1767 emit_block_move_via_loop (x, y, size, align);
1768
1769 if (method == BLOCK_OP_CALL_PARM)
1770 OK_DEFER_POP;
266007a7 1771
4ca79136
RH
1772 return retval;
1773}
266007a7 1774
44bb111a
RH
1775/* A subroutine of emit_block_move. Returns true if calling the
1776 block move libcall will not clobber any parameters which may have
1777 already been placed on the stack. */
1778
1779static bool
1780block_move_libcall_safe_for_call_parm ()
1781{
1782 if (PUSH_ARGS)
1783 return true;
1784 else
1785 {
1786 /* Check to see whether memcpy takes all register arguments. */
1787 static enum {
1788 takes_regs_uninit, takes_regs_no, takes_regs_yes
1789 } takes_regs = takes_regs_uninit;
1790
1791 switch (takes_regs)
1792 {
1793 case takes_regs_uninit:
1794 {
1795 CUMULATIVE_ARGS args_so_far;
1796 tree fn, arg;
1797
1798 fn = emit_block_move_libcall_fn (false);
1799 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1800
1801 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1802 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1803 {
98c0d8d1 1804 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
44bb111a
RH
1805 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1806 if (!tmp || !REG_P (tmp))
1807 goto fail_takes_regs;
1808#ifdef FUNCTION_ARG_PARTIAL_NREGS
1809 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1810 NULL_TREE, 1))
1811 goto fail_takes_regs;
1812#endif
1813 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1814 }
1815 }
1816 takes_regs = takes_regs_yes;
1817 /* FALLTHRU */
1818
1819 case takes_regs_yes:
1820 return true;
1821
1822 fail_takes_regs:
1823 takes_regs = takes_regs_no;
1824 /* FALLTHRU */
1825 case takes_regs_no:
1826 return false;
1827
1828 default:
1829 abort ();
1830 }
1831 }
1832}
1833
4ca79136
RH
1834/* A subroutine of emit_block_move. Expand a movstr pattern;
1835 return true if successful. */
3ef1eef4 1836
4ca79136
RH
1837static bool
1838emit_block_move_via_movstr (x, y, size, align)
1839 rtx x, y, size;
1840 unsigned int align;
1841{
1842 /* Try the most limited insn first, because there's no point
1843 including more than one in the machine description unless
1844 the more limited one has some advantage. */
266007a7 1845
4ca79136
RH
1846 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1847 enum machine_mode mode;
266007a7 1848
4ca79136
RH
1849 /* Since this is a move insn, we don't care about volatility. */
1850 volatile_ok = 1;
1851
1852 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1853 mode = GET_MODE_WIDER_MODE (mode))
1854 {
1855 enum insn_code code = movstr_optab[(int) mode];
1856 insn_operand_predicate_fn pred;
1857
1858 if (code != CODE_FOR_nothing
1859 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1860 here because if SIZE is less than the mode mask, as it is
1861 returned by the macro, it will definitely be less than the
1862 actual mode mask. */
1863 && ((GET_CODE (size) == CONST_INT
1864 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1865 <= (GET_MODE_MASK (mode) >> 1)))
1866 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1867 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1868 || (*pred) (x, BLKmode))
1869 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1870 || (*pred) (y, BLKmode))
1871 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1872 || (*pred) (opalign, VOIDmode)))
1873 {
1874 rtx op2;
1875 rtx last = get_last_insn ();
1876 rtx pat;
1877
1878 op2 = convert_to_mode (mode, size, 1);
1879 pred = insn_data[(int) code].operand[2].predicate;
1880 if (pred != 0 && ! (*pred) (op2, mode))
1881 op2 = copy_to_mode_reg (mode, op2);
1882
1883 /* ??? When called via emit_block_move_for_call, it'd be
1884 nice if there were some way to inform the backend, so
1885 that it doesn't fail the expansion because it thinks
1886 emitting the libcall would be more efficient. */
1887
1888 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1889 if (pat)
1890 {
1891 emit_insn (pat);
1892 volatile_ok = 0;
1893 return true;
bbf6f052 1894 }
4ca79136
RH
1895 else
1896 delete_insns_since (last);
bbf6f052 1897 }
4ca79136 1898 }
bbf6f052 1899
4ca79136
RH
1900 volatile_ok = 0;
1901 return false;
1902}
3ef1eef4 1903
4ca79136
RH
1904/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1905 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1906
4ca79136
RH
1907static rtx
1908emit_block_move_via_libcall (dst, src, size)
1909 rtx dst, src, size;
1910{
1911 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1912 enum machine_mode size_mode;
1913 rtx retval;
4bc973ae 1914
4ca79136 1915 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1916
4ca79136
RH
1917 It is unsafe to save the value generated by protect_from_queue
1918 and reuse it later. Consider what happens if emit_queue is
1919 called before the return value from protect_from_queue is used.
4bc973ae 1920
4ca79136
RH
1921 Expansion of the CALL_EXPR below will call emit_queue before
1922 we are finished emitting RTL for argument setup. So if we are
1923 not careful we could get the wrong value for an argument.
4bc973ae 1924
4ca79136
RH
1925 To avoid this problem we go ahead and emit code to copy X, Y &
1926 SIZE into new pseudos. We can then place those new pseudos
1927 into an RTL_EXPR and use them later, even after a call to
1928 emit_queue.
4bc973ae 1929
4ca79136
RH
1930 Note this is not strictly needed for library calls since they
1931 do not call emit_queue before loading their arguments. However,
1932 we may need to have library calls call emit_queue in the future
1933 since failing to do so could cause problems for targets which
1934 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1935
1936 dst = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1937 src = copy_to_mode_reg (Pmode, XEXP (src, 0));
1938
1939 if (TARGET_MEM_FUNCTIONS)
1940 size_mode = TYPE_MODE (sizetype);
1941 else
1942 size_mode = TYPE_MODE (unsigned_type_node);
1943 size = convert_to_mode (size_mode, size, 1);
1944 size = copy_to_mode_reg (size_mode, size);
1945
1946 /* It is incorrect to use the libcall calling conventions to call
1947 memcpy in this context. This could be a user call to memcpy and
1948 the user may wish to examine the return value from memcpy. For
1949 targets where libcalls and normal calls have different conventions
1950 for returning pointers, we could end up generating incorrect code.
1951
1952 For convenience, we generate the call to bcopy this way as well. */
1953
1954 dst_tree = make_tree (ptr_type_node, dst);
1955 src_tree = make_tree (ptr_type_node, src);
1956 if (TARGET_MEM_FUNCTIONS)
1957 size_tree = make_tree (sizetype, size);
1958 else
1959 size_tree = make_tree (unsigned_type_node, size);
1960
1961 fn = emit_block_move_libcall_fn (true);
1962 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
1963 if (TARGET_MEM_FUNCTIONS)
1964 {
1965 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1966 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1967 }
1968 else
1969 {
1970 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
1971 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
1972 }
1973
1974 /* Now we have to build up the CALL_EXPR itself. */
1975 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1976 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1977 call_expr, arg_list, NULL_TREE);
1978 TREE_SIDE_EFFECTS (call_expr) = 1;
1979
1980 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
1981
1982 /* If we are initializing a readonly value, show the above call
1983 clobbered it. Otherwise, a load from it may erroneously be
1984 hoisted from a loop. */
1985 if (RTX_UNCHANGING_P (dst))
1986 emit_insn (gen_rtx_CLOBBER (VOIDmode, dst));
1987
1988 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
1989}
52cf7115 1990
4ca79136
RH
1991/* A subroutine of emit_block_move_via_libcall. Create the tree node
1992 for the function we use for block copies. The first time FOR_CALL
1993 is true, we call assemble_external. */
52cf7115 1994
4ca79136
RH
1995static GTY(()) tree block_move_fn;
1996
1997static tree
1998emit_block_move_libcall_fn (for_call)
1999 int for_call;
2000{
2001 static bool emitted_extern;
2002 tree fn = block_move_fn, args;
52cf7115 2003
4ca79136
RH
2004 if (!fn)
2005 {
2006 if (TARGET_MEM_FUNCTIONS)
52cf7115 2007 {
4ca79136
RH
2008 fn = get_identifier ("memcpy");
2009 args = build_function_type_list (ptr_type_node, ptr_type_node,
2010 const_ptr_type_node, sizetype,
2011 NULL_TREE);
2012 }
2013 else
2014 {
2015 fn = get_identifier ("bcopy");
2016 args = build_function_type_list (void_type_node, const_ptr_type_node,
2017 ptr_type_node, unsigned_type_node,
2018 NULL_TREE);
52cf7115
JL
2019 }
2020
4ca79136
RH
2021 fn = build_decl (FUNCTION_DECL, fn, args);
2022 DECL_EXTERNAL (fn) = 1;
2023 TREE_PUBLIC (fn) = 1;
2024 DECL_ARTIFICIAL (fn) = 1;
2025 TREE_NOTHROW (fn) = 1;
66c60e67 2026
4ca79136 2027 block_move_fn = fn;
bbf6f052 2028 }
e9a25f70 2029
4ca79136
RH
2030 if (for_call && !emitted_extern)
2031 {
2032 emitted_extern = true;
2033 make_decl_rtl (fn, NULL);
2034 assemble_external (fn);
2035 }
2036
2037 return fn;
bbf6f052 2038}
44bb111a
RH
2039
2040/* A subroutine of emit_block_move. Copy the data via an explicit
2041 loop. This is used only when libcalls are forbidden. */
2042/* ??? It'd be nice to copy in hunks larger than QImode. */
2043
2044static void
2045emit_block_move_via_loop (x, y, size, align)
2046 rtx x, y, size;
2047 unsigned int align ATTRIBUTE_UNUSED;
2048{
2049 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2050 enum machine_mode iter_mode;
2051
2052 iter_mode = GET_MODE (size);
2053 if (iter_mode == VOIDmode)
2054 iter_mode = word_mode;
2055
2056 top_label = gen_label_rtx ();
2057 cmp_label = gen_label_rtx ();
2058 iter = gen_reg_rtx (iter_mode);
2059
2060 emit_move_insn (iter, const0_rtx);
2061
2062 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2063 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2064 do_pending_stack_adjust ();
2065
2066 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2067
2068 emit_jump (cmp_label);
2069 emit_label (top_label);
2070
2071 tmp = convert_modes (Pmode, iter_mode, iter, true);
2072 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2073 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2074 x = change_address (x, QImode, x_addr);
2075 y = change_address (y, QImode, y_addr);
2076
2077 emit_move_insn (x, y);
2078
2079 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2080 true, OPTAB_LIB_WIDEN);
2081 if (tmp != iter)
2082 emit_move_insn (iter, tmp);
2083
2084 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2085 emit_label (cmp_label);
2086
2087 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2088 true, top_label);
2089
2090 emit_note (NULL, NOTE_INSN_LOOP_END);
2091}
bbf6f052
RK
2092\f
2093/* Copy all or part of a value X into registers starting at REGNO.
2094 The number of registers to be filled is NREGS. */
2095
2096void
2097move_block_to_reg (regno, x, nregs, mode)
2098 int regno;
2099 rtx x;
2100 int nregs;
2101 enum machine_mode mode;
2102{
2103 int i;
381127e8 2104#ifdef HAVE_load_multiple
3a94c984 2105 rtx pat;
381127e8
RL
2106 rtx last;
2107#endif
bbf6f052 2108
72bb9717
RK
2109 if (nregs == 0)
2110 return;
2111
bbf6f052
RK
2112 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2113 x = validize_mem (force_const_mem (mode, x));
2114
2115 /* See if the machine can do this with a load multiple insn. */
2116#ifdef HAVE_load_multiple
c3a02afe 2117 if (HAVE_load_multiple)
bbf6f052 2118 {
c3a02afe 2119 last = get_last_insn ();
38a448ca 2120 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
2121 GEN_INT (nregs));
2122 if (pat)
2123 {
2124 emit_insn (pat);
2125 return;
2126 }
2127 else
2128 delete_insns_since (last);
bbf6f052 2129 }
bbf6f052
RK
2130#endif
2131
2132 for (i = 0; i < nregs; i++)
38a448ca 2133 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
2134 operand_subword_force (x, i, mode));
2135}
2136
2137/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
2138 The number of registers to be filled is NREGS. SIZE indicates the number
2139 of bytes in the object X. */
2140
bbf6f052 2141void
0040593d 2142move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
2143 int regno;
2144 rtx x;
2145 int nregs;
0040593d 2146 int size;
bbf6f052
RK
2147{
2148 int i;
381127e8 2149#ifdef HAVE_store_multiple
3a94c984 2150 rtx pat;
381127e8
RL
2151 rtx last;
2152#endif
58a32c5c 2153 enum machine_mode mode;
bbf6f052 2154
2954d7db
RK
2155 if (nregs == 0)
2156 return;
2157
58a32c5c
DE
2158 /* If SIZE is that of a mode no bigger than a word, just use that
2159 mode's store operation. */
2160 if (size <= UNITS_PER_WORD
9ac3e73b 2161 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
58a32c5c 2162 {
792760b9 2163 emit_move_insn (adjust_address (x, mode, 0), gen_rtx_REG (mode, regno));
58a32c5c
DE
2164 return;
2165 }
3a94c984 2166
0040593d 2167 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
2168 to the left before storing to memory. Note that the previous test
2169 doesn't handle all cases (e.g. SIZE == 3). */
9ac3e73b 2170 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
0040593d
JW
2171 {
2172 rtx tem = operand_subword (x, 0, 1, BLKmode);
2173 rtx shift;
2174
2175 if (tem == 0)
2176 abort ();
2177
2178 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 2179 gen_rtx_REG (word_mode, regno),
0040593d
JW
2180 build_int_2 ((UNITS_PER_WORD - size)
2181 * BITS_PER_UNIT, 0), NULL_RTX, 0);
2182 emit_move_insn (tem, shift);
2183 return;
2184 }
2185
bbf6f052
RK
2186 /* See if the machine can do this with a store multiple insn. */
2187#ifdef HAVE_store_multiple
c3a02afe 2188 if (HAVE_store_multiple)
bbf6f052 2189 {
c3a02afe 2190 last = get_last_insn ();
38a448ca 2191 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
2192 GEN_INT (nregs));
2193 if (pat)
2194 {
2195 emit_insn (pat);
2196 return;
2197 }
2198 else
2199 delete_insns_since (last);
bbf6f052 2200 }
bbf6f052
RK
2201#endif
2202
2203 for (i = 0; i < nregs; i++)
2204 {
2205 rtx tem = operand_subword (x, i, 1, BLKmode);
2206
2207 if (tem == 0)
2208 abort ();
2209
38a448ca 2210 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
2211 }
2212}
2213
084a1106
JDA
2214/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2215 ORIG, where ORIG is a non-consecutive group of registers represented by
2216 a PARALLEL. The clone is identical to the original except in that the
2217 original set of registers is replaced by a new set of pseudo registers.
2218 The new set has the same modes as the original set. */
2219
2220rtx
2221gen_group_rtx (orig)
2222 rtx orig;
2223{
2224 int i, length;
2225 rtx *tmps;
2226
2227 if (GET_CODE (orig) != PARALLEL)
2228 abort ();
2229
2230 length = XVECLEN (orig, 0);
2231 tmps = (rtx *) alloca (sizeof (rtx) * length);
2232
2233 /* Skip a NULL entry in first slot. */
2234 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2235
2236 if (i)
2237 tmps[0] = 0;
2238
2239 for (; i < length; i++)
2240 {
2241 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2242 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2243
2244 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2245 }
2246
2247 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2248}
2249
aac5cc16
RH
2250/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2251 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2252 block SRC in bytes, or -1 if not known. */
d6a7951f 2253/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
aac5cc16
RH
2254 the balance will be in what would be the low-order memory addresses, i.e.
2255 left justified for big endian, right justified for little endian. This
2256 happens to be true for the targets currently using this support. If this
2257 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2258 would be needed. */
fffa9c1d
JW
2259
2260void
04050c69 2261emit_group_load (dst, orig_src, ssize)
aac5cc16 2262 rtx dst, orig_src;
729a2125 2263 int ssize;
fffa9c1d 2264{
aac5cc16
RH
2265 rtx *tmps, src;
2266 int start, i;
fffa9c1d 2267
aac5cc16 2268 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
2269 abort ();
2270
2271 /* Check for a NULL entry, used to indicate that the parameter goes
2272 both on the stack and in registers. */
aac5cc16
RH
2273 if (XEXP (XVECEXP (dst, 0, 0), 0))
2274 start = 0;
fffa9c1d 2275 else
aac5cc16
RH
2276 start = 1;
2277
3a94c984 2278 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 2279
aac5cc16
RH
2280 /* Process the pieces. */
2281 for (i = start; i < XVECLEN (dst, 0); i++)
2282 {
2283 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
2284 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2285 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2286 int shift = 0;
2287
2288 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2289 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
2290 {
2291 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2292 bytelen = ssize - bytepos;
2293 if (bytelen <= 0)
729a2125 2294 abort ();
aac5cc16
RH
2295 }
2296
f3ce87a9
DE
2297 /* If we won't be loading directly from memory, protect the real source
2298 from strange tricks we might play; but make sure that the source can
2299 be loaded directly into the destination. */
2300 src = orig_src;
2301 if (GET_CODE (orig_src) != MEM
2302 && (!CONSTANT_P (orig_src)
2303 || (GET_MODE (orig_src) != mode
2304 && GET_MODE (orig_src) != VOIDmode)))
2305 {
2306 if (GET_MODE (orig_src) == VOIDmode)
2307 src = gen_reg_rtx (mode);
2308 else
2309 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 2310
f3ce87a9
DE
2311 emit_move_insn (src, orig_src);
2312 }
2313
aac5cc16
RH
2314 /* Optimize the access just a bit. */
2315 if (GET_CODE (src) == MEM
04050c69 2316 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
729a2125 2317 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2318 && bytelen == GET_MODE_SIZE (mode))
2319 {
2320 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2321 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2322 }
7c4a6db0
JW
2323 else if (GET_CODE (src) == CONCAT)
2324 {
015b1ad1
JDA
2325 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2326 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2327
2328 if ((bytepos == 0 && bytelen == slen0)
2329 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 2330 {
015b1ad1
JDA
2331 /* The following assumes that the concatenated objects all
2332 have the same size. In this case, a simple calculation
2333 can be used to determine the object and the bit field
2334 to be extracted. */
2335 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
2336 if (! CONSTANT_P (tmps[i])
2337 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2338 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
2339 (bytepos % slen0) * BITS_PER_UNIT,
2340 1, NULL_RTX, mode, mode, ssize);
cbb92744 2341 }
58f69841
JH
2342 else if (bytepos == 0)
2343 {
015b1ad1 2344 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 2345 emit_move_insn (mem, src);
04050c69 2346 tmps[i] = adjust_address (mem, mode, 0);
58f69841 2347 }
7c4a6db0
JW
2348 else
2349 abort ();
2350 }
f3ce87a9 2351 else if (CONSTANT_P (src)
2ee5437b
RH
2352 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2353 tmps[i] = src;
fffa9c1d 2354 else
19caa751
RK
2355 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2356 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 2357 mode, mode, ssize);
fffa9c1d 2358
aac5cc16 2359 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2360 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2361 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2362 }
19caa751 2363
3a94c984 2364 emit_queue ();
aac5cc16
RH
2365
2366 /* Copy the extracted pieces into the proper (probable) hard regs. */
2367 for (i = start; i < XVECLEN (dst, 0); i++)
2368 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2369}
2370
084a1106
JDA
2371/* Emit code to move a block SRC to block DST, where SRC and DST are
2372 non-consecutive groups of registers, each represented by a PARALLEL. */
2373
2374void
2375emit_group_move (dst, src)
2376 rtx dst, src;
2377{
2378 int i;
2379
2380 if (GET_CODE (src) != PARALLEL
2381 || GET_CODE (dst) != PARALLEL
2382 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2383 abort ();
2384
2385 /* Skip first entry if NULL. */
2386 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2387 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2388 XEXP (XVECEXP (src, 0, i), 0));
2389}
2390
aac5cc16
RH
2391/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2392 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2393 block DST, or -1 if not known. */
fffa9c1d
JW
2394
2395void
04050c69 2396emit_group_store (orig_dst, src, ssize)
aac5cc16 2397 rtx orig_dst, src;
729a2125 2398 int ssize;
fffa9c1d 2399{
aac5cc16
RH
2400 rtx *tmps, dst;
2401 int start, i;
fffa9c1d 2402
aac5cc16 2403 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2404 abort ();
2405
2406 /* Check for a NULL entry, used to indicate that the parameter goes
2407 both on the stack and in registers. */
aac5cc16
RH
2408 if (XEXP (XVECEXP (src, 0, 0), 0))
2409 start = 0;
fffa9c1d 2410 else
aac5cc16
RH
2411 start = 1;
2412
3a94c984 2413 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2414
aac5cc16
RH
2415 /* Copy the (probable) hard regs into pseudos. */
2416 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2417 {
aac5cc16
RH
2418 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2419 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2420 emit_move_insn (tmps[i], reg);
2421 }
3a94c984 2422 emit_queue ();
fffa9c1d 2423
aac5cc16
RH
2424 /* If we won't be storing directly into memory, protect the real destination
2425 from strange tricks we might play. */
2426 dst = orig_dst;
10a9f2be
JW
2427 if (GET_CODE (dst) == PARALLEL)
2428 {
2429 rtx temp;
2430
2431 /* We can get a PARALLEL dst if there is a conditional expression in
2432 a return statement. In that case, the dst and src are the same,
2433 so no action is necessary. */
2434 if (rtx_equal_p (dst, src))
2435 return;
2436
2437 /* It is unclear if we can ever reach here, but we may as well handle
2438 it. Allocate a temporary, and split this into a store/load to/from
2439 the temporary. */
2440
2441 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
04050c69
RK
2442 emit_group_store (temp, src, ssize);
2443 emit_group_load (dst, temp, ssize);
10a9f2be
JW
2444 return;
2445 }
75897075 2446 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2447 {
2448 dst = gen_reg_rtx (GET_MODE (orig_dst));
2449 /* Make life a bit easier for combine. */
8ae91fc0 2450 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2451 }
aac5cc16
RH
2452
2453 /* Process the pieces. */
2454 for (i = start; i < XVECLEN (src, 0); i++)
2455 {
770ae6cc 2456 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2457 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2458 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2459 rtx dest = dst;
aac5cc16
RH
2460
2461 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2462 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2463 {
aac5cc16
RH
2464 if (BYTES_BIG_ENDIAN)
2465 {
2466 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2467 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2468 tmps[i], 0, OPTAB_WIDEN);
2469 }
2470 bytelen = ssize - bytepos;
71bc0330 2471 }
fffa9c1d 2472
6ddae612
JJ
2473 if (GET_CODE (dst) == CONCAT)
2474 {
2475 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2476 dest = XEXP (dst, 0);
2477 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2478 {
2479 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2480 dest = XEXP (dst, 1);
2481 }
0d446150
JH
2482 else if (bytepos == 0 && XVECLEN (src, 0))
2483 {
2484 dest = assign_stack_temp (GET_MODE (dest),
2485 GET_MODE_SIZE (GET_MODE (dest)), 0);
2486 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2487 tmps[i]);
2488 dst = dest;
2489 break;
2490 }
6ddae612
JJ
2491 else
2492 abort ();
2493 }
2494
aac5cc16 2495 /* Optimize the access just a bit. */
6ddae612
JJ
2496 if (GET_CODE (dest) == MEM
2497 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
729a2125 2498 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2499 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2500 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2501 else
6ddae612 2502 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2503 mode, tmps[i], ssize);
fffa9c1d 2504 }
729a2125 2505
3a94c984 2506 emit_queue ();
aac5cc16
RH
2507
2508 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2509 if (orig_dst != dst)
aac5cc16 2510 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2511}
2512
c36fce9a
GRK
2513/* Generate code to copy a BLKmode object of TYPE out of a
2514 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2515 is null, a stack temporary is created. TGTBLK is returned.
2516
2517 The primary purpose of this routine is to handle functions
2518 that return BLKmode structures in registers. Some machines
2519 (the PA for example) want to return all small structures
3a94c984 2520 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2521
2522rtx
19caa751 2523copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2524 rtx tgtblk;
2525 rtx srcreg;
2526 tree type;
2527{
19caa751
RK
2528 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2529 rtx src = NULL, dst = NULL;
2530 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2531 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2532
2533 if (tgtblk == 0)
2534 {
1da68f56
RK
2535 tgtblk = assign_temp (build_qualified_type (type,
2536 (TYPE_QUALS (type)
2537 | TYPE_QUAL_CONST)),
2538 0, 1, 1);
19caa751
RK
2539 preserve_temp_slots (tgtblk);
2540 }
3a94c984 2541
1ed1b4fb 2542 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2543 into a new pseudo which is a full word. */
0d7839da 2544
19caa751
RK
2545 if (GET_MODE (srcreg) != BLKmode
2546 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2547 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751
RK
2548
2549 /* Structures whose size is not a multiple of a word are aligned
2550 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2551 machine, this means we must skip the empty high order bytes when
2552 calculating the bit offset. */
0d7839da 2553 if (BYTES_BIG_ENDIAN
0d7839da 2554 && bytes % UNITS_PER_WORD)
19caa751
RK
2555 big_endian_correction
2556 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2557
2558 /* Copy the structure BITSIZE bites at a time.
3a94c984 2559
19caa751
RK
2560 We could probably emit more efficient code for machines which do not use
2561 strict alignment, but it doesn't seem worth the effort at the current
2562 time. */
2563 for (bitpos = 0, xbitpos = big_endian_correction;
2564 bitpos < bytes * BITS_PER_UNIT;
2565 bitpos += bitsize, xbitpos += bitsize)
2566 {
3a94c984 2567 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2568 word boundary and when xbitpos == big_endian_correction
2569 (the first time through). */
2570 if (xbitpos % BITS_PER_WORD == 0
2571 || xbitpos == big_endian_correction)
b47f8cfc
JH
2572 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2573 GET_MODE (srcreg));
19caa751
RK
2574
2575 /* We need a new destination operand each time bitpos is on
2576 a word boundary. */
2577 if (bitpos % BITS_PER_WORD == 0)
2578 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2579
19caa751
RK
2580 /* Use xbitpos for the source extraction (right justified) and
2581 xbitpos for the destination store (left justified). */
2582 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2583 extract_bit_field (src, bitsize,
2584 xbitpos % BITS_PER_WORD, 1,
2585 NULL_RTX, word_mode, word_mode,
04050c69
RK
2586 BITS_PER_WORD),
2587 BITS_PER_WORD);
19caa751
RK
2588 }
2589
2590 return tgtblk;
c36fce9a
GRK
2591}
2592
94b25f81
RK
2593/* Add a USE expression for REG to the (possibly empty) list pointed
2594 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2595
2596void
b3f8cf4a
RK
2597use_reg (call_fusage, reg)
2598 rtx *call_fusage, reg;
2599{
0304dfbb
DE
2600 if (GET_CODE (reg) != REG
2601 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2602 abort ();
b3f8cf4a
RK
2603
2604 *call_fusage
38a448ca
RH
2605 = gen_rtx_EXPR_LIST (VOIDmode,
2606 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2607}
2608
94b25f81
RK
2609/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2610 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2611
2612void
0304dfbb
DE
2613use_regs (call_fusage, regno, nregs)
2614 rtx *call_fusage;
bbf6f052
RK
2615 int regno;
2616 int nregs;
2617{
0304dfbb 2618 int i;
bbf6f052 2619
0304dfbb
DE
2620 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2621 abort ();
2622
2623 for (i = 0; i < nregs; i++)
e50126e8 2624 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2625}
fffa9c1d
JW
2626
2627/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2628 PARALLEL REGS. This is for calls that pass values in multiple
2629 non-contiguous locations. The Irix 6 ABI has examples of this. */
2630
2631void
2632use_group_regs (call_fusage, regs)
2633 rtx *call_fusage;
2634 rtx regs;
2635{
2636 int i;
2637
6bd35f86
DE
2638 for (i = 0; i < XVECLEN (regs, 0); i++)
2639 {
2640 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2641
6bd35f86
DE
2642 /* A NULL entry means the parameter goes both on the stack and in
2643 registers. This can also be a MEM for targets that pass values
2644 partially on the stack and partially in registers. */
e9a25f70 2645 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2646 use_reg (call_fusage, reg);
2647 }
fffa9c1d 2648}
bbf6f052 2649\f
57814e5e 2650
cf5124f6
RS
2651/* Determine whether the LEN bytes generated by CONSTFUN can be
2652 stored to memory using several move instructions. CONSTFUNDATA is
2653 a pointer which will be passed as argument in every CONSTFUN call.
2654 ALIGN is maximum alignment we can assume. Return nonzero if a
2655 call to store_by_pieces should succeed. */
2656
57814e5e
JJ
2657int
2658can_store_by_pieces (len, constfun, constfundata, align)
2659 unsigned HOST_WIDE_INT len;
2660 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2661 PTR constfundata;
2662 unsigned int align;
2663{
98166639 2664 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2665 HOST_WIDE_INT offset = 0;
2666 enum machine_mode mode, tmode;
2667 enum insn_code icode;
2668 int reverse;
2669 rtx cst;
2670
4977bab6 2671 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2672 return 0;
2673
2674 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2675 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2676 align = MOVE_MAX * BITS_PER_UNIT;
2677
2678 /* We would first store what we can in the largest integer mode, then go to
2679 successively smaller modes. */
2680
2681 for (reverse = 0;
2682 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2683 reverse++)
2684 {
2685 l = len;
2686 mode = VOIDmode;
cf5124f6 2687 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2688 while (max_size > 1)
2689 {
2690 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2691 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2692 if (GET_MODE_SIZE (tmode) < max_size)
2693 mode = tmode;
2694
2695 if (mode == VOIDmode)
2696 break;
2697
2698 icode = mov_optab->handlers[(int) mode].insn_code;
2699 if (icode != CODE_FOR_nothing
2700 && align >= GET_MODE_ALIGNMENT (mode))
2701 {
2702 unsigned int size = GET_MODE_SIZE (mode);
2703
2704 while (l >= size)
2705 {
2706 if (reverse)
2707 offset -= size;
2708
2709 cst = (*constfun) (constfundata, offset, mode);
2710 if (!LEGITIMATE_CONSTANT_P (cst))
2711 return 0;
2712
2713 if (!reverse)
2714 offset += size;
2715
2716 l -= size;
2717 }
2718 }
2719
2720 max_size = GET_MODE_SIZE (mode);
2721 }
2722
2723 /* The code above should have handled everything. */
2724 if (l != 0)
2725 abort ();
2726 }
2727
2728 return 1;
2729}
2730
2731/* Generate several move instructions to store LEN bytes generated by
2732 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2733 pointer which will be passed as argument in every CONSTFUN call.
2734 ALIGN is maximum alignment we can assume. */
2735
2736void
2737store_by_pieces (to, len, constfun, constfundata, align)
2738 rtx to;
2739 unsigned HOST_WIDE_INT len;
2740 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2741 PTR constfundata;
2742 unsigned int align;
2743{
2744 struct store_by_pieces data;
2745
4977bab6 2746 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2747 abort ();
2748 to = protect_from_queue (to, 1);
2749 data.constfun = constfun;
2750 data.constfundata = constfundata;
2751 data.len = len;
2752 data.to = to;
2753 store_by_pieces_1 (&data, align);
2754}
2755
19caa751
RK
2756/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2757 rtx with BLKmode). The caller must pass TO through protect_from_queue
2758 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2759
2760static void
2761clear_by_pieces (to, len, align)
2762 rtx to;
3bdf5ad1 2763 unsigned HOST_WIDE_INT len;
729a2125 2764 unsigned int align;
9de08200 2765{
57814e5e
JJ
2766 struct store_by_pieces data;
2767
2768 data.constfun = clear_by_pieces_1;
df4ae160 2769 data.constfundata = NULL;
57814e5e
JJ
2770 data.len = len;
2771 data.to = to;
2772 store_by_pieces_1 (&data, align);
2773}
2774
2775/* Callback routine for clear_by_pieces.
2776 Return const0_rtx unconditionally. */
2777
2778static rtx
2779clear_by_pieces_1 (data, offset, mode)
2780 PTR data ATTRIBUTE_UNUSED;
2781 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2782 enum machine_mode mode ATTRIBUTE_UNUSED;
2783{
2784 return const0_rtx;
2785}
2786
2787/* Subroutine of clear_by_pieces and store_by_pieces.
2788 Generate several move instructions to store LEN bytes of block TO. (A MEM
2789 rtx with BLKmode). The caller must pass TO through protect_from_queue
2790 before calling. ALIGN is maximum alignment we can assume. */
2791
2792static void
2793store_by_pieces_1 (data, align)
2794 struct store_by_pieces *data;
2795 unsigned int align;
2796{
2797 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2798 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2799 enum machine_mode mode = VOIDmode, tmode;
2800 enum insn_code icode;
9de08200 2801
57814e5e
JJ
2802 data->offset = 0;
2803 data->to_addr = to_addr;
2804 data->autinc_to
9de08200
RK
2805 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2806 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2807
57814e5e
JJ
2808 data->explicit_inc_to = 0;
2809 data->reverse
9de08200 2810 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2811 if (data->reverse)
2812 data->offset = data->len;
9de08200 2813
57814e5e 2814 /* If storing requires more than two move insns,
9de08200
RK
2815 copy addresses to registers (to make displacements shorter)
2816 and use post-increment if available. */
57814e5e
JJ
2817 if (!data->autinc_to
2818 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2819 {
3a94c984 2820 /* Determine the main mode we'll be using. */
fbe1758d
AM
2821 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2822 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2823 if (GET_MODE_SIZE (tmode) < max_size)
2824 mode = tmode;
2825
57814e5e 2826 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2827 {
57814e5e
JJ
2828 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2829 data->autinc_to = 1;
2830 data->explicit_inc_to = -1;
9de08200 2831 }
3bdf5ad1 2832
57814e5e
JJ
2833 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2834 && ! data->autinc_to)
9de08200 2835 {
57814e5e
JJ
2836 data->to_addr = copy_addr_to_reg (to_addr);
2837 data->autinc_to = 1;
2838 data->explicit_inc_to = 1;
9de08200 2839 }
3bdf5ad1 2840
57814e5e
JJ
2841 if ( !data->autinc_to && CONSTANT_P (to_addr))
2842 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2843 }
2844
e1565e65 2845 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2846 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2847 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2848
57814e5e 2849 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2850 successively smaller modes. */
2851
2852 while (max_size > 1)
2853 {
9de08200
RK
2854 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2855 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2856 if (GET_MODE_SIZE (tmode) < max_size)
2857 mode = tmode;
2858
2859 if (mode == VOIDmode)
2860 break;
2861
2862 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2863 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2864 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2865
2866 max_size = GET_MODE_SIZE (mode);
2867 }
2868
2869 /* The code above should have handled everything. */
57814e5e 2870 if (data->len != 0)
9de08200
RK
2871 abort ();
2872}
2873
57814e5e 2874/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2875 with move instructions for mode MODE. GENFUN is the gen_... function
2876 to make a move insn for that mode. DATA has all the other info. */
2877
2878static void
57814e5e 2879store_by_pieces_2 (genfun, mode, data)
711d877c 2880 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2881 enum machine_mode mode;
57814e5e 2882 struct store_by_pieces *data;
9de08200 2883{
3bdf5ad1 2884 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2885 rtx to1, cst;
9de08200
RK
2886
2887 while (data->len >= size)
2888 {
3bdf5ad1
RK
2889 if (data->reverse)
2890 data->offset -= size;
9de08200 2891
3bdf5ad1 2892 if (data->autinc_to)
630036c6
JJ
2893 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2894 data->offset);
3a94c984 2895 else
f4ef873c 2896 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2897
940da324 2898 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2899 emit_insn (gen_add2_insn (data->to_addr,
2900 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2901
57814e5e
JJ
2902 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2903 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2904
940da324 2905 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2906 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2907
3bdf5ad1
RK
2908 if (! data->reverse)
2909 data->offset += size;
9de08200
RK
2910
2911 data->len -= size;
2912 }
2913}
2914\f
19caa751 2915/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 2916 its length in bytes. */
e9a25f70
JL
2917
2918rtx
8ac61af7 2919clear_storage (object, size)
bbf6f052 2920 rtx object;
4c08eef0 2921 rtx size;
bbf6f052 2922{
e9a25f70 2923 rtx retval = 0;
8ac61af7
RK
2924 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
2925 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 2926
fcf1b822
RK
2927 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2928 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 2929 if (GET_MODE (object) != BLKmode
fcf1b822 2930 && GET_CODE (size) == CONST_INT
4ca79136 2931 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
2932 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2933 else
bbf6f052 2934 {
9de08200
RK
2935 object = protect_from_queue (object, 1);
2936 size = protect_from_queue (size, 0);
2937
2938 if (GET_CODE (size) == CONST_INT
78762e3b 2939 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 2940 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
2941 else if (clear_storage_via_clrstr (object, size, align))
2942 ;
9de08200 2943 else
4ca79136
RH
2944 retval = clear_storage_via_libcall (object, size);
2945 }
2946
2947 return retval;
2948}
2949
2950/* A subroutine of clear_storage. Expand a clrstr pattern;
2951 return true if successful. */
2952
2953static bool
2954clear_storage_via_clrstr (object, size, align)
2955 rtx object, size;
2956 unsigned int align;
2957{
2958 /* Try the most limited insn first, because there's no point
2959 including more than one in the machine description unless
2960 the more limited one has some advantage. */
2961
2962 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
2963 enum machine_mode mode;
2964
2965 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2966 mode = GET_MODE_WIDER_MODE (mode))
2967 {
2968 enum insn_code code = clrstr_optab[(int) mode];
2969 insn_operand_predicate_fn pred;
2970
2971 if (code != CODE_FOR_nothing
2972 /* We don't need MODE to be narrower than
2973 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2974 the mode mask, as it is returned by the macro, it will
2975 definitely be less than the actual mode mask. */
2976 && ((GET_CODE (size) == CONST_INT
2977 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2978 <= (GET_MODE_MASK (mode) >> 1)))
2979 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2980 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2981 || (*pred) (object, BLKmode))
2982 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2983 || (*pred) (opalign, VOIDmode)))
9de08200 2984 {
4ca79136
RH
2985 rtx op1;
2986 rtx last = get_last_insn ();
2987 rtx pat;
9de08200 2988
4ca79136
RH
2989 op1 = convert_to_mode (mode, size, 1);
2990 pred = insn_data[(int) code].operand[1].predicate;
2991 if (pred != 0 && ! (*pred) (op1, mode))
2992 op1 = copy_to_mode_reg (mode, op1);
9de08200 2993
4ca79136
RH
2994 pat = GEN_FCN ((int) code) (object, op1, opalign);
2995 if (pat)
9de08200 2996 {
4ca79136
RH
2997 emit_insn (pat);
2998 return true;
2999 }
3000 else
3001 delete_insns_since (last);
3002 }
3003 }
9de08200 3004
4ca79136
RH
3005 return false;
3006}
9de08200 3007
4ca79136
RH
3008/* A subroutine of clear_storage. Expand a call to memset or bzero.
3009 Return the return value of memset, 0 otherwise. */
9de08200 3010
4ca79136
RH
3011static rtx
3012clear_storage_via_libcall (object, size)
3013 rtx object, size;
3014{
3015 tree call_expr, arg_list, fn, object_tree, size_tree;
3016 enum machine_mode size_mode;
3017 rtx retval;
9de08200 3018
4ca79136 3019 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 3020
4ca79136
RH
3021 It is unsafe to save the value generated by protect_from_queue
3022 and reuse it later. Consider what happens if emit_queue is
3023 called before the return value from protect_from_queue is used.
52cf7115 3024
4ca79136
RH
3025 Expansion of the CALL_EXPR below will call emit_queue before
3026 we are finished emitting RTL for argument setup. So if we are
3027 not careful we could get the wrong value for an argument.
52cf7115 3028
4ca79136
RH
3029 To avoid this problem we go ahead and emit code to copy OBJECT
3030 and SIZE into new pseudos. We can then place those new pseudos
3031 into an RTL_EXPR and use them later, even after a call to
3032 emit_queue.
52cf7115 3033
4ca79136
RH
3034 Note this is not strictly needed for library calls since they
3035 do not call emit_queue before loading their arguments. However,
3036 we may need to have library calls call emit_queue in the future
3037 since failing to do so could cause problems for targets which
3038 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 3039
4ca79136 3040 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 3041
4ca79136
RH
3042 if (TARGET_MEM_FUNCTIONS)
3043 size_mode = TYPE_MODE (sizetype);
3044 else
3045 size_mode = TYPE_MODE (unsigned_type_node);
3046 size = convert_to_mode (size_mode, size, 1);
3047 size = copy_to_mode_reg (size_mode, size);
52cf7115 3048
4ca79136
RH
3049 /* It is incorrect to use the libcall calling conventions to call
3050 memset in this context. This could be a user call to memset and
3051 the user may wish to examine the return value from memset. For
3052 targets where libcalls and normal calls have different conventions
3053 for returning pointers, we could end up generating incorrect code.
4bc973ae 3054
4ca79136 3055 For convenience, we generate the call to bzero this way as well. */
4bc973ae 3056
4ca79136
RH
3057 object_tree = make_tree (ptr_type_node, object);
3058 if (TARGET_MEM_FUNCTIONS)
3059 size_tree = make_tree (sizetype, size);
3060 else
3061 size_tree = make_tree (unsigned_type_node, size);
3062
3063 fn = clear_storage_libcall_fn (true);
3064 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3065 if (TARGET_MEM_FUNCTIONS)
3066 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3067 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3068
3069 /* Now we have to build up the CALL_EXPR itself. */
3070 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3071 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3072 call_expr, arg_list, NULL_TREE);
3073 TREE_SIDE_EFFECTS (call_expr) = 1;
3074
3075 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3076
3077 /* If we are initializing a readonly value, show the above call
3078 clobbered it. Otherwise, a load from it may erroneously be
3079 hoisted from a loop. */
3080 if (RTX_UNCHANGING_P (object))
3081 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3082
3083 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3084}
3085
3086/* A subroutine of clear_storage_via_libcall. Create the tree node
3087 for the function we use for block clears. The first time FOR_CALL
3088 is true, we call assemble_external. */
3089
3090static GTY(()) tree block_clear_fn;
66c60e67 3091
4ca79136
RH
3092static tree
3093clear_storage_libcall_fn (for_call)
3094 int for_call;
3095{
3096 static bool emitted_extern;
3097 tree fn = block_clear_fn, args;
3098
3099 if (!fn)
3100 {
3101 if (TARGET_MEM_FUNCTIONS)
3102 {
3103 fn = get_identifier ("memset");
3104 args = build_function_type_list (ptr_type_node, ptr_type_node,
3105 integer_type_node, sizetype,
3106 NULL_TREE);
3107 }
3108 else
3109 {
3110 fn = get_identifier ("bzero");
3111 args = build_function_type_list (void_type_node, ptr_type_node,
3112 unsigned_type_node, NULL_TREE);
9de08200 3113 }
4ca79136
RH
3114
3115 fn = build_decl (FUNCTION_DECL, fn, args);
3116 DECL_EXTERNAL (fn) = 1;
3117 TREE_PUBLIC (fn) = 1;
3118 DECL_ARTIFICIAL (fn) = 1;
3119 TREE_NOTHROW (fn) = 1;
3120
3121 block_clear_fn = fn;
bbf6f052 3122 }
e9a25f70 3123
4ca79136
RH
3124 if (for_call && !emitted_extern)
3125 {
3126 emitted_extern = true;
3127 make_decl_rtl (fn, NULL);
3128 assemble_external (fn);
3129 }
bbf6f052 3130
4ca79136
RH
3131 return fn;
3132}
3133\f
bbf6f052
RK
3134/* Generate code to copy Y into X.
3135 Both Y and X must have the same mode, except that
3136 Y can be a constant with VOIDmode.
3137 This mode cannot be BLKmode; use emit_block_move for that.
3138
3139 Return the last instruction emitted. */
3140
3141rtx
3142emit_move_insn (x, y)
3143 rtx x, y;
3144{
3145 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
3146 rtx y_cst = NULL_RTX;
3147 rtx last_insn;
bbf6f052
RK
3148
3149 x = protect_from_queue (x, 1);
3150 y = protect_from_queue (y, 0);
3151
3152 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3153 abort ();
3154
ee5332b8
RH
3155 /* Never force constant_p_rtx to memory. */
3156 if (GET_CODE (y) == CONSTANT_P_RTX)
3157 ;
51286de6 3158 else if (CONSTANT_P (y))
de1b33dd 3159 {
51286de6 3160 if (optimize
075fc17a 3161 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
3162 && (last_insn = compress_float_constant (x, y)))
3163 return last_insn;
3164
3165 if (!LEGITIMATE_CONSTANT_P (y))
3166 {
3167 y_cst = y;
3168 y = force_const_mem (mode, y);
3a04ff64
RH
3169
3170 /* If the target's cannot_force_const_mem prevented the spill,
3171 assume that the target's move expanders will also take care
3172 of the non-legitimate constant. */
3173 if (!y)
3174 y = y_cst;
51286de6 3175 }
de1b33dd 3176 }
bbf6f052
RK
3177
3178 /* If X or Y are memory references, verify that their addresses are valid
3179 for the machine. */
3180 if (GET_CODE (x) == MEM
3181 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3182 && ! push_operand (x, GET_MODE (x)))
3183 || (flag_force_addr
3184 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 3185 x = validize_mem (x);
bbf6f052
RK
3186
3187 if (GET_CODE (y) == MEM
3188 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3189 || (flag_force_addr
3190 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 3191 y = validize_mem (y);
bbf6f052
RK
3192
3193 if (mode == BLKmode)
3194 abort ();
3195
de1b33dd
AO
3196 last_insn = emit_move_insn_1 (x, y);
3197
3198 if (y_cst && GET_CODE (x) == REG)
3d238248 3199 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
3200
3201 return last_insn;
261c4230
RS
3202}
3203
3204/* Low level part of emit_move_insn.
3205 Called just like emit_move_insn, but assumes X and Y
3206 are basically valid. */
3207
3208rtx
3209emit_move_insn_1 (x, y)
3210 rtx x, y;
3211{
3212 enum machine_mode mode = GET_MODE (x);
3213 enum machine_mode submode;
3214 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 3215
dbbbbf3b 3216 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 3217 abort ();
76bbe028 3218
bbf6f052
RK
3219 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3220 return
3221 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3222
89742723 3223 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 3224 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 3225 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
3226 && (mov_optab->handlers[(int) submode].insn_code
3227 != CODE_FOR_nothing))
3228 {
3229 /* Don't split destination if it is a stack push. */
3230 int stack = push_operand (x, GET_MODE (x));
7308a047 3231
79ce92d7 3232#ifdef PUSH_ROUNDING
1a06f5fe
JH
3233 /* In case we output to the stack, but the size is smaller machine can
3234 push exactly, we need to use move instructions. */
3235 if (stack
bb93b973
RK
3236 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3237 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
3238 {
3239 rtx temp;
bb93b973 3240 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
3241
3242 /* Do not use anti_adjust_stack, since we don't want to update
3243 stack_pointer_delta. */
3244 temp = expand_binop (Pmode,
3245#ifdef STACK_GROWS_DOWNWARD
3246 sub_optab,
3247#else
3248 add_optab,
3249#endif
3250 stack_pointer_rtx,
3251 GEN_INT
bb93b973
RK
3252 (PUSH_ROUNDING
3253 (GET_MODE_SIZE (GET_MODE (x)))),
3254 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3255
1a06f5fe
JH
3256 if (temp != stack_pointer_rtx)
3257 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 3258
1a06f5fe
JH
3259#ifdef STACK_GROWS_DOWNWARD
3260 offset1 = 0;
3261 offset2 = GET_MODE_SIZE (submode);
3262#else
3263 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3264 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3265 + GET_MODE_SIZE (submode));
3266#endif
bb93b973 3267
1a06f5fe
JH
3268 emit_move_insn (change_address (x, submode,
3269 gen_rtx_PLUS (Pmode,
3270 stack_pointer_rtx,
3271 GEN_INT (offset1))),
3272 gen_realpart (submode, y));
3273 emit_move_insn (change_address (x, submode,
3274 gen_rtx_PLUS (Pmode,
3275 stack_pointer_rtx,
3276 GEN_INT (offset2))),
3277 gen_imagpart (submode, y));
3278 }
e9c0bd54 3279 else
79ce92d7 3280#endif
7308a047
RS
3281 /* If this is a stack, push the highpart first, so it
3282 will be in the argument order.
3283
3284 In that case, change_address is used only to convert
3285 the mode, not to change the address. */
e9c0bd54 3286 if (stack)
c937357e 3287 {
e33c0d66
RS
3288 /* Note that the real part always precedes the imag part in memory
3289 regardless of machine's endianness. */
c937357e
RS
3290#ifdef STACK_GROWS_DOWNWARD
3291 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3292 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3293 gen_imagpart (submode, y)));
c937357e 3294 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3295 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3296 gen_realpart (submode, y)));
c937357e
RS
3297#else
3298 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3299 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3300 gen_realpart (submode, y)));
c937357e 3301 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3302 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3303 gen_imagpart (submode, y)));
c937357e
RS
3304#endif
3305 }
3306 else
3307 {
235ae7be
DM
3308 rtx realpart_x, realpart_y;
3309 rtx imagpart_x, imagpart_y;
3310
405f63da
MM
3311 /* If this is a complex value with each part being smaller than a
3312 word, the usual calling sequence will likely pack the pieces into
3313 a single register. Unfortunately, SUBREG of hard registers only
3314 deals in terms of words, so we have a problem converting input
3315 arguments to the CONCAT of two registers that is used elsewhere
3316 for complex values. If this is before reload, we can copy it into
3317 memory and reload. FIXME, we should see about using extract and
3318 insert on integer registers, but complex short and complex char
3319 variables should be rarely used. */
3a94c984 3320 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
3321 && (reload_in_progress | reload_completed) == 0)
3322 {
bb93b973
RK
3323 int packed_dest_p
3324 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3325 int packed_src_p
3326 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
3327
3328 if (packed_dest_p || packed_src_p)
3329 {
3330 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3331 ? MODE_FLOAT : MODE_INT);
3332
1da68f56
RK
3333 enum machine_mode reg_mode
3334 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
3335
3336 if (reg_mode != BLKmode)
3337 {
3338 rtx mem = assign_stack_temp (reg_mode,
3339 GET_MODE_SIZE (mode), 0);
f4ef873c 3340 rtx cmem = adjust_address (mem, mode, 0);
405f63da 3341
1da68f56
RK
3342 cfun->cannot_inline
3343 = N_("function using short complex types cannot be inline");
405f63da
MM
3344
3345 if (packed_dest_p)
3346 {
3347 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 3348
405f63da
MM
3349 emit_move_insn_1 (cmem, y);
3350 return emit_move_insn_1 (sreg, mem);
3351 }
3352 else
3353 {
3354 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3355
405f63da
MM
3356 emit_move_insn_1 (mem, sreg);
3357 return emit_move_insn_1 (x, cmem);
3358 }
3359 }
3360 }
3361 }
3362
235ae7be
DM
3363 realpart_x = gen_realpart (submode, x);
3364 realpart_y = gen_realpart (submode, y);
3365 imagpart_x = gen_imagpart (submode, x);
3366 imagpart_y = gen_imagpart (submode, y);
3367
3368 /* Show the output dies here. This is necessary for SUBREGs
3369 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3370 hard regs shouldn't appear here except as return values.
3371 We never want to emit such a clobber after reload. */
3372 if (x != y
235ae7be
DM
3373 && ! (reload_in_progress || reload_completed)
3374 && (GET_CODE (realpart_x) == SUBREG
3375 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3376 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3377
c937357e 3378 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3379 (realpart_x, realpart_y));
c937357e 3380 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3381 (imagpart_x, imagpart_y));
c937357e 3382 }
7308a047 3383
7a1ab50a 3384 return get_last_insn ();
7308a047
RS
3385 }
3386
a3600c71
HPN
3387 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3388 find a mode to do it in. If we have a movcc, use it. Otherwise,
3389 find the MODE_INT mode of the same width. */
3390 else if (GET_MODE_CLASS (mode) == MODE_CC
3391 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3392 {
3393 enum insn_code insn_code;
3394 enum machine_mode tmode = VOIDmode;
3395 rtx x1 = x, y1 = y;
3396
3397 if (mode != CCmode
3398 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3399 tmode = CCmode;
3400 else
3401 for (tmode = QImode; tmode != VOIDmode;
3402 tmode = GET_MODE_WIDER_MODE (tmode))
3403 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3404 break;
3405
3406 if (tmode == VOIDmode)
3407 abort ();
3408
3409 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3410 may call change_address which is not appropriate if we were
3411 called when a reload was in progress. We don't have to worry
3412 about changing the address since the size in bytes is supposed to
3413 be the same. Copy the MEM to change the mode and move any
3414 substitutions from the old MEM to the new one. */
3415
3416 if (reload_in_progress)
3417 {
3418 x = gen_lowpart_common (tmode, x1);
3419 if (x == 0 && GET_CODE (x1) == MEM)
3420 {
3421 x = adjust_address_nv (x1, tmode, 0);
3422 copy_replacements (x1, x);
3423 }
3424
3425 y = gen_lowpart_common (tmode, y1);
3426 if (y == 0 && GET_CODE (y1) == MEM)
3427 {
3428 y = adjust_address_nv (y1, tmode, 0);
3429 copy_replacements (y1, y);
3430 }
3431 }
3432 else
3433 {
3434 x = gen_lowpart (tmode, x);
3435 y = gen_lowpart (tmode, y);
3436 }
3437
3438 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3439 return emit_insn (GEN_FCN (insn_code) (x, y));
3440 }
3441
cffa2189
R
3442 /* This will handle any multi-word or full-word mode that lacks a move_insn
3443 pattern. However, you will get better code if you define such patterns,
bbf6f052 3444 even if they must turn into multiple assembler instructions. */
cffa2189 3445 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3446 {
3447 rtx last_insn = 0;
3ef1eef4 3448 rtx seq, inner;
235ae7be 3449 int need_clobber;
bb93b973 3450 int i;
3a94c984 3451
a98c9f1a
RK
3452#ifdef PUSH_ROUNDING
3453
3454 /* If X is a push on the stack, do the push now and replace
3455 X with a reference to the stack pointer. */
3456 if (push_operand (x, GET_MODE (x)))
3457 {
918a6124
GK
3458 rtx temp;
3459 enum rtx_code code;
0fb7aeda 3460
918a6124
GK
3461 /* Do not use anti_adjust_stack, since we don't want to update
3462 stack_pointer_delta. */
3463 temp = expand_binop (Pmode,
3464#ifdef STACK_GROWS_DOWNWARD
3465 sub_optab,
3466#else
3467 add_optab,
3468#endif
3469 stack_pointer_rtx,
3470 GEN_INT
bb93b973
RK
3471 (PUSH_ROUNDING
3472 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3473 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3474
0fb7aeda
KH
3475 if (temp != stack_pointer_rtx)
3476 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3477
3478 code = GET_CODE (XEXP (x, 0));
bb93b973 3479
918a6124
GK
3480 /* Just hope that small offsets off SP are OK. */
3481 if (code == POST_INC)
0fb7aeda 3482 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3483 GEN_INT (-((HOST_WIDE_INT)
3484 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3485 else if (code == POST_DEC)
0fb7aeda 3486 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3487 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3488 else
3489 temp = stack_pointer_rtx;
3490
3491 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3492 }
3493#endif
3a94c984 3494
3ef1eef4
RK
3495 /* If we are in reload, see if either operand is a MEM whose address
3496 is scheduled for replacement. */
3497 if (reload_in_progress && GET_CODE (x) == MEM
3498 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3499 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3500 if (reload_in_progress && GET_CODE (y) == MEM
3501 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3502 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3503
235ae7be 3504 start_sequence ();
15a7a8ec 3505
235ae7be 3506 need_clobber = 0;
bbf6f052 3507 for (i = 0;
3a94c984 3508 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3509 i++)
3510 {
3511 rtx xpart = operand_subword (x, i, 1, mode);
3512 rtx ypart = operand_subword (y, i, 1, mode);
3513
3514 /* If we can't get a part of Y, put Y into memory if it is a
3515 constant. Otherwise, force it into a register. If we still
3516 can't get a part of Y, abort. */
3517 if (ypart == 0 && CONSTANT_P (y))
3518 {
3519 y = force_const_mem (mode, y);
3520 ypart = operand_subword (y, i, 1, mode);
3521 }
3522 else if (ypart == 0)
3523 ypart = operand_subword_force (y, i, mode);
3524
3525 if (xpart == 0 || ypart == 0)
3526 abort ();
3527
235ae7be
DM
3528 need_clobber |= (GET_CODE (xpart) == SUBREG);
3529
bbf6f052
RK
3530 last_insn = emit_move_insn (xpart, ypart);
3531 }
6551fa4d 3532
2f937369 3533 seq = get_insns ();
235ae7be
DM
3534 end_sequence ();
3535
3536 /* Show the output dies here. This is necessary for SUBREGs
3537 of pseudos since we cannot track their lifetimes correctly;
3538 hard regs shouldn't appear here except as return values.
3539 We never want to emit such a clobber after reload. */
3540 if (x != y
3541 && ! (reload_in_progress || reload_completed)
3542 && need_clobber != 0)
bb93b973 3543 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3544
3545 emit_insn (seq);
3546
bbf6f052
RK
3547 return last_insn;
3548 }
3549 else
3550 abort ();
3551}
51286de6
RH
3552
3553/* If Y is representable exactly in a narrower mode, and the target can
3554 perform the extension directly from constant or memory, then emit the
3555 move as an extension. */
3556
3557static rtx
3558compress_float_constant (x, y)
3559 rtx x, y;
3560{
3561 enum machine_mode dstmode = GET_MODE (x);
3562 enum machine_mode orig_srcmode = GET_MODE (y);
3563 enum machine_mode srcmode;
3564 REAL_VALUE_TYPE r;
3565
3566 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3567
3568 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3569 srcmode != orig_srcmode;
3570 srcmode = GET_MODE_WIDER_MODE (srcmode))
3571 {
3572 enum insn_code ic;
3573 rtx trunc_y, last_insn;
3574
3575 /* Skip if the target can't extend this way. */
3576 ic = can_extend_p (dstmode, srcmode, 0);
3577 if (ic == CODE_FOR_nothing)
3578 continue;
3579
3580 /* Skip if the narrowed value isn't exact. */
3581 if (! exact_real_truncate (srcmode, &r))
3582 continue;
3583
3584 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3585
3586 if (LEGITIMATE_CONSTANT_P (trunc_y))
3587 {
3588 /* Skip if the target needs extra instructions to perform
3589 the extension. */
3590 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3591 continue;
3592 }
3593 else if (float_extend_from_mem[dstmode][srcmode])
3594 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3595 else
3596 continue;
3597
3598 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3599 last_insn = get_last_insn ();
3600
3601 if (GET_CODE (x) == REG)
3602 REG_NOTES (last_insn)
3603 = gen_rtx_EXPR_LIST (REG_EQUAL, y, REG_NOTES (last_insn));
3604
3605 return last_insn;
3606 }
3607
3608 return NULL_RTX;
3609}
bbf6f052
RK
3610\f
3611/* Pushing data onto the stack. */
3612
3613/* Push a block of length SIZE (perhaps variable)
3614 and return an rtx to address the beginning of the block.
3615 Note that it is not possible for the value returned to be a QUEUED.
3616 The value may be virtual_outgoing_args_rtx.
3617
3618 EXTRA is the number of bytes of padding to push in addition to SIZE.
3619 BELOW nonzero means this padding comes at low addresses;
3620 otherwise, the padding comes at high addresses. */
3621
3622rtx
3623push_block (size, extra, below)
3624 rtx size;
3625 int extra, below;
3626{
b3694847 3627 rtx temp;
88f63c77
RK
3628
3629 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3630 if (CONSTANT_P (size))
3631 anti_adjust_stack (plus_constant (size, extra));
3632 else if (GET_CODE (size) == REG && extra == 0)
3633 anti_adjust_stack (size);
3634 else
3635 {
ce48579b 3636 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3637 if (extra != 0)
906c4e36 3638 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3639 temp, 0, OPTAB_LIB_WIDEN);
3640 anti_adjust_stack (temp);
3641 }
3642
f73ad30e 3643#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3644 if (0)
f73ad30e
JH
3645#else
3646 if (1)
bbf6f052 3647#endif
f73ad30e 3648 {
f73ad30e
JH
3649 temp = virtual_outgoing_args_rtx;
3650 if (extra != 0 && below)
3651 temp = plus_constant (temp, extra);
3652 }
3653 else
3654 {
3655 if (GET_CODE (size) == CONST_INT)
3656 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3657 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3658 else if (extra != 0 && !below)
3659 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3660 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3661 else
3662 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3663 negate_rtx (Pmode, size));
3664 }
bbf6f052
RK
3665
3666 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3667}
3668
21d93687
RK
3669#ifdef PUSH_ROUNDING
3670
566aa174 3671/* Emit single push insn. */
21d93687 3672
566aa174
JH
3673static void
3674emit_single_push_insn (mode, x, type)
3675 rtx x;
3676 enum machine_mode mode;
3677 tree type;
3678{
566aa174 3679 rtx dest_addr;
918a6124 3680 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3681 rtx dest;
371b8fc0
JH
3682 enum insn_code icode;
3683 insn_operand_predicate_fn pred;
566aa174 3684
371b8fc0
JH
3685 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3686 /* If there is push pattern, use it. Otherwise try old way of throwing
3687 MEM representing push operation to move expander. */
3688 icode = push_optab->handlers[(int) mode].insn_code;
3689 if (icode != CODE_FOR_nothing)
3690 {
3691 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3692 && !((*pred) (x, mode))))
371b8fc0
JH
3693 x = force_reg (mode, x);
3694 emit_insn (GEN_FCN (icode) (x));
3695 return;
3696 }
566aa174
JH
3697 if (GET_MODE_SIZE (mode) == rounded_size)
3698 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3699 else
3700 {
3701#ifdef STACK_GROWS_DOWNWARD
3702 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3703 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174
JH
3704#else
3705 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3706 GEN_INT (rounded_size));
3707#endif
3708 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3709 }
3710
3711 dest = gen_rtx_MEM (mode, dest_addr);
3712
566aa174
JH
3713 if (type != 0)
3714 {
3715 set_mem_attributes (dest, type, 1);
c3d32120
RK
3716
3717 if (flag_optimize_sibling_calls)
3718 /* Function incoming arguments may overlap with sibling call
3719 outgoing arguments and we cannot allow reordering of reads
3720 from function arguments with stores to outgoing arguments
3721 of sibling calls. */
3722 set_mem_alias_set (dest, 0);
566aa174
JH
3723 }
3724 emit_move_insn (dest, x);
566aa174 3725}
21d93687 3726#endif
566aa174 3727
bbf6f052
RK
3728/* Generate code to push X onto the stack, assuming it has mode MODE and
3729 type TYPE.
3730 MODE is redundant except when X is a CONST_INT (since they don't
3731 carry mode info).
3732 SIZE is an rtx for the size of data to be copied (in bytes),
3733 needed only if X is BLKmode.
3734
f1eaaf73 3735 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3736
cd048831
RK
3737 If PARTIAL and REG are both nonzero, then copy that many of the first
3738 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3739 The amount of space pushed is decreased by PARTIAL words,
3740 rounded *down* to a multiple of PARM_BOUNDARY.
3741 REG must be a hard register in this case.
cd048831
RK
3742 If REG is zero but PARTIAL is not, take any all others actions for an
3743 argument partially in registers, but do not actually load any
3744 registers.
bbf6f052
RK
3745
3746 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3747 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3748
3749 On a machine that lacks real push insns, ARGS_ADDR is the address of
3750 the bottom of the argument block for this call. We use indexing off there
3751 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3752 argument block has not been preallocated.
3753
e5e809f4
JL
3754 ARGS_SO_FAR is the size of args previously pushed for this call.
3755
3756 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3757 for arguments passed in registers. If nonzero, it will be the number
3758 of bytes required. */
bbf6f052
RK
3759
3760void
3761emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd 3762 args_addr, args_so_far, reg_parm_stack_space,
0fb7aeda 3763 alignment_pad)
b3694847 3764 rtx x;
bbf6f052
RK
3765 enum machine_mode mode;
3766 tree type;
3767 rtx size;
729a2125 3768 unsigned int align;
bbf6f052
RK
3769 int partial;
3770 rtx reg;
3771 int extra;
3772 rtx args_addr;
3773 rtx args_so_far;
e5e809f4 3774 int reg_parm_stack_space;
4fc026cd 3775 rtx alignment_pad;
bbf6f052
RK
3776{
3777 rtx xinner;
3778 enum direction stack_direction
3779#ifdef STACK_GROWS_DOWNWARD
3780 = downward;
3781#else
3782 = upward;
3783#endif
3784
3785 /* Decide where to pad the argument: `downward' for below,
3786 `upward' for above, or `none' for don't pad it.
3787 Default is below for small data on big-endian machines; else above. */
3788 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3789
0fb7aeda 3790 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3791 FIXME: why? */
3792 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3793 if (where_pad != none)
3794 where_pad = (where_pad == downward ? upward : downward);
3795
3796 xinner = x = protect_from_queue (x, 0);
3797
3798 if (mode == BLKmode)
3799 {
3800 /* Copy a block into the stack, entirely or partially. */
3801
b3694847 3802 rtx temp;
bbf6f052
RK
3803 int used = partial * UNITS_PER_WORD;
3804 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3805 int skip;
3a94c984 3806
bbf6f052
RK
3807 if (size == 0)
3808 abort ();
3809
3810 used -= offset;
3811
3812 /* USED is now the # of bytes we need not copy to the stack
3813 because registers will take care of them. */
3814
3815 if (partial != 0)
f4ef873c 3816 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3817
3818 /* If the partial register-part of the arg counts in its stack size,
3819 skip the part of stack space corresponding to the registers.
3820 Otherwise, start copying to the beginning of the stack space,
3821 by setting SKIP to 0. */
e5e809f4 3822 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3823
3824#ifdef PUSH_ROUNDING
3825 /* Do it with several push insns if that doesn't take lots of insns
3826 and if there is no difficulty with push insns that skip bytes
3827 on the stack for alignment purposes. */
3828 if (args_addr == 0
f73ad30e 3829 && PUSH_ARGS
bbf6f052
RK
3830 && GET_CODE (size) == CONST_INT
3831 && skip == 0
15914757 3832 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3833 /* Here we avoid the case of a structure whose weak alignment
3834 forces many pushes of a small amount of data,
3835 and such small pushes do rounding that causes trouble. */
e1565e65 3836 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3837 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3838 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3839 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3840 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3841 {
3842 /* Push padding now if padding above and stack grows down,
3843 or if padding below and stack grows up.
3844 But if space already allocated, this has already been done. */
3845 if (extra && args_addr == 0
3846 && where_pad != none && where_pad != stack_direction)
906c4e36 3847 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3848
566aa174 3849 move_by_pieces (NULL, xinner, INTVAL (size) - used, align);
bbf6f052
RK
3850 }
3851 else
3a94c984 3852#endif /* PUSH_ROUNDING */
bbf6f052 3853 {
7ab923cc
JJ
3854 rtx target;
3855
bbf6f052
RK
3856 /* Otherwise make space on the stack and copy the data
3857 to the address of that space. */
3858
3859 /* Deduct words put into registers from the size we must copy. */
3860 if (partial != 0)
3861 {
3862 if (GET_CODE (size) == CONST_INT)
906c4e36 3863 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3864 else
3865 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3866 GEN_INT (used), NULL_RTX, 0,
3867 OPTAB_LIB_WIDEN);
bbf6f052
RK
3868 }
3869
3870 /* Get the address of the stack space.
3871 In this case, we do not deal with EXTRA separately.
3872 A single stack adjust will do. */
3873 if (! args_addr)
3874 {
3875 temp = push_block (size, extra, where_pad == downward);
3876 extra = 0;
3877 }
3878 else if (GET_CODE (args_so_far) == CONST_INT)
3879 temp = memory_address (BLKmode,
3880 plus_constant (args_addr,
3881 skip + INTVAL (args_so_far)));
3882 else
3883 temp = memory_address (BLKmode,
38a448ca
RH
3884 plus_constant (gen_rtx_PLUS (Pmode,
3885 args_addr,
3886 args_so_far),
bbf6f052 3887 skip));
4ca79136
RH
3888
3889 if (!ACCUMULATE_OUTGOING_ARGS)
3890 {
3891 /* If the source is referenced relative to the stack pointer,
3892 copy it to another register to stabilize it. We do not need
3893 to do this if we know that we won't be changing sp. */
3894
3895 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3896 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3897 temp = copy_to_reg (temp);
3898 }
3899
3a94c984 3900 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3901
3a94c984
KH
3902 if (type != 0)
3903 {
3904 set_mem_attributes (target, type, 1);
3905 /* Function incoming arguments may overlap with sibling call
3906 outgoing arguments and we cannot allow reordering of reads
3907 from function arguments with stores to outgoing arguments
3908 of sibling calls. */
ba4828e0 3909 set_mem_alias_set (target, 0);
3a94c984 3910 }
4ca79136 3911
44bb111a
RH
3912 /* ALIGN may well be better aligned than TYPE, e.g. due to
3913 PARM_BOUNDARY. Assume the caller isn't lying. */
3914 set_mem_align (target, align);
4ca79136 3915
44bb111a 3916 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
3917 }
3918 }
3919 else if (partial > 0)
3920 {
3921 /* Scalar partly in registers. */
3922
3923 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3924 int i;
3925 int not_stack;
3926 /* # words of start of argument
3927 that we must make space for but need not store. */
3928 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3929 int args_offset = INTVAL (args_so_far);
3930 int skip;
3931
3932 /* Push padding now if padding above and stack grows down,
3933 or if padding below and stack grows up.
3934 But if space already allocated, this has already been done. */
3935 if (extra && args_addr == 0
3936 && where_pad != none && where_pad != stack_direction)
906c4e36 3937 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3938
3939 /* If we make space by pushing it, we might as well push
3940 the real data. Otherwise, we can leave OFFSET nonzero
3941 and leave the space uninitialized. */
3942 if (args_addr == 0)
3943 offset = 0;
3944
3945 /* Now NOT_STACK gets the number of words that we don't need to
3946 allocate on the stack. */
3947 not_stack = partial - offset;
3948
3949 /* If the partial register-part of the arg counts in its stack size,
3950 skip the part of stack space corresponding to the registers.
3951 Otherwise, start copying to the beginning of the stack space,
3952 by setting SKIP to 0. */
e5e809f4 3953 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3954
3955 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3956 x = validize_mem (force_const_mem (mode, x));
3957
3958 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3959 SUBREGs of such registers are not allowed. */
3960 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3961 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3962 x = copy_to_reg (x);
3963
3964 /* Loop over all the words allocated on the stack for this arg. */
3965 /* We can do it by words, because any scalar bigger than a word
3966 has a size a multiple of a word. */
3967#ifndef PUSH_ARGS_REVERSED
3968 for (i = not_stack; i < size; i++)
3969#else
3970 for (i = size - 1; i >= not_stack; i--)
3971#endif
3972 if (i >= not_stack + offset)
3973 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3974 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3975 0, args_addr,
3976 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3977 * UNITS_PER_WORD)),
4fc026cd 3978 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3979 }
3980 else
3981 {
3982 rtx addr;
3bdf5ad1 3983 rtx dest;
bbf6f052
RK
3984
3985 /* Push padding now if padding above and stack grows down,
3986 or if padding below and stack grows up.
3987 But if space already allocated, this has already been done. */
3988 if (extra && args_addr == 0
3989 && where_pad != none && where_pad != stack_direction)
906c4e36 3990 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3991
3992#ifdef PUSH_ROUNDING
f73ad30e 3993 if (args_addr == 0 && PUSH_ARGS)
566aa174 3994 emit_single_push_insn (mode, x, type);
bbf6f052
RK
3995 else
3996#endif
921b3427
RK
3997 {
3998 if (GET_CODE (args_so_far) == CONST_INT)
3999 addr
4000 = memory_address (mode,
3a94c984 4001 plus_constant (args_addr,
921b3427 4002 INTVAL (args_so_far)));
3a94c984 4003 else
38a448ca
RH
4004 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4005 args_so_far));
566aa174
JH
4006 dest = gen_rtx_MEM (mode, addr);
4007 if (type != 0)
4008 {
4009 set_mem_attributes (dest, type, 1);
4010 /* Function incoming arguments may overlap with sibling call
4011 outgoing arguments and we cannot allow reordering of reads
4012 from function arguments with stores to outgoing arguments
4013 of sibling calls. */
ba4828e0 4014 set_mem_alias_set (dest, 0);
566aa174 4015 }
bbf6f052 4016
566aa174 4017 emit_move_insn (dest, x);
566aa174 4018 }
bbf6f052
RK
4019 }
4020
bbf6f052
RK
4021 /* If part should go in registers, copy that part
4022 into the appropriate registers. Do this now, at the end,
4023 since mem-to-mem copies above may do function calls. */
cd048831 4024 if (partial > 0 && reg != 0)
fffa9c1d
JW
4025 {
4026 /* Handle calls that pass values in multiple non-contiguous locations.
4027 The Irix 6 ABI has examples of this. */
4028 if (GET_CODE (reg) == PARALLEL)
04050c69 4029 emit_group_load (reg, x, -1); /* ??? size? */
fffa9c1d
JW
4030 else
4031 move_block_to_reg (REGNO (reg), x, partial, mode);
4032 }
bbf6f052
RK
4033
4034 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 4035 anti_adjust_stack (GEN_INT (extra));
3a94c984 4036
3ea2292a 4037 if (alignment_pad && args_addr == 0)
4fc026cd 4038 anti_adjust_stack (alignment_pad);
bbf6f052
RK
4039}
4040\f
296b4ed9
RK
4041/* Return X if X can be used as a subtarget in a sequence of arithmetic
4042 operations. */
4043
4044static rtx
4045get_subtarget (x)
4046 rtx x;
4047{
4048 return ((x == 0
4049 /* Only registers can be subtargets. */
4050 || GET_CODE (x) != REG
4051 /* If the register is readonly, it can't be set more than once. */
4052 || RTX_UNCHANGING_P (x)
4053 /* Don't use hard regs to avoid extending their life. */
4054 || REGNO (x) < FIRST_PSEUDO_REGISTER
4055 /* Avoid subtargets inside loops,
4056 since they hide some invariant expressions. */
4057 || preserve_subexpressions_p ())
4058 ? 0 : x);
4059}
4060
bbf6f052
RK
4061/* Expand an assignment that stores the value of FROM into TO.
4062 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
4063 (This may contain a QUEUED rtx;
4064 if the value is constant, this rtx is a constant.)
4065 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
4066
4067 SUGGEST_REG is no longer actually used.
4068 It used to mean, copy the value through a register
4069 and return that register, if that is possible.
709f5be1 4070 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
4071
4072rtx
4073expand_assignment (to, from, want_value, suggest_reg)
4074 tree to, from;
4075 int want_value;
c5c76735 4076 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052 4077{
b3694847 4078 rtx to_rtx = 0;
bbf6f052
RK
4079 rtx result;
4080
4081 /* Don't crash if the lhs of the assignment was erroneous. */
4082
4083 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
4084 {
4085 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4086 return want_value ? result : NULL_RTX;
4087 }
bbf6f052
RK
4088
4089 /* Assignment of a structure component needs special treatment
4090 if the structure component's rtx is not simply a MEM.
6be58303
JW
4091 Assignment of an array element at a constant index, and assignment of
4092 an array element in an unaligned packed structure field, has the same
4093 problem. */
bbf6f052 4094
08293add 4095 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
4096 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4097 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
4098 {
4099 enum machine_mode mode1;
770ae6cc 4100 HOST_WIDE_INT bitsize, bitpos;
a06ef755 4101 rtx orig_to_rtx;
7bb0943f 4102 tree offset;
bbf6f052
RK
4103 int unsignedp;
4104 int volatilep = 0;
0088fcb1
RK
4105 tree tem;
4106
4107 push_temp_slots ();
839c4796 4108 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 4109 &unsignedp, &volatilep);
bbf6f052
RK
4110
4111 /* If we are going to use store_bit_field and extract_bit_field,
4112 make sure to_rtx will be safe for multiple use. */
4113
4114 if (mode1 == VOIDmode && want_value)
4115 tem = stabilize_reference (tem);
4116
1ed1b4fb
RK
4117 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4118
7bb0943f
RS
4119 if (offset != 0)
4120 {
e3c8ea67 4121 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
4122
4123 if (GET_CODE (to_rtx) != MEM)
4124 abort ();
bd070e1a 4125
bd070e1a 4126#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4127 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4128 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4129#else
4130 if (GET_MODE (offset_rtx) != ptr_mode)
4131 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4132#endif
bd070e1a 4133
9a7b9f4f
JL
4134 /* A constant address in TO_RTX can have VOIDmode, we must not try
4135 to call force_reg for that case. Avoid that case. */
89752202
HB
4136 if (GET_CODE (to_rtx) == MEM
4137 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 4138 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 4139 && bitsize > 0
3a94c984 4140 && (bitpos % bitsize) == 0
89752202 4141 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 4142 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 4143 {
e3c8ea67 4144 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
4145 bitpos = 0;
4146 }
4147
0d4903b8 4148 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
4149 highest_pow2_factor_for_type (TREE_TYPE (to),
4150 offset));
7bb0943f 4151 }
c5c76735 4152
998d7deb
RH
4153 if (GET_CODE (to_rtx) == MEM)
4154 {
998d7deb
RH
4155 /* If the field is at offset zero, we could have been given the
4156 DECL_RTX of the parent struct. Don't munge it. */
4157 to_rtx = shallow_copy_rtx (to_rtx);
4158
6f1087be 4159 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 4160 }
effbcc6a 4161
a06ef755
RK
4162 /* Deal with volatile and readonly fields. The former is only done
4163 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4164 if (volatilep && GET_CODE (to_rtx) == MEM)
4165 {
4166 if (to_rtx == orig_to_rtx)
4167 to_rtx = copy_rtx (to_rtx);
4168 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
4169 }
4170
956d6950
JL
4171 if (TREE_CODE (to) == COMPONENT_REF
4172 && TREE_READONLY (TREE_OPERAND (to, 1)))
4173 {
a06ef755 4174 if (to_rtx == orig_to_rtx)
956d6950 4175 to_rtx = copy_rtx (to_rtx);
956d6950
JL
4176 RTX_UNCHANGING_P (to_rtx) = 1;
4177 }
4178
a84b4898 4179 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
4180 {
4181 if (to_rtx == orig_to_rtx)
4182 to_rtx = copy_rtx (to_rtx);
4183 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4184 }
4185
a06ef755
RK
4186 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4187 (want_value
4188 /* Spurious cast for HPUX compiler. */
4189 ? ((enum machine_mode)
4190 TYPE_MODE (TREE_TYPE (to)))
4191 : VOIDmode),
4192 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 4193
a06ef755
RK
4194 preserve_temp_slots (result);
4195 free_temp_slots ();
4196 pop_temp_slots ();
a69beca1 4197
a06ef755
RK
4198 /* If the value is meaningful, convert RESULT to the proper mode.
4199 Otherwise, return nothing. */
4200 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4201 TYPE_MODE (TREE_TYPE (from)),
4202 result,
4203 TREE_UNSIGNED (TREE_TYPE (to)))
4204 : NULL_RTX);
bbf6f052
RK
4205 }
4206
cd1db108
RS
4207 /* If the rhs is a function call and its value is not an aggregate,
4208 call the function before we start to compute the lhs.
4209 This is needed for correct code for cases such as
4210 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4211 requires loading up part of an address in a separate insn.
4212
1858863b
JW
4213 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4214 since it might be a promoted variable where the zero- or sign- extension
4215 needs to be done. Handling this in the normal way is safe because no
4216 computation is done before the call. */
1ad87b63 4217 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 4218 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
4219 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4220 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 4221 {
0088fcb1
RK
4222 rtx value;
4223
4224 push_temp_slots ();
4225 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4226 if (to_rtx == 0)
37a08a29 4227 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4228
fffa9c1d
JW
4229 /* Handle calls that return values in multiple non-contiguous locations.
4230 The Irix 6 ABI has examples of this. */
4231 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4232 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4233 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4234 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4235 else
6419e5b0
DT
4236 {
4237#ifdef POINTERS_EXTEND_UNSIGNED
0d4903b8
RK
4238 if (POINTER_TYPE_P (TREE_TYPE (to))
4239 && GET_MODE (to_rtx) != GET_MODE (value))
6419e5b0
DT
4240 value = convert_memory_address (GET_MODE (to_rtx), value);
4241#endif
4242 emit_move_insn (to_rtx, value);
4243 }
cd1db108
RS
4244 preserve_temp_slots (to_rtx);
4245 free_temp_slots ();
0088fcb1 4246 pop_temp_slots ();
709f5be1 4247 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
4248 }
4249
bbf6f052
RK
4250 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4251 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4252
4253 if (to_rtx == 0)
37a08a29 4254 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4255
86d38d25 4256 /* Don't move directly into a return register. */
14a774a9
RK
4257 if (TREE_CODE (to) == RESULT_DECL
4258 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4259 {
0088fcb1
RK
4260 rtx temp;
4261
4262 push_temp_slots ();
4263 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4264
4265 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4266 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4267 else
4268 emit_move_insn (to_rtx, temp);
4269
86d38d25
RS
4270 preserve_temp_slots (to_rtx);
4271 free_temp_slots ();
0088fcb1 4272 pop_temp_slots ();
709f5be1 4273 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
4274 }
4275
bbf6f052
RK
4276 /* In case we are returning the contents of an object which overlaps
4277 the place the value is being stored, use a safe function when copying
4278 a value through a pointer into a structure value return block. */
4279 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4280 && current_function_returns_struct
4281 && !current_function_returns_pcc_struct)
4282 {
0088fcb1
RK
4283 rtx from_rtx, size;
4284
4285 push_temp_slots ();
33a20d10 4286 size = expr_size (from);
37a08a29 4287 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4288
4ca79136
RH
4289 if (TARGET_MEM_FUNCTIONS)
4290 emit_library_call (memmove_libfunc, LCT_NORMAL,
4291 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4292 XEXP (from_rtx, 0), Pmode,
4293 convert_to_mode (TYPE_MODE (sizetype),
4294 size, TREE_UNSIGNED (sizetype)),
4295 TYPE_MODE (sizetype));
4296 else
4297 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4298 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4299 XEXP (to_rtx, 0), Pmode,
4300 convert_to_mode (TYPE_MODE (integer_type_node),
4301 size,
4302 TREE_UNSIGNED (integer_type_node)),
4303 TYPE_MODE (integer_type_node));
bbf6f052
RK
4304
4305 preserve_temp_slots (to_rtx);
4306 free_temp_slots ();
0088fcb1 4307 pop_temp_slots ();
709f5be1 4308 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
4309 }
4310
4311 /* Compute FROM and store the value in the rtx we got. */
4312
0088fcb1 4313 push_temp_slots ();
bbf6f052
RK
4314 result = store_expr (from, to_rtx, want_value);
4315 preserve_temp_slots (result);
4316 free_temp_slots ();
0088fcb1 4317 pop_temp_slots ();
709f5be1 4318 return want_value ? result : NULL_RTX;
bbf6f052
RK
4319}
4320
4321/* Generate code for computing expression EXP,
4322 and storing the value into TARGET.
bbf6f052
RK
4323 TARGET may contain a QUEUED rtx.
4324
8403445a 4325 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4326 not in TARGET, so that we can be sure to use the proper
4327 value in a containing expression even if TARGET has something
4328 else stored in it. If possible, we copy the value through a pseudo
4329 and return that pseudo. Or, if the value is constant, we try to
4330 return the constant. In some cases, we return a pseudo
4331 copied *from* TARGET.
4332
4333 If the mode is BLKmode then we may return TARGET itself.
4334 It turns out that in BLKmode it doesn't cause a problem.
4335 because C has no operators that could combine two different
4336 assignments into the same BLKmode object with different values
4337 with no sequence point. Will other languages need this to
4338 be more thorough?
4339
8403445a 4340 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4341 to catch quickly any cases where the caller uses the value
8403445a
AM
4342 and fails to set WANT_VALUE.
4343
4344 If WANT_VALUE & 2 is set, this is a store into a call param on the
4345 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4346
4347rtx
709f5be1 4348store_expr (exp, target, want_value)
b3694847
SS
4349 tree exp;
4350 rtx target;
709f5be1 4351 int want_value;
bbf6f052 4352{
b3694847 4353 rtx temp;
bbf6f052 4354 int dont_return_target = 0;
e5408e52 4355 int dont_store_target = 0;
bbf6f052 4356
847311f4
AL
4357 if (VOID_TYPE_P (TREE_TYPE (exp)))
4358 {
4359 /* C++ can generate ?: expressions with a throw expression in one
4360 branch and an rvalue in the other. Here, we resolve attempts to
4361 store the throw expression's nonexistant result. */
4362 if (want_value)
4363 abort ();
4364 expand_expr (exp, const0_rtx, VOIDmode, 0);
4365 return NULL_RTX;
4366 }
bbf6f052
RK
4367 if (TREE_CODE (exp) == COMPOUND_EXPR)
4368 {
4369 /* Perform first part of compound expression, then assign from second
4370 part. */
8403445a
AM
4371 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4372 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4373 emit_queue ();
709f5be1 4374 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4375 }
4376 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4377 {
4378 /* For conditional expression, get safe form of the target. Then
4379 test the condition, doing the appropriate assignment on either
4380 side. This avoids the creation of unnecessary temporaries.
4381 For non-BLKmode, it is more efficient not to do this. */
4382
4383 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4384
4385 emit_queue ();
4386 target = protect_from_queue (target, 1);
4387
dabf8373 4388 do_pending_stack_adjust ();
bbf6f052
RK
4389 NO_DEFER_POP;
4390 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4391 start_cleanup_deferral ();
8403445a 4392 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4393 end_cleanup_deferral ();
bbf6f052
RK
4394 emit_queue ();
4395 emit_jump_insn (gen_jump (lab2));
4396 emit_barrier ();
4397 emit_label (lab1);
956d6950 4398 start_cleanup_deferral ();
8403445a 4399 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4400 end_cleanup_deferral ();
bbf6f052
RK
4401 emit_queue ();
4402 emit_label (lab2);
4403 OK_DEFER_POP;
a3a58acc 4404
8403445a 4405 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4406 }
bbf6f052 4407 else if (queued_subexp_p (target))
709f5be1
RS
4408 /* If target contains a postincrement, let's not risk
4409 using it as the place to generate the rhs. */
bbf6f052
RK
4410 {
4411 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4412 {
4413 /* Expand EXP into a new pseudo. */
4414 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4415 temp = expand_expr (exp, temp, GET_MODE (target),
4416 (want_value & 2
4417 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4418 }
4419 else
8403445a
AM
4420 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4421 (want_value & 2
4422 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4423
4424 /* If target is volatile, ANSI requires accessing the value
4425 *from* the target, if it is accessed. So make that happen.
4426 In no case return the target itself. */
8403445a 4427 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4428 dont_return_target = 1;
bbf6f052 4429 }
8403445a
AM
4430 else if ((want_value & 1) != 0
4431 && GET_CODE (target) == MEM
4432 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4433 && GET_MODE (target) != BLKmode)
4434 /* If target is in memory and caller wants value in a register instead,
4435 arrange that. Pass TARGET as target for expand_expr so that,
4436 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4437 We know expand_expr will not use the target in that case.
4438 Don't do this if TARGET is volatile because we are supposed
4439 to write it and then read it. */
4440 {
8403445a
AM
4441 temp = expand_expr (exp, target, GET_MODE (target),
4442 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4443 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4444 {
4445 /* If TEMP is already in the desired TARGET, only copy it from
4446 memory and don't store it there again. */
4447 if (temp == target
4448 || (rtx_equal_p (temp, target)
4449 && ! side_effects_p (temp) && ! side_effects_p (target)))
4450 dont_store_target = 1;
4451 temp = copy_to_reg (temp);
4452 }
12f06d17
CH
4453 dont_return_target = 1;
4454 }
1499e0a8 4455 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4456 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4457 than the declared mode, compute the result into its declared mode
4458 and then convert to the wider mode. Our value is the computed
4459 expression. */
4460 {
b76b08ef
RK
4461 rtx inner_target = 0;
4462
5a32d038 4463 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4464 which will often result in some optimizations. Do the conversion
4465 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4466 the extend. But don't do this if the type of EXP is a subtype
4467 of something else since then the conversion might involve
4468 more than just converting modes. */
8403445a
AM
4469 if ((want_value & 1) == 0
4470 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4471 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4472 {
4473 if (TREE_UNSIGNED (TREE_TYPE (exp))
4474 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4475 exp = convert
4476 ((*lang_hooks.types.signed_or_unsigned_type)
4477 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4478
b0c48229
NB
4479 exp = convert ((*lang_hooks.types.type_for_mode)
4480 (GET_MODE (SUBREG_REG (target)),
4481 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4482 exp);
b76b08ef
RK
4483
4484 inner_target = SUBREG_REG (target);
f635a84d 4485 }
3a94c984 4486
8403445a
AM
4487 temp = expand_expr (exp, inner_target, VOIDmode,
4488 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4489
7abec5be
RH
4490 /* If TEMP is a MEM and we want a result value, make the access
4491 now so it gets done only once. Strictly speaking, this is
4492 only necessary if the MEM is volatile, or if the address
4493 overlaps TARGET. But not performing the load twice also
4494 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4495 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4496 temp = copy_to_reg (temp);
4497
b258707c
RS
4498 /* If TEMP is a VOIDmode constant, use convert_modes to make
4499 sure that we properly convert it. */
4500 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4501 {
4502 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4503 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4504 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4505 GET_MODE (target), temp,
4506 SUBREG_PROMOTED_UNSIGNED_P (target));
4507 }
b258707c 4508
1499e0a8
RK
4509 convert_move (SUBREG_REG (target), temp,
4510 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4511
4512 /* If we promoted a constant, change the mode back down to match
4513 target. Otherwise, the caller might get confused by a result whose
4514 mode is larger than expected. */
4515
8403445a 4516 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4517 {
b3ca30df
JJ
4518 if (GET_MODE (temp) != VOIDmode)
4519 {
4520 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4521 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4522 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4523 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4524 }
4525 else
4526 temp = convert_modes (GET_MODE (target),
4527 GET_MODE (SUBREG_REG (target)),
4528 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4529 }
4530
8403445a 4531 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4532 }
bbf6f052
RK
4533 else
4534 {
8403445a
AM
4535 temp = expand_expr (exp, target, GET_MODE (target),
4536 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
766f36c7 4537 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4538 If TARGET is a volatile mem ref, either return TARGET
4539 or return a reg copied *from* TARGET; ANSI requires this.
4540
4541 Otherwise, if TEMP is not TARGET, return TEMP
4542 if it is constant (for efficiency),
4543 or if we really want the correct value. */
bbf6f052
RK
4544 if (!(target && GET_CODE (target) == REG
4545 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4546 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4547 && ! rtx_equal_p (temp, target)
8403445a 4548 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4549 dont_return_target = 1;
4550 }
4551
b258707c
RS
4552 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4553 the same as that of TARGET, adjust the constant. This is needed, for
4554 example, in case it is a CONST_DOUBLE and we want only a word-sized
4555 value. */
4556 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4557 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4558 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4559 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4560 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4561
bbf6f052 4562 /* If value was not generated in the target, store it there.
37a08a29
RK
4563 Convert the value to TARGET's type first if necessary.
4564 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4565 one or both of them are volatile memory refs, we have to distinguish
4566 two cases:
4567 - expand_expr has used TARGET. In this case, we must not generate
4568 another copy. This can be detected by TARGET being equal according
4569 to == .
4570 - expand_expr has not used TARGET - that means that the source just
4571 happens to have the same RTX form. Since temp will have been created
4572 by expand_expr, it will compare unequal according to == .
4573 We must generate a copy in this case, to reach the correct number
4574 of volatile memory references. */
bbf6f052 4575
6036acbb 4576 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4577 || (temp != target && (side_effects_p (temp)
4578 || side_effects_p (target))))
e5408e52 4579 && TREE_CODE (exp) != ERROR_MARK
a9772b60
JJ
4580 && ! dont_store_target
4581 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4582 but TARGET is not valid memory reference, TEMP will differ
4583 from TARGET although it is really the same location. */
4584 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
e56fc090
HPN
4585 || target != DECL_RTL_IF_SET (exp))
4586 /* If there's nothing to copy, don't bother. Don't call expr_size
4587 unless necessary, because some front-ends (C++) expr_size-hook
4588 aborts on objects that are not supposed to be bit-copied or
4589 bit-initialized. */
4590 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4591 {
4592 target = protect_from_queue (target, 1);
4593 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4594 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4595 {
4596 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4597 if (dont_return_target)
4598 {
4599 /* In this case, we will return TEMP,
4600 so make sure it has the proper mode.
4601 But don't forget to store the value into TARGET. */
4602 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4603 emit_move_insn (target, temp);
4604 }
4605 else
4606 convert_move (target, temp, unsignedp);
4607 }
4608
4609 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4610 {
c24ae149
RK
4611 /* Handle copying a string constant into an array. The string
4612 constant may be shorter than the array. So copy just the string's
4613 actual length, and clear the rest. First get the size of the data
4614 type of the string, which is actually the size of the target. */
4615 rtx size = expr_size (exp);
bbf6f052 4616
e87b4f3f
RS
4617 if (GET_CODE (size) == CONST_INT
4618 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4619 emit_block_move (target, temp, size,
4620 (want_value & 2
4621 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4622 else
bbf6f052 4623 {
e87b4f3f
RS
4624 /* Compute the size of the data to copy from the string. */
4625 tree copy_size
c03b7665 4626 = size_binop (MIN_EXPR,
b50d17a1 4627 make_tree (sizetype, size),
fed3cef0 4628 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4629 rtx copy_size_rtx
4630 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4631 (want_value & 2
4632 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4633 rtx label = 0;
4634
4635 /* Copy that much. */
267b28bd
SE
4636 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4637 TREE_UNSIGNED (sizetype));
8403445a
AM
4638 emit_block_move (target, temp, copy_size_rtx,
4639 (want_value & 2
4640 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4641
88f63c77
RK
4642 /* Figure out how much is left in TARGET that we have to clear.
4643 Do all calculations in ptr_mode. */
e87b4f3f
RS
4644 if (GET_CODE (copy_size_rtx) == CONST_INT)
4645 {
c24ae149
RK
4646 size = plus_constant (size, -INTVAL (copy_size_rtx));
4647 target = adjust_address (target, BLKmode,
4648 INTVAL (copy_size_rtx));
e87b4f3f
RS
4649 }
4650 else
4651 {
fa06ab5c 4652 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4653 copy_size_rtx, NULL_RTX, 0,
4654 OPTAB_LIB_WIDEN);
e87b4f3f 4655
c24ae149
RK
4656#ifdef POINTERS_EXTEND_UNSIGNED
4657 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4658 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4659 TREE_UNSIGNED (sizetype));
c24ae149
RK
4660#endif
4661
4662 target = offset_address (target, copy_size_rtx,
4663 highest_pow2_factor (copy_size));
e87b4f3f 4664 label = gen_label_rtx ();
c5d5d461 4665 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4666 GET_MODE (size), 0, label);
e87b4f3f
RS
4667 }
4668
4669 if (size != const0_rtx)
37a08a29 4670 clear_storage (target, size);
22619c3f 4671
e87b4f3f
RS
4672 if (label)
4673 emit_label (label);
bbf6f052
RK
4674 }
4675 }
fffa9c1d
JW
4676 /* Handle calls that return values in multiple non-contiguous locations.
4677 The Irix 6 ABI has examples of this. */
4678 else if (GET_CODE (target) == PARALLEL)
04050c69 4679 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4680 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4681 emit_block_move (target, temp, expr_size (exp),
4682 (want_value & 2
4683 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052
RK
4684 else
4685 emit_move_insn (target, temp);
4686 }
709f5be1 4687
766f36c7 4688 /* If we don't want a value, return NULL_RTX. */
8403445a 4689 if ((want_value & 1) == 0)
766f36c7
RK
4690 return NULL_RTX;
4691
4692 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4693 ??? The latter test doesn't seem to make sense. */
4694 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4695 return temp;
766f36c7
RK
4696
4697 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4698 else if ((want_value & 1) != 0
4699 && GET_MODE (target) != BLKmode
766f36c7
RK
4700 && ! (GET_CODE (target) == REG
4701 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4702 return copy_to_reg (target);
3a94c984 4703
766f36c7 4704 else
709f5be1 4705 return target;
bbf6f052
RK
4706}
4707\f
9de08200
RK
4708/* Return 1 if EXP just contains zeros. */
4709
4710static int
4711is_zeros_p (exp)
4712 tree exp;
4713{
4714 tree elt;
4715
4716 switch (TREE_CODE (exp))
4717 {
4718 case CONVERT_EXPR:
4719 case NOP_EXPR:
4720 case NON_LVALUE_EXPR:
ed239f5a 4721 case VIEW_CONVERT_EXPR:
9de08200
RK
4722 return is_zeros_p (TREE_OPERAND (exp, 0));
4723
4724 case INTEGER_CST:
05bccae2 4725 return integer_zerop (exp);
9de08200
RK
4726
4727 case COMPLEX_CST:
4728 return
4729 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4730
4731 case REAL_CST:
41c9120b 4732 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4733
69ef87e2
AH
4734 case VECTOR_CST:
4735 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4736 elt = TREE_CHAIN (elt))
4737 if (!is_zeros_p (TREE_VALUE (elt)))
4738 return 0;
4739
4740 return 1;
4741
9de08200 4742 case CONSTRUCTOR:
e1a43f73
PB
4743 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4744 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4745 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4746 if (! is_zeros_p (TREE_VALUE (elt)))
4747 return 0;
4748
4749 return 1;
3a94c984 4750
e9a25f70
JL
4751 default:
4752 return 0;
9de08200 4753 }
9de08200
RK
4754}
4755
4756/* Return 1 if EXP contains mostly (3/4) zeros. */
4757
4758static int
4759mostly_zeros_p (exp)
4760 tree exp;
4761{
9de08200
RK
4762 if (TREE_CODE (exp) == CONSTRUCTOR)
4763 {
e1a43f73
PB
4764 int elts = 0, zeros = 0;
4765 tree elt = CONSTRUCTOR_ELTS (exp);
4766 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4767 {
4768 /* If there are no ranges of true bits, it is all zero. */
4769 return elt == NULL_TREE;
4770 }
4771 for (; elt; elt = TREE_CHAIN (elt))
4772 {
4773 /* We do not handle the case where the index is a RANGE_EXPR,
4774 so the statistic will be somewhat inaccurate.
4775 We do make a more accurate count in store_constructor itself,
4776 so since this function is only used for nested array elements,
0f41302f 4777 this should be close enough. */
e1a43f73
PB
4778 if (mostly_zeros_p (TREE_VALUE (elt)))
4779 zeros++;
4780 elts++;
4781 }
9de08200
RK
4782
4783 return 4 * zeros >= 3 * elts;
4784 }
4785
4786 return is_zeros_p (exp);
4787}
4788\f
e1a43f73
PB
4789/* Helper function for store_constructor.
4790 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4791 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4792 CLEARED is as for store_constructor.
23cb1766 4793 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4794
4795 This provides a recursive shortcut back to store_constructor when it isn't
4796 necessary to go through store_field. This is so that we can pass through
4797 the cleared field to let store_constructor know that we may not have to
4798 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4799
4800static void
04050c69
RK
4801store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4802 alias_set)
e1a43f73 4803 rtx target;
770ae6cc
RK
4804 unsigned HOST_WIDE_INT bitsize;
4805 HOST_WIDE_INT bitpos;
e1a43f73
PB
4806 enum machine_mode mode;
4807 tree exp, type;
4808 int cleared;
23cb1766 4809 int alias_set;
e1a43f73
PB
4810{
4811 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4812 && bitpos % BITS_PER_UNIT == 0
cc2902df 4813 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4814 let store_field do the bitfield handling. This is unlikely to
4815 generate unnecessary clear instructions anyways. */
4816 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4817 {
61cb205c
RK
4818 if (GET_CODE (target) == MEM)
4819 target
4820 = adjust_address (target,
4821 GET_MODE (target) == BLKmode
4822 || 0 != (bitpos
4823 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4824 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4825
e0339ef7 4826
04050c69 4827 /* Update the alias set, if required. */
10b76d73
RK
4828 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4829 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4830 {
4831 target = copy_rtx (target);
4832 set_mem_alias_set (target, alias_set);
4833 }
e0339ef7 4834
04050c69 4835 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4836 }
4837 else
a06ef755
RK
4838 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4839 alias_set);
e1a43f73
PB
4840}
4841
bbf6f052 4842/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4843 TARGET is either a REG or a MEM; we know it cannot conflict, since
4844 safe_from_p has been called.
b7010412
RK
4845 CLEARED is true if TARGET is known to have been zero'd.
4846 SIZE is the number of bytes of TARGET we are allowed to modify: this
4847 may not be the same as the size of EXP if we are assigning to a field
4848 which has been packed to exclude padding bits. */
bbf6f052
RK
4849
4850static void
04050c69 4851store_constructor (exp, target, cleared, size)
bbf6f052
RK
4852 tree exp;
4853 rtx target;
e1a43f73 4854 int cleared;
13eb1f7f 4855 HOST_WIDE_INT size;
bbf6f052 4856{
4af3895e 4857 tree type = TREE_TYPE (exp);
a5efcd63 4858#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4859 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4860#endif
4af3895e 4861
e44842fe
RK
4862 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4863 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4864 {
b3694847 4865 tree elt;
bbf6f052 4866
04050c69 4867 /* We either clear the aggregate or indicate the value is dead. */
dd1db5ec
RK
4868 if ((TREE_CODE (type) == UNION_TYPE
4869 || TREE_CODE (type) == QUAL_UNION_TYPE)
04050c69
RK
4870 && ! cleared
4871 && ! CONSTRUCTOR_ELTS (exp))
4872 /* If the constructor is empty, clear the union. */
a59f8640 4873 {
04050c69
RK
4874 clear_storage (target, expr_size (exp));
4875 cleared = 1;
a59f8640 4876 }
4af3895e
JVA
4877
4878 /* If we are building a static constructor into a register,
4879 set the initial value as zero so we can fold the value into
67225c15
RK
4880 a constant. But if more than one register is involved,
4881 this probably loses. */
04050c69 4882 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4883 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4884 {
04050c69 4885 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4886 cleared = 1;
4887 }
4888
4889 /* If the constructor has fewer fields than the structure
4890 or if we are initializing the structure to mostly zeros,
0d97bf4c 4891 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4892 register whose mode size isn't equal to SIZE since clear_storage
4893 can't handle this case. */
04050c69 4894 else if (! cleared && size > 0
9376fcd6 4895 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4896 != fields_length (type))
fcf1b822
RK
4897 || mostly_zeros_p (exp))
4898 && (GET_CODE (target) != REG
04050c69
RK
4899 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
4900 == size)))
9de08200 4901 {
04050c69 4902 clear_storage (target, GEN_INT (size));
9de08200
RK
4903 cleared = 1;
4904 }
04050c69
RK
4905
4906 if (! cleared)
38a448ca 4907 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4908
4909 /* Store each element of the constructor into
4910 the corresponding field of TARGET. */
4911
4912 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4913 {
b3694847 4914 tree field = TREE_PURPOSE (elt);
34c73909 4915 tree value = TREE_VALUE (elt);
b3694847 4916 enum machine_mode mode;
770ae6cc
RK
4917 HOST_WIDE_INT bitsize;
4918 HOST_WIDE_INT bitpos = 0;
770ae6cc 4919 tree offset;
b50d17a1 4920 rtx to_rtx = target;
bbf6f052 4921
f32fd778
RS
4922 /* Just ignore missing fields.
4923 We cleared the whole structure, above,
4924 if any fields are missing. */
4925 if (field == 0)
4926 continue;
4927
8b6000fc 4928 if (cleared && is_zeros_p (value))
e1a43f73 4929 continue;
9de08200 4930
770ae6cc
RK
4931 if (host_integerp (DECL_SIZE (field), 1))
4932 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4933 else
4934 bitsize = -1;
4935
bbf6f052
RK
4936 mode = DECL_MODE (field);
4937 if (DECL_BIT_FIELD (field))
4938 mode = VOIDmode;
4939
770ae6cc
RK
4940 offset = DECL_FIELD_OFFSET (field);
4941 if (host_integerp (offset, 0)
4942 && host_integerp (bit_position (field), 0))
4943 {
4944 bitpos = int_bit_position (field);
4945 offset = 0;
4946 }
b50d17a1 4947 else
770ae6cc 4948 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4949
b50d17a1
RK
4950 if (offset)
4951 {
4952 rtx offset_rtx;
4953
4954 if (contains_placeholder_p (offset))
7fa96708 4955 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4956 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4957
b50d17a1
RK
4958 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4959 if (GET_CODE (to_rtx) != MEM)
4960 abort ();
4961
bd070e1a 4962#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4963 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4964 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4965#else
4966 if (GET_MODE (offset_rtx) != ptr_mode)
4967 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4968#endif
bd070e1a 4969
0d4903b8
RK
4970 to_rtx = offset_address (to_rtx, offset_rtx,
4971 highest_pow2_factor (offset));
b50d17a1 4972 }
c5c76735 4973
cf04eb80
RK
4974 if (TREE_READONLY (field))
4975 {
9151b3bf 4976 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4977 to_rtx = copy_rtx (to_rtx);
4978
cf04eb80
RK
4979 RTX_UNCHANGING_P (to_rtx) = 1;
4980 }
4981
34c73909
R
4982#ifdef WORD_REGISTER_OPERATIONS
4983 /* If this initializes a field that is smaller than a word, at the
4984 start of a word, try to widen it to a full word.
4985 This special case allows us to output C++ member function
4986 initializations in a form that the optimizers can understand. */
770ae6cc 4987 if (GET_CODE (target) == REG
34c73909
R
4988 && bitsize < BITS_PER_WORD
4989 && bitpos % BITS_PER_WORD == 0
4990 && GET_MODE_CLASS (mode) == MODE_INT
4991 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4992 && exp_size >= 0
4993 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4994 {
4995 tree type = TREE_TYPE (value);
04050c69 4996
34c73909
R
4997 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4998 {
b0c48229
NB
4999 type = (*lang_hooks.types.type_for_size)
5000 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
5001 value = convert (type, value);
5002 }
04050c69 5003
34c73909
R
5004 if (BYTES_BIG_ENDIAN)
5005 value
5006 = fold (build (LSHIFT_EXPR, type, value,
5007 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5008 bitsize = BITS_PER_WORD;
5009 mode = word_mode;
5010 }
5011#endif
10b76d73
RK
5012
5013 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5014 && DECL_NONADDRESSABLE_P (field))
5015 {
5016 to_rtx = copy_rtx (to_rtx);
5017 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5018 }
5019
c5c76735 5020 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 5021 value, type, cleared,
10b76d73 5022 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
5023 }
5024 }
e6834654
SS
5025 else if (TREE_CODE (type) == ARRAY_TYPE
5026 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 5027 {
b3694847
SS
5028 tree elt;
5029 int i;
e1a43f73 5030 int need_to_clear;
4af3895e 5031 tree domain = TYPE_DOMAIN (type);
4af3895e 5032 tree elttype = TREE_TYPE (type);
e6834654 5033 int const_bounds_p;
ae0ed63a
JM
5034 HOST_WIDE_INT minelt = 0;
5035 HOST_WIDE_INT maxelt = 0;
85f3d674 5036
e6834654
SS
5037 /* Vectors are like arrays, but the domain is stored via an array
5038 type indirectly. */
5039 if (TREE_CODE (type) == VECTOR_TYPE)
5040 {
5041 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5042 the same field as TYPE_DOMAIN, we are not guaranteed that
5043 it always will. */
5044 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5045 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5046 }
5047
5048 const_bounds_p = (TYPE_MIN_VALUE (domain)
5049 && TYPE_MAX_VALUE (domain)
5050 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5051 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5052
85f3d674
RK
5053 /* If we have constant bounds for the range of the type, get them. */
5054 if (const_bounds_p)
5055 {
5056 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5057 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5058 }
bbf6f052 5059
e1a43f73 5060 /* If the constructor has fewer elements than the array,
38e01259 5061 clear the whole array first. Similarly if this is
e1a43f73
PB
5062 static constructor of a non-BLKmode object. */
5063 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5064 need_to_clear = 1;
5065 else
5066 {
5067 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
5068 need_to_clear = ! const_bounds_p;
5069
e1a43f73
PB
5070 /* This loop is a more accurate version of the loop in
5071 mostly_zeros_p (it handles RANGE_EXPR in an index).
5072 It is also needed to check for missing elements. */
5073 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 5074 elt != NULL_TREE && ! need_to_clear;
df0faff1 5075 elt = TREE_CHAIN (elt))
e1a43f73
PB
5076 {
5077 tree index = TREE_PURPOSE (elt);
5078 HOST_WIDE_INT this_node_count;
19caa751 5079
e1a43f73
PB
5080 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5081 {
5082 tree lo_index = TREE_OPERAND (index, 0);
5083 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 5084
19caa751
RK
5085 if (! host_integerp (lo_index, 1)
5086 || ! host_integerp (hi_index, 1))
e1a43f73
PB
5087 {
5088 need_to_clear = 1;
5089 break;
5090 }
19caa751
RK
5091
5092 this_node_count = (tree_low_cst (hi_index, 1)
5093 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
5094 }
5095 else
5096 this_node_count = 1;
85f3d674 5097
e1a43f73
PB
5098 count += this_node_count;
5099 if (mostly_zeros_p (TREE_VALUE (elt)))
5100 zero_count += this_node_count;
5101 }
85f3d674 5102
8e958f70 5103 /* Clear the entire array first if there are any missing elements,
0f41302f 5104 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
5105 if (! need_to_clear
5106 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
5107 need_to_clear = 1;
5108 }
85f3d674 5109
9376fcd6 5110 if (need_to_clear && size > 0)
9de08200
RK
5111 {
5112 if (! cleared)
725e58b1
RK
5113 {
5114 if (REG_P (target))
5115 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5116 else
5117 clear_storage (target, GEN_INT (size));
5118 }
9de08200
RK
5119 cleared = 1;
5120 }
df4556a3 5121 else if (REG_P (target))
bbf6f052 5122 /* Inform later passes that the old value is dead. */
38a448ca 5123 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
5124
5125 /* Store each element of the constructor into
5126 the corresponding element of TARGET, determined
5127 by counting the elements. */
5128 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5129 elt;
5130 elt = TREE_CHAIN (elt), i++)
5131 {
b3694847 5132 enum machine_mode mode;
19caa751
RK
5133 HOST_WIDE_INT bitsize;
5134 HOST_WIDE_INT bitpos;
bbf6f052 5135 int unsignedp;
e1a43f73 5136 tree value = TREE_VALUE (elt);
03dc44a6
RS
5137 tree index = TREE_PURPOSE (elt);
5138 rtx xtarget = target;
bbf6f052 5139
e1a43f73
PB
5140 if (cleared && is_zeros_p (value))
5141 continue;
9de08200 5142
bbf6f052 5143 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
5144 mode = TYPE_MODE (elttype);
5145 if (mode == BLKmode)
19caa751
RK
5146 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5147 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5148 : -1);
14a774a9
RK
5149 else
5150 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5151
e1a43f73
PB
5152 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5153 {
5154 tree lo_index = TREE_OPERAND (index, 0);
5155 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 5156 rtx index_r, pos_rtx, loop_end;
e1a43f73 5157 struct nesting *loop;
05c0b405
PB
5158 HOST_WIDE_INT lo, hi, count;
5159 tree position;
e1a43f73 5160
0f41302f 5161 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
5162 if (const_bounds_p
5163 && host_integerp (lo_index, 0)
19caa751
RK
5164 && host_integerp (hi_index, 0)
5165 && (lo = tree_low_cst (lo_index, 0),
5166 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
5167 count = hi - lo + 1,
5168 (GET_CODE (target) != MEM
5169 || count <= 2
19caa751
RK
5170 || (host_integerp (TYPE_SIZE (elttype), 1)
5171 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5172 <= 40 * 8)))))
e1a43f73 5173 {
05c0b405
PB
5174 lo -= minelt; hi -= minelt;
5175 for (; lo <= hi; lo++)
e1a43f73 5176 {
19caa751 5177 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
5178
5179 if (GET_CODE (target) == MEM
5180 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5181 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5182 && TYPE_NONALIASED_COMPONENT (type))
5183 {
5184 target = copy_rtx (target);
5185 MEM_KEEP_ALIAS_SET_P (target) = 1;
5186 }
5187
23cb1766 5188 store_constructor_field
04050c69
RK
5189 (target, bitsize, bitpos, mode, value, type, cleared,
5190 get_alias_set (elttype));
e1a43f73
PB
5191 }
5192 }
5193 else
5194 {
4977bab6 5195 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
5196 loop_end = gen_label_rtx ();
5197
5198 unsignedp = TREE_UNSIGNED (domain);
5199
5200 index = build_decl (VAR_DECL, NULL_TREE, domain);
5201
19e7881c 5202 index_r
e1a43f73
PB
5203 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5204 &unsignedp, 0));
19e7881c 5205 SET_DECL_RTL (index, index_r);
e1a43f73
PB
5206 if (TREE_CODE (value) == SAVE_EXPR
5207 && SAVE_EXPR_RTL (value) == 0)
5208 {
0f41302f
MS
5209 /* Make sure value gets expanded once before the
5210 loop. */
e1a43f73
PB
5211 expand_expr (value, const0_rtx, VOIDmode, 0);
5212 emit_queue ();
5213 }
5214 store_expr (lo_index, index_r, 0);
5215 loop = expand_start_loop (0);
5216
0f41302f 5217 /* Assign value to element index. */
fed3cef0
RK
5218 position
5219 = convert (ssizetype,
5220 fold (build (MINUS_EXPR, TREE_TYPE (index),
5221 index, TYPE_MIN_VALUE (domain))));
5222 position = size_binop (MULT_EXPR, position,
5223 convert (ssizetype,
5224 TYPE_SIZE_UNIT (elttype)));
5225
e1a43f73 5226 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5227 xtarget = offset_address (target, pos_rtx,
5228 highest_pow2_factor (position));
5229 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5230 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 5231 store_constructor (value, xtarget, cleared,
b7010412 5232 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5233 else
5234 store_expr (value, xtarget, 0);
5235
5236 expand_exit_loop_if_false (loop,
5237 build (LT_EXPR, integer_type_node,
5238 index, hi_index));
5239
5240 expand_increment (build (PREINCREMENT_EXPR,
5241 TREE_TYPE (index),
7b8b9722 5242 index, integer_one_node), 0, 0);
e1a43f73
PB
5243 expand_end_loop ();
5244 emit_label (loop_end);
e1a43f73
PB
5245 }
5246 }
19caa751
RK
5247 else if ((index != 0 && ! host_integerp (index, 0))
5248 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5249 {
03dc44a6
RS
5250 tree position;
5251
5b6c44ff 5252 if (index == 0)
fed3cef0 5253 index = ssize_int (1);
5b6c44ff 5254
e1a43f73 5255 if (minelt)
fed3cef0
RK
5256 index = convert (ssizetype,
5257 fold (build (MINUS_EXPR, index,
5258 TYPE_MIN_VALUE (domain))));
19caa751 5259
fed3cef0
RK
5260 position = size_binop (MULT_EXPR, index,
5261 convert (ssizetype,
5262 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5263 xtarget = offset_address (target,
5264 expand_expr (position, 0, VOIDmode, 0),
5265 highest_pow2_factor (position));
5266 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5267 store_expr (value, xtarget, 0);
03dc44a6
RS
5268 }
5269 else
5270 {
5271 if (index != 0)
19caa751
RK
5272 bitpos = ((tree_low_cst (index, 0) - minelt)
5273 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5274 else
19caa751
RK
5275 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5276
10b76d73 5277 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5278 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5279 && TYPE_NONALIASED_COMPONENT (type))
5280 {
5281 target = copy_rtx (target);
5282 MEM_KEEP_ALIAS_SET_P (target) = 1;
5283 }
5284
c5c76735 5285 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 5286 type, cleared, get_alias_set (elttype));
23cb1766 5287
03dc44a6 5288 }
bbf6f052
RK
5289 }
5290 }
19caa751 5291
3a94c984 5292 /* Set constructor assignments. */
071a6595
PB
5293 else if (TREE_CODE (type) == SET_TYPE)
5294 {
e1a43f73 5295 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5296 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5297 tree domain = TYPE_DOMAIN (type);
5298 tree domain_min, domain_max, bitlength;
5299
9faa82d8 5300 /* The default implementation strategy is to extract the constant
071a6595
PB
5301 parts of the constructor, use that to initialize the target,
5302 and then "or" in whatever non-constant ranges we need in addition.
5303
5304 If a large set is all zero or all ones, it is
5305 probably better to set it using memset (if available) or bzero.
5306 Also, if a large set has just a single range, it may also be
5307 better to first clear all the first clear the set (using
0f41302f 5308 bzero/memset), and set the bits we want. */
3a94c984 5309
0f41302f 5310 /* Check for all zeros. */
9376fcd6 5311 if (elt == NULL_TREE && size > 0)
071a6595 5312 {
e1a43f73 5313 if (!cleared)
8ac61af7 5314 clear_storage (target, GEN_INT (size));
071a6595
PB
5315 return;
5316 }
5317
071a6595
PB
5318 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5319 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5320 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5321 size_diffop (domain_max, domain_min),
5322 ssize_int (1));
071a6595 5323
19caa751 5324 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5325
5326 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5327 are "complicated" (more than one range), initialize (the
3a94c984 5328 constant parts) by copying from a constant. */
e1a43f73
PB
5329 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5330 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5331 {
19caa751 5332 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5333 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 5334 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 5335 HOST_WIDE_INT word = 0;
19caa751
RK
5336 unsigned int bit_pos = 0;
5337 unsigned int ibit = 0;
5338 unsigned int offset = 0; /* In bytes from beginning of set. */
5339
e1a43f73 5340 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5341 for (;;)
071a6595 5342 {
b4ee5a72
PB
5343 if (bit_buffer[ibit])
5344 {
b09f3348 5345 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5346 word |= (1 << (set_word_size - 1 - bit_pos));
5347 else
5348 word |= 1 << bit_pos;
5349 }
19caa751 5350
b4ee5a72
PB
5351 bit_pos++; ibit++;
5352 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5353 {
e1a43f73
PB
5354 if (word != 0 || ! cleared)
5355 {
5356 rtx datum = GEN_INT (word);
5357 rtx to_rtx;
19caa751 5358
0f41302f
MS
5359 /* The assumption here is that it is safe to use
5360 XEXP if the set is multi-word, but not if
5361 it's single-word. */
e1a43f73 5362 if (GET_CODE (target) == MEM)
f4ef873c 5363 to_rtx = adjust_address (target, mode, offset);
3a94c984 5364 else if (offset == 0)
e1a43f73
PB
5365 to_rtx = target;
5366 else
5367 abort ();
5368 emit_move_insn (to_rtx, datum);
5369 }
19caa751 5370
b4ee5a72
PB
5371 if (ibit == nbits)
5372 break;
5373 word = 0;
5374 bit_pos = 0;
5375 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5376 }
5377 }
071a6595 5378 }
e1a43f73 5379 else if (!cleared)
19caa751
RK
5380 /* Don't bother clearing storage if the set is all ones. */
5381 if (TREE_CHAIN (elt) != NULL_TREE
5382 || (TREE_PURPOSE (elt) == NULL_TREE
5383 ? nbits != 1
5384 : ( ! host_integerp (TREE_VALUE (elt), 0)
5385 || ! host_integerp (TREE_PURPOSE (elt), 0)
5386 || (tree_low_cst (TREE_VALUE (elt), 0)
5387 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5388 != (HOST_WIDE_INT) nbits))))
8ac61af7 5389 clear_storage (target, expr_size (exp));
3a94c984 5390
e1a43f73 5391 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5392 {
3a94c984 5393 /* Start of range of element or NULL. */
071a6595 5394 tree startbit = TREE_PURPOSE (elt);
3a94c984 5395 /* End of range of element, or element value. */
071a6595
PB
5396 tree endbit = TREE_VALUE (elt);
5397 HOST_WIDE_INT startb, endb;
19caa751 5398 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5399
5400 bitlength_rtx = expand_expr (bitlength,
19caa751 5401 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5402
3a94c984 5403 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5404 if (startbit == NULL_TREE)
5405 {
5406 startbit = save_expr (endbit);
5407 endbit = startbit;
5408 }
19caa751 5409
071a6595
PB
5410 startbit = convert (sizetype, startbit);
5411 endbit = convert (sizetype, endbit);
5412 if (! integer_zerop (domain_min))
5413 {
5414 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5415 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5416 }
3a94c984 5417 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5418 EXPAND_CONST_ADDRESS);
3a94c984 5419 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5420 EXPAND_CONST_ADDRESS);
5421
5422 if (REG_P (target))
5423 {
1da68f56
RK
5424 targetx
5425 = assign_temp
b0c48229
NB
5426 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5427 (GET_MODE (target), 0),
1da68f56
RK
5428 TYPE_QUAL_CONST)),
5429 0, 1, 1);
071a6595
PB
5430 emit_move_insn (targetx, target);
5431 }
19caa751 5432
071a6595
PB
5433 else if (GET_CODE (target) == MEM)
5434 targetx = target;
5435 else
5436 abort ();
5437
4ca79136
RH
5438 /* Optimization: If startbit and endbit are constants divisible
5439 by BITS_PER_UNIT, call memset instead. */
5440 if (TARGET_MEM_FUNCTIONS
5441 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5442 && TREE_CODE (endbit) == INTEGER_CST
5443 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5444 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5445 {
ebb1b59a 5446 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5447 VOIDmode, 3,
e1a43f73
PB
5448 plus_constant (XEXP (targetx, 0),
5449 startb / BITS_PER_UNIT),
071a6595 5450 Pmode,
3b6f75e2 5451 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5452 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5453 TYPE_MODE (sizetype));
071a6595
PB
5454 }
5455 else
68d28100
RH
5456 emit_library_call (setbits_libfunc, LCT_NORMAL,
5457 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5458 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5459 startbit_rtx, TYPE_MODE (sizetype),
5460 endbit_rtx, TYPE_MODE (sizetype));
5461
071a6595
PB
5462 if (REG_P (target))
5463 emit_move_insn (target, targetx);
5464 }
5465 }
bbf6f052
RK
5466
5467 else
5468 abort ();
5469}
5470
5471/* Store the value of EXP (an expression tree)
5472 into a subfield of TARGET which has mode MODE and occupies
5473 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5474 If MODE is VOIDmode, it means that we are storing into a bit-field.
5475
5476 If VALUE_MODE is VOIDmode, return nothing in particular.
5477 UNSIGNEDP is not used in this case.
5478
5479 Otherwise, return an rtx for the value stored. This rtx
5480 has mode VALUE_MODE if that is convenient to do.
5481 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5482
a06ef755 5483 TYPE is the type of the underlying object,
ece32014
MM
5484
5485 ALIAS_SET is the alias set for the destination. This value will
5486 (in general) be different from that for TARGET, since TARGET is a
5487 reference to the containing structure. */
bbf6f052
RK
5488
5489static rtx
a06ef755
RK
5490store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5491 alias_set)
bbf6f052 5492 rtx target;
770ae6cc
RK
5493 HOST_WIDE_INT bitsize;
5494 HOST_WIDE_INT bitpos;
bbf6f052
RK
5495 enum machine_mode mode;
5496 tree exp;
5497 enum machine_mode value_mode;
5498 int unsignedp;
a06ef755 5499 tree type;
ece32014 5500 int alias_set;
bbf6f052 5501{
906c4e36 5502 HOST_WIDE_INT width_mask = 0;
bbf6f052 5503
e9a25f70
JL
5504 if (TREE_CODE (exp) == ERROR_MARK)
5505 return const0_rtx;
5506
2be6a7e9
RK
5507 /* If we have nothing to store, do nothing unless the expression has
5508 side-effects. */
5509 if (bitsize == 0)
5510 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5511 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5512 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5513
5514 /* If we are storing into an unaligned field of an aligned union that is
5515 in a register, we may have the mode of TARGET being an integer mode but
5516 MODE == BLKmode. In that case, get an aligned object whose size and
5517 alignment are the same as TARGET and store TARGET into it (we can avoid
5518 the store if the field being stored is the entire width of TARGET). Then
5519 call ourselves recursively to store the field into a BLKmode version of
5520 that object. Finally, load from the object into TARGET. This is not
5521 very efficient in general, but should only be slightly more expensive
5522 than the otherwise-required unaligned accesses. Perhaps this can be
5523 cleaned up later. */
5524
5525 if (mode == BLKmode
5526 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5527 {
1da68f56
RK
5528 rtx object
5529 = assign_temp
a06ef755 5530 (build_qualified_type (type, TYPE_QUALS (type) | TYPE_QUAL_CONST),
1da68f56 5531 0, 1, 1);
c4e59f51 5532 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5533
8752c357 5534 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5535 emit_move_insn (object, target);
5536
a06ef755
RK
5537 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5538 alias_set);
bbf6f052
RK
5539
5540 emit_move_insn (target, object);
5541
a06ef755 5542 /* We want to return the BLKmode version of the data. */
46093b97 5543 return blk_object;
bbf6f052 5544 }
c3b247b4
JM
5545
5546 if (GET_CODE (target) == CONCAT)
5547 {
5548 /* We're storing into a struct containing a single __complex. */
5549
5550 if (bitpos != 0)
5551 abort ();
5552 return store_expr (exp, target, 0);
5553 }
bbf6f052
RK
5554
5555 /* If the structure is in a register or if the component
5556 is a bit field, we cannot use addressing to access it.
5557 Use bit-field techniques or SUBREG to store in it. */
5558
4fa52007 5559 if (mode == VOIDmode
6ab06cbb
JW
5560 || (mode != BLKmode && ! direct_store[(int) mode]
5561 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5562 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5563 || GET_CODE (target) == REG
c980ac49 5564 || GET_CODE (target) == SUBREG
ccc98036
RS
5565 /* If the field isn't aligned enough to store as an ordinary memref,
5566 store it as a bit field. */
15b19a7d
OH
5567 || (mode != BLKmode
5568 && ((SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target))
5569 && (MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode)))
14a774a9 5570 || bitpos % GET_MODE_ALIGNMENT (mode)))
14a774a9
RK
5571 /* If the RHS and field are a constant size and the size of the
5572 RHS isn't the same size as the bitfield, we must use bitfield
5573 operations. */
05bccae2
RK
5574 || (bitsize >= 0
5575 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5576 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5577 {
906c4e36 5578 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5579
ef19912d
RK
5580 /* If BITSIZE is narrower than the size of the type of EXP
5581 we will be narrowing TEMP. Normally, what's wanted are the
5582 low-order bits. However, if EXP's type is a record and this is
5583 big-endian machine, we want the upper BITSIZE bits. */
5584 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5585 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5586 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5587 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5588 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5589 - bitsize),
5590 temp, 1);
5591
bbd6cf73
RK
5592 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5593 MODE. */
5594 if (mode != VOIDmode && mode != BLKmode
5595 && mode != TYPE_MODE (TREE_TYPE (exp)))
5596 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5597
a281e72d
RK
5598 /* If the modes of TARGET and TEMP are both BLKmode, both
5599 must be in memory and BITPOS must be aligned on a byte
5600 boundary. If so, we simply do a block copy. */
5601 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5602 {
5603 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5604 || bitpos % BITS_PER_UNIT != 0)
5605 abort ();
5606
f4ef873c 5607 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5608 emit_block_move (target, temp,
a06ef755 5609 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5610 / BITS_PER_UNIT),
5611 BLOCK_OP_NORMAL);
a281e72d
RK
5612
5613 return value_mode == VOIDmode ? const0_rtx : target;
5614 }
5615
bbf6f052 5616 /* Store the value in the bitfield. */
a06ef755
RK
5617 store_bit_field (target, bitsize, bitpos, mode, temp,
5618 int_size_in_bytes (type));
5619
bbf6f052
RK
5620 if (value_mode != VOIDmode)
5621 {
04050c69
RK
5622 /* The caller wants an rtx for the value.
5623 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5624 if (width_mask != 0
5625 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5626 {
9074de27 5627 tree count;
5c4d7cfb 5628 enum machine_mode tmode;
86a2c12a 5629
5c4d7cfb 5630 tmode = GET_MODE (temp);
86a2c12a
RS
5631 if (tmode == VOIDmode)
5632 tmode = value_mode;
22273300
JJ
5633
5634 if (unsignedp)
5635 return expand_and (tmode, temp,
2496c7bd 5636 gen_int_mode (width_mask, tmode),
22273300
JJ
5637 NULL_RTX);
5638
5c4d7cfb
RS
5639 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5640 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5641 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5642 }
04050c69 5643
bbf6f052 5644 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5645 NULL_RTX, value_mode, VOIDmode,
a06ef755 5646 int_size_in_bytes (type));
bbf6f052
RK
5647 }
5648 return const0_rtx;
5649 }
5650 else
5651 {
5652 rtx addr = XEXP (target, 0);
a06ef755 5653 rtx to_rtx = target;
bbf6f052
RK
5654
5655 /* If a value is wanted, it must be the lhs;
5656 so make the address stable for multiple use. */
5657
5658 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5659 && ! CONSTANT_ADDRESS_P (addr)
5660 /* A frame-pointer reference is already stable. */
5661 && ! (GET_CODE (addr) == PLUS
5662 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5663 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5664 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5665 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5666
5667 /* Now build a reference to just the desired component. */
5668
a06ef755
RK
5669 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5670
5671 if (to_rtx == target)
5672 to_rtx = copy_rtx (to_rtx);
792760b9 5673
c6df88cb 5674 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5675 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5676 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5677
5678 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5679 }
5680}
5681\f
5682/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5683 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5684 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5685
5686 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5687 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5688 If the position of the field is variable, we store a tree
5689 giving the variable offset (in units) in *POFFSET.
5690 This offset is in addition to the bit position.
5691 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5692
5693 If any of the extraction expressions is volatile,
5694 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5695
5696 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5697 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5698 is redundant.
5699
5700 If the field describes a variable-sized object, *PMODE is set to
5701 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5702 this case, but the address of the object can be found. */
bbf6f052
RK
5703
5704tree
4969d05d 5705get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
a06ef755 5706 punsignedp, pvolatilep)
bbf6f052 5707 tree exp;
770ae6cc
RK
5708 HOST_WIDE_INT *pbitsize;
5709 HOST_WIDE_INT *pbitpos;
7bb0943f 5710 tree *poffset;
bbf6f052
RK
5711 enum machine_mode *pmode;
5712 int *punsignedp;
5713 int *pvolatilep;
5714{
5715 tree size_tree = 0;
5716 enum machine_mode mode = VOIDmode;
fed3cef0 5717 tree offset = size_zero_node;
770ae6cc 5718 tree bit_offset = bitsize_zero_node;
738cc472 5719 tree placeholder_ptr = 0;
770ae6cc 5720 tree tem;
bbf6f052 5721
770ae6cc
RK
5722 /* First get the mode, signedness, and size. We do this from just the
5723 outermost expression. */
bbf6f052
RK
5724 if (TREE_CODE (exp) == COMPONENT_REF)
5725 {
5726 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5727 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5728 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5729
bbf6f052
RK
5730 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5731 }
5732 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5733 {
5734 size_tree = TREE_OPERAND (exp, 1);
5735 *punsignedp = TREE_UNSIGNED (exp);
5736 }
5737 else
5738 {
5739 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5740 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5741
ab87f8c8
JL
5742 if (mode == BLKmode)
5743 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5744 else
5745 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5746 }
3a94c984 5747
770ae6cc 5748 if (size_tree != 0)
bbf6f052 5749 {
770ae6cc 5750 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5751 mode = BLKmode, *pbitsize = -1;
5752 else
770ae6cc 5753 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5754 }
5755
5756 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5757 and find the ultimate containing object. */
bbf6f052
RK
5758 while (1)
5759 {
770ae6cc
RK
5760 if (TREE_CODE (exp) == BIT_FIELD_REF)
5761 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5762 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5763 {
770ae6cc
RK
5764 tree field = TREE_OPERAND (exp, 1);
5765 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5766
e7f3c83f
RK
5767 /* If this field hasn't been filled in yet, don't go
5768 past it. This should only happen when folding expressions
5769 made during type construction. */
770ae6cc 5770 if (this_offset == 0)
e7f3c83f 5771 break;
770ae6cc
RK
5772 else if (! TREE_CONSTANT (this_offset)
5773 && contains_placeholder_p (this_offset))
5774 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5775
7156dead 5776 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5777 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5778 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5779
a06ef755 5780 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5781 }
7156dead 5782
b4e3fabb
RK
5783 else if (TREE_CODE (exp) == ARRAY_REF
5784 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5785 {
742920c7 5786 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5787 tree array = TREE_OPERAND (exp, 0);
5788 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5789 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5790 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5791
770ae6cc
RK
5792 /* We assume all arrays have sizes that are a multiple of a byte.
5793 First subtract the lower bound, if any, in the type of the
5794 index, then convert to sizetype and multiply by the size of the
5795 array element. */
5796 if (low_bound != 0 && ! integer_zerop (low_bound))
5797 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5798 index, low_bound));
f8dac6eb 5799
7156dead
RK
5800 /* If the index has a self-referential type, pass it to a
5801 WITH_RECORD_EXPR; if the component size is, pass our
5802 component to one. */
770ae6cc
RK
5803 if (! TREE_CONSTANT (index)
5804 && contains_placeholder_p (index))
5805 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5806 if (! TREE_CONSTANT (unit_size)
5807 && contains_placeholder_p (unit_size))
b4e3fabb 5808 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5809
770ae6cc
RK
5810 offset = size_binop (PLUS_EXPR, offset,
5811 size_binop (MULT_EXPR,
5812 convert (sizetype, index),
7156dead 5813 unit_size));
bbf6f052 5814 }
7156dead 5815
738cc472
RK
5816 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5817 {
70072ed9
RK
5818 tree new = find_placeholder (exp, &placeholder_ptr);
5819
5820 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5821 We might have been called from tree optimization where we
5822 haven't set up an object yet. */
5823 if (new == 0)
5824 break;
5825 else
5826 exp = new;
5827
738cc472
RK
5828 continue;
5829 }
bbf6f052 5830 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
ed239f5a 5831 && TREE_CODE (exp) != VIEW_CONVERT_EXPR
bbf6f052
RK
5832 && ! ((TREE_CODE (exp) == NOP_EXPR
5833 || TREE_CODE (exp) == CONVERT_EXPR)
5834 && (TYPE_MODE (TREE_TYPE (exp))
5835 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5836 break;
7bb0943f
RS
5837
5838 /* If any reference in the chain is volatile, the effect is volatile. */
5839 if (TREE_THIS_VOLATILE (exp))
5840 *pvolatilep = 1;
839c4796 5841
bbf6f052
RK
5842 exp = TREE_OPERAND (exp, 0);
5843 }
5844
770ae6cc
RK
5845 /* If OFFSET is constant, see if we can return the whole thing as a
5846 constant bit position. Otherwise, split it up. */
5847 if (host_integerp (offset, 0)
5848 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5849 bitsize_unit_node))
5850 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5851 && host_integerp (tem, 0))
5852 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5853 else
5854 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5855
bbf6f052 5856 *pmode = mode;
bbf6f052
RK
5857 return exp;
5858}
921b3427 5859
ed239f5a
RK
5860/* Return 1 if T is an expression that get_inner_reference handles. */
5861
5862int
5863handled_component_p (t)
5864 tree t;
5865{
5866 switch (TREE_CODE (t))
5867 {
5868 case BIT_FIELD_REF:
5869 case COMPONENT_REF:
5870 case ARRAY_REF:
5871 case ARRAY_RANGE_REF:
5872 case NON_LVALUE_EXPR:
5873 case VIEW_CONVERT_EXPR:
5874 return 1;
5875
5876 case NOP_EXPR:
5877 case CONVERT_EXPR:
5878 return (TYPE_MODE (TREE_TYPE (t))
5879 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
5880
5881 default:
5882 return 0;
5883 }
5884}
bbf6f052 5885\f
3fe44edd
RK
5886/* Given an rtx VALUE that may contain additions and multiplications, return
5887 an equivalent value that just refers to a register, memory, or constant.
5888 This is done by generating instructions to perform the arithmetic and
5889 returning a pseudo-register containing the value.
c45a13a6
RK
5890
5891 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5892
5893rtx
5894force_operand (value, target)
5895 rtx value, target;
5896{
8a28dbcc 5897 rtx op1, op2;
bbf6f052 5898 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 5899 rtx subtarget = get_subtarget (target);
8a28dbcc 5900 enum rtx_code code = GET_CODE (value);
bbf6f052 5901
8b015896 5902 /* Check for a PIC address load. */
8a28dbcc 5903 if ((code == PLUS || code == MINUS)
8b015896
RH
5904 && XEXP (value, 0) == pic_offset_table_rtx
5905 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5906 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5907 || GET_CODE (XEXP (value, 1)) == CONST))
5908 {
5909 if (!subtarget)
5910 subtarget = gen_reg_rtx (GET_MODE (value));
5911 emit_move_insn (subtarget, value);
5912 return subtarget;
5913 }
5914
8a28dbcc 5915 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 5916 {
8a28dbcc
JH
5917 if (!target)
5918 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 5919 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
5920 code == ZERO_EXTEND);
5921 return target;
bbf6f052
RK
5922 }
5923
8a28dbcc 5924 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
5925 {
5926 op2 = XEXP (value, 1);
8a28dbcc 5927 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 5928 subtarget = 0;
8a28dbcc 5929 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 5930 {
8a28dbcc 5931 code = PLUS;
bbf6f052
RK
5932 op2 = negate_rtx (GET_MODE (value), op2);
5933 }
5934
5935 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
5936 operand a PLUS of a virtual register and something else. In that
5937 case, we want to emit the sum of the virtual register and the
5938 constant first and then add the other value. This allows virtual
5939 register instantiation to simply modify the constant rather than
5940 creating another one around this addition. */
5941 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
5942 && GET_CODE (XEXP (value, 0)) == PLUS
5943 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5944 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5945 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5946 {
8a28dbcc
JH
5947 rtx temp = expand_simple_binop (GET_MODE (value), code,
5948 XEXP (XEXP (value, 0), 0), op2,
5949 subtarget, 0, OPTAB_LIB_WIDEN);
5950 return expand_simple_binop (GET_MODE (value), code, temp,
5951 force_operand (XEXP (XEXP (value,
5952 0), 1), 0),
5953 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 5954 }
3a94c984 5955
8a28dbcc
JH
5956 op1 = force_operand (XEXP (value, 0), subtarget);
5957 op2 = force_operand (op2, NULL_RTX);
5958 switch (code)
5959 {
5960 case MULT:
5961 return expand_mult (GET_MODE (value), op1, op2, target, 1);
5962 case DIV:
5963 if (!INTEGRAL_MODE_P (GET_MODE (value)))
5964 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5965 target, 1, OPTAB_LIB_WIDEN);
5966 else
5967 return expand_divmod (0,
5968 FLOAT_MODE_P (GET_MODE (value))
5969 ? RDIV_EXPR : TRUNC_DIV_EXPR,
5970 GET_MODE (value), op1, op2, target, 0);
5971 break;
5972 case MOD:
5973 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5974 target, 0);
5975 break;
5976 case UDIV:
5977 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
5978 target, 1);
5979 break;
5980 case UMOD:
5981 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
5982 target, 1);
5983 break;
5984 case ASHIFTRT:
5985 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5986 target, 0, OPTAB_LIB_WIDEN);
5987 break;
5988 default:
5989 return expand_simple_binop (GET_MODE (value), code, op1, op2,
5990 target, 1, OPTAB_LIB_WIDEN);
5991 }
5992 }
5993 if (GET_RTX_CLASS (code) == '1')
5994 {
5995 op1 = force_operand (XEXP (value, 0), NULL_RTX);
5996 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 5997 }
34e81b5a
RK
5998
5999#ifdef INSN_SCHEDULING
6000 /* On machines that have insn scheduling, we want all memory reference to be
6001 explicit, so we need to deal with such paradoxical SUBREGs. */
6002 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6003 && (GET_MODE_SIZE (GET_MODE (value))
6004 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6005 value
6006 = simplify_gen_subreg (GET_MODE (value),
6007 force_reg (GET_MODE (SUBREG_REG (value)),
6008 force_operand (SUBREG_REG (value),
6009 NULL_RTX)),
6010 GET_MODE (SUBREG_REG (value)),
6011 SUBREG_BYTE (value));
6012#endif
6013
bbf6f052
RK
6014 return value;
6015}
6016\f
bbf6f052 6017/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
6018 EXP can reference X, which is being modified. TOP_P is nonzero if this
6019 call is going to be used to determine whether we need a temporary
ff439b5f
CB
6020 for EXP, as opposed to a recursive call to this function.
6021
6022 It is always safe for this routine to return zero since it merely
6023 searches for optimization opportunities. */
bbf6f052 6024
8f17b5c5 6025int
e5e809f4 6026safe_from_p (x, exp, top_p)
bbf6f052
RK
6027 rtx x;
6028 tree exp;
e5e809f4 6029 int top_p;
bbf6f052
RK
6030{
6031 rtx exp_rtl = 0;
6032 int i, nops;
1da68f56 6033 static tree save_expr_list;
bbf6f052 6034
6676e72f
RK
6035 if (x == 0
6036 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
6037 have no way of allocating temporaries of variable size
6038 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6039 So we assume here that something at a higher level has prevented a
f4510f37 6040 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 6041 do this when X is BLKmode and when we are at the top level. */
d0f062fb 6042 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 6043 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
6044 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6045 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6046 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6047 != INTEGER_CST)
1da68f56
RK
6048 && GET_MODE (x) == BLKmode)
6049 /* If X is in the outgoing argument area, it is always safe. */
6050 || (GET_CODE (x) == MEM
6051 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6052 || (GET_CODE (XEXP (x, 0)) == PLUS
6053 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
6054 return 1;
6055
6056 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6057 find the underlying pseudo. */
6058 if (GET_CODE (x) == SUBREG)
6059 {
6060 x = SUBREG_REG (x);
6061 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6062 return 0;
6063 }
6064
1da68f56
RK
6065 /* A SAVE_EXPR might appear many times in the expression passed to the
6066 top-level safe_from_p call, and if it has a complex subexpression,
6067 examining it multiple times could result in a combinatorial explosion.
6068 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6069 with optimization took about 28 minutes to compile -- even though it was
6070 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6071 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6072 we have processed. Note that the only test of top_p was above. */
6073
6074 if (top_p)
6075 {
6076 int rtn;
6077 tree t;
6078
6079 save_expr_list = 0;
6080
6081 rtn = safe_from_p (x, exp, 0);
6082
6083 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6084 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6085
6086 return rtn;
6087 }
bbf6f052 6088
1da68f56 6089 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
6090 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6091 {
6092 case 'd':
a9772b60 6093 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
6094 break;
6095
6096 case 'c':
6097 return 1;
6098
6099 case 'x':
6100 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
6101 {
6102 while (1)
6103 {
6104 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6105 return 0;
6106 exp = TREE_CHAIN (exp);
6107 if (!exp)
6108 return 1;
6109 if (TREE_CODE (exp) != TREE_LIST)
6110 return safe_from_p (x, exp, 0);
6111 }
6112 }
ff439b5f
CB
6113 else if (TREE_CODE (exp) == ERROR_MARK)
6114 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
6115 else
6116 return 0;
6117
bbf6f052
RK
6118 case '2':
6119 case '<':
f8d4be57
CE
6120 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6121 return 0;
6122 /* FALLTHRU */
6123
6124 case '1':
6125 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
6126
6127 case 'e':
6128 case 'r':
6129 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6130 the expression. If it is set, we conflict iff we are that rtx or
6131 both are in memory. Otherwise, we check all operands of the
6132 expression recursively. */
6133
6134 switch (TREE_CODE (exp))
6135 {
6136 case ADDR_EXPR:
70072ed9
RK
6137 /* If the operand is static or we are static, we can't conflict.
6138 Likewise if we don't conflict with the operand at all. */
6139 if (staticp (TREE_OPERAND (exp, 0))
6140 || TREE_STATIC (exp)
6141 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6142 return 1;
6143
6144 /* Otherwise, the only way this can conflict is if we are taking
6145 the address of a DECL a that address if part of X, which is
6146 very rare. */
6147 exp = TREE_OPERAND (exp, 0);
6148 if (DECL_P (exp))
6149 {
6150 if (!DECL_RTL_SET_P (exp)
6151 || GET_CODE (DECL_RTL (exp)) != MEM)
6152 return 0;
6153 else
6154 exp_rtl = XEXP (DECL_RTL (exp), 0);
6155 }
6156 break;
bbf6f052
RK
6157
6158 case INDIRECT_REF:
1da68f56
RK
6159 if (GET_CODE (x) == MEM
6160 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6161 get_alias_set (exp)))
bbf6f052
RK
6162 return 0;
6163 break;
6164
6165 case CALL_EXPR:
f9808f81
MM
6166 /* Assume that the call will clobber all hard registers and
6167 all of memory. */
6168 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6169 || GET_CODE (x) == MEM)
6170 return 0;
bbf6f052
RK
6171 break;
6172
6173 case RTL_EXPR:
3bb5826a
RK
6174 /* If a sequence exists, we would have to scan every instruction
6175 in the sequence to see if it was safe. This is probably not
6176 worthwhile. */
6177 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
6178 return 0;
6179
3bb5826a 6180 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
6181 break;
6182
6183 case WITH_CLEANUP_EXPR:
6ad7895a 6184 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
6185 break;
6186
5dab5552 6187 case CLEANUP_POINT_EXPR:
e5e809f4 6188 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 6189
bbf6f052
RK
6190 case SAVE_EXPR:
6191 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
6192 if (exp_rtl)
6193 break;
6194
1da68f56
RK
6195 /* If we've already scanned this, don't do it again. Otherwise,
6196 show we've scanned it and record for clearing the flag if we're
6197 going on. */
6198 if (TREE_PRIVATE (exp))
6199 return 1;
ff439b5f 6200
1da68f56
RK
6201 TREE_PRIVATE (exp) = 1;
6202 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 6203 {
1da68f56
RK
6204 TREE_PRIVATE (exp) = 0;
6205 return 0;
ff59bfe6 6206 }
1da68f56
RK
6207
6208 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 6209 return 1;
bbf6f052 6210
8129842c
RS
6211 case BIND_EXPR:
6212 /* The only operand we look at is operand 1. The rest aren't
6213 part of the expression. */
e5e809f4 6214 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6215
bbf6f052 6216 case METHOD_CALL_EXPR:
4fe9b91c 6217 /* This takes an rtx argument, but shouldn't appear here. */
bbf6f052 6218 abort ();
3a94c984 6219
e9a25f70
JL
6220 default:
6221 break;
bbf6f052
RK
6222 }
6223
6224 /* If we have an rtx, we do not need to scan our operands. */
6225 if (exp_rtl)
6226 break;
6227
8f17b5c5 6228 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6229 for (i = 0; i < nops; i++)
6230 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6231 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6232 return 0;
8f17b5c5
MM
6233
6234 /* If this is a language-specific tree code, it may require
6235 special handling. */
dbbbbf3b
JDA
6236 if ((unsigned int) TREE_CODE (exp)
6237 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 6238 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 6239 return 0;
bbf6f052
RK
6240 }
6241
6242 /* If we have an rtl, find any enclosed object. Then see if we conflict
6243 with it. */
6244 if (exp_rtl)
6245 {
6246 if (GET_CODE (exp_rtl) == SUBREG)
6247 {
6248 exp_rtl = SUBREG_REG (exp_rtl);
6249 if (GET_CODE (exp_rtl) == REG
6250 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6251 return 0;
6252 }
6253
6254 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6255 are memory and they conflict. */
bbf6f052
RK
6256 return ! (rtx_equal_p (x, exp_rtl)
6257 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 6258 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6259 rtx_addr_varies_p)));
bbf6f052
RK
6260 }
6261
6262 /* If we reach here, it is safe. */
6263 return 1;
6264}
6265
01c8a7c8
RK
6266/* Subroutine of expand_expr: return rtx if EXP is a
6267 variable or parameter; else return 0. */
6268
6269static rtx
6270var_rtx (exp)
6271 tree exp;
6272{
6273 STRIP_NOPS (exp);
6274 switch (TREE_CODE (exp))
6275 {
6276 case PARM_DECL:
6277 case VAR_DECL:
6278 return DECL_RTL (exp);
6279 default:
6280 return 0;
6281 }
6282}
dbecbbe4
JL
6283
6284#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 6285
dbecbbe4
JL
6286void
6287check_max_integer_computation_mode (exp)
3a94c984 6288 tree exp;
dbecbbe4 6289{
5f652c07 6290 enum tree_code code;
dbecbbe4
JL
6291 enum machine_mode mode;
6292
5f652c07
JM
6293 /* Strip any NOPs that don't change the mode. */
6294 STRIP_NOPS (exp);
6295 code = TREE_CODE (exp);
6296
71bca506
JL
6297 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6298 if (code == NOP_EXPR
6299 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6300 return;
6301
dbecbbe4
JL
6302 /* First check the type of the overall operation. We need only look at
6303 unary, binary and relational operations. */
6304 if (TREE_CODE_CLASS (code) == '1'
6305 || TREE_CODE_CLASS (code) == '2'
6306 || TREE_CODE_CLASS (code) == '<')
6307 {
6308 mode = TYPE_MODE (TREE_TYPE (exp));
6309 if (GET_MODE_CLASS (mode) == MODE_INT
6310 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6311 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6312 }
6313
6314 /* Check operand of a unary op. */
6315 if (TREE_CODE_CLASS (code) == '1')
6316 {
6317 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6318 if (GET_MODE_CLASS (mode) == MODE_INT
6319 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6320 internal_error ("unsupported wide integer operation");
dbecbbe4 6321 }
3a94c984 6322
dbecbbe4
JL
6323 /* Check operands of a binary/comparison op. */
6324 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6325 {
6326 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6327 if (GET_MODE_CLASS (mode) == MODE_INT
6328 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6329 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6330
6331 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6332 if (GET_MODE_CLASS (mode) == MODE_INT
6333 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6334 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6335 }
6336}
6337#endif
14a774a9 6338\f
0d4903b8
RK
6339/* Return the highest power of two that EXP is known to be a multiple of.
6340 This is used in updating alignment of MEMs in array references. */
6341
9ceca302 6342static unsigned HOST_WIDE_INT
0d4903b8
RK
6343highest_pow2_factor (exp)
6344 tree exp;
6345{
9ceca302 6346 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6347
6348 switch (TREE_CODE (exp))
6349 {
6350 case INTEGER_CST:
e0f1be5c
JJ
6351 /* We can find the lowest bit that's a one. If the low
6352 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6353 We need to handle this case since we can find it in a COND_EXPR,
6354 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6355 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6356 later ICE. */
e0f1be5c 6357 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6358 return BIGGEST_ALIGNMENT;
e0f1be5c 6359 else
0d4903b8 6360 {
e0f1be5c
JJ
6361 /* Note: tree_low_cst is intentionally not used here,
6362 we don't care about the upper bits. */
6363 c0 = TREE_INT_CST_LOW (exp);
6364 c0 &= -c0;
6365 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6366 }
6367 break;
6368
65a07688 6369 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6370 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6371 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6372 return MIN (c0, c1);
6373
6374 case MULT_EXPR:
6375 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6376 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6377 return c0 * c1;
6378
6379 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6380 case CEIL_DIV_EXPR:
65a07688
RK
6381 if (integer_pow2p (TREE_OPERAND (exp, 1))
6382 && host_integerp (TREE_OPERAND (exp, 1), 1))
6383 {
6384 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6385 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6386 return MAX (1, c0 / c1);
6387 }
6388 break;
0d4903b8
RK
6389
6390 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6391 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6392 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6393
65a07688
RK
6394 case COMPOUND_EXPR:
6395 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6396
0d4903b8
RK
6397 case COND_EXPR:
6398 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6399 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6400 return MIN (c0, c1);
6401
6402 default:
6403 break;
6404 }
6405
6406 return 1;
6407}
818c0c94
RH
6408
6409/* Similar, except that it is known that the expression must be a multiple
6410 of the alignment of TYPE. */
6411
9ceca302 6412static unsigned HOST_WIDE_INT
818c0c94
RH
6413highest_pow2_factor_for_type (type, exp)
6414 tree type;
6415 tree exp;
6416{
9ceca302 6417 unsigned HOST_WIDE_INT type_align, factor;
818c0c94
RH
6418
6419 factor = highest_pow2_factor (exp);
6420 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6421 return MAX (factor, type_align);
6422}
0d4903b8 6423\f
f47e9b4e
RK
6424/* Return an object on the placeholder list that matches EXP, a
6425 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6426 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6427 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6428 is a location which initially points to a starting location in the
738cc472
RK
6429 placeholder list (zero means start of the list) and where a pointer into
6430 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6431
6432tree
6433find_placeholder (exp, plist)
6434 tree exp;
6435 tree *plist;
6436{
6437 tree type = TREE_TYPE (exp);
6438 tree placeholder_expr;
6439
738cc472
RK
6440 for (placeholder_expr
6441 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6442 placeholder_expr != 0;
f47e9b4e
RK
6443 placeholder_expr = TREE_CHAIN (placeholder_expr))
6444 {
6445 tree need_type = TYPE_MAIN_VARIANT (type);
6446 tree elt;
6447
6448 /* Find the outermost reference that is of the type we want. If none,
6449 see if any object has a type that is a pointer to the type we
6450 want. */
6451 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6452 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6453 || TREE_CODE (elt) == COND_EXPR)
6454 ? TREE_OPERAND (elt, 1)
6455 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6456 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6457 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6458 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6459 ? TREE_OPERAND (elt, 0) : 0))
6460 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6461 {
6462 if (plist)
6463 *plist = placeholder_expr;
6464 return elt;
6465 }
6466
6467 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6468 elt
6469 = ((TREE_CODE (elt) == COMPOUND_EXPR
6470 || TREE_CODE (elt) == COND_EXPR)
6471 ? TREE_OPERAND (elt, 1)
6472 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6473 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6474 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6475 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6476 ? TREE_OPERAND (elt, 0) : 0))
6477 if (POINTER_TYPE_P (TREE_TYPE (elt))
6478 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6479 == need_type))
6480 {
6481 if (plist)
6482 *plist = placeholder_expr;
6483 return build1 (INDIRECT_REF, need_type, elt);
6484 }
6485 }
6486
70072ed9 6487 return 0;
f47e9b4e
RK
6488}
6489\f
bbf6f052
RK
6490/* expand_expr: generate code for computing expression EXP.
6491 An rtx for the computed value is returned. The value is never null.
6492 In the case of a void EXP, const0_rtx is returned.
6493
6494 The value may be stored in TARGET if TARGET is nonzero.
6495 TARGET is just a suggestion; callers must assume that
6496 the rtx returned may not be the same as TARGET.
6497
6498 If TARGET is CONST0_RTX, it means that the value will be ignored.
6499
6500 If TMODE is not VOIDmode, it suggests generating the
6501 result in mode TMODE. But this is done only when convenient.
6502 Otherwise, TMODE is ignored and the value generated in its natural mode.
6503 TMODE is just a suggestion; callers must assume that
6504 the rtx returned may not have mode TMODE.
6505
d6a5ac33
RK
6506 Note that TARGET may have neither TMODE nor MODE. In that case, it
6507 probably will not be used.
bbf6f052
RK
6508
6509 If MODIFIER is EXPAND_SUM then when EXP is an addition
6510 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6511 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6512 products as above, or REG or MEM, or constant.
6513 Ordinarily in such cases we would output mul or add instructions
6514 and then return a pseudo reg containing the sum.
6515
6516 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6517 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6518 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6519 This is used for outputting expressions used in initializers.
6520
6521 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6522 with a constant address even if that address is not normally legitimate.
8403445a
AM
6523 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6524
6525 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6526 a call parameter. Such targets require special care as we haven't yet
6527 marked TARGET so that it's safe from being trashed by libcalls. We
6528 don't want to use TARGET for anything but the final result;
6529 Intermediate values must go elsewhere. Additionally, calls to
6530 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
bbf6f052
RK
6531
6532rtx
6533expand_expr (exp, target, tmode, modifier)
b3694847 6534 tree exp;
bbf6f052
RK
6535 rtx target;
6536 enum machine_mode tmode;
6537 enum expand_modifier modifier;
6538{
b3694847 6539 rtx op0, op1, temp;
bbf6f052
RK
6540 tree type = TREE_TYPE (exp);
6541 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6542 enum machine_mode mode;
6543 enum tree_code code = TREE_CODE (exp);
bbf6f052 6544 optab this_optab;
68557e14
ML
6545 rtx subtarget, original_target;
6546 int ignore;
bbf6f052
RK
6547 tree context;
6548
3a94c984 6549 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6550 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6551 {
6552 op0 = CONST0_RTX (tmode);
6553 if (op0 != 0)
6554 return op0;
6555 return const0_rtx;
6556 }
6557
6558 mode = TYPE_MODE (type);
6559 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6560 subtarget = get_subtarget (target);
68557e14
ML
6561 original_target = target;
6562 ignore = (target == const0_rtx
6563 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6564 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6565 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6566 && TREE_CODE (type) == VOID_TYPE));
6567
dd27116b
RK
6568 /* If we are going to ignore this result, we need only do something
6569 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6570 is, short-circuit the most common cases here. Note that we must
6571 not call expand_expr with anything but const0_rtx in case this
6572 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6573
dd27116b
RK
6574 if (ignore)
6575 {
6576 if (! TREE_SIDE_EFFECTS (exp))
6577 return const0_rtx;
6578
14a774a9
RK
6579 /* Ensure we reference a volatile object even if value is ignored, but
6580 don't do this if all we are doing is taking its address. */
dd27116b
RK
6581 if (TREE_THIS_VOLATILE (exp)
6582 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6583 && mode != VOIDmode && mode != BLKmode
6584 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6585 {
37a08a29 6586 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6587 if (GET_CODE (temp) == MEM)
6588 temp = copy_to_reg (temp);
6589 return const0_rtx;
6590 }
6591
14a774a9
RK
6592 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6593 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6594 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6595 modifier);
6596
14a774a9 6597 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6598 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6599 {
37a08a29
RK
6600 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6601 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6602 return const0_rtx;
6603 }
6604 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6605 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6606 /* If the second operand has no side effects, just evaluate
0f41302f 6607 the first. */
37a08a29
RK
6608 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6609 modifier);
14a774a9
RK
6610 else if (code == BIT_FIELD_REF)
6611 {
37a08a29
RK
6612 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6613 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6614 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6615 return const0_rtx;
6616 }
37a08a29 6617
90764a87 6618 target = 0;
dd27116b 6619 }
bbf6f052 6620
dbecbbe4 6621#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07 6622 /* Only check stuff here if the mode we want is different from the mode
fbe5a4a6 6623 of the expression; if it's the same, check_max_integer_computation_mode
5f652c07
JM
6624 will handle it. Do we really need to check this stuff at all? */
6625
ce3c0b53 6626 if (target
5f652c07 6627 && GET_MODE (target) != mode
ce3c0b53
JL
6628 && TREE_CODE (exp) != INTEGER_CST
6629 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6630 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6631 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6632 && TREE_CODE (exp) != COMPONENT_REF
6633 && TREE_CODE (exp) != BIT_FIELD_REF
6634 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6635 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6636 && TREE_CODE (exp) != VAR_DECL
6637 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6638 {
6639 enum machine_mode mode = GET_MODE (target);
6640
6641 if (GET_MODE_CLASS (mode) == MODE_INT
6642 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6643 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6644 }
6645
5f652c07
JM
6646 if (tmode != mode
6647 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6648 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6649 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6650 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6651 && TREE_CODE (exp) != COMPONENT_REF
6652 && TREE_CODE (exp) != BIT_FIELD_REF
6653 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6654 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6655 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6656 && TREE_CODE (exp) != RTL_EXPR
71bca506 6657 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6658 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6659 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6660
6661 check_max_integer_computation_mode (exp);
6662#endif
6663
e44842fe
RK
6664 /* If will do cse, generate all results into pseudo registers
6665 since 1) that allows cse to find more things
6666 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6667 cannot support. An exception is a CONSTRUCTOR into a multi-word
6668 MEM: that's much more likely to be most efficient into the MEM.
6669 Another is a CALL_EXPR which must return in memory. */
e44842fe 6670
bbf6f052 6671 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6672 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6
ZW
6673 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6674 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
8403445a 6675 target = 0;
bbf6f052 6676
bbf6f052
RK
6677 switch (code)
6678 {
6679 case LABEL_DECL:
b552441b
RS
6680 {
6681 tree function = decl_function_context (exp);
6682 /* Handle using a label in a containing function. */
d0977240
RK
6683 if (function != current_function_decl
6684 && function != inline_function_decl && function != 0)
b552441b
RS
6685 {
6686 struct function *p = find_function_data (function);
49ad7cfa
BS
6687 p->expr->x_forced_labels
6688 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6689 p->expr->x_forced_labels);
b552441b 6690 }
ab87f8c8
JL
6691 else
6692 {
ab87f8c8
JL
6693 if (modifier == EXPAND_INITIALIZER)
6694 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6695 label_rtx (exp),
6696 forced_labels);
6697 }
c5c76735 6698
38a448ca
RH
6699 temp = gen_rtx_MEM (FUNCTION_MODE,
6700 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6701 if (function != current_function_decl
6702 && function != inline_function_decl && function != 0)
26fcb35a
RS
6703 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6704 return temp;
b552441b 6705 }
bbf6f052
RK
6706
6707 case PARM_DECL:
1877be45 6708 if (!DECL_RTL_SET_P (exp))
bbf6f052
RK
6709 {
6710 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6711 return CONST0_RTX (mode);
bbf6f052
RK
6712 }
6713
0f41302f 6714 /* ... fall through ... */
d6a5ac33 6715
bbf6f052 6716 case VAR_DECL:
2dca20cd
RS
6717 /* If a static var's type was incomplete when the decl was written,
6718 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6719 if (DECL_SIZE (exp) == 0
6720 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6721 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6722 layout_decl (exp, 0);
921b3427 6723
0f41302f 6724 /* ... fall through ... */
d6a5ac33 6725
2dca20cd 6726 case FUNCTION_DECL:
bbf6f052
RK
6727 case RESULT_DECL:
6728 if (DECL_RTL (exp) == 0)
6729 abort ();
d6a5ac33 6730
e44842fe
RK
6731 /* Ensure variable marked as used even if it doesn't go through
6732 a parser. If it hasn't be used yet, write out an external
6733 definition. */
6734 if (! TREE_USED (exp))
6735 {
6736 assemble_external (exp);
6737 TREE_USED (exp) = 1;
6738 }
6739
dc6d66b3
RK
6740 /* Show we haven't gotten RTL for this yet. */
6741 temp = 0;
6742
bbf6f052
RK
6743 /* Handle variables inherited from containing functions. */
6744 context = decl_function_context (exp);
6745
6746 /* We treat inline_function_decl as an alias for the current function
6747 because that is the inline function whose vars, types, etc.
6748 are being merged into the current function.
6749 See expand_inline_function. */
d6a5ac33 6750
bbf6f052
RK
6751 if (context != 0 && context != current_function_decl
6752 && context != inline_function_decl
6753 /* If var is static, we don't need a static chain to access it. */
6754 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6755 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6756 {
6757 rtx addr;
6758
6759 /* Mark as non-local and addressable. */
81feeecb 6760 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6761 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6762 abort ();
dffd7eb6 6763 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6764 if (GET_CODE (DECL_RTL (exp)) != MEM)
6765 abort ();
6766 addr = XEXP (DECL_RTL (exp), 0);
6767 if (GET_CODE (addr) == MEM)
792760b9
RK
6768 addr
6769 = replace_equiv_address (addr,
6770 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6771 else
6772 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6773
792760b9 6774 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6775 }
4af3895e 6776
bbf6f052
RK
6777 /* This is the case of an array whose size is to be determined
6778 from its initializer, while the initializer is still being parsed.
6779 See expand_decl. */
d6a5ac33 6780
dc6d66b3
RK
6781 else if (GET_CODE (DECL_RTL (exp)) == MEM
6782 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6783 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6784
6785 /* If DECL_RTL is memory, we are in the normal case and either
6786 the address is not valid or it is not a register and -fforce-addr
6787 is specified, get the address into a register. */
6788
dc6d66b3
RK
6789 else if (GET_CODE (DECL_RTL (exp)) == MEM
6790 && modifier != EXPAND_CONST_ADDRESS
6791 && modifier != EXPAND_SUM
6792 && modifier != EXPAND_INITIALIZER
6793 && (! memory_address_p (DECL_MODE (exp),
6794 XEXP (DECL_RTL (exp), 0))
6795 || (flag_force_addr
6796 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6797 temp = replace_equiv_address (DECL_RTL (exp),
6798 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6799
dc6d66b3 6800 /* If we got something, return it. But first, set the alignment
04956a1a 6801 if the address is a register. */
dc6d66b3
RK
6802 if (temp != 0)
6803 {
6804 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6805 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6806
6807 return temp;
6808 }
6809
1499e0a8
RK
6810 /* If the mode of DECL_RTL does not match that of the decl, it
6811 must be a promoted value. We return a SUBREG of the wanted mode,
6812 but mark it so that we know that it was already extended. */
6813
6814 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6815 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6816 {
1499e0a8
RK
6817 /* Get the signedness used for this variable. Ensure we get the
6818 same mode we got when the variable was declared. */
78911e8b 6819 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6820 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6821 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6822 abort ();
6823
ddef6bc7 6824 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6825 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6826 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6827 return temp;
6828 }
6829
bbf6f052
RK
6830 return DECL_RTL (exp);
6831
6832 case INTEGER_CST:
d8a50944 6833 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6834 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6835
d8a50944
RH
6836 /* ??? If overflow is set, fold will have done an incomplete job,
6837 which can result in (plus xx (const_int 0)), which can get
6838 simplified by validate_replace_rtx during virtual register
6839 instantiation, which can result in unrecognizable insns.
6840 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6841 if (TREE_CONSTANT_OVERFLOW (exp)
6842 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6843 temp = force_reg (mode, temp);
6844
6845 return temp;
6846
d744e06e
AH
6847 case VECTOR_CST:
6848 return const_vector_from_tree (exp);
6849
bbf6f052 6850 case CONST_DECL:
8403445a 6851 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6852
6853 case REAL_CST:
6854 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6855 which will be turned into memory by reload if necessary.
6856
bbf6f052
RK
6857 We used to force a register so that loop.c could see it. But
6858 this does not allow gen_* patterns to perform optimizations with
6859 the constants. It also produces two insns in cases like "x = 1.0;".
6860 On most machines, floating-point constants are not permitted in
6861 many insns, so we'd end up copying it to a register in any case.
6862
6863 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6864 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6865 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6866
6867 case COMPLEX_CST:
6868 case STRING_CST:
6869 if (! TREE_CST_RTL (exp))
bd7cf17e 6870 output_constant_def (exp, 1);
bbf6f052
RK
6871
6872 /* TREE_CST_RTL probably contains a constant address.
6873 On RISC machines where a constant address isn't valid,
6874 make some insns to get that address into a register. */
6875 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6876 && modifier != EXPAND_CONST_ADDRESS
6877 && modifier != EXPAND_INITIALIZER
6878 && modifier != EXPAND_SUM
d6a5ac33
RK
6879 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6880 || (flag_force_addr
6881 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
792760b9
RK
6882 return replace_equiv_address (TREE_CST_RTL (exp),
6883 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
bbf6f052
RK
6884 return TREE_CST_RTL (exp);
6885
bf1e5319 6886 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6887 {
6888 rtx to_return;
3b304f5b 6889 const char *saved_input_filename = input_filename;
b24f65cd
APB
6890 int saved_lineno = lineno;
6891 input_filename = EXPR_WFL_FILENAME (exp);
6892 lineno = EXPR_WFL_LINENO (exp);
6893 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6894 emit_line_note (input_filename, lineno);
6ad7895a 6895 /* Possibly avoid switching back and forth here. */
b0ca54af 6896 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
b24f65cd
APB
6897 input_filename = saved_input_filename;
6898 lineno = saved_lineno;
6899 return to_return;
6900 }
bf1e5319 6901
bbf6f052
RK
6902 case SAVE_EXPR:
6903 context = decl_function_context (exp);
d6a5ac33 6904
d0977240
RK
6905 /* If this SAVE_EXPR was at global context, assume we are an
6906 initialization function and move it into our context. */
6907 if (context == 0)
6908 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6909
bbf6f052
RK
6910 /* We treat inline_function_decl as an alias for the current function
6911 because that is the inline function whose vars, types, etc.
6912 are being merged into the current function.
6913 See expand_inline_function. */
6914 if (context == current_function_decl || context == inline_function_decl)
6915 context = 0;
6916
6917 /* If this is non-local, handle it. */
6918 if (context)
6919 {
d0977240
RK
6920 /* The following call just exists to abort if the context is
6921 not of a containing function. */
6922 find_function_data (context);
6923
bbf6f052
RK
6924 temp = SAVE_EXPR_RTL (exp);
6925 if (temp && GET_CODE (temp) == REG)
6926 {
f29a2bd1 6927 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
6928 temp = SAVE_EXPR_RTL (exp);
6929 }
6930 if (temp == 0 || GET_CODE (temp) != MEM)
6931 abort ();
792760b9
RK
6932 return
6933 replace_equiv_address (temp,
6934 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
6935 }
6936 if (SAVE_EXPR_RTL (exp) == 0)
6937 {
06089a8b
RK
6938 if (mode == VOIDmode)
6939 temp = const0_rtx;
6940 else
1da68f56
RK
6941 temp = assign_temp (build_qualified_type (type,
6942 (TYPE_QUALS (type)
6943 | TYPE_QUAL_CONST)),
6944 3, 0, 0);
1499e0a8 6945
bbf6f052 6946 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6947 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6948 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6949 save_expr_regs);
ff78f773
RK
6950
6951 /* If the mode of TEMP does not match that of the expression, it
6952 must be a promoted value. We pass store_expr a SUBREG of the
6953 wanted mode but mark it so that we know that it was already
3ac1a319 6954 extended. */
ff78f773
RK
6955
6956 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6957 {
ddef6bc7 6958 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 6959 promote_mode (type, mode, &unsignedp, 0);
ff78f773 6960 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6961 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
6962 }
6963
4c7a0be9 6964 if (temp == const0_rtx)
37a08a29 6965 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 6966 else
8403445a
AM
6967 store_expr (TREE_OPERAND (exp, 0), temp,
6968 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
6969
6970 TREE_USED (exp) = 1;
bbf6f052 6971 }
1499e0a8
RK
6972
6973 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6974 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6975 but mark it so that we know that it was already extended. */
1499e0a8
RK
6976
6977 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6978 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6979 {
e70d22c8
RK
6980 /* Compute the signedness and make the proper SUBREG. */
6981 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 6982 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 6983 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6984 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6985 return temp;
6986 }
6987
bbf6f052
RK
6988 return SAVE_EXPR_RTL (exp);
6989
679163cf
MS
6990 case UNSAVE_EXPR:
6991 {
6992 rtx temp;
6993 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
6994 TREE_OPERAND (exp, 0)
6995 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
6996 return temp;
6997 }
6998
b50d17a1 6999 case PLACEHOLDER_EXPR:
e9a25f70 7000 {
f47e9b4e 7001 tree old_list = placeholder_list;
738cc472 7002 tree placeholder_expr = 0;
e9a25f70 7003
f47e9b4e 7004 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
7005 if (exp == 0)
7006 abort ();
7007
f47e9b4e 7008 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 7009 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
7010 placeholder_list = old_list;
7011 return temp;
e9a25f70 7012 }
b50d17a1 7013
b50d17a1
RK
7014 case WITH_RECORD_EXPR:
7015 /* Put the object on the placeholder list, expand our first operand,
7016 and pop the list. */
7017 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7018 placeholder_list);
37a08a29
RK
7019 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7020 modifier);
b50d17a1
RK
7021 placeholder_list = TREE_CHAIN (placeholder_list);
7022 return target;
7023
70e6ca43
APB
7024 case GOTO_EXPR:
7025 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7026 expand_goto (TREE_OPERAND (exp, 0));
7027 else
7028 expand_computed_goto (TREE_OPERAND (exp, 0));
7029 return const0_rtx;
7030
bbf6f052 7031 case EXIT_EXPR:
df4ae160 7032 expand_exit_loop_if_false (NULL,
e44842fe 7033 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
7034 return const0_rtx;
7035
f42e28dd
APB
7036 case LABELED_BLOCK_EXPR:
7037 if (LABELED_BLOCK_BODY (exp))
b0832fe1 7038 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 7039 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 7040 do_pending_stack_adjust ();
f42e28dd
APB
7041 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7042 return const0_rtx;
7043
7044 case EXIT_BLOCK_EXPR:
7045 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 7046 sorry ("returned value in block_exit_expr");
f42e28dd
APB
7047 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7048 return const0_rtx;
7049
bbf6f052 7050 case LOOP_EXPR:
0088fcb1 7051 push_temp_slots ();
bbf6f052 7052 expand_start_loop (1);
b0832fe1 7053 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 7054 expand_end_loop ();
0088fcb1 7055 pop_temp_slots ();
bbf6f052
RK
7056
7057 return const0_rtx;
7058
7059 case BIND_EXPR:
7060 {
7061 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
7062
7063 /* Need to open a binding contour here because
e976b8b2 7064 if there are any cleanups they must be contained here. */
8e91754e 7065 expand_start_bindings (2);
bbf6f052 7066
2df53c0b
RS
7067 /* Mark the corresponding BLOCK for output in its proper place. */
7068 if (TREE_OPERAND (exp, 2) != 0
7069 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 7070 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
7071
7072 /* If VARS have not yet been expanded, expand them now. */
7073 while (vars)
7074 {
19e7881c 7075 if (!DECL_RTL_SET_P (vars))
4977bab6 7076 expand_decl (vars);
bbf6f052
RK
7077 expand_decl_init (vars);
7078 vars = TREE_CHAIN (vars);
7079 }
7080
37a08a29 7081 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
7082
7083 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7084
7085 return temp;
7086 }
7087
7088 case RTL_EXPR:
83b853c9
JM
7089 if (RTL_EXPR_SEQUENCE (exp))
7090 {
7091 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7092 abort ();
2f937369 7093 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
7094 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7095 }
64dc53f3
MM
7096 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7097 free_temps_for_rtl_expr (exp);
bbf6f052
RK
7098 return RTL_EXPR_RTL (exp);
7099
7100 case CONSTRUCTOR:
dd27116b
RK
7101 /* If we don't need the result, just ensure we evaluate any
7102 subexpressions. */
7103 if (ignore)
7104 {
7105 tree elt;
37a08a29 7106
dd27116b 7107 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
7108 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7109
dd27116b
RK
7110 return const0_rtx;
7111 }
3207b172 7112
4af3895e
JVA
7113 /* All elts simple constants => refer to a constant in memory. But
7114 if this is a non-BLKmode mode, let it store a field at a time
7115 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 7116 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
7117 store directly into the target unless the type is large enough
7118 that memcpy will be used. If we are making an initializer and
00182e1e
AH
7119 all operands are constant, put it in memory as well.
7120
7121 FIXME: Avoid trying to fill vector constructors piece-meal.
7122 Output them with output_constant_def below unless we're sure
7123 they're zeros. This should go away when vector initializers
7124 are treated like VECTOR_CST instead of arrays.
7125 */
dd27116b 7126 else if ((TREE_STATIC (exp)
3207b172 7127 && ((mode == BLKmode
e5e809f4 7128 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 7129 || TREE_ADDRESSABLE (exp)
19caa751 7130 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 7131 && (! MOVE_BY_PIECES_P
19caa751
RK
7132 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7133 TYPE_ALIGN (type)))
0fb7aeda
KH
7134 && ((TREE_CODE (type) == VECTOR_TYPE
7135 && !is_zeros_p (exp))
7136 || ! mostly_zeros_p (exp)))))
dd27116b 7137 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 7138 {
bd7cf17e 7139 rtx constructor = output_constant_def (exp, 1);
19caa751 7140
b552441b
RS
7141 if (modifier != EXPAND_CONST_ADDRESS
7142 && modifier != EXPAND_INITIALIZER
792760b9
RK
7143 && modifier != EXPAND_SUM)
7144 constructor = validize_mem (constructor);
7145
bbf6f052
RK
7146 return constructor;
7147 }
bbf6f052
RK
7148 else
7149 {
e9ac02a6
JW
7150 /* Handle calls that pass values in multiple non-contiguous
7151 locations. The Irix 6 ABI has examples of this. */
e5e809f4 7152 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
7153 || GET_CODE (target) == PARALLEL
7154 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
7155 target
7156 = assign_temp (build_qualified_type (type,
7157 (TYPE_QUALS (type)
7158 | (TREE_READONLY (exp)
7159 * TYPE_QUAL_CONST))),
c24ae149 7160 0, TREE_ADDRESSABLE (exp), 1);
07604beb 7161
de8920be 7162 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
7163 return target;
7164 }
7165
7166 case INDIRECT_REF:
7167 {
7168 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 7169 tree index;
3a94c984
KH
7170 tree string = string_constant (exp1, &index);
7171
06eaa86f 7172 /* Try to optimize reads from const strings. */
0fb7aeda
KH
7173 if (string
7174 && TREE_CODE (string) == STRING_CST
7175 && TREE_CODE (index) == INTEGER_CST
05bccae2 7176 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
7177 && GET_MODE_CLASS (mode) == MODE_INT
7178 && GET_MODE_SIZE (mode) == 1
37a08a29 7179 && modifier != EXPAND_WRITE)
0fb7aeda 7180 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 7181 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7182
405f0da6
JW
7183 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7184 op0 = memory_address (mode, op0);
38a448ca 7185 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 7186 set_mem_attributes (temp, exp, 0);
1125706f 7187
14a774a9
RK
7188 /* If we are writing to this object and its type is a record with
7189 readonly fields, we must mark it as readonly so it will
7190 conflict with readonly references to those fields. */
37a08a29 7191 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
7192 RTX_UNCHANGING_P (temp) = 1;
7193
8c8a8e34
JW
7194 return temp;
7195 }
bbf6f052
RK
7196
7197 case ARRAY_REF:
742920c7
RK
7198 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7199 abort ();
bbf6f052 7200
bbf6f052 7201 {
742920c7
RK
7202 tree array = TREE_OPERAND (exp, 0);
7203 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7204 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 7205 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 7206 HOST_WIDE_INT i;
b50d17a1 7207
d4c89139
PB
7208 /* Optimize the special-case of a zero lower bound.
7209
7210 We convert the low_bound to sizetype to avoid some problems
7211 with constant folding. (E.g. suppose the lower bound is 1,
7212 and its mode is QI. Without the conversion, (ARRAY
7213 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 7214 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 7215
742920c7 7216 if (! integer_zerop (low_bound))
fed3cef0 7217 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 7218
742920c7 7219 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7220 This is not done in fold so it won't happen inside &.
7221 Don't fold if this is for wide characters since it's too
7222 difficult to do correctly and this is a very rare case. */
742920c7 7223
cb5fa0f8
RK
7224 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7225 && TREE_CODE (array) == STRING_CST
742920c7 7226 && TREE_CODE (index) == INTEGER_CST
05bccae2 7227 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
7228 && GET_MODE_CLASS (mode) == MODE_INT
7229 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7230 return gen_int_mode (TREE_STRING_POINTER (array)
7231 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7232
742920c7
RK
7233 /* If this is a constant index into a constant array,
7234 just get the value from the array. Handle both the cases when
7235 we have an explicit constructor and when our operand is a variable
7236 that was declared const. */
4af3895e 7237
cb5fa0f8
RK
7238 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7239 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 7240 && TREE_CODE (index) == INTEGER_CST
3a94c984 7241 && 0 > compare_tree_int (index,
05bccae2
RK
7242 list_length (CONSTRUCTOR_ELTS
7243 (TREE_OPERAND (exp, 0)))))
742920c7 7244 {
05bccae2
RK
7245 tree elem;
7246
7247 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7248 i = TREE_INT_CST_LOW (index);
7249 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7250 ;
7251
7252 if (elem)
37a08a29
RK
7253 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7254 modifier);
742920c7 7255 }
3a94c984 7256
742920c7 7257 else if (optimize >= 1
cb5fa0f8
RK
7258 && modifier != EXPAND_CONST_ADDRESS
7259 && modifier != EXPAND_INITIALIZER
742920c7
RK
7260 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7261 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7262 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7263 {
08293add 7264 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7265 {
7266 tree init = DECL_INITIAL (array);
7267
742920c7
RK
7268 if (TREE_CODE (init) == CONSTRUCTOR)
7269 {
665f2503 7270 tree elem;
742920c7 7271
05bccae2 7272 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7273 (elem
7274 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7275 elem = TREE_CHAIN (elem))
7276 ;
7277
c54b0a5e 7278 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7279 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7280 tmode, modifier);
742920c7
RK
7281 }
7282 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7283 && 0 > compare_tree_int (index,
7284 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7285 {
7286 tree type = TREE_TYPE (TREE_TYPE (init));
7287 enum machine_mode mode = TYPE_MODE (type);
7288
7289 if (GET_MODE_CLASS (mode) == MODE_INT
7290 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7291 return gen_int_mode (TREE_STRING_POINTER (init)
7292 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7293 }
742920c7
RK
7294 }
7295 }
7296 }
3a94c984 7297 /* Fall through. */
bbf6f052
RK
7298
7299 case COMPONENT_REF:
7300 case BIT_FIELD_REF:
b4e3fabb 7301 case ARRAY_RANGE_REF:
4af3895e 7302 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
7303 appropriate field if it is present. Don't do this if we have
7304 already written the data since we want to refer to that copy
7305 and varasm.c assumes that's what we'll do. */
b4e3fabb 7306 if (code == COMPONENT_REF
7a0b7b9a
RK
7307 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
7308 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
7309 {
7310 tree elt;
7311
7312 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7313 elt = TREE_CHAIN (elt))
86b5812c
RK
7314 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7315 /* We can normally use the value of the field in the
7316 CONSTRUCTOR. However, if this is a bitfield in
7317 an integral mode that we can fit in a HOST_WIDE_INT,
7318 we must mask only the number of bits in the bitfield,
7319 since this is done implicitly by the constructor. If
7320 the bitfield does not meet either of those conditions,
7321 we can't do this optimization. */
7322 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7323 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7324 == MODE_INT)
7325 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7326 <= HOST_BITS_PER_WIDE_INT))))
7327 {
8403445a
AM
7328 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7329 && modifier == EXPAND_STACK_PARM)
7330 target = 0;
3a94c984 7331 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7332 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7333 {
9df2c88c
RK
7334 HOST_WIDE_INT bitsize
7335 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7336 enum machine_mode imode
7337 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
7338
7339 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7340 {
7341 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7342 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7343 }
7344 else
7345 {
7346 tree count
e5e809f4
JL
7347 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7348 0);
86b5812c
RK
7349
7350 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7351 target, 0);
7352 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7353 target, 0);
7354 }
7355 }
7356
7357 return op0;
7358 }
4af3895e
JVA
7359 }
7360
bbf6f052
RK
7361 {
7362 enum machine_mode mode1;
770ae6cc 7363 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7364 tree offset;
bbf6f052 7365 int volatilep = 0;
839c4796 7366 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7367 &mode1, &unsignedp, &volatilep);
f47e9b4e 7368 rtx orig_op0;
bbf6f052 7369
e7f3c83f
RK
7370 /* If we got back the original object, something is wrong. Perhaps
7371 we are evaluating an expression too early. In any event, don't
7372 infinitely recurse. */
7373 if (tem == exp)
7374 abort ();
7375
3d27140a 7376 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7377 computation, since it will need a temporary and TARGET is known
7378 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7379
f47e9b4e
RK
7380 orig_op0 = op0
7381 = expand_expr (tem,
7382 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7383 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7384 != INTEGER_CST)
8403445a 7385 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7386 ? target : NULL_RTX),
7387 VOIDmode,
7388 (modifier == EXPAND_INITIALIZER
8403445a
AM
7389 || modifier == EXPAND_CONST_ADDRESS
7390 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7391 ? modifier : EXPAND_NORMAL);
bbf6f052 7392
8c8a8e34 7393 /* If this is a constant, put it into a register if it is a
14a774a9 7394 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7395 if (CONSTANT_P (op0))
7396 {
7397 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7398 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7399 && offset == 0)
8c8a8e34
JW
7400 op0 = force_reg (mode, op0);
7401 else
7402 op0 = validize_mem (force_const_mem (mode, op0));
7403 }
7404
7bb0943f
RS
7405 if (offset != 0)
7406 {
8403445a
AM
7407 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7408 EXPAND_SUM);
7bb0943f 7409
a2725049 7410 /* If this object is in a register, put it into memory.
14a774a9
RK
7411 This case can't occur in C, but can in Ada if we have
7412 unchecked conversion of an expression from a scalar type to
7413 an array or record type. */
7414 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7415 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7416 {
d04218c0
RK
7417 /* If the operand is a SAVE_EXPR, we can deal with this by
7418 forcing the SAVE_EXPR into memory. */
7419 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45 7420 {
f29a2bd1
MM
7421 put_var_into_stack (TREE_OPERAND (exp, 0),
7422 /*rescan=*/true);
eeb35b45
RK
7423 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7424 }
d04218c0
RK
7425 else
7426 {
7427 tree nt
7428 = build_qualified_type (TREE_TYPE (tem),
7429 (TYPE_QUALS (TREE_TYPE (tem))
7430 | TYPE_QUAL_CONST));
7431 rtx memloc = assign_temp (nt, 1, 1, 1);
7432
d04218c0
RK
7433 emit_move_insn (memloc, op0);
7434 op0 = memloc;
7435 }
14a774a9
RK
7436 }
7437
7bb0943f
RS
7438 if (GET_CODE (op0) != MEM)
7439 abort ();
2d48c13d 7440
2d48c13d 7441#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7442 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7443 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7444#else
7445 if (GET_MODE (offset_rtx) != ptr_mode)
7446 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7447#endif
7448
14a774a9 7449 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7450 to call force_reg for that case. Avoid that case. */
89752202
HB
7451 if (GET_CODE (op0) == MEM
7452 && GET_MODE (op0) == BLKmode
efd07ca7 7453 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7454 && bitsize != 0
3a94c984 7455 && (bitpos % bitsize) == 0
89752202 7456 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7457 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7458 {
e3c8ea67 7459 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7460 bitpos = 0;
7461 }
7462
0d4903b8
RK
7463 op0 = offset_address (op0, offset_rtx,
7464 highest_pow2_factor (offset));
7bb0943f
RS
7465 }
7466
1ce7f3c2
RK
7467 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7468 record its alignment as BIGGEST_ALIGNMENT. */
7469 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7470 && is_aligning_offset (offset, tem))
7471 set_mem_align (op0, BIGGEST_ALIGNMENT);
7472
bbf6f052
RK
7473 /* Don't forget about volatility even if this is a bitfield. */
7474 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7475 {
f47e9b4e
RK
7476 if (op0 == orig_op0)
7477 op0 = copy_rtx (op0);
7478
bbf6f052
RK
7479 MEM_VOLATILE_P (op0) = 1;
7480 }
7481
010f87c4
JJ
7482 /* The following code doesn't handle CONCAT.
7483 Assume only bitpos == 0 can be used for CONCAT, due to
7484 one element arrays having the same mode as its element. */
7485 if (GET_CODE (op0) == CONCAT)
7486 {
7487 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7488 abort ();
7489 return op0;
7490 }
7491
ccc98036
RS
7492 /* In cases where an aligned union has an unaligned object
7493 as a field, we might be extracting a BLKmode value from
7494 an integer-mode (e.g., SImode) object. Handle this case
7495 by doing the extract into an object as wide as the field
7496 (which we know to be the width of a basic mode), then
cb5fa0f8 7497 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7498 if (mode1 == VOIDmode
ccc98036 7499 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7500 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7501 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7502 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7503 && modifier != EXPAND_CONST_ADDRESS
7504 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7505 /* If the field isn't aligned enough to fetch as a memref,
7506 fetch it as a bit field. */
7507 || (mode1 != BLKmode
15b19a7d
OH
7508 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7509 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
cb5fa0f8
RK
7510 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
7511 /* If the type and the field are a constant size and the
7512 size of the type isn't the same size as the bitfield,
7513 we must use bitfield operations. */
7514 || (bitsize >= 0
7515 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7516 == INTEGER_CST)
7517 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7518 bitsize)))
bbf6f052 7519 {
bbf6f052
RK
7520 enum machine_mode ext_mode = mode;
7521
14a774a9
RK
7522 if (ext_mode == BLKmode
7523 && ! (target != 0 && GET_CODE (op0) == MEM
7524 && GET_CODE (target) == MEM
7525 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7526 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7527
7528 if (ext_mode == BLKmode)
a281e72d
RK
7529 {
7530 /* In this case, BITPOS must start at a byte boundary and
7531 TARGET, if specified, must be a MEM. */
7532 if (GET_CODE (op0) != MEM
7533 || (target != 0 && GET_CODE (target) != MEM)
7534 || bitpos % BITS_PER_UNIT != 0)
7535 abort ();
7536
f4ef873c 7537 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7538 if (target == 0)
7539 target = assign_temp (type, 0, 1, 1);
7540
7541 emit_block_move (target, op0,
a06ef755 7542 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7543 / BITS_PER_UNIT),
8403445a
AM
7544 (modifier == EXPAND_STACK_PARM
7545 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7546
a281e72d
RK
7547 return target;
7548 }
bbf6f052 7549
dc6d66b3
RK
7550 op0 = validize_mem (op0);
7551
7552 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7553 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7554
8403445a
AM
7555 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7556 (modifier == EXPAND_STACK_PARM
7557 ? NULL_RTX : target),
7558 ext_mode, ext_mode,
bbf6f052 7559 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7560
7561 /* If the result is a record type and BITSIZE is narrower than
7562 the mode of OP0, an integral mode, and this is a big endian
7563 machine, we must put the field into the high-order bits. */
7564 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7565 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7566 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7567 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7568 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7569 - bitsize),
7570 op0, 1);
7571
bbf6f052
RK
7572 if (mode == BLKmode)
7573 {
c3d32120 7574 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7575 ((*lang_hooks.types.type_for_mode)
7576 (ext_mode, 0),
c3d32120 7577 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7578
7579 emit_move_insn (new, op0);
7580 op0 = copy_rtx (new);
7581 PUT_MODE (op0, BLKmode);
c3d32120 7582 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7583 }
7584
7585 return op0;
7586 }
7587
05019f83
RK
7588 /* If the result is BLKmode, use that to access the object
7589 now as well. */
7590 if (mode == BLKmode)
7591 mode1 = BLKmode;
7592
bbf6f052
RK
7593 /* Get a reference to just this component. */
7594 if (modifier == EXPAND_CONST_ADDRESS
7595 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7596 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7597 else
f4ef873c 7598 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7599
f47e9b4e
RK
7600 if (op0 == orig_op0)
7601 op0 = copy_rtx (op0);
7602
3bdf5ad1 7603 set_mem_attributes (op0, exp, 0);
dc6d66b3 7604 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7605 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7606
bbf6f052 7607 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7608 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7609 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7610 || modifier == EXPAND_INITIALIZER)
bbf6f052 7611 return op0;
0d15e60c 7612 else if (target == 0)
bbf6f052 7613 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7614
bbf6f052
RK
7615 convert_move (target, op0, unsignedp);
7616 return target;
7617 }
7618
4a8d0c9c
RH
7619 case VTABLE_REF:
7620 {
7621 rtx insn, before = get_last_insn (), vtbl_ref;
7622
7623 /* Evaluate the interior expression. */
7624 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7625 tmode, modifier);
7626
7627 /* Get or create an instruction off which to hang a note. */
7628 if (REG_P (subtarget))
7629 {
7630 target = subtarget;
7631 insn = get_last_insn ();
7632 if (insn == before)
7633 abort ();
7634 if (! INSN_P (insn))
7635 insn = prev_nonnote_insn (insn);
7636 }
7637 else
7638 {
7639 target = gen_reg_rtx (GET_MODE (subtarget));
7640 insn = emit_move_insn (target, subtarget);
7641 }
7642
7643 /* Collect the data for the note. */
7644 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7645 vtbl_ref = plus_constant (vtbl_ref,
7646 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7647 /* Discard the initial CONST that was added. */
7648 vtbl_ref = XEXP (vtbl_ref, 0);
7649
7650 REG_NOTES (insn)
7651 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7652
7653 return target;
7654 }
7655
bbf6f052
RK
7656 /* Intended for a reference to a buffer of a file-object in Pascal.
7657 But it's not certain that a special tree code will really be
7658 necessary for these. INDIRECT_REF might work for them. */
7659 case BUFFER_REF:
7660 abort ();
7661
7308a047 7662 case IN_EXPR:
7308a047 7663 {
d6a5ac33
RK
7664 /* Pascal set IN expression.
7665
7666 Algorithm:
7667 rlo = set_low - (set_low%bits_per_word);
7668 the_word = set [ (index - rlo)/bits_per_word ];
7669 bit_index = index % bits_per_word;
7670 bitmask = 1 << bit_index;
7671 return !!(the_word & bitmask); */
7672
7308a047
RS
7673 tree set = TREE_OPERAND (exp, 0);
7674 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7675 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7676 tree set_type = TREE_TYPE (set);
7308a047
RS
7677 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7678 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7679 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7680 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7681 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7682 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7683 rtx setaddr = XEXP (setval, 0);
7684 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7685 rtx rlow;
7686 rtx diff, quo, rem, addr, bit, result;
7308a047 7687
d6a5ac33
RK
7688 /* If domain is empty, answer is no. Likewise if index is constant
7689 and out of bounds. */
51723711 7690 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7691 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7692 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7693 || (TREE_CODE (index) == INTEGER_CST
7694 && TREE_CODE (set_low_bound) == INTEGER_CST
7695 && tree_int_cst_lt (index, set_low_bound))
7696 || (TREE_CODE (set_high_bound) == INTEGER_CST
7697 && TREE_CODE (index) == INTEGER_CST
7698 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7699 return const0_rtx;
7700
d6a5ac33
RK
7701 if (target == 0)
7702 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7703
7704 /* If we get here, we have to generate the code for both cases
7705 (in range and out of range). */
7706
7707 op0 = gen_label_rtx ();
7708 op1 = gen_label_rtx ();
7709
7710 if (! (GET_CODE (index_val) == CONST_INT
7711 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7712 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7713 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7714
7715 if (! (GET_CODE (index_val) == CONST_INT
7716 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7717 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7718 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7719
7720 /* Calculate the element number of bit zero in the first word
7721 of the set. */
7722 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7723 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7724 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7725 else
17938e57
RK
7726 rlow = expand_binop (index_mode, and_optab, lo_r,
7727 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7728 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7729
d6a5ac33
RK
7730 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7731 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7732
7733 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7734 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7735 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7736 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7737
7308a047 7738 addr = memory_address (byte_mode,
d6a5ac33
RK
7739 expand_binop (index_mode, add_optab, diff,
7740 setaddr, NULL_RTX, iunsignedp,
17938e57 7741 OPTAB_LIB_WIDEN));
d6a5ac33 7742
3a94c984 7743 /* Extract the bit we want to examine. */
7308a047 7744 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7745 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7746 make_tree (TREE_TYPE (index), rem),
7747 NULL_RTX, 1);
7748 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7749 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7750 1, OPTAB_LIB_WIDEN);
17938e57
RK
7751
7752 if (result != target)
7753 convert_move (target, result, 1);
7308a047
RS
7754
7755 /* Output the code to handle the out-of-range case. */
7756 emit_jump (op0);
7757 emit_label (op1);
7758 emit_move_insn (target, const0_rtx);
7759 emit_label (op0);
7760 return target;
7761 }
7762
bbf6f052 7763 case WITH_CLEANUP_EXPR:
6ad7895a 7764 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7765 {
6ad7895a 7766 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7767 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7768 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7769 CLEANUP_EH_ONLY (exp));
e976b8b2 7770
bbf6f052 7771 /* That's it for this cleanup. */
6ad7895a 7772 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7773 }
6ad7895a 7774 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7775
5dab5552
MS
7776 case CLEANUP_POINT_EXPR:
7777 {
e976b8b2
MS
7778 /* Start a new binding layer that will keep track of all cleanup
7779 actions to be performed. */
8e91754e 7780 expand_start_bindings (2);
e976b8b2 7781
d93d4205 7782 target_temp_slot_level = temp_slot_level;
e976b8b2 7783
37a08a29 7784 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7785 /* If we're going to use this value, load it up now. */
7786 if (! ignore)
7787 op0 = force_not_mem (op0);
d93d4205 7788 preserve_temp_slots (op0);
e976b8b2 7789 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7790 }
7791 return op0;
7792
bbf6f052
RK
7793 case CALL_EXPR:
7794 /* Check for a built-in function. */
7795 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7796 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7797 == FUNCTION_DECL)
bbf6f052 7798 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7799 {
c70eaeaf
KG
7800 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7801 == BUILT_IN_FRONTEND)
8403445a
AM
7802 return (*lang_hooks.expand_expr) (exp, original_target,
7803 tmode, modifier);
c70eaeaf
KG
7804 else
7805 return expand_builtin (exp, target, subtarget, tmode, ignore);
7806 }
d6a5ac33 7807
8129842c 7808 return expand_call (exp, target, ignore);
bbf6f052
RK
7809
7810 case NON_LVALUE_EXPR:
7811 case NOP_EXPR:
7812 case CONVERT_EXPR:
7813 case REFERENCE_EXPR:
4a53008b 7814 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7815 return const0_rtx;
4a53008b 7816
bbf6f052
RK
7817 if (TREE_CODE (type) == UNION_TYPE)
7818 {
7819 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7820
c3d32120
RK
7821 /* If both input and output are BLKmode, this conversion isn't doing
7822 anything except possibly changing memory attribute. */
7823 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7824 {
7825 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7826 modifier);
7827
7828 result = copy_rtx (result);
7829 set_mem_attributes (result, exp, 0);
7830 return result;
7831 }
14a774a9 7832
bbf6f052 7833 if (target == 0)
1da68f56 7834 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7835
bbf6f052
RK
7836 if (GET_CODE (target) == MEM)
7837 /* Store data into beginning of memory target. */
7838 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7839 adjust_address (target, TYPE_MODE (valtype), 0),
7840 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7841
bbf6f052
RK
7842 else if (GET_CODE (target) == REG)
7843 /* Store this field into a union of the proper type. */
14a774a9
RK
7844 store_field (target,
7845 MIN ((int_size_in_bytes (TREE_TYPE
7846 (TREE_OPERAND (exp, 0)))
7847 * BITS_PER_UNIT),
8752c357 7848 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7849 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7850 VOIDmode, 0, type, 0);
bbf6f052
RK
7851 else
7852 abort ();
7853
7854 /* Return the entire union. */
7855 return target;
7856 }
d6a5ac33 7857
7f62854a
RK
7858 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7859 {
7860 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7861 modifier);
7f62854a
RK
7862
7863 /* If the signedness of the conversion differs and OP0 is
7864 a promoted SUBREG, clear that indication since we now
7865 have to do the proper extension. */
7866 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7867 && GET_CODE (op0) == SUBREG)
7868 SUBREG_PROMOTED_VAR_P (op0) = 0;
7869
7870 return op0;
7871 }
7872
fdf473ae 7873 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7874 if (GET_MODE (op0) == mode)
7875 return op0;
12342f90 7876
d6a5ac33
RK
7877 /* If OP0 is a constant, just convert it into the proper mode. */
7878 if (CONSTANT_P (op0))
fdf473ae
RH
7879 {
7880 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7881 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7882
0fb7aeda 7883 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7884 return simplify_gen_subreg (mode, op0, inner_mode,
7885 subreg_lowpart_offset (mode,
7886 inner_mode));
7887 else
7888 return convert_modes (mode, inner_mode, op0,
7889 TREE_UNSIGNED (inner_type));
7890 }
12342f90 7891
26fcb35a 7892 if (modifier == EXPAND_INITIALIZER)
38a448ca 7893 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7894
bbf6f052 7895 if (target == 0)
d6a5ac33
RK
7896 return
7897 convert_to_mode (mode, op0,
7898 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7899 else
d6a5ac33
RK
7900 convert_move (target, op0,
7901 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7902 return target;
7903
ed239f5a 7904 case VIEW_CONVERT_EXPR:
37a08a29 7905 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
7906
7907 /* If the input and output modes are both the same, we are done.
7908 Otherwise, if neither mode is BLKmode and both are within a word, we
c11c10d8
RK
7909 can use gen_lowpart. If neither is true, make sure the operand is
7910 in memory and convert the MEM to the new mode. */
ed239f5a
RK
7911 if (TYPE_MODE (type) == GET_MODE (op0))
7912 ;
7913 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
7914 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
7915 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
7916 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 7917 else if (GET_CODE (op0) != MEM)
ed239f5a 7918 {
c11c10d8
RK
7919 /* If the operand is not a MEM, force it into memory. Since we
7920 are going to be be changing the mode of the MEM, don't call
7921 force_const_mem for constants because we don't allow pool
7922 constants to change mode. */
ed239f5a 7923 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 7924
c11c10d8
RK
7925 if (TREE_ADDRESSABLE (exp))
7926 abort ();
ed239f5a 7927
c11c10d8
RK
7928 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
7929 target
7930 = assign_stack_temp_for_type
7931 (TYPE_MODE (inner_type),
7932 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 7933
c11c10d8
RK
7934 emit_move_insn (target, op0);
7935 op0 = target;
ed239f5a
RK
7936 }
7937
c11c10d8
RK
7938 /* At this point, OP0 is in the correct mode. If the output type is such
7939 that the operand is known to be aligned, indicate that it is.
7940 Otherwise, we need only be concerned about alignment for non-BLKmode
7941 results. */
ed239f5a
RK
7942 if (GET_CODE (op0) == MEM)
7943 {
7944 op0 = copy_rtx (op0);
7945
ed239f5a
RK
7946 if (TYPE_ALIGN_OK (type))
7947 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
7948 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
7949 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
7950 {
7951 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
7952 HOST_WIDE_INT temp_size
7953 = MAX (int_size_in_bytes (inner_type),
7954 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
7955 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
7956 temp_size, 0, type);
c4e59f51 7957 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 7958
c11c10d8
RK
7959 if (TREE_ADDRESSABLE (exp))
7960 abort ();
7961
ed239f5a
RK
7962 if (GET_MODE (op0) == BLKmode)
7963 emit_block_move (new_with_op0_mode, op0,
44bb111a 7964 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
7965 (modifier == EXPAND_STACK_PARM
7966 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
7967 else
7968 emit_move_insn (new_with_op0_mode, op0);
7969
7970 op0 = new;
7971 }
0fb7aeda 7972
c4e59f51 7973 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
7974 }
7975
7976 return op0;
7977
bbf6f052 7978 case PLUS_EXPR:
91ce572a 7979 this_optab = ! unsignedp && flag_trapv
a9785c70 7980 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 7981 ? addv_optab : add_optab;
bbf6f052
RK
7982
7983 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7984 something else, make sure we add the register to the constant and
7985 then to the other thing. This case can occur during strength
7986 reduction and doing it this way will produce better code if the
7987 frame pointer or argument pointer is eliminated.
7988
7989 fold-const.c will ensure that the constant is always in the inner
7990 PLUS_EXPR, so the only case we need to do anything about is if
7991 sp, ap, or fp is our second argument, in which case we must swap
7992 the innermost first argument and our second argument. */
7993
7994 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7995 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7996 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7997 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7998 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7999 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8000 {
8001 tree t = TREE_OPERAND (exp, 1);
8002
8003 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8004 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8005 }
8006
88f63c77 8007 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
8008 something, we might be forming a constant. So try to use
8009 plus_constant. If it produces a sum and we can't accept it,
8010 use force_operand. This allows P = &ARR[const] to generate
8011 efficient code on machines where a SYMBOL_REF is not a valid
8012 address.
8013
8014 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 8015 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 8016 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 8017 {
8403445a
AM
8018 if (modifier == EXPAND_STACK_PARM)
8019 target = 0;
c980ac49
RS
8020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8021 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8022 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8023 {
cbbc503e
JL
8024 rtx constant_part;
8025
c980ac49
RS
8026 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8027 EXPAND_SUM);
cbbc503e
JL
8028 /* Use immed_double_const to ensure that the constant is
8029 truncated according to the mode of OP1, then sign extended
8030 to a HOST_WIDE_INT. Using the constant directly can result
8031 in non-canonical RTL in a 64x32 cross compile. */
8032 constant_part
8033 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8034 (HOST_WIDE_INT) 0,
a5efcd63 8035 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 8036 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
8037 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8038 op1 = force_operand (op1, target);
8039 return op1;
8040 }
bbf6f052 8041
c980ac49
RS
8042 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8043 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8044 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8045 {
cbbc503e
JL
8046 rtx constant_part;
8047
c980ac49 8048 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
8049 (modifier == EXPAND_INITIALIZER
8050 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
8051 if (! CONSTANT_P (op0))
8052 {
8053 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8054 VOIDmode, modifier);
709f5be1
RS
8055 /* Don't go to both_summands if modifier
8056 says it's not right to return a PLUS. */
8057 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8058 goto binop2;
c980ac49
RS
8059 goto both_summands;
8060 }
cbbc503e
JL
8061 /* Use immed_double_const to ensure that the constant is
8062 truncated according to the mode of OP1, then sign extended
8063 to a HOST_WIDE_INT. Using the constant directly can result
8064 in non-canonical RTL in a 64x32 cross compile. */
8065 constant_part
8066 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8067 (HOST_WIDE_INT) 0,
2a94e396 8068 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 8069 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
8070 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8071 op0 = force_operand (op0, target);
8072 return op0;
8073 }
bbf6f052
RK
8074 }
8075
4ef7870a
EB
8076 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8077 subtarget = 0;
8078
bbf6f052
RK
8079 /* No sense saving up arithmetic to be done
8080 if it's all in the wrong mode to form part of an address.
8081 And force_operand won't know whether to sign-extend or
8082 zero-extend. */
8083 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 8084 || mode != ptr_mode)
4ef7870a
EB
8085 {
8086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6e7727eb
EB
8088 if (op0 == const0_rtx)
8089 return op1;
8090 if (op1 == const0_rtx)
8091 return op0;
4ef7870a
EB
8092 goto binop2;
8093 }
bbf6f052 8094
37a08a29
RK
8095 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8096 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 8097
1717e19e
UW
8098 /* We come here from MINUS_EXPR when the second operand is a
8099 constant. */
c980ac49 8100 both_summands:
bbf6f052
RK
8101 /* Make sure any term that's a sum with a constant comes last. */
8102 if (GET_CODE (op0) == PLUS
8103 && CONSTANT_P (XEXP (op0, 1)))
8104 {
8105 temp = op0;
8106 op0 = op1;
8107 op1 = temp;
8108 }
8109 /* If adding to a sum including a constant,
8110 associate it to put the constant outside. */
8111 if (GET_CODE (op1) == PLUS
8112 && CONSTANT_P (XEXP (op1, 1)))
8113 {
8114 rtx constant_term = const0_rtx;
8115
8116 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8117 if (temp != 0)
8118 op0 = temp;
6f90e075
JW
8119 /* Ensure that MULT comes first if there is one. */
8120 else if (GET_CODE (op0) == MULT)
38a448ca 8121 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 8122 else
38a448ca 8123 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
8124
8125 /* Let's also eliminate constants from op0 if possible. */
8126 op0 = eliminate_constant_term (op0, &constant_term);
8127
8128 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 8129 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
8130 result we want will then be OP0 + OP1. */
8131
8132 temp = simplify_binary_operation (PLUS, mode, constant_term,
8133 XEXP (op1, 1));
8134 if (temp != 0)
8135 op1 = temp;
8136 else
38a448ca 8137 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
8138 }
8139
8140 /* Put a constant term last and put a multiplication first. */
8141 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8142 temp = op1, op1 = op0, op0 = temp;
8143
8144 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 8145 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
8146
8147 case MINUS_EXPR:
ea87523e
RK
8148 /* For initializers, we are allowed to return a MINUS of two
8149 symbolic constants. Here we handle all cases when both operands
8150 are constant. */
bbf6f052
RK
8151 /* Handle difference of two symbolic constants,
8152 for the sake of an initializer. */
8153 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8154 && really_constant_p (TREE_OPERAND (exp, 0))
8155 && really_constant_p (TREE_OPERAND (exp, 1)))
8156 {
37a08a29
RK
8157 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8158 modifier);
8159 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8160 modifier);
ea87523e 8161
ea87523e
RK
8162 /* If the last operand is a CONST_INT, use plus_constant of
8163 the negated constant. Else make the MINUS. */
8164 if (GET_CODE (op1) == CONST_INT)
8165 return plus_constant (op0, - INTVAL (op1));
8166 else
38a448ca 8167 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 8168 }
ae431183 8169
91ce572a
CC
8170 this_optab = ! unsignedp && flag_trapv
8171 && (GET_MODE_CLASS(mode) == MODE_INT)
8172 ? subv_optab : sub_optab;
1717e19e
UW
8173
8174 /* No sense saving up arithmetic to be done
8175 if it's all in the wrong mode to form part of an address.
8176 And force_operand won't know whether to sign-extend or
8177 zero-extend. */
8178 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8179 || mode != ptr_mode)
8180 goto binop;
8181
8182 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8183 subtarget = 0;
8184
8185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8186 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8187
8188 /* Convert A - const to A + (-const). */
8189 if (GET_CODE (op1) == CONST_INT)
8190 {
8191 op1 = negate_rtx (mode, op1);
8192 goto both_summands;
8193 }
8194
8195 goto binop2;
bbf6f052
RK
8196
8197 case MULT_EXPR:
bbf6f052
RK
8198 /* If first operand is constant, swap them.
8199 Thus the following special case checks need only
8200 check the second operand. */
8201 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8202 {
b3694847 8203 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
8204 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8205 TREE_OPERAND (exp, 1) = t1;
8206 }
8207
8208 /* Attempt to return something suitable for generating an
8209 indexed address, for machines that support that. */
8210
88f63c77 8211 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 8212 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 8213 {
48a5f2fa
DJ
8214 tree exp1 = TREE_OPERAND (exp, 1);
8215
921b3427
RK
8216 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8217 EXPAND_SUM);
bbf6f052 8218
3b40e71b
RH
8219 /* If we knew for certain that this is arithmetic for an array
8220 reference, and we knew the bounds of the array, then we could
8221 apply the distributive law across (PLUS X C) for constant C.
8222 Without such knowledge, we risk overflowing the computation
8223 when both X and C are large, but X+C isn't. */
8224 /* ??? Could perhaps special-case EXP being unsigned and C being
8225 positive. In that case we are certain that X+C is no smaller
8226 than X and so the transformed expression will overflow iff the
8227 original would have. */
bbf6f052
RK
8228
8229 if (GET_CODE (op0) != REG)
906c4e36 8230 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
8231 if (GET_CODE (op0) != REG)
8232 op0 = copy_to_mode_reg (mode, op0);
8233
48a5f2fa
DJ
8234 return gen_rtx_MULT (mode, op0,
8235 gen_int_mode (tree_low_cst (exp1, 0),
8236 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
8237 }
8238
e5e809f4 8239 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8240 subtarget = 0;
8241
8403445a
AM
8242 if (modifier == EXPAND_STACK_PARM)
8243 target = 0;
8244
bbf6f052
RK
8245 /* Check for multiplying things that have been extended
8246 from a narrower type. If this machine supports multiplying
8247 in that narrower type with a result in the desired type,
8248 do it that way, and avoid the explicit type-conversion. */
8249 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8250 && TREE_CODE (type) == INTEGER_TYPE
8251 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8252 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8253 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8254 && int_fits_type_p (TREE_OPERAND (exp, 1),
8255 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8256 /* Don't use a widening multiply if a shift will do. */
8257 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8258 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8259 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8260 ||
8261 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8262 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8263 ==
8264 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8265 /* If both operands are extended, they must either both
8266 be zero-extended or both be sign-extended. */
8267 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8268 ==
8269 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8270 {
8271 enum machine_mode innermode
8272 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
8273 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8274 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
8275 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8276 ? umul_widen_optab : smul_widen_optab);
b10af0c8 8277 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 8278 {
b10af0c8
TG
8279 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8280 {
8281 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8282 NULL_RTX, VOIDmode, 0);
8283 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8284 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8285 VOIDmode, 0);
8286 else
8287 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8288 NULL_RTX, VOIDmode, 0);
8289 goto binop2;
8290 }
8291 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8292 && innermode == word_mode)
8293 {
8294 rtx htem;
8295 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8296 NULL_RTX, VOIDmode, 0);
8297 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
8298 op1 = convert_modes (innermode, mode,
8299 expand_expr (TREE_OPERAND (exp, 1),
8300 NULL_RTX, VOIDmode, 0),
8301 unsignedp);
b10af0c8
TG
8302 else
8303 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8304 NULL_RTX, VOIDmode, 0);
8305 temp = expand_binop (mode, other_optab, op0, op1, target,
8306 unsignedp, OPTAB_LIB_WIDEN);
8307 htem = expand_mult_highpart_adjust (innermode,
8308 gen_highpart (innermode, temp),
8309 op0, op1,
8310 gen_highpart (innermode, temp),
8311 unsignedp);
8312 emit_move_insn (gen_highpart (innermode, temp), htem);
8313 return temp;
8314 }
bbf6f052
RK
8315 }
8316 }
8317 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8318 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8319 return expand_mult (mode, op0, op1, target, unsignedp);
8320
8321 case TRUNC_DIV_EXPR:
8322 case FLOOR_DIV_EXPR:
8323 case CEIL_DIV_EXPR:
8324 case ROUND_DIV_EXPR:
8325 case EXACT_DIV_EXPR:
e5e809f4 8326 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8327 subtarget = 0;
8403445a
AM
8328 if (modifier == EXPAND_STACK_PARM)
8329 target = 0;
bbf6f052
RK
8330 /* Possible optimization: compute the dividend with EXPAND_SUM
8331 then if the divisor is constant can optimize the case
8332 where some terms of the dividend have coeffs divisible by it. */
8333 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8334 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8335 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8336
8337 case RDIV_EXPR:
b7e9703c
JH
8338 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8339 expensive divide. If not, combine will rebuild the original
8340 computation. */
8341 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 8342 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
8343 && !real_onep (TREE_OPERAND (exp, 0)))
8344 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8345 build (RDIV_EXPR, type,
8346 build_real (type, dconst1),
8347 TREE_OPERAND (exp, 1))),
8e37cba8 8348 target, tmode, modifier);
ef89d648 8349 this_optab = sdiv_optab;
bbf6f052
RK
8350 goto binop;
8351
8352 case TRUNC_MOD_EXPR:
8353 case FLOOR_MOD_EXPR:
8354 case CEIL_MOD_EXPR:
8355 case ROUND_MOD_EXPR:
e5e809f4 8356 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8357 subtarget = 0;
8403445a
AM
8358 if (modifier == EXPAND_STACK_PARM)
8359 target = 0;
bbf6f052 8360 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8361 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8362 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8363
8364 case FIX_ROUND_EXPR:
8365 case FIX_FLOOR_EXPR:
8366 case FIX_CEIL_EXPR:
8367 abort (); /* Not used for C. */
8368
8369 case FIX_TRUNC_EXPR:
906c4e36 8370 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8371 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8372 target = gen_reg_rtx (mode);
8373 expand_fix (target, op0, unsignedp);
8374 return target;
8375
8376 case FLOAT_EXPR:
906c4e36 8377 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8378 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8379 target = gen_reg_rtx (mode);
8380 /* expand_float can't figure out what to do if FROM has VOIDmode.
8381 So give it the correct mode. With -O, cse will optimize this. */
8382 if (GET_MODE (op0) == VOIDmode)
8383 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8384 op0);
8385 expand_float (target, op0,
8386 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8387 return target;
8388
8389 case NEGATE_EXPR:
5b22bee8 8390 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8391 if (modifier == EXPAND_STACK_PARM)
8392 target = 0;
91ce572a 8393 temp = expand_unop (mode,
0fb7aeda
KH
8394 ! unsignedp && flag_trapv
8395 && (GET_MODE_CLASS(mode) == MODE_INT)
8396 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8397 if (temp == 0)
8398 abort ();
8399 return temp;
8400
8401 case ABS_EXPR:
8402 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8403 if (modifier == EXPAND_STACK_PARM)
8404 target = 0;
bbf6f052 8405
2d7050fd 8406 /* Handle complex values specially. */
d6a5ac33
RK
8407 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8408 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8409 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 8410
bbf6f052
RK
8411 /* Unsigned abs is simply the operand. Testing here means we don't
8412 risk generating incorrect code below. */
8413 if (TREE_UNSIGNED (type))
8414 return op0;
8415
91ce572a 8416 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8417 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8418
8419 case MAX_EXPR:
8420 case MIN_EXPR:
8421 target = original_target;
8403445a
AM
8422 if (target == 0
8423 || modifier == EXPAND_STACK_PARM
8424 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 8425 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8426 || GET_MODE (target) != mode
bbf6f052
RK
8427 || (GET_CODE (target) == REG
8428 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8429 target = gen_reg_rtx (mode);
906c4e36 8430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8431 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8432
8433 /* First try to do it with a special MIN or MAX instruction.
8434 If that does not win, use a conditional jump to select the proper
8435 value. */
8436 this_optab = (TREE_UNSIGNED (type)
8437 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8438 : (code == MIN_EXPR ? smin_optab : smax_optab));
8439
8440 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8441 OPTAB_WIDEN);
8442 if (temp != 0)
8443 return temp;
8444
fa2981d8
JW
8445 /* At this point, a MEM target is no longer useful; we will get better
8446 code without it. */
3a94c984 8447
fa2981d8
JW
8448 if (GET_CODE (target) == MEM)
8449 target = gen_reg_rtx (mode);
8450
ee456b1c
RK
8451 if (target != op0)
8452 emit_move_insn (target, op0);
d6a5ac33 8453
bbf6f052 8454 op0 = gen_label_rtx ();
d6a5ac33 8455
f81497d9
RS
8456 /* If this mode is an integer too wide to compare properly,
8457 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8458 if (GET_MODE_CLASS (mode) == MODE_INT
8459 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8460 {
f81497d9 8461 if (code == MAX_EXPR)
d6a5ac33
RK
8462 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8463 target, op1, NULL_RTX, op0);
bbf6f052 8464 else
d6a5ac33
RK
8465 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8466 op1, target, NULL_RTX, op0);
bbf6f052 8467 }
f81497d9
RS
8468 else
8469 {
b30f05db
BS
8470 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8471 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8472 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8473 op0);
f81497d9 8474 }
b30f05db 8475 emit_move_insn (target, op1);
bbf6f052
RK
8476 emit_label (op0);
8477 return target;
8478
bbf6f052
RK
8479 case BIT_NOT_EXPR:
8480 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8481 if (modifier == EXPAND_STACK_PARM)
8482 target = 0;
bbf6f052
RK
8483 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8484 if (temp == 0)
8485 abort ();
8486 return temp;
8487
8488 case FFS_EXPR:
8489 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8490 if (modifier == EXPAND_STACK_PARM)
8491 target = 0;
bbf6f052
RK
8492 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8493 if (temp == 0)
8494 abort ();
8495 return temp;
8496
2928cd7a
RH
8497 case CLZ_EXPR:
8498 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8499 temp = expand_unop (mode, clz_optab, op0, target, 1);
8500 if (temp == 0)
8501 abort ();
8502 return temp;
8503
8504 case CTZ_EXPR:
8505 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8506 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8507 if (temp == 0)
8508 abort ();
8509 return temp;
8510
8511 case POPCOUNT_EXPR:
8512 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8513 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8514 if (temp == 0)
8515 abort ();
8516 return temp;
8517
8518 case PARITY_EXPR:
8519 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8520 temp = expand_unop (mode, parity_optab, op0, target, 1);
8521 if (temp == 0)
8522 abort ();
8523 return temp;
8524
d6a5ac33
RK
8525 /* ??? Can optimize bitwise operations with one arg constant.
8526 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8527 and (a bitwise1 b) bitwise2 b (etc)
8528 but that is probably not worth while. */
8529
8530 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8531 boolean values when we want in all cases to compute both of them. In
8532 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8533 as actual zero-or-1 values and then bitwise anding. In cases where
8534 there cannot be any side effects, better code would be made by
8535 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8536 how to recognize those cases. */
8537
bbf6f052
RK
8538 case TRUTH_AND_EXPR:
8539 case BIT_AND_EXPR:
8540 this_optab = and_optab;
8541 goto binop;
8542
bbf6f052
RK
8543 case TRUTH_OR_EXPR:
8544 case BIT_IOR_EXPR:
8545 this_optab = ior_optab;
8546 goto binop;
8547
874726a8 8548 case TRUTH_XOR_EXPR:
bbf6f052
RK
8549 case BIT_XOR_EXPR:
8550 this_optab = xor_optab;
8551 goto binop;
8552
8553 case LSHIFT_EXPR:
8554 case RSHIFT_EXPR:
8555 case LROTATE_EXPR:
8556 case RROTATE_EXPR:
e5e809f4 8557 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8558 subtarget = 0;
8403445a
AM
8559 if (modifier == EXPAND_STACK_PARM)
8560 target = 0;
bbf6f052
RK
8561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8562 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8563 unsignedp);
8564
d6a5ac33
RK
8565 /* Could determine the answer when only additive constants differ. Also,
8566 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8567 case LT_EXPR:
8568 case LE_EXPR:
8569 case GT_EXPR:
8570 case GE_EXPR:
8571 case EQ_EXPR:
8572 case NE_EXPR:
1eb8759b
RH
8573 case UNORDERED_EXPR:
8574 case ORDERED_EXPR:
8575 case UNLT_EXPR:
8576 case UNLE_EXPR:
8577 case UNGT_EXPR:
8578 case UNGE_EXPR:
8579 case UNEQ_EXPR:
8403445a
AM
8580 temp = do_store_flag (exp,
8581 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8582 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8583 if (temp != 0)
8584 return temp;
d6a5ac33 8585
0f41302f 8586 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8587 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8588 && original_target
8589 && GET_CODE (original_target) == REG
8590 && (GET_MODE (original_target)
8591 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8592 {
d6a5ac33
RK
8593 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8594 VOIDmode, 0);
8595
c0a3eeac
UW
8596 /* If temp is constant, we can just compute the result. */
8597 if (GET_CODE (temp) == CONST_INT)
8598 {
8599 if (INTVAL (temp) != 0)
8600 emit_move_insn (target, const1_rtx);
8601 else
8602 emit_move_insn (target, const0_rtx);
8603
8604 return target;
8605 }
8606
bbf6f052 8607 if (temp != original_target)
c0a3eeac
UW
8608 {
8609 enum machine_mode mode1 = GET_MODE (temp);
8610 if (mode1 == VOIDmode)
8611 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8612
c0a3eeac
UW
8613 temp = copy_to_mode_reg (mode1, temp);
8614 }
d6a5ac33 8615
bbf6f052 8616 op1 = gen_label_rtx ();
c5d5d461 8617 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8618 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8619 emit_move_insn (temp, const1_rtx);
8620 emit_label (op1);
8621 return temp;
8622 }
d6a5ac33 8623
bbf6f052
RK
8624 /* If no set-flag instruction, must generate a conditional
8625 store into a temporary variable. Drop through
8626 and handle this like && and ||. */
8627
8628 case TRUTH_ANDIF_EXPR:
8629 case TRUTH_ORIF_EXPR:
e44842fe 8630 if (! ignore
8403445a
AM
8631 && (target == 0
8632 || modifier == EXPAND_STACK_PARM
8633 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8634 /* Make sure we don't have a hard reg (such as function's return
8635 value) live across basic blocks, if not optimizing. */
8636 || (!optimize && GET_CODE (target) == REG
8637 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8638 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8639
8640 if (target)
8641 emit_clr_insn (target);
8642
bbf6f052
RK
8643 op1 = gen_label_rtx ();
8644 jumpifnot (exp, op1);
e44842fe
RK
8645
8646 if (target)
8647 emit_0_to_1_insn (target);
8648
bbf6f052 8649 emit_label (op1);
e44842fe 8650 return ignore ? const0_rtx : target;
bbf6f052
RK
8651
8652 case TRUTH_NOT_EXPR:
8403445a
AM
8653 if (modifier == EXPAND_STACK_PARM)
8654 target = 0;
bbf6f052
RK
8655 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8656 /* The parser is careful to generate TRUTH_NOT_EXPR
8657 only with operands that are always zero or one. */
906c4e36 8658 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8659 target, 1, OPTAB_LIB_WIDEN);
8660 if (temp == 0)
8661 abort ();
8662 return temp;
8663
8664 case COMPOUND_EXPR:
8665 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8666 emit_queue ();
8667 return expand_expr (TREE_OPERAND (exp, 1),
8668 (ignore ? const0_rtx : target),
8403445a 8669 VOIDmode, modifier);
bbf6f052
RK
8670
8671 case COND_EXPR:
ac01eace
RK
8672 /* If we would have a "singleton" (see below) were it not for a
8673 conversion in each arm, bring that conversion back out. */
8674 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8675 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8676 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8677 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8678 {
d6edb99e
ZW
8679 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8680 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8681
8682 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8683 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8684 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8685 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8686 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8687 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8688 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8689 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8690 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8691 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8692 TREE_OPERAND (exp, 0),
d6edb99e 8693 iftrue, iffalse)),
ac01eace
RK
8694 target, tmode, modifier);
8695 }
8696
bbf6f052
RK
8697 {
8698 /* Note that COND_EXPRs whose type is a structure or union
8699 are required to be constructed to contain assignments of
8700 a temporary variable, so that we can evaluate them here
8701 for side effect only. If type is void, we must do likewise. */
8702
8703 /* If an arm of the branch requires a cleanup,
8704 only that cleanup is performed. */
8705
8706 tree singleton = 0;
8707 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8708
8709 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8710 convert it to our mode, if necessary. */
8711 if (integer_onep (TREE_OPERAND (exp, 1))
8712 && integer_zerop (TREE_OPERAND (exp, 2))
8713 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8714 {
dd27116b
RK
8715 if (ignore)
8716 {
8717 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8718 modifier);
dd27116b
RK
8719 return const0_rtx;
8720 }
8721
8403445a
AM
8722 if (modifier == EXPAND_STACK_PARM)
8723 target = 0;
37a08a29 8724 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8725 if (GET_MODE (op0) == mode)
8726 return op0;
d6a5ac33 8727
bbf6f052
RK
8728 if (target == 0)
8729 target = gen_reg_rtx (mode);
8730 convert_move (target, op0, unsignedp);
8731 return target;
8732 }
8733
ac01eace
RK
8734 /* Check for X ? A + B : A. If we have this, we can copy A to the
8735 output and conditionally add B. Similarly for unary operations.
8736 Don't do this if X has side-effects because those side effects
8737 might affect A or B and the "?" operation is a sequence point in
8738 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8739
8740 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8741 && operand_equal_p (TREE_OPERAND (exp, 2),
8742 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8743 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8744 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8745 && operand_equal_p (TREE_OPERAND (exp, 1),
8746 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8747 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8748 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8749 && operand_equal_p (TREE_OPERAND (exp, 2),
8750 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8751 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8752 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8753 && operand_equal_p (TREE_OPERAND (exp, 1),
8754 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8755 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8756
01c8a7c8
RK
8757 /* If we are not to produce a result, we have no target. Otherwise,
8758 if a target was specified use it; it will not be used as an
3a94c984 8759 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8760 temporary. */
8761
8762 if (ignore)
8763 temp = 0;
8403445a
AM
8764 else if (modifier == EXPAND_STACK_PARM)
8765 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8766 else if (original_target
e5e809f4 8767 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8768 || (singleton && GET_CODE (original_target) == REG
8769 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8770 && original_target == var_rtx (singleton)))
8771 && GET_MODE (original_target) == mode
7c00d1fe
RK
8772#ifdef HAVE_conditional_move
8773 && (! can_conditionally_move_p (mode)
8774 || GET_CODE (original_target) == REG
8775 || TREE_ADDRESSABLE (type))
8776#endif
8125d7e9
BS
8777 && (GET_CODE (original_target) != MEM
8778 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8779 temp = original_target;
8780 else if (TREE_ADDRESSABLE (type))
8781 abort ();
8782 else
8783 temp = assign_temp (type, 0, 0, 1);
8784
ac01eace
RK
8785 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8786 do the test of X as a store-flag operation, do this as
8787 A + ((X != 0) << log C). Similarly for other simple binary
8788 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8789 if (temp && singleton && binary_op
bbf6f052
RK
8790 && (TREE_CODE (binary_op) == PLUS_EXPR
8791 || TREE_CODE (binary_op) == MINUS_EXPR
8792 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8793 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8794 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8795 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8796 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8797 {
8798 rtx result;
61f6c84f 8799 tree cond;
91ce572a 8800 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8801 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8802 ? addv_optab : add_optab)
8803 : TREE_CODE (binary_op) == MINUS_EXPR
8804 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8805 ? subv_optab : sub_optab)
8806 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8807 : xor_optab);
bbf6f052 8808
61f6c84f 8809 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8810 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8811 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8812 else
8813 cond = TREE_OPERAND (exp, 0);
bbf6f052 8814
61f6c84f
JJ
8815 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8816 ? temp : NULL_RTX),
bbf6f052
RK
8817 mode, BRANCH_COST <= 1);
8818
ac01eace
RK
8819 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8820 result = expand_shift (LSHIFT_EXPR, mode, result,
8821 build_int_2 (tree_log2
8822 (TREE_OPERAND
8823 (binary_op, 1)),
8824 0),
e5e809f4 8825 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8826 ? temp : NULL_RTX), 0);
8827
bbf6f052
RK
8828 if (result)
8829 {
906c4e36 8830 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8831 return expand_binop (mode, boptab, op1, result, temp,
8832 unsignedp, OPTAB_LIB_WIDEN);
8833 }
bbf6f052 8834 }
3a94c984 8835
dabf8373 8836 do_pending_stack_adjust ();
bbf6f052
RK
8837 NO_DEFER_POP;
8838 op0 = gen_label_rtx ();
8839
8840 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8841 {
8842 if (temp != 0)
8843 {
8844 /* If the target conflicts with the other operand of the
8845 binary op, we can't use it. Also, we can't use the target
8846 if it is a hard register, because evaluating the condition
8847 might clobber it. */
8848 if ((binary_op
e5e809f4 8849 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8850 || (GET_CODE (temp) == REG
8851 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8852 temp = gen_reg_rtx (mode);
8403445a
AM
8853 store_expr (singleton, temp,
8854 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8855 }
8856 else
906c4e36 8857 expand_expr (singleton,
2937cf87 8858 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8859 if (singleton == TREE_OPERAND (exp, 1))
8860 jumpif (TREE_OPERAND (exp, 0), op0);
8861 else
8862 jumpifnot (TREE_OPERAND (exp, 0), op0);
8863
956d6950 8864 start_cleanup_deferral ();
bbf6f052
RK
8865 if (binary_op && temp == 0)
8866 /* Just touch the other operand. */
8867 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8868 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8869 else if (binary_op)
8870 store_expr (build (TREE_CODE (binary_op), type,
8871 make_tree (type, temp),
8872 TREE_OPERAND (binary_op, 1)),
8403445a 8873 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8874 else
8875 store_expr (build1 (TREE_CODE (unary_op), type,
8876 make_tree (type, temp)),
8403445a 8877 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8878 op1 = op0;
bbf6f052 8879 }
bbf6f052
RK
8880 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8881 comparison operator. If we have one of these cases, set the
8882 output to A, branch on A (cse will merge these two references),
8883 then set the output to FOO. */
8884 else if (temp
8885 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8886 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8887 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8888 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8889 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8890 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8891 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8892 {
3a94c984
KH
8893 if (GET_CODE (temp) == REG
8894 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8895 temp = gen_reg_rtx (mode);
8403445a
AM
8896 store_expr (TREE_OPERAND (exp, 1), temp,
8897 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8898 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8899
956d6950 8900 start_cleanup_deferral ();
8403445a
AM
8901 store_expr (TREE_OPERAND (exp, 2), temp,
8902 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8903 op1 = op0;
8904 }
8905 else if (temp
8906 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8907 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8908 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8909 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8910 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8911 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8912 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8913 {
3a94c984
KH
8914 if (GET_CODE (temp) == REG
8915 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 8916 temp = gen_reg_rtx (mode);
8403445a
AM
8917 store_expr (TREE_OPERAND (exp, 2), temp,
8918 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8919 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8920
956d6950 8921 start_cleanup_deferral ();
8403445a
AM
8922 store_expr (TREE_OPERAND (exp, 1), temp,
8923 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8924 op1 = op0;
8925 }
8926 else
8927 {
8928 op1 = gen_label_rtx ();
8929 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8930
956d6950 8931 start_cleanup_deferral ();
3a94c984 8932
2ac84cfe 8933 /* One branch of the cond can be void, if it never returns. For
3a94c984 8934 example A ? throw : E */
2ac84cfe 8935 if (temp != 0
3a94c984 8936 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
8937 store_expr (TREE_OPERAND (exp, 1), temp,
8938 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8939 else
906c4e36
RK
8940 expand_expr (TREE_OPERAND (exp, 1),
8941 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8942 end_cleanup_deferral ();
bbf6f052
RK
8943 emit_queue ();
8944 emit_jump_insn (gen_jump (op1));
8945 emit_barrier ();
8946 emit_label (op0);
956d6950 8947 start_cleanup_deferral ();
2ac84cfe 8948 if (temp != 0
3a94c984 8949 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
8950 store_expr (TREE_OPERAND (exp, 2), temp,
8951 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8952 else
906c4e36
RK
8953 expand_expr (TREE_OPERAND (exp, 2),
8954 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8955 }
8956
956d6950 8957 end_cleanup_deferral ();
bbf6f052
RK
8958
8959 emit_queue ();
8960 emit_label (op1);
8961 OK_DEFER_POP;
5dab5552 8962
bbf6f052
RK
8963 return temp;
8964 }
8965
8966 case TARGET_EXPR:
8967 {
8968 /* Something needs to be initialized, but we didn't know
8969 where that thing was when building the tree. For example,
8970 it could be the return value of a function, or a parameter
8971 to a function which lays down in the stack, or a temporary
8972 variable which must be passed by reference.
8973
8974 We guarantee that the expression will either be constructed
8975 or copied into our original target. */
8976
8977 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8978 tree cleanups = NULL_TREE;
5c062816 8979 tree exp1;
bbf6f052
RK
8980
8981 if (TREE_CODE (slot) != VAR_DECL)
8982 abort ();
8983
9c51f375
RK
8984 if (! ignore)
8985 target = original_target;
8986
6fbfac92
JM
8987 /* Set this here so that if we get a target that refers to a
8988 register variable that's already been used, put_reg_into_stack
3a94c984 8989 knows that it should fix up those uses. */
6fbfac92
JM
8990 TREE_USED (slot) = 1;
8991
bbf6f052
RK
8992 if (target == 0)
8993 {
19e7881c 8994 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
8995 {
8996 target = DECL_RTL (slot);
5c062816 8997 /* If we have already expanded the slot, so don't do
ac993f4f 8998 it again. (mrs) */
5c062816
MS
8999 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9000 return target;
ac993f4f 9001 }
bbf6f052
RK
9002 else
9003 {
e9a25f70 9004 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
9005 /* All temp slots at this level must not conflict. */
9006 preserve_temp_slots (target);
19e7881c 9007 SET_DECL_RTL (slot, target);
e9a25f70 9008 if (TREE_ADDRESSABLE (slot))
f29a2bd1 9009 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 9010
e287fd6e
RK
9011 /* Since SLOT is not known to the called function
9012 to belong to its stack frame, we must build an explicit
9013 cleanup. This case occurs when we must build up a reference
9014 to pass the reference as an argument. In this case,
9015 it is very likely that such a reference need not be
9016 built here. */
9017
9018 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
9019 TREE_OPERAND (exp, 2)
9020 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 9021 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 9022 }
bbf6f052
RK
9023 }
9024 else
9025 {
9026 /* This case does occur, when expanding a parameter which
9027 needs to be constructed on the stack. The target
9028 is the actual stack address that we want to initialize.
9029 The function we call will perform the cleanup in this case. */
9030
8c042b47
RS
9031 /* If we have already assigned it space, use that space,
9032 not target that we were passed in, as our target
9033 parameter is only a hint. */
19e7881c 9034 if (DECL_RTL_SET_P (slot))
3a94c984
KH
9035 {
9036 target = DECL_RTL (slot);
9037 /* If we have already expanded the slot, so don't do
8c042b47 9038 it again. (mrs) */
3a94c984
KH
9039 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9040 return target;
8c042b47 9041 }
21002281
JW
9042 else
9043 {
19e7881c 9044 SET_DECL_RTL (slot, target);
21002281
JW
9045 /* If we must have an addressable slot, then make sure that
9046 the RTL that we just stored in slot is OK. */
9047 if (TREE_ADDRESSABLE (slot))
f29a2bd1 9048 put_var_into_stack (slot, /*rescan=*/true);
21002281 9049 }
bbf6f052
RK
9050 }
9051
4847c938 9052 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
9053 /* Mark it as expanded. */
9054 TREE_OPERAND (exp, 1) = NULL_TREE;
9055
8403445a 9056 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 9057
659e5a7a 9058 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 9059
41531e5b 9060 return target;
bbf6f052
RK
9061 }
9062
9063 case INIT_EXPR:
9064 {
9065 tree lhs = TREE_OPERAND (exp, 0);
9066 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9067
9068 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
bbf6f052
RK
9069 return temp;
9070 }
9071
9072 case MODIFY_EXPR:
9073 {
9074 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
9075 That's so we don't compute a pointer and save it over a
9076 call. If lhs is simple, compute it first so we can give it
9077 as a target if the rhs is just a call. This avoids an
9078 extra temp and copy and that prevents a partial-subsumption
9079 which makes bad code. Actually we could treat
9080 component_ref's of vars like vars. */
bbf6f052
RK
9081
9082 tree lhs = TREE_OPERAND (exp, 0);
9083 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9084
9085 temp = 0;
9086
bbf6f052
RK
9087 /* Check for |= or &= of a bitfield of size one into another bitfield
9088 of size 1. In this case, (unless we need the result of the
9089 assignment) we can do this more efficiently with a
9090 test followed by an assignment, if necessary.
9091
9092 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9093 things change so we do, this code should be enhanced to
9094 support it. */
9095 if (ignore
9096 && TREE_CODE (lhs) == COMPONENT_REF
9097 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9098 || TREE_CODE (rhs) == BIT_AND_EXPR)
9099 && TREE_OPERAND (rhs, 0) == lhs
9100 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
9101 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9102 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
9103 {
9104 rtx label = gen_label_rtx ();
9105
9106 do_jump (TREE_OPERAND (rhs, 1),
9107 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9108 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9109 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9110 (TREE_CODE (rhs) == BIT_IOR_EXPR
9111 ? integer_one_node
9112 : integer_zero_node)),
9113 0, 0);
e7c33f54 9114 do_pending_stack_adjust ();
bbf6f052
RK
9115 emit_label (label);
9116 return const0_rtx;
9117 }
9118
bbf6f052 9119 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
0fb7aeda 9120
bbf6f052
RK
9121 return temp;
9122 }
9123
6e7f84a7
APB
9124 case RETURN_EXPR:
9125 if (!TREE_OPERAND (exp, 0))
9126 expand_null_return ();
9127 else
9128 expand_return (TREE_OPERAND (exp, 0));
9129 return const0_rtx;
9130
bbf6f052
RK
9131 case PREINCREMENT_EXPR:
9132 case PREDECREMENT_EXPR:
7b8b9722 9133 return expand_increment (exp, 0, ignore);
bbf6f052
RK
9134
9135 case POSTINCREMENT_EXPR:
9136 case POSTDECREMENT_EXPR:
9137 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 9138 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
9139
9140 case ADDR_EXPR:
8403445a
AM
9141 if (modifier == EXPAND_STACK_PARM)
9142 target = 0;
bbf6f052
RK
9143 /* Are we taking the address of a nested function? */
9144 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 9145 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
9146 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9147 && ! TREE_STATIC (exp))
bbf6f052
RK
9148 {
9149 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9150 op0 = force_operand (op0, target);
9151 }
682ba3a6
RK
9152 /* If we are taking the address of something erroneous, just
9153 return a zero. */
9154 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9155 return const0_rtx;
d6b6783b
RK
9156 /* If we are taking the address of a constant and are at the
9157 top level, we have to use output_constant_def since we can't
9158 call force_const_mem at top level. */
9159 else if (cfun == 0
9160 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9161 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9162 == 'c')))
9163 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
9164 else
9165 {
e287fd6e
RK
9166 /* We make sure to pass const0_rtx down if we came in with
9167 ignore set, to avoid doing the cleanups twice for something. */
9168 op0 = expand_expr (TREE_OPERAND (exp, 0),
9169 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
9170 (modifier == EXPAND_INITIALIZER
9171 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 9172
119af78a
RK
9173 /* If we are going to ignore the result, OP0 will have been set
9174 to const0_rtx, so just return it. Don't get confused and
9175 think we are taking the address of the constant. */
9176 if (ignore)
9177 return op0;
9178
73b7f58c
BS
9179 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9180 clever and returns a REG when given a MEM. */
9181 op0 = protect_from_queue (op0, 1);
3539e816 9182
c5c76735
JL
9183 /* We would like the object in memory. If it is a constant, we can
9184 have it be statically allocated into memory. For a non-constant,
9185 we need to allocate some memory and store the value into it. */
896102d0
RK
9186
9187 if (CONSTANT_P (op0))
9188 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9189 op0);
682ba3a6 9190 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
9191 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
9192 || GET_CODE (op0) == PARALLEL)
896102d0 9193 {
6c7d86ec
RK
9194 /* If the operand is a SAVE_EXPR, we can deal with this by
9195 forcing the SAVE_EXPR into memory. */
9196 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9197 {
f29a2bd1
MM
9198 put_var_into_stack (TREE_OPERAND (exp, 0),
9199 /*rescan=*/true);
6c7d86ec
RK
9200 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9201 }
df6018fd 9202 else
6c7d86ec
RK
9203 {
9204 /* If this object is in a register, it can't be BLKmode. */
9205 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 9206 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
9207
9208 if (GET_CODE (op0) == PARALLEL)
9209 /* Handle calls that pass values in multiple
9210 non-contiguous locations. The Irix 6 ABI has examples
9211 of this. */
0fb7aeda 9212 emit_group_store (memloc, op0,
6c7d86ec
RK
9213 int_size_in_bytes (inner_type));
9214 else
9215 emit_move_insn (memloc, op0);
0fb7aeda 9216
6c7d86ec
RK
9217 op0 = memloc;
9218 }
896102d0
RK
9219 }
9220
bbf6f052
RK
9221 if (GET_CODE (op0) != MEM)
9222 abort ();
3a94c984 9223
34e81b5a 9224 mark_temp_addr_taken (op0);
bbf6f052 9225 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 9226 {
34e81b5a 9227 op0 = XEXP (op0, 0);
88f63c77 9228#ifdef POINTERS_EXTEND_UNSIGNED
34e81b5a 9229 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
88f63c77 9230 && mode == ptr_mode)
34e81b5a 9231 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 9232#endif
34e81b5a 9233 return op0;
88f63c77 9234 }
987c71d9 9235
c952ff4b
RK
9236 /* If OP0 is not aligned as least as much as the type requires, we
9237 need to make a temporary, copy OP0 to it, and take the address of
9238 the temporary. We want to use the alignment of the type, not of
9239 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9240 the test for BLKmode means that can't happen. The test for
9241 BLKmode is because we never make mis-aligned MEMs with
9242 non-BLKmode.
9243
9244 We don't need to do this at all if the machine doesn't have
9245 strict alignment. */
9246 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9247 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
9248 > MEM_ALIGN (op0))
9249 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
9250 {
9251 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 9252 rtx new;
a06ef755 9253
c3d32120
RK
9254 if (TYPE_ALIGN_OK (inner_type))
9255 abort ();
9256
bdaa131b
JM
9257 if (TREE_ADDRESSABLE (inner_type))
9258 {
9259 /* We can't make a bitwise copy of this object, so fail. */
9260 error ("cannot take the address of an unaligned member");
9261 return const0_rtx;
9262 }
9263
9264 new = assign_stack_temp_for_type
9265 (TYPE_MODE (inner_type),
9266 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9267 : int_size_in_bytes (inner_type),
9268 1, build_qualified_type (inner_type,
9269 (TYPE_QUALS (inner_type)
9270 | TYPE_QUAL_CONST)));
9271
44bb111a 9272 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
9273 (modifier == EXPAND_STACK_PARM
9274 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 9275
a06ef755
RK
9276 op0 = new;
9277 }
9278
bbf6f052
RK
9279 op0 = force_operand (XEXP (op0, 0), target);
9280 }
987c71d9 9281
05c8e58b
HPN
9282 if (flag_force_addr
9283 && GET_CODE (op0) != REG
9284 && modifier != EXPAND_CONST_ADDRESS
9285 && modifier != EXPAND_INITIALIZER
9286 && modifier != EXPAND_SUM)
987c71d9
RK
9287 op0 = force_reg (Pmode, op0);
9288
dc6d66b3
RK
9289 if (GET_CODE (op0) == REG
9290 && ! REG_USERVAR_P (op0))
bdb429a5 9291 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 9292
88f63c77
RK
9293#ifdef POINTERS_EXTEND_UNSIGNED
9294 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9295 && mode == ptr_mode)
9fcfcce7 9296 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
9297#endif
9298
bbf6f052
RK
9299 return op0;
9300
9301 case ENTRY_VALUE_EXPR:
9302 abort ();
9303
7308a047
RS
9304 /* COMPLEX type for Extended Pascal & Fortran */
9305 case COMPLEX_EXPR:
9306 {
9307 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 9308 rtx insns;
7308a047
RS
9309
9310 /* Get the rtx code of the operands. */
9311 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9312 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9313
9314 if (! target)
9315 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9316
6551fa4d 9317 start_sequence ();
7308a047
RS
9318
9319 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
9320 emit_move_insn (gen_realpart (mode, target), op0);
9321 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 9322
6551fa4d
JW
9323 insns = get_insns ();
9324 end_sequence ();
9325
7308a047 9326 /* Complex construction should appear as a single unit. */
6551fa4d
JW
9327 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9328 each with a separate pseudo as destination.
9329 It's not correct for flow to treat them as a unit. */
6d6e61ce 9330 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9331 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9332 else
2f937369 9333 emit_insn (insns);
7308a047
RS
9334
9335 return target;
9336 }
9337
9338 case REALPART_EXPR:
2d7050fd
RS
9339 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9340 return gen_realpart (mode, op0);
3a94c984 9341
7308a047 9342 case IMAGPART_EXPR:
2d7050fd
RS
9343 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9344 return gen_imagpart (mode, op0);
7308a047
RS
9345
9346 case CONJ_EXPR:
9347 {
62acb978 9348 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 9349 rtx imag_t;
6551fa4d 9350 rtx insns;
3a94c984
KH
9351
9352 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
9353
9354 if (! target)
d6a5ac33 9355 target = gen_reg_rtx (mode);
3a94c984 9356
6551fa4d 9357 start_sequence ();
7308a047
RS
9358
9359 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
9360 emit_move_insn (gen_realpart (partmode, target),
9361 gen_realpart (partmode, op0));
7308a047 9362
62acb978 9363 imag_t = gen_imagpart (partmode, target);
91ce572a 9364 temp = expand_unop (partmode,
0fb7aeda
KH
9365 ! unsignedp && flag_trapv
9366 && (GET_MODE_CLASS(partmode) == MODE_INT)
9367 ? negv_optab : neg_optab,
3a94c984 9368 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
9369 if (temp != imag_t)
9370 emit_move_insn (imag_t, temp);
9371
6551fa4d
JW
9372 insns = get_insns ();
9373 end_sequence ();
9374
3a94c984 9375 /* Conjugate should appear as a single unit
d6a5ac33 9376 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
9377 each with a separate pseudo as destination.
9378 It's not correct for flow to treat them as a unit. */
6d6e61ce 9379 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9380 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9381 else
2f937369 9382 emit_insn (insns);
7308a047
RS
9383
9384 return target;
9385 }
9386
e976b8b2
MS
9387 case TRY_CATCH_EXPR:
9388 {
9389 tree handler = TREE_OPERAND (exp, 1);
9390
9391 expand_eh_region_start ();
9392
9393 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9394
52a11cbf 9395 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
9396
9397 return op0;
9398 }
9399
b335b813
PB
9400 case TRY_FINALLY_EXPR:
9401 {
9402 tree try_block = TREE_OPERAND (exp, 0);
9403 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9404
8ad8135a 9405 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9406 {
9407 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9408 is not sufficient, so we cannot expand the block twice.
9409 So we play games with GOTO_SUBROUTINE_EXPR to let us
9410 expand the thing only once. */
8ad8135a
RH
9411 /* When not optimizing, we go ahead with this form since
9412 (1) user breakpoints operate more predictably without
9413 code duplication, and
9414 (2) we're not running any of the global optimizers
9415 that would explode in time/space with the highly
9416 connected CFG created by the indirect branching. */
8943a0b4
RH
9417
9418 rtx finally_label = gen_label_rtx ();
9419 rtx done_label = gen_label_rtx ();
9420 rtx return_link = gen_reg_rtx (Pmode);
9421 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9422 (tree) finally_label, (tree) return_link);
9423 TREE_SIDE_EFFECTS (cleanup) = 1;
9424
9425 /* Start a new binding layer that will keep track of all cleanup
9426 actions to be performed. */
9427 expand_start_bindings (2);
9428 target_temp_slot_level = temp_slot_level;
9429
9430 expand_decl_cleanup (NULL_TREE, cleanup);
9431 op0 = expand_expr (try_block, target, tmode, modifier);
9432
9433 preserve_temp_slots (op0);
9434 expand_end_bindings (NULL_TREE, 0, 0);
9435 emit_jump (done_label);
9436 emit_label (finally_label);
9437 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9438 emit_indirect_jump (return_link);
9439 emit_label (done_label);
9440 }
9441 else
9442 {
9443 expand_start_bindings (2);
9444 target_temp_slot_level = temp_slot_level;
b335b813 9445
8943a0b4
RH
9446 expand_decl_cleanup (NULL_TREE, finally_block);
9447 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9448
8943a0b4
RH
9449 preserve_temp_slots (op0);
9450 expand_end_bindings (NULL_TREE, 0, 0);
9451 }
b335b813 9452
b335b813
PB
9453 return op0;
9454 }
9455
3a94c984 9456 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9457 {
9458 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9459 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9460 rtx return_address = gen_label_rtx ();
3a94c984
KH
9461 emit_move_insn (return_link,
9462 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9463 emit_jump (subr);
9464 emit_label (return_address);
9465 return const0_rtx;
9466 }
9467
d3707adb
RH
9468 case VA_ARG_EXPR:
9469 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9470
52a11cbf 9471 case EXC_PTR_EXPR:
86c99549 9472 return get_exception_pointer (cfun);
52a11cbf 9473
67231816
RH
9474 case FDESC_EXPR:
9475 /* Function descriptors are not valid except for as
9476 initialization constants, and should not be expanded. */
9477 abort ();
9478
bbf6f052 9479 default:
c9d892a8 9480 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
9481 }
9482
9483 /* Here to do an ordinary binary operator, generating an instruction
9484 from the optab already placed in `this_optab'. */
9485 binop:
e5e809f4 9486 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
9487 subtarget = 0;
9488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 9489 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9490 binop2:
8403445a
AM
9491 if (modifier == EXPAND_STACK_PARM)
9492 target = 0;
bbf6f052
RK
9493 temp = expand_binop (mode, this_optab, op0, op1, target,
9494 unsignedp, OPTAB_LIB_WIDEN);
9495 if (temp == 0)
9496 abort ();
9497 return temp;
9498}
b93a436e 9499\f
1ce7f3c2
RK
9500/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9501 when applied to the address of EXP produces an address known to be
9502 aligned more than BIGGEST_ALIGNMENT. */
9503
9504static int
9505is_aligning_offset (offset, exp)
9506 tree offset;
9507 tree exp;
9508{
9509 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9510 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9511 || TREE_CODE (offset) == NOP_EXPR
9512 || TREE_CODE (offset) == CONVERT_EXPR
9513 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9514 offset = TREE_OPERAND (offset, 0);
9515
9516 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9517 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9518 if (TREE_CODE (offset) != BIT_AND_EXPR
9519 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9520 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9521 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9522 return 0;
9523
9524 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9525 It must be NEGATE_EXPR. Then strip any more conversions. */
9526 offset = TREE_OPERAND (offset, 0);
9527 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9528 || TREE_CODE (offset) == NOP_EXPR
9529 || TREE_CODE (offset) == CONVERT_EXPR)
9530 offset = TREE_OPERAND (offset, 0);
9531
9532 if (TREE_CODE (offset) != NEGATE_EXPR)
9533 return 0;
9534
9535 offset = TREE_OPERAND (offset, 0);
9536 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9537 || TREE_CODE (offset) == NOP_EXPR
9538 || TREE_CODE (offset) == CONVERT_EXPR)
9539 offset = TREE_OPERAND (offset, 0);
9540
9541 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9542 whose type is the same as EXP. */
9543 return (TREE_CODE (offset) == ADDR_EXPR
9544 && (TREE_OPERAND (offset, 0) == exp
9545 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9546 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9547 == TREE_TYPE (exp)))));
9548}
9549\f
e0a2f705 9550/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9551 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9552 in bytes within the string that ARG is accessing. The type of the
9553 offset will be `sizetype'. */
b93a436e 9554
28f4ec01 9555tree
b93a436e
JL
9556string_constant (arg, ptr_offset)
9557 tree arg;
9558 tree *ptr_offset;
9559{
9560 STRIP_NOPS (arg);
9561
9562 if (TREE_CODE (arg) == ADDR_EXPR
9563 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9564 {
fed3cef0 9565 *ptr_offset = size_zero_node;
b93a436e
JL
9566 return TREE_OPERAND (arg, 0);
9567 }
9568 else if (TREE_CODE (arg) == PLUS_EXPR)
9569 {
9570 tree arg0 = TREE_OPERAND (arg, 0);
9571 tree arg1 = TREE_OPERAND (arg, 1);
9572
9573 STRIP_NOPS (arg0);
9574 STRIP_NOPS (arg1);
9575
9576 if (TREE_CODE (arg0) == ADDR_EXPR
9577 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9578 {
fed3cef0 9579 *ptr_offset = convert (sizetype, arg1);
b93a436e 9580 return TREE_OPERAND (arg0, 0);
bbf6f052 9581 }
b93a436e
JL
9582 else if (TREE_CODE (arg1) == ADDR_EXPR
9583 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9584 {
fed3cef0 9585 *ptr_offset = convert (sizetype, arg0);
b93a436e 9586 return TREE_OPERAND (arg1, 0);
bbf6f052 9587 }
b93a436e 9588 }
ca695ac9 9589
b93a436e
JL
9590 return 0;
9591}
ca695ac9 9592\f
b93a436e
JL
9593/* Expand code for a post- or pre- increment or decrement
9594 and return the RTX for the result.
9595 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9596
b93a436e
JL
9597static rtx
9598expand_increment (exp, post, ignore)
b3694847 9599 tree exp;
b93a436e 9600 int post, ignore;
ca695ac9 9601{
b3694847
SS
9602 rtx op0, op1;
9603 rtx temp, value;
9604 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9605 optab this_optab = add_optab;
9606 int icode;
9607 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9608 int op0_is_copy = 0;
9609 int single_insn = 0;
9610 /* 1 means we can't store into OP0 directly,
9611 because it is a subreg narrower than a word,
9612 and we don't dare clobber the rest of the word. */
9613 int bad_subreg = 0;
1499e0a8 9614
b93a436e
JL
9615 /* Stabilize any component ref that might need to be
9616 evaluated more than once below. */
9617 if (!post
9618 || TREE_CODE (incremented) == BIT_FIELD_REF
9619 || (TREE_CODE (incremented) == COMPONENT_REF
9620 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9621 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9622 incremented = stabilize_reference (incremented);
9623 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9624 ones into save exprs so that they don't accidentally get evaluated
9625 more than once by the code below. */
9626 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9627 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9628 incremented = save_expr (incremented);
e9a25f70 9629
b93a436e
JL
9630 /* Compute the operands as RTX.
9631 Note whether OP0 is the actual lvalue or a copy of it:
9632 I believe it is a copy iff it is a register or subreg
6d2f8887 9633 and insns were generated in computing it. */
e9a25f70 9634
b93a436e 9635 temp = get_last_insn ();
37a08a29 9636 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9637
b93a436e
JL
9638 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9639 in place but instead must do sign- or zero-extension during assignment,
9640 so we copy it into a new register and let the code below use it as
9641 a copy.
e9a25f70 9642
b93a436e
JL
9643 Note that we can safely modify this SUBREG since it is know not to be
9644 shared (it was made by the expand_expr call above). */
9645
9646 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9647 {
9648 if (post)
9649 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9650 else
9651 bad_subreg = 1;
9652 }
9653 else if (GET_CODE (op0) == SUBREG
9654 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9655 {
9656 /* We cannot increment this SUBREG in place. If we are
9657 post-incrementing, get a copy of the old value. Otherwise,
9658 just mark that we cannot increment in place. */
9659 if (post)
9660 op0 = copy_to_reg (op0);
9661 else
9662 bad_subreg = 1;
e9a25f70
JL
9663 }
9664
b93a436e
JL
9665 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9666 && temp != get_last_insn ());
37a08a29 9667 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9668
b93a436e
JL
9669 /* Decide whether incrementing or decrementing. */
9670 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9671 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9672 this_optab = sub_optab;
9673
9674 /* Convert decrement by a constant into a negative increment. */
9675 if (this_optab == sub_optab
9676 && GET_CODE (op1) == CONST_INT)
ca695ac9 9677 {
3a94c984 9678 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9679 this_optab = add_optab;
ca695ac9 9680 }
1499e0a8 9681
91ce572a 9682 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9683 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9684
b93a436e
JL
9685 /* For a preincrement, see if we can do this with a single instruction. */
9686 if (!post)
9687 {
9688 icode = (int) this_optab->handlers[(int) mode].insn_code;
9689 if (icode != (int) CODE_FOR_nothing
9690 /* Make sure that OP0 is valid for operands 0 and 1
9691 of the insn we want to queue. */
a995e389
RH
9692 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9693 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9694 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9695 single_insn = 1;
9696 }
bbf6f052 9697
b93a436e
JL
9698 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9699 then we cannot just increment OP0. We must therefore contrive to
9700 increment the original value. Then, for postincrement, we can return
9701 OP0 since it is a copy of the old value. For preincrement, expand here
9702 unless we can do it with a single insn.
bbf6f052 9703
b93a436e
JL
9704 Likewise if storing directly into OP0 would clobber high bits
9705 we need to preserve (bad_subreg). */
9706 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9707 {
b93a436e
JL
9708 /* This is the easiest way to increment the value wherever it is.
9709 Problems with multiple evaluation of INCREMENTED are prevented
9710 because either (1) it is a component_ref or preincrement,
9711 in which case it was stabilized above, or (2) it is an array_ref
9712 with constant index in an array in a register, which is
9713 safe to reevaluate. */
9714 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9715 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9716 ? MINUS_EXPR : PLUS_EXPR),
9717 TREE_TYPE (exp),
9718 incremented,
9719 TREE_OPERAND (exp, 1));
a358cee0 9720
b93a436e
JL
9721 while (TREE_CODE (incremented) == NOP_EXPR
9722 || TREE_CODE (incremented) == CONVERT_EXPR)
9723 {
9724 newexp = convert (TREE_TYPE (incremented), newexp);
9725 incremented = TREE_OPERAND (incremented, 0);
9726 }
bbf6f052 9727
b93a436e
JL
9728 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9729 return post ? op0 : temp;
9730 }
bbf6f052 9731
b93a436e
JL
9732 if (post)
9733 {
9734 /* We have a true reference to the value in OP0.
9735 If there is an insn to add or subtract in this mode, queue it.
9736 Queueing the increment insn avoids the register shuffling
9737 that often results if we must increment now and first save
9738 the old value for subsequent use. */
bbf6f052 9739
b93a436e
JL
9740#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9741 op0 = stabilize (op0);
9742#endif
41dfd40c 9743
b93a436e
JL
9744 icode = (int) this_optab->handlers[(int) mode].insn_code;
9745 if (icode != (int) CODE_FOR_nothing
9746 /* Make sure that OP0 is valid for operands 0 and 1
9747 of the insn we want to queue. */
a995e389
RH
9748 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9749 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9750 {
a995e389 9751 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9752 op1 = force_reg (mode, op1);
bbf6f052 9753
b93a436e
JL
9754 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9755 }
9756 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9757 {
9758 rtx addr = (general_operand (XEXP (op0, 0), mode)
9759 ? force_reg (Pmode, XEXP (op0, 0))
9760 : copy_to_reg (XEXP (op0, 0)));
9761 rtx temp, result;
ca695ac9 9762
792760b9 9763 op0 = replace_equiv_address (op0, addr);
b93a436e 9764 temp = force_reg (GET_MODE (op0), op0);
a995e389 9765 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9766 op1 = force_reg (mode, op1);
ca695ac9 9767
b93a436e
JL
9768 /* The increment queue is LIFO, thus we have to `queue'
9769 the instructions in reverse order. */
9770 enqueue_insn (op0, gen_move_insn (op0, temp));
9771 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9772 return result;
bbf6f052
RK
9773 }
9774 }
ca695ac9 9775
b93a436e
JL
9776 /* Preincrement, or we can't increment with one simple insn. */
9777 if (post)
9778 /* Save a copy of the value before inc or dec, to return it later. */
9779 temp = value = copy_to_reg (op0);
9780 else
9781 /* Arrange to return the incremented value. */
9782 /* Copy the rtx because expand_binop will protect from the queue,
9783 and the results of that would be invalid for us to return
9784 if our caller does emit_queue before using our result. */
9785 temp = copy_rtx (value = op0);
bbf6f052 9786
b93a436e 9787 /* Increment however we can. */
37a08a29 9788 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9789 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9790
b93a436e
JL
9791 /* Make sure the value is stored into OP0. */
9792 if (op1 != op0)
9793 emit_move_insn (op0, op1);
5718612f 9794
b93a436e
JL
9795 return temp;
9796}
9797\f
b93a436e
JL
9798/* Generate code to calculate EXP using a store-flag instruction
9799 and return an rtx for the result. EXP is either a comparison
9800 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9801
b93a436e 9802 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9803
cc2902df 9804 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9805 cheap.
ca695ac9 9806
b93a436e
JL
9807 Return zero if there is no suitable set-flag instruction
9808 available on this machine.
ca695ac9 9809
b93a436e
JL
9810 Once expand_expr has been called on the arguments of the comparison,
9811 we are committed to doing the store flag, since it is not safe to
9812 re-evaluate the expression. We emit the store-flag insn by calling
9813 emit_store_flag, but only expand the arguments if we have a reason
9814 to believe that emit_store_flag will be successful. If we think that
9815 it will, but it isn't, we have to simulate the store-flag with a
9816 set/jump/set sequence. */
ca695ac9 9817
b93a436e
JL
9818static rtx
9819do_store_flag (exp, target, mode, only_cheap)
9820 tree exp;
9821 rtx target;
9822 enum machine_mode mode;
9823 int only_cheap;
9824{
9825 enum rtx_code code;
9826 tree arg0, arg1, type;
9827 tree tem;
9828 enum machine_mode operand_mode;
9829 int invert = 0;
9830 int unsignedp;
9831 rtx op0, op1;
9832 enum insn_code icode;
9833 rtx subtarget = target;
381127e8 9834 rtx result, label;
ca695ac9 9835
b93a436e
JL
9836 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9837 result at the end. We can't simply invert the test since it would
9838 have already been inverted if it were valid. This case occurs for
9839 some floating-point comparisons. */
ca695ac9 9840
b93a436e
JL
9841 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9842 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9843
b93a436e
JL
9844 arg0 = TREE_OPERAND (exp, 0);
9845 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9846
9847 /* Don't crash if the comparison was erroneous. */
9848 if (arg0 == error_mark_node || arg1 == error_mark_node)
9849 return const0_rtx;
9850
b93a436e
JL
9851 type = TREE_TYPE (arg0);
9852 operand_mode = TYPE_MODE (type);
9853 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9854
b93a436e
JL
9855 /* We won't bother with BLKmode store-flag operations because it would mean
9856 passing a lot of information to emit_store_flag. */
9857 if (operand_mode == BLKmode)
9858 return 0;
ca695ac9 9859
b93a436e
JL
9860 /* We won't bother with store-flag operations involving function pointers
9861 when function pointers must be canonicalized before comparisons. */
9862#ifdef HAVE_canonicalize_funcptr_for_compare
9863 if (HAVE_canonicalize_funcptr_for_compare
9864 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9865 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9866 == FUNCTION_TYPE))
9867 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9868 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9869 == FUNCTION_TYPE))))
9870 return 0;
ca695ac9
JB
9871#endif
9872
b93a436e
JL
9873 STRIP_NOPS (arg0);
9874 STRIP_NOPS (arg1);
ca695ac9 9875
b93a436e
JL
9876 /* Get the rtx comparison code to use. We know that EXP is a comparison
9877 operation of some type. Some comparisons against 1 and -1 can be
9878 converted to comparisons with zero. Do so here so that the tests
9879 below will be aware that we have a comparison with zero. These
9880 tests will not catch constants in the first operand, but constants
9881 are rarely passed as the first operand. */
ca695ac9 9882
b93a436e
JL
9883 switch (TREE_CODE (exp))
9884 {
9885 case EQ_EXPR:
9886 code = EQ;
bbf6f052 9887 break;
b93a436e
JL
9888 case NE_EXPR:
9889 code = NE;
bbf6f052 9890 break;
b93a436e
JL
9891 case LT_EXPR:
9892 if (integer_onep (arg1))
9893 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9894 else
9895 code = unsignedp ? LTU : LT;
ca695ac9 9896 break;
b93a436e
JL
9897 case LE_EXPR:
9898 if (! unsignedp && integer_all_onesp (arg1))
9899 arg1 = integer_zero_node, code = LT;
9900 else
9901 code = unsignedp ? LEU : LE;
ca695ac9 9902 break;
b93a436e
JL
9903 case GT_EXPR:
9904 if (! unsignedp && integer_all_onesp (arg1))
9905 arg1 = integer_zero_node, code = GE;
9906 else
9907 code = unsignedp ? GTU : GT;
9908 break;
9909 case GE_EXPR:
9910 if (integer_onep (arg1))
9911 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9912 else
9913 code = unsignedp ? GEU : GE;
ca695ac9 9914 break;
1eb8759b
RH
9915
9916 case UNORDERED_EXPR:
9917 code = UNORDERED;
9918 break;
9919 case ORDERED_EXPR:
9920 code = ORDERED;
9921 break;
9922 case UNLT_EXPR:
9923 code = UNLT;
9924 break;
9925 case UNLE_EXPR:
9926 code = UNLE;
9927 break;
9928 case UNGT_EXPR:
9929 code = UNGT;
9930 break;
9931 case UNGE_EXPR:
9932 code = UNGE;
9933 break;
9934 case UNEQ_EXPR:
9935 code = UNEQ;
9936 break;
1eb8759b 9937
ca695ac9 9938 default:
b93a436e 9939 abort ();
bbf6f052 9940 }
bbf6f052 9941
b93a436e
JL
9942 /* Put a constant second. */
9943 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9944 {
9945 tem = arg0; arg0 = arg1; arg1 = tem;
9946 code = swap_condition (code);
ca695ac9 9947 }
bbf6f052 9948
b93a436e
JL
9949 /* If this is an equality or inequality test of a single bit, we can
9950 do this by shifting the bit being tested to the low-order bit and
9951 masking the result with the constant 1. If the condition was EQ,
9952 we xor it with 1. This does not require an scc insn and is faster
9953 than an scc insn even if we have it. */
d39985fa 9954
b93a436e
JL
9955 if ((code == NE || code == EQ)
9956 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9957 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9958 {
9959 tree inner = TREE_OPERAND (arg0, 0);
9960 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9961 int ops_unsignedp;
bbf6f052 9962
b93a436e
JL
9963 /* If INNER is a right shift of a constant and it plus BITNUM does
9964 not overflow, adjust BITNUM and INNER. */
ca695ac9 9965
b93a436e
JL
9966 if (TREE_CODE (inner) == RSHIFT_EXPR
9967 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9968 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
9969 && bitnum < TYPE_PRECISION (type)
9970 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
9971 bitnum - TYPE_PRECISION (type)))
ca695ac9 9972 {
b93a436e
JL
9973 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9974 inner = TREE_OPERAND (inner, 0);
ca695ac9 9975 }
ca695ac9 9976
b93a436e
JL
9977 /* If we are going to be able to omit the AND below, we must do our
9978 operations as unsigned. If we must use the AND, we have a choice.
9979 Normally unsigned is faster, but for some machines signed is. */
9980 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9981#ifdef LOAD_EXTEND_OP
9982 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9983#else
9984 : 1
9985#endif
9986 );
bbf6f052 9987
296b4ed9 9988 if (! get_subtarget (subtarget)
a47fed55 9989 || GET_MODE (subtarget) != operand_mode
e5e809f4 9990 || ! safe_from_p (subtarget, inner, 1))
b93a436e 9991 subtarget = 0;
bbf6f052 9992
b93a436e 9993 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9994
b93a436e 9995 if (bitnum != 0)
681cb233 9996 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 9997 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9998
b93a436e
JL
9999 if (GET_MODE (op0) != mode)
10000 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10001
b93a436e
JL
10002 if ((code == EQ && ! invert) || (code == NE && invert))
10003 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10004 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10005
b93a436e
JL
10006 /* Put the AND last so it can combine with more things. */
10007 if (bitnum != TYPE_PRECISION (type) - 1)
22273300 10008 op0 = expand_and (mode, op0, const1_rtx, subtarget);
bbf6f052 10009
b93a436e
JL
10010 return op0;
10011 }
bbf6f052 10012
b93a436e 10013 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10014 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10015 return 0;
1eb8759b 10016
b93a436e
JL
10017 icode = setcc_gen_code[(int) code];
10018 if (icode == CODE_FOR_nothing
a995e389 10019 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10020 {
b93a436e
JL
10021 /* We can only do this if it is one of the special cases that
10022 can be handled without an scc insn. */
10023 if ((code == LT && integer_zerop (arg1))
10024 || (! only_cheap && code == GE && integer_zerop (arg1)))
10025 ;
10026 else if (BRANCH_COST >= 0
10027 && ! only_cheap && (code == NE || code == EQ)
10028 && TREE_CODE (type) != REAL_TYPE
10029 && ((abs_optab->handlers[(int) operand_mode].insn_code
10030 != CODE_FOR_nothing)
10031 || (ffs_optab->handlers[(int) operand_mode].insn_code
10032 != CODE_FOR_nothing)))
10033 ;
10034 else
10035 return 0;
ca695ac9 10036 }
3a94c984 10037
296b4ed9 10038 if (! get_subtarget (target)
a47fed55 10039 || GET_MODE (subtarget) != operand_mode
e5e809f4 10040 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10041 subtarget = 0;
10042
10043 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10044 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10045
10046 if (target == 0)
10047 target = gen_reg_rtx (mode);
10048
10049 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10050 because, if the emit_store_flag does anything it will succeed and
10051 OP0 and OP1 will not be used subsequently. */
ca695ac9 10052
b93a436e
JL
10053 result = emit_store_flag (target, code,
10054 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10055 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10056 operand_mode, unsignedp, 1);
ca695ac9 10057
b93a436e
JL
10058 if (result)
10059 {
10060 if (invert)
10061 result = expand_binop (mode, xor_optab, result, const1_rtx,
10062 result, 0, OPTAB_LIB_WIDEN);
10063 return result;
ca695ac9 10064 }
bbf6f052 10065
b93a436e
JL
10066 /* If this failed, we have to do this with set/compare/jump/set code. */
10067 if (GET_CODE (target) != REG
10068 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10069 target = gen_reg_rtx (GET_MODE (target));
10070
10071 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10072 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 10073 operand_mode, NULL_RTX);
b93a436e
JL
10074 if (GET_CODE (result) == CONST_INT)
10075 return (((result == const0_rtx && ! invert)
10076 || (result != const0_rtx && invert))
10077 ? const0_rtx : const1_rtx);
ca695ac9 10078
8f08e8c0
JL
10079 /* The code of RESULT may not match CODE if compare_from_rtx
10080 decided to swap its operands and reverse the original code.
10081
10082 We know that compare_from_rtx returns either a CONST_INT or
10083 a new comparison code, so it is safe to just extract the
10084 code from RESULT. */
10085 code = GET_CODE (result);
10086
b93a436e
JL
10087 label = gen_label_rtx ();
10088 if (bcc_gen_fctn[(int) code] == 0)
10089 abort ();
0f41302f 10090
b93a436e
JL
10091 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10092 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10093 emit_label (label);
bbf6f052 10094
b93a436e 10095 return target;
ca695ac9 10096}
b93a436e 10097\f
b93a436e 10098
ad82abb8
ZW
10099/* Stubs in case we haven't got a casesi insn. */
10100#ifndef HAVE_casesi
10101# define HAVE_casesi 0
10102# define gen_casesi(a, b, c, d, e) (0)
10103# define CODE_FOR_casesi CODE_FOR_nothing
10104#endif
10105
10106/* If the machine does not have a case insn that compares the bounds,
10107 this means extra overhead for dispatch tables, which raises the
10108 threshold for using them. */
10109#ifndef CASE_VALUES_THRESHOLD
10110#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10111#endif /* CASE_VALUES_THRESHOLD */
10112
10113unsigned int
10114case_values_threshold ()
10115{
10116 return CASE_VALUES_THRESHOLD;
10117}
10118
10119/* Attempt to generate a casesi instruction. Returns 1 if successful,
10120 0 otherwise (i.e. if there is no casesi instruction). */
10121int
10122try_casesi (index_type, index_expr, minval, range,
10123 table_label, default_label)
10124 tree index_type, index_expr, minval, range;
10125 rtx table_label ATTRIBUTE_UNUSED;
10126 rtx default_label;
10127{
10128 enum machine_mode index_mode = SImode;
10129 int index_bits = GET_MODE_BITSIZE (index_mode);
10130 rtx op1, op2, index;
10131 enum machine_mode op_mode;
10132
10133 if (! HAVE_casesi)
10134 return 0;
10135
10136 /* Convert the index to SImode. */
10137 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10138 {
10139 enum machine_mode omode = TYPE_MODE (index_type);
10140 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10141
10142 /* We must handle the endpoints in the original mode. */
10143 index_expr = build (MINUS_EXPR, index_type,
10144 index_expr, minval);
10145 minval = integer_zero_node;
10146 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10147 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 10148 omode, 1, default_label);
ad82abb8
ZW
10149 /* Now we can safely truncate. */
10150 index = convert_to_mode (index_mode, index, 0);
10151 }
10152 else
10153 {
10154 if (TYPE_MODE (index_type) != index_mode)
10155 {
b0c48229
NB
10156 index_expr = convert ((*lang_hooks.types.type_for_size)
10157 (index_bits, 0), index_expr);
ad82abb8
ZW
10158 index_type = TREE_TYPE (index_expr);
10159 }
10160
10161 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10162 }
10163 emit_queue ();
10164 index = protect_from_queue (index, 0);
10165 do_pending_stack_adjust ();
10166
10167 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10168 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10169 (index, op_mode))
10170 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10171
ad82abb8
ZW
10172 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10173
10174 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10175 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10176 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10177 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10178 (op1, op_mode))
10179 op1 = copy_to_mode_reg (op_mode, op1);
10180
10181 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10182
10183 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10184 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10185 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10186 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10187 (op2, op_mode))
10188 op2 = copy_to_mode_reg (op_mode, op2);
10189
10190 emit_jump_insn (gen_casesi (index, op1, op2,
10191 table_label, default_label));
10192 return 1;
10193}
10194
10195/* Attempt to generate a tablejump instruction; same concept. */
10196#ifndef HAVE_tablejump
10197#define HAVE_tablejump 0
10198#define gen_tablejump(x, y) (0)
10199#endif
10200
10201/* Subroutine of the next function.
10202
10203 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10204 in the table already subtracted.
10205 MODE is its expected mode (needed if INDEX is constant).
10206 RANGE is the length of the jump table.
10207 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10208
b93a436e
JL
10209 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10210 index value is out of range. */
0f41302f 10211
ad82abb8 10212static void
b93a436e
JL
10213do_tablejump (index, mode, range, table_label, default_label)
10214 rtx index, range, table_label, default_label;
10215 enum machine_mode mode;
ca695ac9 10216{
b3694847 10217 rtx temp, vector;
88d3b7f0 10218
74f6d071
JH
10219 if (INTVAL (range) > cfun->max_jumptable_ents)
10220 cfun->max_jumptable_ents = INTVAL (range);
1877be45 10221
b93a436e
JL
10222 /* Do an unsigned comparison (in the proper mode) between the index
10223 expression and the value which represents the length of the range.
10224 Since we just finished subtracting the lower bound of the range
10225 from the index expression, this comparison allows us to simultaneously
10226 check that the original index expression value is both greater than
10227 or equal to the minimum value of the range and less than or equal to
10228 the maximum value of the range. */
709f5be1 10229
c5d5d461 10230 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10231 default_label);
bbf6f052 10232
b93a436e
JL
10233 /* If index is in range, it must fit in Pmode.
10234 Convert to Pmode so we can index with it. */
10235 if (mode != Pmode)
10236 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10237
b93a436e
JL
10238 /* Don't let a MEM slip thru, because then INDEX that comes
10239 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10240 and break_out_memory_refs will go to work on it and mess it up. */
10241#ifdef PIC_CASE_VECTOR_ADDRESS
10242 if (flag_pic && GET_CODE (index) != REG)
10243 index = copy_to_mode_reg (Pmode, index);
10244#endif
ca695ac9 10245
b93a436e
JL
10246 /* If flag_force_addr were to affect this address
10247 it could interfere with the tricky assumptions made
10248 about addresses that contain label-refs,
10249 which may be valid only very near the tablejump itself. */
10250 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10251 GET_MODE_SIZE, because this indicates how large insns are. The other
10252 uses should all be Pmode, because they are addresses. This code
10253 could fail if addresses and insns are not the same size. */
10254 index = gen_rtx_PLUS (Pmode,
10255 gen_rtx_MULT (Pmode, index,
10256 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10257 gen_rtx_LABEL_REF (Pmode, table_label));
10258#ifdef PIC_CASE_VECTOR_ADDRESS
10259 if (flag_pic)
10260 index = PIC_CASE_VECTOR_ADDRESS (index);
10261 else
bbf6f052 10262#endif
b93a436e
JL
10263 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10264 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10265 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10266 RTX_UNCHANGING_P (vector) = 1;
10267 convert_move (temp, vector, 0);
10268
10269 emit_jump_insn (gen_tablejump (temp, table_label));
10270
10271 /* If we are generating PIC code or if the table is PC-relative, the
10272 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10273 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10274 emit_barrier ();
bbf6f052 10275}
b93a436e 10276
ad82abb8
ZW
10277int
10278try_tablejump (index_type, index_expr, minval, range,
10279 table_label, default_label)
10280 tree index_type, index_expr, minval, range;
10281 rtx table_label, default_label;
10282{
10283 rtx index;
10284
10285 if (! HAVE_tablejump)
10286 return 0;
10287
10288 index_expr = fold (build (MINUS_EXPR, index_type,
10289 convert (index_type, index_expr),
10290 convert (index_type, minval)));
10291 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10292 emit_queue ();
10293 index = protect_from_queue (index, 0);
10294 do_pending_stack_adjust ();
10295
10296 do_tablejump (index, TYPE_MODE (index_type),
10297 convert_modes (TYPE_MODE (index_type),
10298 TYPE_MODE (TREE_TYPE (range)),
10299 expand_expr (range, NULL_RTX,
10300 VOIDmode, 0),
10301 TREE_UNSIGNED (TREE_TYPE (range))),
10302 table_label, default_label);
10303 return 1;
10304}
e2500fed 10305
cb2a532e
AH
10306/* Nonzero if the mode is a valid vector mode for this architecture.
10307 This returns nonzero even if there is no hardware support for the
10308 vector mode, but we can emulate with narrower modes. */
10309
10310int
10311vector_mode_valid_p (mode)
10312 enum machine_mode mode;
10313{
10314 enum mode_class class = GET_MODE_CLASS (mode);
10315 enum machine_mode innermode;
10316
10317 /* Doh! What's going on? */
10318 if (class != MODE_VECTOR_INT
10319 && class != MODE_VECTOR_FLOAT)
10320 return 0;
10321
10322 /* Hardware support. Woo hoo! */
10323 if (VECTOR_MODE_SUPPORTED_P (mode))
10324 return 1;
10325
10326 innermode = GET_MODE_INNER (mode);
10327
10328 /* We should probably return 1 if requesting V4DI and we have no DI,
10329 but we have V2DI, but this is probably very unlikely. */
10330
10331 /* If we have support for the inner mode, we can safely emulate it.
10332 We may not have V2DI, but me can emulate with a pair of DIs. */
10333 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10334}
10335
d744e06e
AH
10336/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10337static rtx
10338const_vector_from_tree (exp)
10339 tree exp;
10340{
10341 rtvec v;
10342 int units, i;
10343 tree link, elt;
10344 enum machine_mode inner, mode;
10345
10346 mode = TYPE_MODE (TREE_TYPE (exp));
10347
10348 if (is_zeros_p (exp))
10349 return CONST0_RTX (mode);
10350
10351 units = GET_MODE_NUNITS (mode);
10352 inner = GET_MODE_INNER (mode);
10353
10354 v = rtvec_alloc (units);
10355
10356 link = TREE_VECTOR_CST_ELTS (exp);
10357 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10358 {
10359 elt = TREE_VALUE (link);
10360
10361 if (TREE_CODE (elt) == REAL_CST)
10362 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10363 inner);
10364 else
10365 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10366 TREE_INT_CST_HIGH (elt),
10367 inner);
10368 }
10369
10370 return gen_rtx_raw_CONST_VECTOR (mode, v);
10371}
10372
e2500fed 10373#include "gt-expr.h"
This page took 3.977852 seconds and 5 git commands to generate.