]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
combine.c (simplify_set): Don't move a subreg in SET_SRC to SET_DEST if...
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357 2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
8e37cba8 3 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
bbf6f052 4
1322177d 5This file is part of GCC.
bbf6f052 6
1322177d
LB
7GCC is free software; you can redistribute it and/or modify it under
8the terms of the GNU General Public License as published by the Free
9Software Foundation; either version 2, or (at your option) any later
10version.
bbf6f052 11
1322177d
LB
12GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13WARRANTY; without even the implied warranty of MERCHANTABILITY or
14FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15for more details.
bbf6f052
RK
16
17You should have received a copy of the GNU General Public License
1322177d
LB
18along with GCC; see the file COPYING. If not, write to the Free
19Software Foundation, 59 Temple Place - Suite 330, Boston, MA
2002111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
4977bab6
ZW
24#include "coretypes.h"
25#include "tm.h"
ca695ac9 26#include "machmode.h"
11ad4784 27#include "real.h"
bbf6f052
RK
28#include "rtl.h"
29#include "tree.h"
30#include "flags.h"
bf76bb5a 31#include "regs.h"
4ed67205 32#include "hard-reg-set.h"
3d195391 33#include "except.h"
bbf6f052 34#include "function.h"
bbf6f052 35#include "insn-config.h"
34e81b5a 36#include "insn-attr.h"
3a94c984 37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 38#include "expr.h"
e78d8e51
ZW
39#include "optabs.h"
40#include "libfuncs.h"
bbf6f052 41#include "recog.h"
3ef1eef4 42#include "reload.h"
bbf6f052 43#include "output.h"
bbf6f052 44#include "typeclass.h"
10f0ad3d 45#include "toplev.h"
d7db6646 46#include "ggc.h"
ac79cd5a 47#include "langhooks.h"
e2c49ac2 48#include "intl.h"
b1474bb7 49#include "tm_p.h"
bbf6f052 50
bbf6f052 51/* Decide whether a function's arguments should be processed
bbc8a071
RK
52 from first to last or from last to first.
53
54 They should if the stack and args grow in opposite directions, but
55 only if we have push insns. */
bbf6f052 56
bbf6f052 57#ifdef PUSH_ROUNDING
bbc8a071 58
2da4124d 59#ifndef PUSH_ARGS_REVERSED
3319a347 60#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 61#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 62#endif
2da4124d 63#endif
bbc8a071 64
bbf6f052
RK
65#endif
66
67#ifndef STACK_PUSH_CODE
68#ifdef STACK_GROWS_DOWNWARD
69#define STACK_PUSH_CODE PRE_DEC
70#else
71#define STACK_PUSH_CODE PRE_INC
72#endif
73#endif
74
18543a22
ILT
75/* Assume that case vectors are not pc-relative. */
76#ifndef CASE_VECTOR_PC_RELATIVE
77#define CASE_VECTOR_PC_RELATIVE 0
78#endif
79
4ca79136
RH
80/* Convert defined/undefined to boolean. */
81#ifdef TARGET_MEM_FUNCTIONS
82#undef TARGET_MEM_FUNCTIONS
83#define TARGET_MEM_FUNCTIONS 1
84#else
85#define TARGET_MEM_FUNCTIONS 0
86#endif
87
88
bbf6f052
RK
89/* If this is nonzero, we do not bother generating VOLATILE
90 around volatile memory references, and we are willing to
91 output indirect addresses. If cse is to follow, we reject
92 indirect addresses so a useful potential cse is generated;
93 if it is used only once, instruction combination will produce
94 the same indirect address eventually. */
95int cse_not_expected;
96
14a774a9 97/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
1cff8964 98tree placeholder_list = 0;
14a774a9 99
4969d05d
RK
100/* This structure is used by move_by_pieces to describe the move to
101 be performed. */
4969d05d
RK
102struct move_by_pieces
103{
104 rtx to;
105 rtx to_addr;
106 int autinc_to;
107 int explicit_inc_to;
108 rtx from;
109 rtx from_addr;
110 int autinc_from;
111 int explicit_inc_from;
3bdf5ad1
RK
112 unsigned HOST_WIDE_INT len;
113 HOST_WIDE_INT offset;
4969d05d
RK
114 int reverse;
115};
116
57814e5e 117/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
118 be performed. */
119
57814e5e 120struct store_by_pieces
9de08200
RK
121{
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
3bdf5ad1
RK
126 unsigned HOST_WIDE_INT len;
127 HOST_WIDE_INT offset;
57814e5e
JJ
128 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
129 PTR constfundata;
9de08200
RK
130 int reverse;
131};
132
711d877c 133static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
134static unsigned HOST_WIDE_INT move_by_pieces_ninsns
135 PARAMS ((unsigned HOST_WIDE_INT,
136 unsigned int));
711d877c
KG
137static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
44bb111a 139static bool block_move_libcall_safe_for_call_parm PARAMS ((void));
4ca79136
RH
140static bool emit_block_move_via_movstr PARAMS ((rtx, rtx, rtx, unsigned));
141static rtx emit_block_move_via_libcall PARAMS ((rtx, rtx, rtx));
142static tree emit_block_move_libcall_fn PARAMS ((int));
44bb111a 143static void emit_block_move_via_loop PARAMS ((rtx, rtx, rtx, unsigned));
57814e5e
JJ
144static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
145 enum machine_mode));
3bdf5ad1
RK
146static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
147 unsigned int));
57814e5e
JJ
148static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
149 unsigned int));
150static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 151 enum machine_mode,
57814e5e 152 struct store_by_pieces *));
4ca79136
RH
153static bool clear_storage_via_clrstr PARAMS ((rtx, rtx, unsigned));
154static rtx clear_storage_via_libcall PARAMS ((rtx, rtx));
155static tree clear_storage_libcall_fn PARAMS ((int));
51286de6 156static rtx compress_float_constant PARAMS ((rtx, rtx));
296b4ed9 157static rtx get_subtarget PARAMS ((rtx));
d744e06e 158static int is_zeros_p PARAMS ((tree));
711d877c 159static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
160static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
161 HOST_WIDE_INT, enum machine_mode,
04050c69
RK
162 tree, tree, int, int));
163static void store_constructor PARAMS ((tree, rtx, int, HOST_WIDE_INT));
770ae6cc
RK
164static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
a06ef755
RK
166 tree, enum machine_mode, int, tree,
167 int));
711d877c 168static rtx var_rtx PARAMS ((tree));
9ceca302
OH
169
170static unsigned HOST_WIDE_INT highest_pow2_factor PARAMS ((tree));
171static unsigned HOST_WIDE_INT highest_pow2_factor_for_type PARAMS ((tree,
172 tree));
173
1ce7f3c2 174static int is_aligning_offset PARAMS ((tree, tree));
711d877c 175static rtx expand_increment PARAMS ((tree, int, int));
711d877c 176static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
21d93687 177#ifdef PUSH_ROUNDING
566aa174 178static void emit_single_push_insn PARAMS ((enum machine_mode, rtx, tree));
21d93687 179#endif
ad82abb8 180static void do_tablejump PARAMS ((rtx, enum machine_mode, rtx, rtx, rtx));
d744e06e 181static rtx const_vector_from_tree PARAMS ((tree));
bbf6f052 182
4fa52007
RK
183/* Record for each mode whether we can move a register directly to or
184 from an object of that mode in memory. If we can't, we won't try
185 to use that mode directly when accessing a field of that mode. */
186
187static char direct_load[NUM_MACHINE_MODES];
188static char direct_store[NUM_MACHINE_MODES];
189
51286de6
RH
190/* Record for each mode whether we can float-extend from memory. */
191
192static bool float_extend_from_mem[NUM_MACHINE_MODES][NUM_MACHINE_MODES];
193
7e24ffc9
HPN
194/* If a memory-to-memory move would take MOVE_RATIO or more simple
195 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
196
197#ifndef MOVE_RATIO
266007a7 198#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
199#define MOVE_RATIO 2
200#else
3a94c984 201/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 202#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
203#endif
204#endif
e87b4f3f 205
fbe1758d 206/* This macro is used to determine whether move_by_pieces should be called
3a94c984 207 to perform a structure copy. */
fbe1758d 208#ifndef MOVE_BY_PIECES_P
19caa751 209#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 210 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
211#endif
212
78762e3b
RS
213/* If a clear memory operation would take CLEAR_RATIO or more simple
214 move-instruction sequences, we will do a clrstr or libcall instead. */
215
216#ifndef CLEAR_RATIO
217#if defined (HAVE_clrstrqi) || defined (HAVE_clrstrhi) || defined (HAVE_clrstrsi) || defined (HAVE_clrstrdi) || defined (HAVE_clrstrti)
218#define CLEAR_RATIO 2
219#else
220/* If we are optimizing for space, cut down the default clear ratio. */
221#define CLEAR_RATIO (optimize_size ? 3 : 15)
222#endif
223#endif
224
225/* This macro is used to determine whether clear_by_pieces should be
226 called to clear storage. */
227#ifndef CLEAR_BY_PIECES_P
228#define CLEAR_BY_PIECES_P(SIZE, ALIGN) \
229 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) CLEAR_RATIO)
230#endif
231
4977bab6
ZW
232/* This macro is used to determine whether store_by_pieces should be
233 called to "memset" storage with byte values other than zero, or
234 to "memcpy" storage when the source is a constant string. */
235#ifndef STORE_BY_PIECES_P
236#define STORE_BY_PIECES_P(SIZE, ALIGN) MOVE_BY_PIECES_P (SIZE, ALIGN)
237#endif
238
266007a7 239/* This array records the insn_code of insns to perform block moves. */
e6677db3 240enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 241
9de08200
RK
242/* This array records the insn_code of insns to perform block clears. */
243enum insn_code clrstr_optab[NUM_MACHINE_MODES];
244
cc2902df 245/* SLOW_UNALIGNED_ACCESS is nonzero if unaligned accesses are very slow. */
e87b4f3f
RS
246
247#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 248#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 249#endif
bbf6f052 250\f
4fa52007 251/* This is run once per compilation to set up which modes can be used
266007a7 252 directly in memory and to initialize the block move optab. */
4fa52007
RK
253
254void
255init_expr_once ()
256{
257 rtx insn, pat;
258 enum machine_mode mode;
cff48d8f 259 int num_clobbers;
9ec36da5 260 rtx mem, mem1;
bf1660a6 261 rtx reg;
9ec36da5 262
e2549997
RS
263 /* Try indexing by frame ptr and try by stack ptr.
264 It is known that on the Convex the stack ptr isn't a valid index.
265 With luck, one or the other is valid on any machine. */
9ec36da5
JL
266 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
267 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 268
bf1660a6
JL
269 /* A scratch register we can modify in-place below to avoid
270 useless RTL allocations. */
271 reg = gen_rtx_REG (VOIDmode, -1);
272
1f8c3c5b
RH
273 insn = rtx_alloc (INSN);
274 pat = gen_rtx_SET (0, NULL_RTX, NULL_RTX);
275 PATTERN (insn) = pat;
4fa52007
RK
276
277 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
278 mode = (enum machine_mode) ((int) mode + 1))
279 {
280 int regno;
4fa52007
RK
281
282 direct_load[(int) mode] = direct_store[(int) mode] = 0;
283 PUT_MODE (mem, mode);
e2549997 284 PUT_MODE (mem1, mode);
bf1660a6 285 PUT_MODE (reg, mode);
4fa52007 286
e6fe56a4
RK
287 /* See if there is some register that can be used in this mode and
288 directly loaded or stored from memory. */
289
7308a047
RS
290 if (mode != VOIDmode && mode != BLKmode)
291 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
292 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
293 regno++)
294 {
295 if (! HARD_REGNO_MODE_OK (regno, mode))
296 continue;
e6fe56a4 297
bf1660a6 298 REGNO (reg) = regno;
e6fe56a4 299
7308a047
RS
300 SET_SRC (pat) = mem;
301 SET_DEST (pat) = reg;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_load[(int) mode] = 1;
e6fe56a4 304
e2549997
RS
305 SET_SRC (pat) = mem1;
306 SET_DEST (pat) = reg;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_load[(int) mode] = 1;
309
7308a047
RS
310 SET_SRC (pat) = reg;
311 SET_DEST (pat) = mem;
312 if (recog (pat, insn, &num_clobbers) >= 0)
313 direct_store[(int) mode] = 1;
e2549997
RS
314
315 SET_SRC (pat) = reg;
316 SET_DEST (pat) = mem1;
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 direct_store[(int) mode] = 1;
7308a047 319 }
4fa52007
RK
320 }
321
51286de6
RH
322 mem = gen_rtx_MEM (VOIDmode, gen_rtx_raw_REG (Pmode, 10000));
323
324 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); mode != VOIDmode;
325 mode = GET_MODE_WIDER_MODE (mode))
326 {
327 enum machine_mode srcmode;
328 for (srcmode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT); srcmode != mode;
0fb7aeda 329 srcmode = GET_MODE_WIDER_MODE (srcmode))
51286de6
RH
330 {
331 enum insn_code ic;
332
333 ic = can_extend_p (mode, srcmode, 0);
334 if (ic == CODE_FOR_nothing)
335 continue;
336
337 PUT_MODE (mem, srcmode);
0fb7aeda 338
51286de6
RH
339 if ((*insn_data[ic].operand[1].predicate) (mem, srcmode))
340 float_extend_from_mem[mode][srcmode] = true;
341 }
342 }
4fa52007 343}
cff48d8f 344
bbf6f052
RK
345/* This is run at the start of compiling a function. */
346
347void
348init_expr ()
349{
e2500fed 350 cfun->expr = (struct expr_status *) ggc_alloc (sizeof (struct expr_status));
bbf6f052 351
49ad7cfa 352 pending_chain = 0;
bbf6f052 353 pending_stack_adjust = 0;
1503a7ec 354 stack_pointer_delta = 0;
bbf6f052 355 inhibit_defer_pop = 0;
bbf6f052 356 saveregs_value = 0;
0006469d 357 apply_args_value = 0;
e87b4f3f 358 forced_labels = 0;
bbf6f052
RK
359}
360
49ad7cfa 361/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 362
bbf6f052 363void
49ad7cfa 364finish_expr_for_function ()
bbf6f052 365{
49ad7cfa
BS
366 if (pending_chain)
367 abort ();
bbf6f052
RK
368}
369\f
370/* Manage the queue of increment instructions to be output
371 for POSTINCREMENT_EXPR expressions, etc. */
372
bbf6f052
RK
373/* Queue up to increment (or change) VAR later. BODY says how:
374 BODY should be the same thing you would pass to emit_insn
375 to increment right away. It will go to emit_insn later on.
376
377 The value is a QUEUED expression to be used in place of VAR
378 where you want to guarantee the pre-incrementation value of VAR. */
379
380static rtx
381enqueue_insn (var, body)
382 rtx var, body;
383{
c5c76735
JL
384 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
385 body, pending_chain);
bbf6f052
RK
386 return pending_chain;
387}
388
389/* Use protect_from_queue to convert a QUEUED expression
390 into something that you can put immediately into an instruction.
391 If the queued incrementation has not happened yet,
392 protect_from_queue returns the variable itself.
393 If the incrementation has happened, protect_from_queue returns a temp
394 that contains a copy of the old value of the variable.
395
396 Any time an rtx which might possibly be a QUEUED is to be put
397 into an instruction, it must be passed through protect_from_queue first.
398 QUEUED expressions are not meaningful in instructions.
399
400 Do not pass a value through protect_from_queue and then hold
401 on to it for a while before putting it in an instruction!
402 If the queue is flushed in between, incorrect code will result. */
403
404rtx
405protect_from_queue (x, modify)
b3694847 406 rtx x;
bbf6f052
RK
407 int modify;
408{
b3694847 409 RTX_CODE code = GET_CODE (x);
bbf6f052
RK
410
411#if 0 /* A QUEUED can hang around after the queue is forced out. */
412 /* Shortcut for most common case. */
413 if (pending_chain == 0)
414 return x;
415#endif
416
417 if (code != QUEUED)
418 {
e9baa644
RK
419 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
420 use of autoincrement. Make a copy of the contents of the memory
421 location rather than a copy of the address, but not if the value is
422 of mode BLKmode. Don't modify X in place since it might be
423 shared. */
bbf6f052
RK
424 if (code == MEM && GET_MODE (x) != BLKmode
425 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
426 {
f1ec5147
RK
427 rtx y = XEXP (x, 0);
428 rtx new = replace_equiv_address_nv (x, QUEUED_VAR (y));
e9baa644 429
bbf6f052
RK
430 if (QUEUED_INSN (y))
431 {
f1ec5147
RK
432 rtx temp = gen_reg_rtx (GET_MODE (x));
433
e9baa644 434 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
435 QUEUED_INSN (y));
436 return temp;
437 }
f1ec5147 438
73b7f58c
BS
439 /* Copy the address into a pseudo, so that the returned value
440 remains correct across calls to emit_queue. */
f1ec5147 441 return replace_equiv_address (new, copy_to_reg (XEXP (new, 0)));
bbf6f052 442 }
f1ec5147 443
bbf6f052
RK
444 /* Otherwise, recursively protect the subexpressions of all
445 the kinds of rtx's that can contain a QUEUED. */
446 if (code == MEM)
3f15938e
RS
447 {
448 rtx tem = protect_from_queue (XEXP (x, 0), 0);
449 if (tem != XEXP (x, 0))
450 {
451 x = copy_rtx (x);
452 XEXP (x, 0) = tem;
453 }
454 }
bbf6f052
RK
455 else if (code == PLUS || code == MULT)
456 {
3f15938e
RS
457 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
458 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
459 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
460 {
461 x = copy_rtx (x);
462 XEXP (x, 0) = new0;
463 XEXP (x, 1) = new1;
464 }
bbf6f052
RK
465 }
466 return x;
467 }
73b7f58c
BS
468 /* If the increment has not happened, use the variable itself. Copy it
469 into a new pseudo so that the value remains correct across calls to
470 emit_queue. */
bbf6f052 471 if (QUEUED_INSN (x) == 0)
73b7f58c 472 return copy_to_reg (QUEUED_VAR (x));
bbf6f052
RK
473 /* If the increment has happened and a pre-increment copy exists,
474 use that copy. */
475 if (QUEUED_COPY (x) != 0)
476 return QUEUED_COPY (x);
477 /* The increment has happened but we haven't set up a pre-increment copy.
478 Set one up now, and use it. */
479 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
480 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
481 QUEUED_INSN (x));
482 return QUEUED_COPY (x);
483}
484
485/* Return nonzero if X contains a QUEUED expression:
486 if it contains anything that will be altered by a queued increment.
487 We handle only combinations of MEM, PLUS, MINUS and MULT operators
488 since memory addresses generally contain only those. */
489
1f06ee8d 490int
bbf6f052
RK
491queued_subexp_p (x)
492 rtx x;
493{
b3694847 494 enum rtx_code code = GET_CODE (x);
bbf6f052
RK
495 switch (code)
496 {
497 case QUEUED:
498 return 1;
499 case MEM:
500 return queued_subexp_p (XEXP (x, 0));
501 case MULT:
502 case PLUS:
503 case MINUS:
e9a25f70
JL
504 return (queued_subexp_p (XEXP (x, 0))
505 || queued_subexp_p (XEXP (x, 1)));
506 default:
507 return 0;
bbf6f052 508 }
bbf6f052
RK
509}
510
511/* Perform all the pending incrementations. */
512
513void
514emit_queue ()
515{
b3694847 516 rtx p;
381127e8 517 while ((p = pending_chain))
bbf6f052 518 {
41b083c4
R
519 rtx body = QUEUED_BODY (p);
520
2f937369
DM
521 switch (GET_CODE (body))
522 {
523 case INSN:
524 case JUMP_INSN:
525 case CALL_INSN:
526 case CODE_LABEL:
527 case BARRIER:
528 case NOTE:
529 QUEUED_INSN (p) = body;
530 emit_insn (body);
531 break;
532
533#ifdef ENABLE_CHECKING
534 case SEQUENCE:
535 abort ();
536 break;
537#endif
538
539 default:
540 QUEUED_INSN (p) = emit_insn (body);
541 break;
41b083c4 542 }
2f937369 543
bbf6f052
RK
544 pending_chain = QUEUED_NEXT (p);
545 }
546}
bbf6f052
RK
547\f
548/* Copy data from FROM to TO, where the machine modes are not the same.
549 Both modes may be integer, or both may be floating.
550 UNSIGNEDP should be nonzero if FROM is an unsigned type.
551 This causes zero-extension instead of sign-extension. */
552
553void
554convert_move (to, from, unsignedp)
b3694847 555 rtx to, from;
bbf6f052
RK
556 int unsignedp;
557{
558 enum machine_mode to_mode = GET_MODE (to);
559 enum machine_mode from_mode = GET_MODE (from);
560 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
561 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
562 enum insn_code code;
563 rtx libcall;
564
565 /* rtx code for making an equivalent value. */
37d0b254
SE
566 enum rtx_code equiv_code = (unsignedp < 0 ? UNKNOWN
567 : (unsignedp ? ZERO_EXTEND : SIGN_EXTEND));
bbf6f052
RK
568
569 to = protect_from_queue (to, 1);
570 from = protect_from_queue (from, 0);
571
572 if (to_real != from_real)
573 abort ();
574
1499e0a8
RK
575 /* If FROM is a SUBREG that indicates that we have already done at least
576 the required extension, strip it. We don't handle such SUBREGs as
577 TO here. */
578
579 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
580 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
581 >= GET_MODE_SIZE (to_mode))
582 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
583 from = gen_lowpart (to_mode, from), from_mode = to_mode;
584
585 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
586 abort ();
587
bbf6f052
RK
588 if (to_mode == from_mode
589 || (from_mode == VOIDmode && CONSTANT_P (from)))
590 {
591 emit_move_insn (to, from);
592 return;
593 }
594
0b4565c9
BS
595 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
596 {
597 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
598 abort ();
3a94c984 599
0b4565c9 600 if (VECTOR_MODE_P (to_mode))
bafe341a 601 from = simplify_gen_subreg (to_mode, from, GET_MODE (from), 0);
0b4565c9 602 else
bafe341a 603 to = simplify_gen_subreg (from_mode, to, GET_MODE (to), 0);
0b4565c9
BS
604
605 emit_move_insn (to, from);
606 return;
607 }
608
609 if (to_real != from_real)
610 abort ();
611
bbf6f052
RK
612 if (to_real)
613 {
642dfa8b 614 rtx value, insns;
81d79e2c 615
2b01c326 616 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 617 {
2b01c326
RK
618 /* Try converting directly if the insn is supported. */
619 if ((code = can_extend_p (to_mode, from_mode, 0))
620 != CODE_FOR_nothing)
621 {
622 emit_unop_insn (code, to, from, UNKNOWN);
623 return;
624 }
bbf6f052 625 }
3a94c984 626
b424402e
RS
627#ifdef HAVE_trunchfqf2
628 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
704af6a1
JL
634#ifdef HAVE_trunctqfqf2
635 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
b424402e
RS
641#ifdef HAVE_truncsfqf2
642 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
648#ifdef HAVE_truncdfqf2
649 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
650 {
651 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
652 return;
653 }
654#endif
655#ifdef HAVE_truncxfqf2
656 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
662#ifdef HAVE_trunctfqf2
663 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
664 {
665 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
03747aa3
RK
669
670#ifdef HAVE_trunctqfhf2
671 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
b424402e
RS
677#ifdef HAVE_truncsfhf2
678 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
684#ifdef HAVE_truncdfhf2
685 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
691#ifdef HAVE_truncxfhf2
692 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
698#ifdef HAVE_trunctfhf2
699 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
700 {
701 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
2b01c326
RK
705
706#ifdef HAVE_truncsftqf2
707 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713#ifdef HAVE_truncdftqf2
714 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
717 return;
718 }
719#endif
720#ifdef HAVE_truncxftqf2
721 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
724 return;
725 }
726#endif
727#ifdef HAVE_trunctftqf2
728 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
729 {
730 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
731 return;
732 }
733#endif
734
bbf6f052
RK
735#ifdef HAVE_truncdfsf2
736 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
b092b471
JW
742#ifdef HAVE_truncxfsf2
743 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
744 {
745 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
bbf6f052
RK
749#ifdef HAVE_trunctfsf2
750 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
751 {
752 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
753 return;
754 }
755#endif
b092b471
JW
756#ifdef HAVE_truncxfdf2
757 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
758 {
759 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
760 return;
761 }
762#endif
bbf6f052
RK
763#ifdef HAVE_trunctfdf2
764 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
765 {
766 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
767 return;
768 }
769#endif
770
b092b471
JW
771 libcall = (rtx) 0;
772 switch (from_mode)
773 {
774 case SFmode:
775 switch (to_mode)
776 {
777 case DFmode:
778 libcall = extendsfdf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extendsfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extendsftf2_libfunc;
787 break;
3a94c984 788
e9a25f70
JL
789 default:
790 break;
b092b471
JW
791 }
792 break;
793
794 case DFmode:
795 switch (to_mode)
796 {
797 case SFmode:
798 libcall = truncdfsf2_libfunc;
799 break;
800
801 case XFmode:
802 libcall = extenddfxf2_libfunc;
803 break;
804
805 case TFmode:
806 libcall = extenddftf2_libfunc;
807 break;
3a94c984 808
e9a25f70
JL
809 default:
810 break;
b092b471
JW
811 }
812 break;
813
814 case XFmode:
815 switch (to_mode)
816 {
817 case SFmode:
818 libcall = truncxfsf2_libfunc;
819 break;
820
821 case DFmode:
822 libcall = truncxfdf2_libfunc;
823 break;
3a94c984 824
e9a25f70
JL
825 default:
826 break;
b092b471
JW
827 }
828 break;
829
830 case TFmode:
831 switch (to_mode)
832 {
833 case SFmode:
834 libcall = trunctfsf2_libfunc;
835 break;
836
837 case DFmode:
838 libcall = trunctfdf2_libfunc;
839 break;
3a94c984 840
e9a25f70
JL
841 default:
842 break;
b092b471
JW
843 }
844 break;
3a94c984 845
e9a25f70
JL
846 default:
847 break;
b092b471
JW
848 }
849
850 if (libcall == (rtx) 0)
851 /* This conversion is not implemented yet. */
bbf6f052
RK
852 abort ();
853
642dfa8b 854 start_sequence ();
ebb1b59a 855 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 856 1, from, from_mode);
642dfa8b
BS
857 insns = get_insns ();
858 end_sequence ();
859 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
860 from));
bbf6f052
RK
861 return;
862 }
863
864 /* Now both modes are integers. */
865
866 /* Handle expanding beyond a word. */
867 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
868 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
869 {
870 rtx insns;
871 rtx lowpart;
872 rtx fill_value;
873 rtx lowfrom;
874 int i;
875 enum machine_mode lowpart_mode;
876 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
877
878 /* Try converting directly if the insn is supported. */
879 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
880 != CODE_FOR_nothing)
881 {
cd1b4b44
RK
882 /* If FROM is a SUBREG, put it into a register. Do this
883 so that we always generate the same set of insns for
884 better cse'ing; if an intermediate assignment occurred,
885 we won't be doing the operation directly on the SUBREG. */
886 if (optimize > 0 && GET_CODE (from) == SUBREG)
887 from = force_reg (from_mode, from);
bbf6f052
RK
888 emit_unop_insn (code, to, from, equiv_code);
889 return;
890 }
891 /* Next, try converting via full word. */
892 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
893 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
894 != CODE_FOR_nothing))
895 {
a81fee56 896 if (GET_CODE (to) == REG)
38a448ca 897 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
898 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
899 emit_unop_insn (code, to,
900 gen_lowpart (word_mode, to), equiv_code);
901 return;
902 }
903
904 /* No special multiword conversion insn; do it by hand. */
905 start_sequence ();
906
5c5033c3
RK
907 /* Since we will turn this into a no conflict block, we must ensure
908 that the source does not overlap the target. */
909
910 if (reg_overlap_mentioned_p (to, from))
911 from = force_reg (from_mode, from);
912
bbf6f052
RK
913 /* Get a copy of FROM widened to a word, if necessary. */
914 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
915 lowpart_mode = word_mode;
916 else
917 lowpart_mode = from_mode;
918
919 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
920
921 lowpart = gen_lowpart (lowpart_mode, to);
922 emit_move_insn (lowpart, lowfrom);
923
924 /* Compute the value to put in each remaining word. */
925 if (unsignedp)
926 fill_value = const0_rtx;
927 else
928 {
929#ifdef HAVE_slt
930 if (HAVE_slt
a995e389 931 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
932 && STORE_FLAG_VALUE == -1)
933 {
906c4e36 934 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
a06ef755 935 lowpart_mode, 0);
bbf6f052
RK
936 fill_value = gen_reg_rtx (word_mode);
937 emit_insn (gen_slt (fill_value));
938 }
939 else
940#endif
941 {
942 fill_value
943 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
944 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 945 NULL_RTX, 0);
bbf6f052
RK
946 fill_value = convert_to_mode (word_mode, fill_value, 1);
947 }
948 }
949
950 /* Fill the remaining words. */
951 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
952 {
953 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
954 rtx subword = operand_subword (to, index, 1, to_mode);
955
956 if (subword == 0)
957 abort ();
958
959 if (fill_value != subword)
960 emit_move_insn (subword, fill_value);
961 }
962
963 insns = get_insns ();
964 end_sequence ();
965
906c4e36 966 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 967 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
968 return;
969 }
970
d3c64ee3
RS
971 /* Truncating multi-word to a word or less. */
972 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
973 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 974 {
431a6eca
JW
975 if (!((GET_CODE (from) == MEM
976 && ! MEM_VOLATILE_P (from)
977 && direct_load[(int) to_mode]
978 && ! mode_dependent_address_p (XEXP (from, 0)))
979 || GET_CODE (from) == REG
980 || GET_CODE (from) == SUBREG))
981 from = force_reg (from_mode, from);
bbf6f052
RK
982 convert_move (to, gen_lowpart (word_mode, from), 0);
983 return;
984 }
985
3a94c984 986 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
987 if (to_mode == PQImode)
988 {
989 if (from_mode != QImode)
990 from = convert_to_mode (QImode, from, unsignedp);
991
992#ifdef HAVE_truncqipqi2
993 if (HAVE_truncqipqi2)
994 {
995 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
996 return;
997 }
998#endif /* HAVE_truncqipqi2 */
999 abort ();
1000 }
1001
1002 if (from_mode == PQImode)
1003 {
1004 if (to_mode != QImode)
1005 {
1006 from = convert_to_mode (QImode, from, unsignedp);
1007 from_mode = QImode;
1008 }
1009 else
1010 {
1011#ifdef HAVE_extendpqiqi2
1012 if (HAVE_extendpqiqi2)
1013 {
1014 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
1015 return;
1016 }
1017#endif /* HAVE_extendpqiqi2 */
1018 abort ();
1019 }
1020 }
1021
bbf6f052
RK
1022 if (to_mode == PSImode)
1023 {
1024 if (from_mode != SImode)
1025 from = convert_to_mode (SImode, from, unsignedp);
1026
1f584163
DE
1027#ifdef HAVE_truncsipsi2
1028 if (HAVE_truncsipsi2)
bbf6f052 1029 {
1f584163 1030 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1031 return;
1032 }
1f584163 1033#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1034 abort ();
1035 }
1036
1037 if (from_mode == PSImode)
1038 {
1039 if (to_mode != SImode)
1040 {
1041 from = convert_to_mode (SImode, from, unsignedp);
1042 from_mode = SImode;
1043 }
1044 else
1045 {
1f584163 1046#ifdef HAVE_extendpsisi2
43d75418 1047 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1048 {
1f584163 1049 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1050 return;
1051 }
1f584163 1052#endif /* HAVE_extendpsisi2 */
43d75418
R
1053#ifdef HAVE_zero_extendpsisi2
1054 if (unsignedp && HAVE_zero_extendpsisi2)
1055 {
1056 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1057 return;
1058 }
1059#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1060 abort ();
1061 }
1062 }
1063
0407367d
RK
1064 if (to_mode == PDImode)
1065 {
1066 if (from_mode != DImode)
1067 from = convert_to_mode (DImode, from, unsignedp);
1068
1069#ifdef HAVE_truncdipdi2
1070 if (HAVE_truncdipdi2)
1071 {
1072 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1073 return;
1074 }
1075#endif /* HAVE_truncdipdi2 */
1076 abort ();
1077 }
1078
1079 if (from_mode == PDImode)
1080 {
1081 if (to_mode != DImode)
1082 {
1083 from = convert_to_mode (DImode, from, unsignedp);
1084 from_mode = DImode;
1085 }
1086 else
1087 {
1088#ifdef HAVE_extendpdidi2
1089 if (HAVE_extendpdidi2)
1090 {
1091 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1092 return;
1093 }
1094#endif /* HAVE_extendpdidi2 */
1095 abort ();
1096 }
1097 }
1098
bbf6f052
RK
1099 /* Now follow all the conversions between integers
1100 no more than a word long. */
1101
1102 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1103 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1104 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1105 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1106 {
d3c64ee3
RS
1107 if (!((GET_CODE (from) == MEM
1108 && ! MEM_VOLATILE_P (from)
1109 && direct_load[(int) to_mode]
1110 && ! mode_dependent_address_p (XEXP (from, 0)))
1111 || GET_CODE (from) == REG
1112 || GET_CODE (from) == SUBREG))
1113 from = force_reg (from_mode, from);
34aa3599
RK
1114 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1115 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1116 from = copy_to_reg (from);
bbf6f052
RK
1117 emit_move_insn (to, gen_lowpart (to_mode, from));
1118 return;
1119 }
1120
d3c64ee3 1121 /* Handle extension. */
bbf6f052
RK
1122 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1123 {
1124 /* Convert directly if that works. */
1125 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1126 != CODE_FOR_nothing)
1127 {
9413de45
RK
1128 if (flag_force_mem)
1129 from = force_not_mem (from);
1130
bbf6f052
RK
1131 emit_unop_insn (code, to, from, equiv_code);
1132 return;
1133 }
1134 else
1135 {
1136 enum machine_mode intermediate;
2b28d92e
NC
1137 rtx tmp;
1138 tree shift_amount;
bbf6f052
RK
1139
1140 /* Search for a mode to convert via. */
1141 for (intermediate = from_mode; intermediate != VOIDmode;
1142 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1143 if (((can_extend_p (to_mode, intermediate, unsignedp)
1144 != CODE_FOR_nothing)
1145 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1146 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1147 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1148 && (can_extend_p (intermediate, from_mode, unsignedp)
1149 != CODE_FOR_nothing))
1150 {
1151 convert_move (to, convert_to_mode (intermediate, from,
1152 unsignedp), unsignedp);
1153 return;
1154 }
1155
2b28d92e 1156 /* No suitable intermediate mode.
3a94c984 1157 Generate what we need with shifts. */
2b28d92e
NC
1158 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1159 - GET_MODE_BITSIZE (from_mode), 0);
1160 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1161 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1162 to, unsignedp);
3a94c984 1163 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1164 to, unsignedp);
1165 if (tmp != to)
1166 emit_move_insn (to, tmp);
1167 return;
bbf6f052
RK
1168 }
1169 }
1170
3a94c984 1171 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1172
1173 if (from_mode == DImode && to_mode == SImode)
1174 {
1175#ifdef HAVE_truncdisi2
1176 if (HAVE_truncdisi2)
1177 {
1178 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1179 return;
1180 }
1181#endif
1182 convert_move (to, force_reg (from_mode, from), unsignedp);
1183 return;
1184 }
1185
1186 if (from_mode == DImode && to_mode == HImode)
1187 {
1188#ifdef HAVE_truncdihi2
1189 if (HAVE_truncdihi2)
1190 {
1191 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1192 return;
1193 }
1194#endif
1195 convert_move (to, force_reg (from_mode, from), unsignedp);
1196 return;
1197 }
1198
1199 if (from_mode == DImode && to_mode == QImode)
1200 {
1201#ifdef HAVE_truncdiqi2
1202 if (HAVE_truncdiqi2)
1203 {
1204 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1205 return;
1206 }
1207#endif
1208 convert_move (to, force_reg (from_mode, from), unsignedp);
1209 return;
1210 }
1211
1212 if (from_mode == SImode && to_mode == HImode)
1213 {
1214#ifdef HAVE_truncsihi2
1215 if (HAVE_truncsihi2)
1216 {
1217 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1218 return;
1219 }
1220#endif
1221 convert_move (to, force_reg (from_mode, from), unsignedp);
1222 return;
1223 }
1224
1225 if (from_mode == SImode && to_mode == QImode)
1226 {
1227#ifdef HAVE_truncsiqi2
1228 if (HAVE_truncsiqi2)
1229 {
1230 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1231 return;
1232 }
1233#endif
1234 convert_move (to, force_reg (from_mode, from), unsignedp);
1235 return;
1236 }
1237
1238 if (from_mode == HImode && to_mode == QImode)
1239 {
1240#ifdef HAVE_trunchiqi2
1241 if (HAVE_trunchiqi2)
1242 {
1243 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1244 return;
1245 }
1246#endif
1247 convert_move (to, force_reg (from_mode, from), unsignedp);
1248 return;
1249 }
1250
b9bcad65
RK
1251 if (from_mode == TImode && to_mode == DImode)
1252 {
1253#ifdef HAVE_trunctidi2
1254 if (HAVE_trunctidi2)
1255 {
1256 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1257 return;
1258 }
1259#endif
1260 convert_move (to, force_reg (from_mode, from), unsignedp);
1261 return;
1262 }
1263
1264 if (from_mode == TImode && to_mode == SImode)
1265 {
1266#ifdef HAVE_trunctisi2
1267 if (HAVE_trunctisi2)
1268 {
1269 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1270 return;
1271 }
1272#endif
1273 convert_move (to, force_reg (from_mode, from), unsignedp);
1274 return;
1275 }
1276
1277 if (from_mode == TImode && to_mode == HImode)
1278 {
1279#ifdef HAVE_trunctihi2
1280 if (HAVE_trunctihi2)
1281 {
1282 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1283 return;
1284 }
1285#endif
1286 convert_move (to, force_reg (from_mode, from), unsignedp);
1287 return;
1288 }
1289
1290 if (from_mode == TImode && to_mode == QImode)
1291 {
1292#ifdef HAVE_trunctiqi2
1293 if (HAVE_trunctiqi2)
1294 {
1295 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1296 return;
1297 }
1298#endif
1299 convert_move (to, force_reg (from_mode, from), unsignedp);
1300 return;
1301 }
1302
bbf6f052
RK
1303 /* Handle truncation of volatile memrefs, and so on;
1304 the things that couldn't be truncated directly,
1305 and for which there was no special instruction. */
1306 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1307 {
1308 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1309 emit_move_insn (to, temp);
1310 return;
1311 }
1312
1313 /* Mode combination is not recognized. */
1314 abort ();
1315}
1316
1317/* Return an rtx for a value that would result
1318 from converting X to mode MODE.
1319 Both X and MODE may be floating, or both integer.
1320 UNSIGNEDP is nonzero if X is an unsigned value.
1321 This can be done by referring to a part of X in place
5d901c31
RS
1322 or by copying to a new temporary with conversion.
1323
1324 This function *must not* call protect_from_queue
1325 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1326
1327rtx
1328convert_to_mode (mode, x, unsignedp)
1329 enum machine_mode mode;
1330 rtx x;
1331 int unsignedp;
5ffe63ed
RS
1332{
1333 return convert_modes (mode, VOIDmode, x, unsignedp);
1334}
1335
1336/* Return an rtx for a value that would result
1337 from converting X from mode OLDMODE to mode MODE.
1338 Both modes may be floating, or both integer.
1339 UNSIGNEDP is nonzero if X is an unsigned value.
1340
1341 This can be done by referring to a part of X in place
1342 or by copying to a new temporary with conversion.
1343
1344 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1345
1346 This function *must not* call protect_from_queue
1347 except when putting X into an insn (in which case convert_move does it). */
1348
1349rtx
1350convert_modes (mode, oldmode, x, unsignedp)
1351 enum machine_mode mode, oldmode;
1352 rtx x;
1353 int unsignedp;
bbf6f052 1354{
b3694847 1355 rtx temp;
5ffe63ed 1356
1499e0a8
RK
1357 /* If FROM is a SUBREG that indicates that we have already done at least
1358 the required extension, strip it. */
1359
1360 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1361 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1362 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1363 x = gen_lowpart (mode, x);
bbf6f052 1364
64791b18
RK
1365 if (GET_MODE (x) != VOIDmode)
1366 oldmode = GET_MODE (x);
3a94c984 1367
5ffe63ed 1368 if (mode == oldmode)
bbf6f052
RK
1369 return x;
1370
1371 /* There is one case that we must handle specially: If we are converting
906c4e36 1372 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1373 we are to interpret the constant as unsigned, gen_lowpart will do
1374 the wrong if the constant appears negative. What we want to do is
1375 make the high-order word of the constant zero, not all ones. */
1376
1377 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1378 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1379 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1380 {
1381 HOST_WIDE_INT val = INTVAL (x);
1382
1383 if (oldmode != VOIDmode
1384 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1385 {
1386 int width = GET_MODE_BITSIZE (oldmode);
1387
1388 /* We need to zero extend VAL. */
1389 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1390 }
1391
1392 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1393 }
bbf6f052
RK
1394
1395 /* We can do this with a gen_lowpart if both desired and current modes
1396 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1397 non-volatile MEM. Except for the constant case where MODE is no
1398 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1399
ba2e110c
RK
1400 if ((GET_CODE (x) == CONST_INT
1401 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1402 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1403 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1404 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1405 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1406 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1407 && direct_load[(int) mode])
2bf29316 1408 || (GET_CODE (x) == REG
006c9f4a
SE
1409 && (! HARD_REGISTER_P (x)
1410 || HARD_REGNO_MODE_OK (REGNO (x), mode))
2bf29316
JW
1411 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1412 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1413 {
1414 /* ?? If we don't know OLDMODE, we have to assume here that
1415 X does not need sign- or zero-extension. This may not be
1416 the case, but it's the best we can do. */
1417 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1418 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1419 {
1420 HOST_WIDE_INT val = INTVAL (x);
1421 int width = GET_MODE_BITSIZE (oldmode);
1422
1423 /* We must sign or zero-extend in this case. Start by
1424 zero-extending, then sign extend if we need to. */
1425 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1426 if (! unsignedp
1427 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1428 val |= (HOST_WIDE_INT) (-1) << width;
1429
2496c7bd 1430 return gen_int_mode (val, mode);
ba2e110c
RK
1431 }
1432
1433 return gen_lowpart (mode, x);
1434 }
bbf6f052
RK
1435
1436 temp = gen_reg_rtx (mode);
1437 convert_move (temp, x, unsignedp);
1438 return temp;
1439}
1440\f
fbe1758d 1441/* This macro is used to determine what the largest unit size that
3a94c984 1442 move_by_pieces can use is. */
fbe1758d
AM
1443
1444/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1445 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1446 number of bytes we can move with a single instruction. */
fbe1758d
AM
1447
1448#ifndef MOVE_MAX_PIECES
1449#define MOVE_MAX_PIECES MOVE_MAX
1450#endif
1451
cf5124f6
RS
1452/* STORE_MAX_PIECES is the number of bytes at a time that we can
1453 store efficiently. Due to internal GCC limitations, this is
1454 MOVE_MAX_PIECES limited by the number of bytes GCC can represent
1455 for an immediate constant. */
1456
1457#define STORE_MAX_PIECES MIN (MOVE_MAX_PIECES, 2 * sizeof (HOST_WIDE_INT))
1458
8fd3cf4e
JJ
1459/* Determine whether the LEN bytes can be moved by using several move
1460 instructions. Return nonzero if a call to move_by_pieces should
1461 succeed. */
1462
1463int
1464can_move_by_pieces (len, align)
1465 unsigned HOST_WIDE_INT len;
1466 unsigned int align;
1467{
1468 return MOVE_BY_PIECES_P (len, align);
1469}
1470
21d93687
RK
1471/* Generate several move instructions to copy LEN bytes from block FROM to
1472 block TO. (These are MEM rtx's with BLKmode). The caller must pass FROM
1473 and TO through protect_from_queue before calling.
566aa174 1474
21d93687
RK
1475 If PUSH_ROUNDING is defined and TO is NULL, emit_single_push_insn is
1476 used to push FROM to the stack.
566aa174 1477
8fd3cf4e 1478 ALIGN is maximum stack alignment we can assume.
bbf6f052 1479
8fd3cf4e
JJ
1480 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
1481 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
1482 stpcpy. */
1483
1484rtx
1485move_by_pieces (to, from, len, align, endp)
bbf6f052 1486 rtx to, from;
3bdf5ad1 1487 unsigned HOST_WIDE_INT len;
729a2125 1488 unsigned int align;
8fd3cf4e 1489 int endp;
bbf6f052
RK
1490{
1491 struct move_by_pieces data;
566aa174 1492 rtx to_addr, from_addr = XEXP (from, 0);
770ae6cc 1493 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1494 enum machine_mode mode = VOIDmode, tmode;
1495 enum insn_code icode;
bbf6f052 1496
f26aca6d
DD
1497 align = MIN (to ? MEM_ALIGN (to) : align, MEM_ALIGN (from));
1498
bbf6f052 1499 data.offset = 0;
bbf6f052 1500 data.from_addr = from_addr;
566aa174
JH
1501 if (to)
1502 {
1503 to_addr = XEXP (to, 0);
1504 data.to = to;
1505 data.autinc_to
1506 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1507 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1508 data.reverse
1509 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1510 }
1511 else
1512 {
1513 to_addr = NULL_RTX;
1514 data.to = NULL_RTX;
1515 data.autinc_to = 1;
1516#ifdef STACK_GROWS_DOWNWARD
1517 data.reverse = 1;
1518#else
1519 data.reverse = 0;
1520#endif
1521 }
1522 data.to_addr = to_addr;
bbf6f052 1523 data.from = from;
bbf6f052
RK
1524 data.autinc_from
1525 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1526 || GET_CODE (from_addr) == POST_INC
1527 || GET_CODE (from_addr) == POST_DEC);
1528
1529 data.explicit_inc_from = 0;
1530 data.explicit_inc_to = 0;
bbf6f052
RK
1531 if (data.reverse) data.offset = len;
1532 data.len = len;
1533
1534 /* If copying requires more than two move insns,
1535 copy addresses to registers (to make displacements shorter)
1536 and use post-increment if available. */
1537 if (!(data.autinc_from && data.autinc_to)
1538 && move_by_pieces_ninsns (len, align) > 2)
1539 {
3a94c984 1540 /* Find the mode of the largest move... */
fbe1758d
AM
1541 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1542 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1543 if (GET_MODE_SIZE (tmode) < max_size)
1544 mode = tmode;
1545
1546 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1547 {
1548 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1549 data.autinc_from = 1;
1550 data.explicit_inc_from = -1;
1551 }
fbe1758d 1552 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1553 {
1554 data.from_addr = copy_addr_to_reg (from_addr);
1555 data.autinc_from = 1;
1556 data.explicit_inc_from = 1;
1557 }
bbf6f052
RK
1558 if (!data.autinc_from && CONSTANT_P (from_addr))
1559 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1560 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1561 {
1562 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1563 data.autinc_to = 1;
1564 data.explicit_inc_to = -1;
1565 }
fbe1758d 1566 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1567 {
1568 data.to_addr = copy_addr_to_reg (to_addr);
1569 data.autinc_to = 1;
1570 data.explicit_inc_to = 1;
1571 }
bbf6f052
RK
1572 if (!data.autinc_to && CONSTANT_P (to_addr))
1573 data.to_addr = copy_addr_to_reg (to_addr);
1574 }
1575
e1565e65 1576 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1577 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1578 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1579
1580 /* First move what we can in the largest integer mode, then go to
1581 successively smaller modes. */
1582
1583 while (max_size > 1)
1584 {
e7c33f54
RK
1585 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1586 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1587 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1588 mode = tmode;
1589
1590 if (mode == VOIDmode)
1591 break;
1592
1593 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1594 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1595 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1596
1597 max_size = GET_MODE_SIZE (mode);
1598 }
1599
1600 /* The code above should have handled everything. */
2a8e278c 1601 if (data.len > 0)
bbf6f052 1602 abort ();
8fd3cf4e
JJ
1603
1604 if (endp)
1605 {
1606 rtx to1;
1607
1608 if (data.reverse)
1609 abort ();
1610 if (data.autinc_to)
1611 {
1612 if (endp == 2)
1613 {
1614 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
1615 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
1616 else
1617 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
1618 -1));
1619 }
1620 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
1621 data.offset);
1622 }
1623 else
1624 {
1625 if (endp == 2)
1626 --data.offset;
1627 to1 = adjust_address (data.to, QImode, data.offset);
1628 }
1629 return to1;
1630 }
1631 else
1632 return data.to;
bbf6f052
RK
1633}
1634
1635/* Return number of insns required to move L bytes by pieces.
f1eaaf73 1636 ALIGN (in bits) is maximum alignment we can assume. */
bbf6f052 1637
3bdf5ad1 1638static unsigned HOST_WIDE_INT
bbf6f052 1639move_by_pieces_ninsns (l, align)
3bdf5ad1 1640 unsigned HOST_WIDE_INT l;
729a2125 1641 unsigned int align;
bbf6f052 1642{
3bdf5ad1
RK
1643 unsigned HOST_WIDE_INT n_insns = 0;
1644 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1645
e1565e65 1646 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1647 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1648 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1649
1650 while (max_size > 1)
1651 {
1652 enum machine_mode mode = VOIDmode, tmode;
1653 enum insn_code icode;
1654
e7c33f54
RK
1655 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1656 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1657 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1658 mode = tmode;
1659
1660 if (mode == VOIDmode)
1661 break;
1662
1663 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1664 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1665 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1666
1667 max_size = GET_MODE_SIZE (mode);
1668 }
1669
13c6f0d5
NS
1670 if (l)
1671 abort ();
bbf6f052
RK
1672 return n_insns;
1673}
1674
1675/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1676 with move instructions for mode MODE. GENFUN is the gen_... function
1677 to make a move insn for that mode. DATA has all the other info. */
1678
1679static void
1680move_by_pieces_1 (genfun, mode, data)
711d877c 1681 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1682 enum machine_mode mode;
1683 struct move_by_pieces *data;
1684{
3bdf5ad1 1685 unsigned int size = GET_MODE_SIZE (mode);
ae0ed63a 1686 rtx to1 = NULL_RTX, from1;
bbf6f052
RK
1687
1688 while (data->len >= size)
1689 {
3bdf5ad1
RK
1690 if (data->reverse)
1691 data->offset -= size;
1692
566aa174 1693 if (data->to)
3bdf5ad1 1694 {
566aa174 1695 if (data->autinc_to)
630036c6
JJ
1696 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
1697 data->offset);
566aa174 1698 else
f4ef873c 1699 to1 = adjust_address (data->to, mode, data->offset);
3bdf5ad1 1700 }
3bdf5ad1
RK
1701
1702 if (data->autinc_from)
630036c6
JJ
1703 from1 = adjust_automodify_address (data->from, mode, data->from_addr,
1704 data->offset);
3bdf5ad1 1705 else
f4ef873c 1706 from1 = adjust_address (data->from, mode, data->offset);
bbf6f052 1707
940da324 1708 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
3d709fd3
RH
1709 emit_insn (gen_add2_insn (data->to_addr,
1710 GEN_INT (-(HOST_WIDE_INT)size)));
940da324 1711 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
3d709fd3
RH
1712 emit_insn (gen_add2_insn (data->from_addr,
1713 GEN_INT (-(HOST_WIDE_INT)size)));
bbf6f052 1714
566aa174
JH
1715 if (data->to)
1716 emit_insn ((*genfun) (to1, from1));
1717 else
21d93687
RK
1718 {
1719#ifdef PUSH_ROUNDING
1720 emit_single_push_insn (mode, from1, NULL);
1721#else
1722 abort ();
1723#endif
1724 }
3bdf5ad1 1725
940da324 1726 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1727 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1728 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1729 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1730
3bdf5ad1
RK
1731 if (! data->reverse)
1732 data->offset += size;
bbf6f052
RK
1733
1734 data->len -= size;
1735 }
1736}
1737\f
4ca79136
RH
1738/* Emit code to move a block Y to a block X. This may be done with
1739 string-move instructions, with multiple scalar move instructions,
1740 or with a library call.
bbf6f052 1741
4ca79136 1742 Both X and Y must be MEM rtx's (perhaps inside VOLATILE) with mode BLKmode.
bbf6f052 1743 SIZE is an rtx that says how long they are.
19caa751 1744 ALIGN is the maximum alignment we can assume they have.
44bb111a 1745 METHOD describes what kind of copy this is, and what mechanisms may be used.
bbf6f052 1746
e9a25f70
JL
1747 Return the address of the new block, if memcpy is called and returns it,
1748 0 otherwise. */
1749
1750rtx
44bb111a 1751emit_block_move (x, y, size, method)
4ca79136 1752 rtx x, y, size;
44bb111a 1753 enum block_op_methods method;
bbf6f052 1754{
44bb111a 1755 bool may_use_call;
e9a25f70 1756 rtx retval = 0;
44bb111a
RH
1757 unsigned int align;
1758
1759 switch (method)
1760 {
1761 case BLOCK_OP_NORMAL:
1762 may_use_call = true;
1763 break;
1764
1765 case BLOCK_OP_CALL_PARM:
1766 may_use_call = block_move_libcall_safe_for_call_parm ();
1767
1768 /* Make inhibit_defer_pop nonzero around the library call
1769 to force it to pop the arguments right away. */
1770 NO_DEFER_POP;
1771 break;
1772
1773 case BLOCK_OP_NO_LIBCALL:
1774 may_use_call = false;
1775 break;
1776
1777 default:
1778 abort ();
1779 }
1780
1781 align = MIN (MEM_ALIGN (x), MEM_ALIGN (y));
e9a25f70 1782
bbf6f052
RK
1783 if (GET_MODE (x) != BLKmode)
1784 abort ();
bbf6f052
RK
1785 if (GET_MODE (y) != BLKmode)
1786 abort ();
1787
1788 x = protect_from_queue (x, 1);
1789 y = protect_from_queue (y, 0);
5d901c31 1790 size = protect_from_queue (size, 0);
bbf6f052
RK
1791
1792 if (GET_CODE (x) != MEM)
1793 abort ();
1794 if (GET_CODE (y) != MEM)
1795 abort ();
1796 if (size == 0)
1797 abort ();
1798
cb38fd88
RH
1799 /* Set MEM_SIZE as appropriate for this block copy. The main place this
1800 can be incorrect is coming from __builtin_memcpy. */
1801 if (GET_CODE (size) == CONST_INT)
1802 {
1803 x = shallow_copy_rtx (x);
1804 y = shallow_copy_rtx (y);
1805 set_mem_size (x, size);
1806 set_mem_size (y, size);
1807 }
1808
fbe1758d 1809 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
8fd3cf4e 1810 move_by_pieces (x, y, INTVAL (size), align, 0);
4ca79136
RH
1811 else if (emit_block_move_via_movstr (x, y, size, align))
1812 ;
44bb111a 1813 else if (may_use_call)
4ca79136 1814 retval = emit_block_move_via_libcall (x, y, size);
44bb111a
RH
1815 else
1816 emit_block_move_via_loop (x, y, size, align);
1817
1818 if (method == BLOCK_OP_CALL_PARM)
1819 OK_DEFER_POP;
266007a7 1820
4ca79136
RH
1821 return retval;
1822}
266007a7 1823
44bb111a
RH
1824/* A subroutine of emit_block_move. Returns true if calling the
1825 block move libcall will not clobber any parameters which may have
1826 already been placed on the stack. */
1827
1828static bool
1829block_move_libcall_safe_for_call_parm ()
1830{
1831 if (PUSH_ARGS)
1832 return true;
1833 else
1834 {
1835 /* Check to see whether memcpy takes all register arguments. */
1836 static enum {
1837 takes_regs_uninit, takes_regs_no, takes_regs_yes
1838 } takes_regs = takes_regs_uninit;
1839
1840 switch (takes_regs)
1841 {
1842 case takes_regs_uninit:
1843 {
1844 CUMULATIVE_ARGS args_so_far;
1845 tree fn, arg;
1846
1847 fn = emit_block_move_libcall_fn (false);
1848 INIT_CUMULATIVE_ARGS (args_so_far, TREE_TYPE (fn), NULL_RTX, 0);
1849
1850 arg = TYPE_ARG_TYPES (TREE_TYPE (fn));
1851 for ( ; arg != void_list_node ; arg = TREE_CHAIN (arg))
1852 {
98c0d8d1 1853 enum machine_mode mode = TYPE_MODE (TREE_VALUE (arg));
44bb111a
RH
1854 rtx tmp = FUNCTION_ARG (args_so_far, mode, NULL_TREE, 1);
1855 if (!tmp || !REG_P (tmp))
1856 goto fail_takes_regs;
1857#ifdef FUNCTION_ARG_PARTIAL_NREGS
1858 if (FUNCTION_ARG_PARTIAL_NREGS (args_so_far, mode,
1859 NULL_TREE, 1))
1860 goto fail_takes_regs;
1861#endif
1862 FUNCTION_ARG_ADVANCE (args_so_far, mode, NULL_TREE, 1);
1863 }
1864 }
1865 takes_regs = takes_regs_yes;
1866 /* FALLTHRU */
1867
1868 case takes_regs_yes:
1869 return true;
1870
1871 fail_takes_regs:
1872 takes_regs = takes_regs_no;
1873 /* FALLTHRU */
1874 case takes_regs_no:
1875 return false;
1876
1877 default:
1878 abort ();
1879 }
1880 }
1881}
1882
4ca79136
RH
1883/* A subroutine of emit_block_move. Expand a movstr pattern;
1884 return true if successful. */
3ef1eef4 1885
4ca79136
RH
1886static bool
1887emit_block_move_via_movstr (x, y, size, align)
1888 rtx x, y, size;
1889 unsigned int align;
1890{
4ca79136
RH
1891 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
1892 enum machine_mode mode;
266007a7 1893
4ca79136
RH
1894 /* Since this is a move insn, we don't care about volatility. */
1895 volatile_ok = 1;
1896
ee960939
OH
1897 /* Try the most limited insn first, because there's no point
1898 including more than one in the machine description unless
1899 the more limited one has some advantage. */
1900
4ca79136
RH
1901 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1902 mode = GET_MODE_WIDER_MODE (mode))
1903 {
1904 enum insn_code code = movstr_optab[(int) mode];
1905 insn_operand_predicate_fn pred;
1906
1907 if (code != CODE_FOR_nothing
1908 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1909 here because if SIZE is less than the mode mask, as it is
1910 returned by the macro, it will definitely be less than the
1911 actual mode mask. */
1912 && ((GET_CODE (size) == CONST_INT
1913 && ((unsigned HOST_WIDE_INT) INTVAL (size)
1914 <= (GET_MODE_MASK (mode) >> 1)))
1915 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
1916 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1917 || (*pred) (x, BLKmode))
1918 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1919 || (*pred) (y, BLKmode))
1920 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1921 || (*pred) (opalign, VOIDmode)))
1922 {
1923 rtx op2;
1924 rtx last = get_last_insn ();
1925 rtx pat;
1926
1927 op2 = convert_to_mode (mode, size, 1);
1928 pred = insn_data[(int) code].operand[2].predicate;
1929 if (pred != 0 && ! (*pred) (op2, mode))
1930 op2 = copy_to_mode_reg (mode, op2);
1931
1932 /* ??? When called via emit_block_move_for_call, it'd be
1933 nice if there were some way to inform the backend, so
1934 that it doesn't fail the expansion because it thinks
1935 emitting the libcall would be more efficient. */
1936
1937 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1938 if (pat)
1939 {
1940 emit_insn (pat);
1941 volatile_ok = 0;
1942 return true;
bbf6f052 1943 }
4ca79136
RH
1944 else
1945 delete_insns_since (last);
bbf6f052 1946 }
4ca79136 1947 }
bbf6f052 1948
4ca79136
RH
1949 volatile_ok = 0;
1950 return false;
1951}
3ef1eef4 1952
4ca79136
RH
1953/* A subroutine of emit_block_move. Expand a call to memcpy or bcopy.
1954 Return the return value from memcpy, 0 otherwise. */
4bc973ae 1955
4ca79136
RH
1956static rtx
1957emit_block_move_via_libcall (dst, src, size)
1958 rtx dst, src, size;
1959{
ee960939 1960 rtx dst_addr, src_addr;
4ca79136
RH
1961 tree call_expr, arg_list, fn, src_tree, dst_tree, size_tree;
1962 enum machine_mode size_mode;
1963 rtx retval;
4bc973ae 1964
4ca79136 1965 /* DST, SRC, or SIZE may have been passed through protect_from_queue.
4bc973ae 1966
ee960939
OH
1967 It is unsafe to save the value generated by protect_from_queue and reuse
1968 it later. Consider what happens if emit_queue is called before the
1969 return value from protect_from_queue is used.
4bc973ae 1970
ee960939
OH
1971 Expansion of the CALL_EXPR below will call emit_queue before we are
1972 finished emitting RTL for argument setup. So if we are not careful we
1973 could get the wrong value for an argument.
4bc973ae 1974
ee960939
OH
1975 To avoid this problem we go ahead and emit code to copy the addresses of
1976 DST and SRC and SIZE into new pseudos. We can then place those new
1977 pseudos into an RTL_EXPR and use them later, even after a call to
4ca79136 1978 emit_queue.
4bc973ae 1979
ee960939
OH
1980 Note this is not strictly needed for library calls since they do not call
1981 emit_queue before loading their arguments. However, we may need to have
1982 library calls call emit_queue in the future since failing to do so could
1983 cause problems for targets which define SMALL_REGISTER_CLASSES and pass
1984 arguments in registers. */
1985
1986 dst_addr = copy_to_mode_reg (Pmode, XEXP (dst, 0));
1987 src_addr = copy_to_mode_reg (Pmode, XEXP (src, 0));
4ca79136 1988
ee960939
OH
1989#ifdef POINTERS_EXTEND_UNSIGNED
1990 dst_addr = convert_memory_address (ptr_mode, dst_addr);
1991 src_addr = convert_memory_address (ptr_mode, src_addr);
1992#endif
1993
1994 dst_tree = make_tree (ptr_type_node, dst_addr);
1995 src_tree = make_tree (ptr_type_node, src_addr);
4ca79136
RH
1996
1997 if (TARGET_MEM_FUNCTIONS)
1998 size_mode = TYPE_MODE (sizetype);
1999 else
2000 size_mode = TYPE_MODE (unsigned_type_node);
ee960939 2001
4ca79136
RH
2002 size = convert_to_mode (size_mode, size, 1);
2003 size = copy_to_mode_reg (size_mode, size);
2004
2005 /* It is incorrect to use the libcall calling conventions to call
2006 memcpy in this context. This could be a user call to memcpy and
2007 the user may wish to examine the return value from memcpy. For
2008 targets where libcalls and normal calls have different conventions
2009 for returning pointers, we could end up generating incorrect code.
2010
2011 For convenience, we generate the call to bcopy this way as well. */
2012
4ca79136
RH
2013 if (TARGET_MEM_FUNCTIONS)
2014 size_tree = make_tree (sizetype, size);
2015 else
2016 size_tree = make_tree (unsigned_type_node, size);
2017
2018 fn = emit_block_move_libcall_fn (true);
2019 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
2020 if (TARGET_MEM_FUNCTIONS)
2021 {
2022 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2023 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2024 }
2025 else
2026 {
2027 arg_list = tree_cons (NULL_TREE, dst_tree, arg_list);
2028 arg_list = tree_cons (NULL_TREE, src_tree, arg_list);
2029 }
2030
2031 /* Now we have to build up the CALL_EXPR itself. */
2032 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2033 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2034 call_expr, arg_list, NULL_TREE);
2035 TREE_SIDE_EFFECTS (call_expr) = 1;
2036
2037 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
2038
ee960939
OH
2039 /* If we are initializing a readonly value, show the above call clobbered
2040 it. Otherwise, a load from it may erroneously be hoisted from a loop, or
2041 the delay slot scheduler might overlook conflicts and take nasty
2042 decisions. */
4ca79136 2043 if (RTX_UNCHANGING_P (dst))
ee960939
OH
2044 add_function_usage_to
2045 (last_call_insn (), gen_rtx_EXPR_LIST (VOIDmode,
2046 gen_rtx_CLOBBER (VOIDmode, dst),
2047 NULL_RTX));
4ca79136 2048
ee960939 2049 return TARGET_MEM_FUNCTIONS ? retval : NULL_RTX;
4ca79136 2050}
52cf7115 2051
4ca79136
RH
2052/* A subroutine of emit_block_move_via_libcall. Create the tree node
2053 for the function we use for block copies. The first time FOR_CALL
2054 is true, we call assemble_external. */
52cf7115 2055
4ca79136
RH
2056static GTY(()) tree block_move_fn;
2057
9661b15f
JJ
2058void
2059init_block_move_fn (asmspec)
2060 const char *asmspec;
4ca79136 2061{
9661b15f 2062 if (!block_move_fn)
4ca79136 2063 {
8fd3cf4e 2064 tree args, fn;
9661b15f 2065
4ca79136 2066 if (TARGET_MEM_FUNCTIONS)
52cf7115 2067 {
4ca79136
RH
2068 fn = get_identifier ("memcpy");
2069 args = build_function_type_list (ptr_type_node, ptr_type_node,
2070 const_ptr_type_node, sizetype,
2071 NULL_TREE);
2072 }
2073 else
2074 {
2075 fn = get_identifier ("bcopy");
2076 args = build_function_type_list (void_type_node, const_ptr_type_node,
2077 ptr_type_node, unsigned_type_node,
2078 NULL_TREE);
52cf7115
JL
2079 }
2080
4ca79136
RH
2081 fn = build_decl (FUNCTION_DECL, fn, args);
2082 DECL_EXTERNAL (fn) = 1;
2083 TREE_PUBLIC (fn) = 1;
2084 DECL_ARTIFICIAL (fn) = 1;
2085 TREE_NOTHROW (fn) = 1;
66c60e67 2086
4ca79136 2087 block_move_fn = fn;
bbf6f052 2088 }
e9a25f70 2089
9661b15f
JJ
2090 if (asmspec)
2091 {
2092 SET_DECL_RTL (block_move_fn, NULL_RTX);
2093 SET_DECL_ASSEMBLER_NAME (block_move_fn, get_identifier (asmspec));
2094 }
2095}
2096
2097static tree
2098emit_block_move_libcall_fn (for_call)
2099 int for_call;
2100{
2101 static bool emitted_extern;
2102
2103 if (!block_move_fn)
2104 init_block_move_fn (NULL);
2105
4ca79136
RH
2106 if (for_call && !emitted_extern)
2107 {
2108 emitted_extern = true;
9661b15f
JJ
2109 make_decl_rtl (block_move_fn, NULL);
2110 assemble_external (block_move_fn);
4ca79136
RH
2111 }
2112
9661b15f 2113 return block_move_fn;
bbf6f052 2114}
44bb111a
RH
2115
2116/* A subroutine of emit_block_move. Copy the data via an explicit
2117 loop. This is used only when libcalls are forbidden. */
2118/* ??? It'd be nice to copy in hunks larger than QImode. */
2119
2120static void
2121emit_block_move_via_loop (x, y, size, align)
2122 rtx x, y, size;
2123 unsigned int align ATTRIBUTE_UNUSED;
2124{
2125 rtx cmp_label, top_label, iter, x_addr, y_addr, tmp;
2126 enum machine_mode iter_mode;
2127
2128 iter_mode = GET_MODE (size);
2129 if (iter_mode == VOIDmode)
2130 iter_mode = word_mode;
2131
2132 top_label = gen_label_rtx ();
2133 cmp_label = gen_label_rtx ();
2134 iter = gen_reg_rtx (iter_mode);
2135
2136 emit_move_insn (iter, const0_rtx);
2137
2138 x_addr = force_operand (XEXP (x, 0), NULL_RTX);
2139 y_addr = force_operand (XEXP (y, 0), NULL_RTX);
2140 do_pending_stack_adjust ();
2141
2142 emit_note (NULL, NOTE_INSN_LOOP_BEG);
2143
2144 emit_jump (cmp_label);
2145 emit_label (top_label);
2146
2147 tmp = convert_modes (Pmode, iter_mode, iter, true);
2148 x_addr = gen_rtx_PLUS (Pmode, x_addr, tmp);
2149 y_addr = gen_rtx_PLUS (Pmode, y_addr, tmp);
2150 x = change_address (x, QImode, x_addr);
2151 y = change_address (y, QImode, y_addr);
2152
2153 emit_move_insn (x, y);
2154
2155 tmp = expand_simple_binop (iter_mode, PLUS, iter, const1_rtx, iter,
2156 true, OPTAB_LIB_WIDEN);
2157 if (tmp != iter)
2158 emit_move_insn (iter, tmp);
2159
2160 emit_note (NULL, NOTE_INSN_LOOP_CONT);
2161 emit_label (cmp_label);
2162
2163 emit_cmp_and_jump_insns (iter, size, LT, NULL_RTX, iter_mode,
2164 true, top_label);
2165
2166 emit_note (NULL, NOTE_INSN_LOOP_END);
2167}
bbf6f052
RK
2168\f
2169/* Copy all or part of a value X into registers starting at REGNO.
2170 The number of registers to be filled is NREGS. */
2171
2172void
2173move_block_to_reg (regno, x, nregs, mode)
2174 int regno;
2175 rtx x;
2176 int nregs;
2177 enum machine_mode mode;
2178{
2179 int i;
381127e8 2180#ifdef HAVE_load_multiple
3a94c984 2181 rtx pat;
381127e8
RL
2182 rtx last;
2183#endif
bbf6f052 2184
72bb9717
RK
2185 if (nregs == 0)
2186 return;
2187
bbf6f052
RK
2188 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2189 x = validize_mem (force_const_mem (mode, x));
2190
2191 /* See if the machine can do this with a load multiple insn. */
2192#ifdef HAVE_load_multiple
c3a02afe 2193 if (HAVE_load_multiple)
bbf6f052 2194 {
c3a02afe 2195 last = get_last_insn ();
38a448ca 2196 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
2197 GEN_INT (nregs));
2198 if (pat)
2199 {
2200 emit_insn (pat);
2201 return;
2202 }
2203 else
2204 delete_insns_since (last);
bbf6f052 2205 }
bbf6f052
RK
2206#endif
2207
2208 for (i = 0; i < nregs; i++)
38a448ca 2209 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
2210 operand_subword_force (x, i, mode));
2211}
2212
2213/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
c6b97fac 2214 The number of registers to be filled is NREGS. */
0040593d 2215
bbf6f052 2216void
c6b97fac 2217move_block_from_reg (regno, x, nregs)
bbf6f052
RK
2218 int regno;
2219 rtx x;
2220 int nregs;
2221{
2222 int i;
bbf6f052 2223
2954d7db
RK
2224 if (nregs == 0)
2225 return;
2226
bbf6f052
RK
2227 /* See if the machine can do this with a store multiple insn. */
2228#ifdef HAVE_store_multiple
c3a02afe 2229 if (HAVE_store_multiple)
bbf6f052 2230 {
c6b97fac
AM
2231 rtx last = get_last_insn ();
2232 rtx pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
2233 GEN_INT (nregs));
c3a02afe
RK
2234 if (pat)
2235 {
2236 emit_insn (pat);
2237 return;
2238 }
2239 else
2240 delete_insns_since (last);
bbf6f052 2241 }
bbf6f052
RK
2242#endif
2243
2244 for (i = 0; i < nregs; i++)
2245 {
2246 rtx tem = operand_subword (x, i, 1, BLKmode);
2247
2248 if (tem == 0)
2249 abort ();
2250
38a448ca 2251 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
2252 }
2253}
2254
084a1106
JDA
2255/* Generate a PARALLEL rtx for a new non-consecutive group of registers from
2256 ORIG, where ORIG is a non-consecutive group of registers represented by
2257 a PARALLEL. The clone is identical to the original except in that the
2258 original set of registers is replaced by a new set of pseudo registers.
2259 The new set has the same modes as the original set. */
2260
2261rtx
2262gen_group_rtx (orig)
2263 rtx orig;
2264{
2265 int i, length;
2266 rtx *tmps;
2267
2268 if (GET_CODE (orig) != PARALLEL)
2269 abort ();
2270
2271 length = XVECLEN (orig, 0);
2272 tmps = (rtx *) alloca (sizeof (rtx) * length);
2273
2274 /* Skip a NULL entry in first slot. */
2275 i = XEXP (XVECEXP (orig, 0, 0), 0) ? 0 : 1;
2276
2277 if (i)
2278 tmps[0] = 0;
2279
2280 for (; i < length; i++)
2281 {
2282 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (orig, 0, i), 0));
2283 rtx offset = XEXP (XVECEXP (orig, 0, i), 1);
2284
2285 tmps[i] = gen_rtx_EXPR_LIST (VOIDmode, gen_reg_rtx (mode), offset);
2286 }
2287
2288 return gen_rtx_PARALLEL (GET_MODE (orig), gen_rtvec_v (length, tmps));
2289}
2290
aac5cc16
RH
2291/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
2292 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2293 block SRC in bytes, or -1 if not known. */
d6a7951f 2294/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatant assumption that
aac5cc16
RH
2295 the balance will be in what would be the low-order memory addresses, i.e.
2296 left justified for big endian, right justified for little endian. This
2297 happens to be true for the targets currently using this support. If this
2298 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
2299 would be needed. */
fffa9c1d
JW
2300
2301void
04050c69 2302emit_group_load (dst, orig_src, ssize)
aac5cc16 2303 rtx dst, orig_src;
729a2125 2304 int ssize;
fffa9c1d 2305{
aac5cc16
RH
2306 rtx *tmps, src;
2307 int start, i;
fffa9c1d 2308
aac5cc16 2309 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
2310 abort ();
2311
2312 /* Check for a NULL entry, used to indicate that the parameter goes
2313 both on the stack and in registers. */
aac5cc16
RH
2314 if (XEXP (XVECEXP (dst, 0, 0), 0))
2315 start = 0;
fffa9c1d 2316 else
aac5cc16
RH
2317 start = 1;
2318
3a94c984 2319 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16 2320
aac5cc16
RH
2321 /* Process the pieces. */
2322 for (i = start; i < XVECLEN (dst, 0); i++)
2323 {
2324 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
2325 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
2326 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2327 int shift = 0;
2328
2329 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2330 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
2331 {
2332 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2333 bytelen = ssize - bytepos;
2334 if (bytelen <= 0)
729a2125 2335 abort ();
aac5cc16
RH
2336 }
2337
f3ce87a9
DE
2338 /* If we won't be loading directly from memory, protect the real source
2339 from strange tricks we might play; but make sure that the source can
2340 be loaded directly into the destination. */
2341 src = orig_src;
2342 if (GET_CODE (orig_src) != MEM
2343 && (!CONSTANT_P (orig_src)
2344 || (GET_MODE (orig_src) != mode
2345 && GET_MODE (orig_src) != VOIDmode)))
2346 {
2347 if (GET_MODE (orig_src) == VOIDmode)
2348 src = gen_reg_rtx (mode);
2349 else
2350 src = gen_reg_rtx (GET_MODE (orig_src));
04050c69 2351
f3ce87a9
DE
2352 emit_move_insn (src, orig_src);
2353 }
2354
aac5cc16
RH
2355 /* Optimize the access just a bit. */
2356 if (GET_CODE (src) == MEM
04050c69 2357 && MEM_ALIGN (src) >= GET_MODE_ALIGNMENT (mode)
729a2125 2358 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
2359 && bytelen == GET_MODE_SIZE (mode))
2360 {
2361 tmps[i] = gen_reg_rtx (mode);
f4ef873c 2362 emit_move_insn (tmps[i], adjust_address (src, mode, bytepos));
fffa9c1d 2363 }
7c4a6db0
JW
2364 else if (GET_CODE (src) == CONCAT)
2365 {
015b1ad1
JDA
2366 unsigned int slen = GET_MODE_SIZE (GET_MODE (src));
2367 unsigned int slen0 = GET_MODE_SIZE (GET_MODE (XEXP (src, 0)));
2368
2369 if ((bytepos == 0 && bytelen == slen0)
2370 || (bytepos != 0 && bytepos + bytelen <= slen))
cbb92744 2371 {
015b1ad1
JDA
2372 /* The following assumes that the concatenated objects all
2373 have the same size. In this case, a simple calculation
2374 can be used to determine the object and the bit field
2375 to be extracted. */
2376 tmps[i] = XEXP (src, bytepos / slen0);
cbb92744
JJ
2377 if (! CONSTANT_P (tmps[i])
2378 && (GET_CODE (tmps[i]) != REG || GET_MODE (tmps[i]) != mode))
2379 tmps[i] = extract_bit_field (tmps[i], bytelen * BITS_PER_UNIT,
015b1ad1
JDA
2380 (bytepos % slen0) * BITS_PER_UNIT,
2381 1, NULL_RTX, mode, mode, ssize);
cbb92744 2382 }
58f69841
JH
2383 else if (bytepos == 0)
2384 {
015b1ad1 2385 rtx mem = assign_stack_temp (GET_MODE (src), slen, 0);
58f69841 2386 emit_move_insn (mem, src);
04050c69 2387 tmps[i] = adjust_address (mem, mode, 0);
58f69841 2388 }
7c4a6db0
JW
2389 else
2390 abort ();
2391 }
9c0631a7
AH
2392 /* FIXME: A SIMD parallel will eventually lead to a subreg of a
2393 SIMD register, which is currently broken. While we get GCC
2394 to emit proper RTL for these cases, let's dump to memory. */
2395 else if (VECTOR_MODE_P (GET_MODE (dst))
2396 && GET_CODE (src) == REG)
2397 {
2398 int slen = GET_MODE_SIZE (GET_MODE (src));
2399 rtx mem;
2400
2401 mem = assign_stack_temp (GET_MODE (src), slen, 0);
2402 emit_move_insn (mem, src);
2403 tmps[i] = adjust_address (mem, mode, (int) bytepos);
2404 }
f3ce87a9 2405 else if (CONSTANT_P (src)
2ee5437b
RH
2406 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2407 tmps[i] = src;
fffa9c1d 2408 else
19caa751
RK
2409 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2410 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
04050c69 2411 mode, mode, ssize);
fffa9c1d 2412
aac5cc16 2413 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2414 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2415 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2416 }
19caa751 2417
3a94c984 2418 emit_queue ();
aac5cc16
RH
2419
2420 /* Copy the extracted pieces into the proper (probable) hard regs. */
2421 for (i = start; i < XVECLEN (dst, 0); i++)
2422 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2423}
2424
084a1106
JDA
2425/* Emit code to move a block SRC to block DST, where SRC and DST are
2426 non-consecutive groups of registers, each represented by a PARALLEL. */
2427
2428void
2429emit_group_move (dst, src)
2430 rtx dst, src;
2431{
2432 int i;
2433
2434 if (GET_CODE (src) != PARALLEL
2435 || GET_CODE (dst) != PARALLEL
2436 || XVECLEN (src, 0) != XVECLEN (dst, 0))
2437 abort ();
2438
2439 /* Skip first entry if NULL. */
2440 for (i = XEXP (XVECEXP (src, 0, 0), 0) ? 0 : 1; i < XVECLEN (src, 0); i++)
2441 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0),
2442 XEXP (XVECEXP (src, 0, i), 0));
2443}
2444
aac5cc16
RH
2445/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2446 registers represented by a PARALLEL. SSIZE represents the total size of
04050c69 2447 block DST, or -1 if not known. */
fffa9c1d
JW
2448
2449void
04050c69 2450emit_group_store (orig_dst, src, ssize)
aac5cc16 2451 rtx orig_dst, src;
729a2125 2452 int ssize;
fffa9c1d 2453{
aac5cc16
RH
2454 rtx *tmps, dst;
2455 int start, i;
fffa9c1d 2456
aac5cc16 2457 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2458 abort ();
2459
2460 /* Check for a NULL entry, used to indicate that the parameter goes
2461 both on the stack and in registers. */
aac5cc16
RH
2462 if (XEXP (XVECEXP (src, 0, 0), 0))
2463 start = 0;
fffa9c1d 2464 else
aac5cc16
RH
2465 start = 1;
2466
3a94c984 2467 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2468
aac5cc16
RH
2469 /* Copy the (probable) hard regs into pseudos. */
2470 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2471 {
aac5cc16
RH
2472 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2473 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2474 emit_move_insn (tmps[i], reg);
2475 }
3a94c984 2476 emit_queue ();
fffa9c1d 2477
aac5cc16
RH
2478 /* If we won't be storing directly into memory, protect the real destination
2479 from strange tricks we might play. */
2480 dst = orig_dst;
10a9f2be
JW
2481 if (GET_CODE (dst) == PARALLEL)
2482 {
2483 rtx temp;
2484
2485 /* We can get a PARALLEL dst if there is a conditional expression in
2486 a return statement. In that case, the dst and src are the same,
2487 so no action is necessary. */
2488 if (rtx_equal_p (dst, src))
2489 return;
2490
2491 /* It is unclear if we can ever reach here, but we may as well handle
2492 it. Allocate a temporary, and split this into a store/load to/from
2493 the temporary. */
2494
2495 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
04050c69
RK
2496 emit_group_store (temp, src, ssize);
2497 emit_group_load (dst, temp, ssize);
10a9f2be
JW
2498 return;
2499 }
75897075 2500 else if (GET_CODE (dst) != MEM && GET_CODE (dst) != CONCAT)
aac5cc16
RH
2501 {
2502 dst = gen_reg_rtx (GET_MODE (orig_dst));
2503 /* Make life a bit easier for combine. */
8ae91fc0 2504 emit_move_insn (dst, CONST0_RTX (GET_MODE (orig_dst)));
aac5cc16 2505 }
aac5cc16
RH
2506
2507 /* Process the pieces. */
2508 for (i = start; i < XVECLEN (src, 0); i++)
2509 {
770ae6cc 2510 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2511 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2512 unsigned int bytelen = GET_MODE_SIZE (mode);
6ddae612 2513 rtx dest = dst;
aac5cc16
RH
2514
2515 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2516 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2517 {
aac5cc16
RH
2518 if (BYTES_BIG_ENDIAN)
2519 {
2520 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2521 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2522 tmps[i], 0, OPTAB_WIDEN);
2523 }
2524 bytelen = ssize - bytepos;
71bc0330 2525 }
fffa9c1d 2526
6ddae612
JJ
2527 if (GET_CODE (dst) == CONCAT)
2528 {
2529 if (bytepos + bytelen <= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2530 dest = XEXP (dst, 0);
2531 else if (bytepos >= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0))))
2532 {
2533 bytepos -= GET_MODE_SIZE (GET_MODE (XEXP (dst, 0)));
2534 dest = XEXP (dst, 1);
2535 }
0d446150
JH
2536 else if (bytepos == 0 && XVECLEN (src, 0))
2537 {
2538 dest = assign_stack_temp (GET_MODE (dest),
2539 GET_MODE_SIZE (GET_MODE (dest)), 0);
2540 emit_move_insn (adjust_address (dest, GET_MODE (tmps[i]), bytepos),
2541 tmps[i]);
2542 dst = dest;
2543 break;
2544 }
6ddae612
JJ
2545 else
2546 abort ();
2547 }
2548
aac5cc16 2549 /* Optimize the access just a bit. */
6ddae612
JJ
2550 if (GET_CODE (dest) == MEM
2551 && MEM_ALIGN (dest) >= GET_MODE_ALIGNMENT (mode)
729a2125 2552 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2553 && bytelen == GET_MODE_SIZE (mode))
6ddae612 2554 emit_move_insn (adjust_address (dest, mode, bytepos), tmps[i]);
aac5cc16 2555 else
6ddae612 2556 store_bit_field (dest, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
04050c69 2557 mode, tmps[i], ssize);
fffa9c1d 2558 }
729a2125 2559
3a94c984 2560 emit_queue ();
aac5cc16
RH
2561
2562 /* Copy from the pseudo into the (probable) hard reg. */
0d446150 2563 if (orig_dst != dst)
aac5cc16 2564 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2565}
2566
c36fce9a
GRK
2567/* Generate code to copy a BLKmode object of TYPE out of a
2568 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2569 is null, a stack temporary is created. TGTBLK is returned.
2570
2571 The primary purpose of this routine is to handle functions
2572 that return BLKmode structures in registers. Some machines
2573 (the PA for example) want to return all small structures
3a94c984 2574 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2575
2576rtx
19caa751 2577copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2578 rtx tgtblk;
2579 rtx srcreg;
2580 tree type;
2581{
19caa751
RK
2582 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2583 rtx src = NULL, dst = NULL;
2584 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2585 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2586
2587 if (tgtblk == 0)
2588 {
1da68f56
RK
2589 tgtblk = assign_temp (build_qualified_type (type,
2590 (TYPE_QUALS (type)
2591 | TYPE_QUAL_CONST)),
2592 0, 1, 1);
19caa751
RK
2593 preserve_temp_slots (tgtblk);
2594 }
3a94c984 2595
1ed1b4fb 2596 /* This code assumes srcreg is at least a full word. If it isn't, copy it
9ac3e73b 2597 into a new pseudo which is a full word. */
0d7839da 2598
19caa751
RK
2599 if (GET_MODE (srcreg) != BLKmode
2600 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
9ac3e73b 2601 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
19caa751
RK
2602
2603 /* Structures whose size is not a multiple of a word are aligned
2604 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2605 machine, this means we must skip the empty high order bytes when
2606 calculating the bit offset. */
0d7839da 2607 if (BYTES_BIG_ENDIAN
0d7839da 2608 && bytes % UNITS_PER_WORD)
19caa751
RK
2609 big_endian_correction
2610 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2611
2612 /* Copy the structure BITSIZE bites at a time.
3a94c984 2613
19caa751
RK
2614 We could probably emit more efficient code for machines which do not use
2615 strict alignment, but it doesn't seem worth the effort at the current
2616 time. */
2617 for (bitpos = 0, xbitpos = big_endian_correction;
2618 bitpos < bytes * BITS_PER_UNIT;
2619 bitpos += bitsize, xbitpos += bitsize)
2620 {
3a94c984 2621 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2622 word boundary and when xbitpos == big_endian_correction
2623 (the first time through). */
2624 if (xbitpos % BITS_PER_WORD == 0
2625 || xbitpos == big_endian_correction)
b47f8cfc
JH
2626 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD,
2627 GET_MODE (srcreg));
19caa751
RK
2628
2629 /* We need a new destination operand each time bitpos is on
2630 a word boundary. */
2631 if (bitpos % BITS_PER_WORD == 0)
2632 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2633
19caa751
RK
2634 /* Use xbitpos for the source extraction (right justified) and
2635 xbitpos for the destination store (left justified). */
2636 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2637 extract_bit_field (src, bitsize,
2638 xbitpos % BITS_PER_WORD, 1,
2639 NULL_RTX, word_mode, word_mode,
04050c69
RK
2640 BITS_PER_WORD),
2641 BITS_PER_WORD);
19caa751
RK
2642 }
2643
2644 return tgtblk;
c36fce9a
GRK
2645}
2646
94b25f81
RK
2647/* Add a USE expression for REG to the (possibly empty) list pointed
2648 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2649
2650void
b3f8cf4a
RK
2651use_reg (call_fusage, reg)
2652 rtx *call_fusage, reg;
2653{
0304dfbb
DE
2654 if (GET_CODE (reg) != REG
2655 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2656 abort ();
b3f8cf4a
RK
2657
2658 *call_fusage
38a448ca
RH
2659 = gen_rtx_EXPR_LIST (VOIDmode,
2660 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2661}
2662
94b25f81
RK
2663/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2664 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2665
2666void
0304dfbb
DE
2667use_regs (call_fusage, regno, nregs)
2668 rtx *call_fusage;
bbf6f052
RK
2669 int regno;
2670 int nregs;
2671{
0304dfbb 2672 int i;
bbf6f052 2673
0304dfbb
DE
2674 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2675 abort ();
2676
2677 for (i = 0; i < nregs; i++)
e50126e8 2678 use_reg (call_fusage, regno_reg_rtx[regno + i]);
bbf6f052 2679}
fffa9c1d
JW
2680
2681/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2682 PARALLEL REGS. This is for calls that pass values in multiple
2683 non-contiguous locations. The Irix 6 ABI has examples of this. */
2684
2685void
2686use_group_regs (call_fusage, regs)
2687 rtx *call_fusage;
2688 rtx regs;
2689{
2690 int i;
2691
6bd35f86
DE
2692 for (i = 0; i < XVECLEN (regs, 0); i++)
2693 {
2694 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2695
6bd35f86
DE
2696 /* A NULL entry means the parameter goes both on the stack and in
2697 registers. This can also be a MEM for targets that pass values
2698 partially on the stack and partially in registers. */
e9a25f70 2699 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2700 use_reg (call_fusage, reg);
2701 }
fffa9c1d 2702}
bbf6f052 2703\f
57814e5e 2704
cf5124f6
RS
2705/* Determine whether the LEN bytes generated by CONSTFUN can be
2706 stored to memory using several move instructions. CONSTFUNDATA is
2707 a pointer which will be passed as argument in every CONSTFUN call.
2708 ALIGN is maximum alignment we can assume. Return nonzero if a
2709 call to store_by_pieces should succeed. */
2710
57814e5e
JJ
2711int
2712can_store_by_pieces (len, constfun, constfundata, align)
2713 unsigned HOST_WIDE_INT len;
2714 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2715 PTR constfundata;
2716 unsigned int align;
2717{
98166639 2718 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2719 HOST_WIDE_INT offset = 0;
2720 enum machine_mode mode, tmode;
2721 enum insn_code icode;
2722 int reverse;
2723 rtx cst;
2724
4977bab6 2725 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2726 return 0;
2727
2728 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2729 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2730 align = MOVE_MAX * BITS_PER_UNIT;
2731
2732 /* We would first store what we can in the largest integer mode, then go to
2733 successively smaller modes. */
2734
2735 for (reverse = 0;
2736 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2737 reverse++)
2738 {
2739 l = len;
2740 mode = VOIDmode;
cf5124f6 2741 max_size = STORE_MAX_PIECES + 1;
57814e5e
JJ
2742 while (max_size > 1)
2743 {
2744 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2745 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2746 if (GET_MODE_SIZE (tmode) < max_size)
2747 mode = tmode;
2748
2749 if (mode == VOIDmode)
2750 break;
2751
2752 icode = mov_optab->handlers[(int) mode].insn_code;
2753 if (icode != CODE_FOR_nothing
2754 && align >= GET_MODE_ALIGNMENT (mode))
2755 {
2756 unsigned int size = GET_MODE_SIZE (mode);
2757
2758 while (l >= size)
2759 {
2760 if (reverse)
2761 offset -= size;
2762
2763 cst = (*constfun) (constfundata, offset, mode);
2764 if (!LEGITIMATE_CONSTANT_P (cst))
2765 return 0;
2766
2767 if (!reverse)
2768 offset += size;
2769
2770 l -= size;
2771 }
2772 }
2773
2774 max_size = GET_MODE_SIZE (mode);
2775 }
2776
2777 /* The code above should have handled everything. */
2778 if (l != 0)
2779 abort ();
2780 }
2781
2782 return 1;
2783}
2784
2785/* Generate several move instructions to store LEN bytes generated by
2786 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2787 pointer which will be passed as argument in every CONSTFUN call.
8fd3cf4e
JJ
2788 ALIGN is maximum alignment we can assume.
2789 If ENDP is 0 return to, if ENDP is 1 return memory at the end ala
2790 mempcpy, and if ENDP is 2 return memory the end minus one byte ala
2791 stpcpy. */
57814e5e 2792
8fd3cf4e
JJ
2793rtx
2794store_by_pieces (to, len, constfun, constfundata, align, endp)
57814e5e
JJ
2795 rtx to;
2796 unsigned HOST_WIDE_INT len;
2797 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2798 PTR constfundata;
2799 unsigned int align;
8fd3cf4e 2800 int endp;
57814e5e
JJ
2801{
2802 struct store_by_pieces data;
2803
4977bab6 2804 if (! STORE_BY_PIECES_P (len, align))
57814e5e
JJ
2805 abort ();
2806 to = protect_from_queue (to, 1);
2807 data.constfun = constfun;
2808 data.constfundata = constfundata;
2809 data.len = len;
2810 data.to = to;
2811 store_by_pieces_1 (&data, align);
8fd3cf4e
JJ
2812 if (endp)
2813 {
2814 rtx to1;
2815
2816 if (data.reverse)
2817 abort ();
2818 if (data.autinc_to)
2819 {
2820 if (endp == 2)
2821 {
2822 if (HAVE_POST_INCREMENT && data.explicit_inc_to > 0)
2823 emit_insn (gen_add2_insn (data.to_addr, constm1_rtx));
2824 else
2825 data.to_addr = copy_addr_to_reg (plus_constant (data.to_addr,
2826 -1));
2827 }
2828 to1 = adjust_automodify_address (data.to, QImode, data.to_addr,
2829 data.offset);
2830 }
2831 else
2832 {
2833 if (endp == 2)
2834 --data.offset;
2835 to1 = adjust_address (data.to, QImode, data.offset);
2836 }
2837 return to1;
2838 }
2839 else
2840 return data.to;
57814e5e
JJ
2841}
2842
19caa751
RK
2843/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2844 rtx with BLKmode). The caller must pass TO through protect_from_queue
2845 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2846
2847static void
2848clear_by_pieces (to, len, align)
2849 rtx to;
3bdf5ad1 2850 unsigned HOST_WIDE_INT len;
729a2125 2851 unsigned int align;
9de08200 2852{
57814e5e
JJ
2853 struct store_by_pieces data;
2854
2855 data.constfun = clear_by_pieces_1;
df4ae160 2856 data.constfundata = NULL;
57814e5e
JJ
2857 data.len = len;
2858 data.to = to;
2859 store_by_pieces_1 (&data, align);
2860}
2861
2862/* Callback routine for clear_by_pieces.
2863 Return const0_rtx unconditionally. */
2864
2865static rtx
2866clear_by_pieces_1 (data, offset, mode)
2867 PTR data ATTRIBUTE_UNUSED;
2868 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2869 enum machine_mode mode ATTRIBUTE_UNUSED;
2870{
2871 return const0_rtx;
2872}
2873
2874/* Subroutine of clear_by_pieces and store_by_pieces.
2875 Generate several move instructions to store LEN bytes of block TO. (A MEM
2876 rtx with BLKmode). The caller must pass TO through protect_from_queue
2877 before calling. ALIGN is maximum alignment we can assume. */
2878
2879static void
2880store_by_pieces_1 (data, align)
2881 struct store_by_pieces *data;
2882 unsigned int align;
2883{
2884 rtx to_addr = XEXP (data->to, 0);
cf5124f6 2885 unsigned HOST_WIDE_INT max_size = STORE_MAX_PIECES + 1;
fbe1758d
AM
2886 enum machine_mode mode = VOIDmode, tmode;
2887 enum insn_code icode;
9de08200 2888
57814e5e
JJ
2889 data->offset = 0;
2890 data->to_addr = to_addr;
2891 data->autinc_to
9de08200
RK
2892 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2893 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2894
57814e5e
JJ
2895 data->explicit_inc_to = 0;
2896 data->reverse
9de08200 2897 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2898 if (data->reverse)
2899 data->offset = data->len;
9de08200 2900
57814e5e 2901 /* If storing requires more than two move insns,
9de08200
RK
2902 copy addresses to registers (to make displacements shorter)
2903 and use post-increment if available. */
57814e5e
JJ
2904 if (!data->autinc_to
2905 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2906 {
3a94c984 2907 /* Determine the main mode we'll be using. */
fbe1758d
AM
2908 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2909 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2910 if (GET_MODE_SIZE (tmode) < max_size)
2911 mode = tmode;
2912
57814e5e 2913 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2914 {
57814e5e
JJ
2915 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2916 data->autinc_to = 1;
2917 data->explicit_inc_to = -1;
9de08200 2918 }
3bdf5ad1 2919
57814e5e
JJ
2920 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2921 && ! data->autinc_to)
9de08200 2922 {
57814e5e
JJ
2923 data->to_addr = copy_addr_to_reg (to_addr);
2924 data->autinc_to = 1;
2925 data->explicit_inc_to = 1;
9de08200 2926 }
3bdf5ad1 2927
57814e5e
JJ
2928 if ( !data->autinc_to && CONSTANT_P (to_addr))
2929 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2930 }
2931
e1565e65 2932 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2933 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2934 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2935
57814e5e 2936 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2937 successively smaller modes. */
2938
2939 while (max_size > 1)
2940 {
9de08200
RK
2941 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2942 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2943 if (GET_MODE_SIZE (tmode) < max_size)
2944 mode = tmode;
2945
2946 if (mode == VOIDmode)
2947 break;
2948
2949 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2950 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2951 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2952
2953 max_size = GET_MODE_SIZE (mode);
2954 }
2955
2956 /* The code above should have handled everything. */
57814e5e 2957 if (data->len != 0)
9de08200
RK
2958 abort ();
2959}
2960
57814e5e 2961/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2962 with move instructions for mode MODE. GENFUN is the gen_... function
2963 to make a move insn for that mode. DATA has all the other info. */
2964
2965static void
57814e5e 2966store_by_pieces_2 (genfun, mode, data)
711d877c 2967 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2968 enum machine_mode mode;
57814e5e 2969 struct store_by_pieces *data;
9de08200 2970{
3bdf5ad1 2971 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2972 rtx to1, cst;
9de08200
RK
2973
2974 while (data->len >= size)
2975 {
3bdf5ad1
RK
2976 if (data->reverse)
2977 data->offset -= size;
9de08200 2978
3bdf5ad1 2979 if (data->autinc_to)
630036c6
JJ
2980 to1 = adjust_automodify_address (data->to, mode, data->to_addr,
2981 data->offset);
3a94c984 2982 else
f4ef873c 2983 to1 = adjust_address (data->to, mode, data->offset);
9de08200 2984
940da324 2985 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2986 emit_insn (gen_add2_insn (data->to_addr,
2987 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2988
57814e5e
JJ
2989 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2990 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2991
940da324 2992 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2993 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2994
3bdf5ad1
RK
2995 if (! data->reverse)
2996 data->offset += size;
9de08200
RK
2997
2998 data->len -= size;
2999 }
3000}
3001\f
19caa751 3002/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
8ac61af7 3003 its length in bytes. */
e9a25f70
JL
3004
3005rtx
8ac61af7 3006clear_storage (object, size)
bbf6f052 3007 rtx object;
4c08eef0 3008 rtx size;
bbf6f052 3009{
e9a25f70 3010 rtx retval = 0;
8ac61af7
RK
3011 unsigned int align = (GET_CODE (object) == MEM ? MEM_ALIGN (object)
3012 : GET_MODE_ALIGNMENT (GET_MODE (object)));
e9a25f70 3013
fcf1b822
RK
3014 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
3015 just move a zero. Otherwise, do this a piece at a time. */
69ef87e2 3016 if (GET_MODE (object) != BLKmode
fcf1b822 3017 && GET_CODE (size) == CONST_INT
4ca79136 3018 && INTVAL (size) == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (object)))
fcf1b822
RK
3019 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
3020 else
bbf6f052 3021 {
9de08200
RK
3022 object = protect_from_queue (object, 1);
3023 size = protect_from_queue (size, 0);
3024
3025 if (GET_CODE (size) == CONST_INT
78762e3b 3026 && CLEAR_BY_PIECES_P (INTVAL (size), align))
9de08200 3027 clear_by_pieces (object, INTVAL (size), align);
4ca79136
RH
3028 else if (clear_storage_via_clrstr (object, size, align))
3029 ;
9de08200 3030 else
4ca79136
RH
3031 retval = clear_storage_via_libcall (object, size);
3032 }
3033
3034 return retval;
3035}
3036
3037/* A subroutine of clear_storage. Expand a clrstr pattern;
3038 return true if successful. */
3039
3040static bool
3041clear_storage_via_clrstr (object, size, align)
3042 rtx object, size;
3043 unsigned int align;
3044{
3045 /* Try the most limited insn first, because there's no point
3046 including more than one in the machine description unless
3047 the more limited one has some advantage. */
3048
3049 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
3050 enum machine_mode mode;
3051
3052 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
3053 mode = GET_MODE_WIDER_MODE (mode))
3054 {
3055 enum insn_code code = clrstr_optab[(int) mode];
3056 insn_operand_predicate_fn pred;
3057
3058 if (code != CODE_FOR_nothing
3059 /* We don't need MODE to be narrower than
3060 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
3061 the mode mask, as it is returned by the macro, it will
3062 definitely be less than the actual mode mask. */
3063 && ((GET_CODE (size) == CONST_INT
3064 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3065 <= (GET_MODE_MASK (mode) >> 1)))
3066 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3067 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
3068 || (*pred) (object, BLKmode))
3069 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
3070 || (*pred) (opalign, VOIDmode)))
9de08200 3071 {
4ca79136
RH
3072 rtx op1;
3073 rtx last = get_last_insn ();
3074 rtx pat;
9de08200 3075
4ca79136
RH
3076 op1 = convert_to_mode (mode, size, 1);
3077 pred = insn_data[(int) code].operand[1].predicate;
3078 if (pred != 0 && ! (*pred) (op1, mode))
3079 op1 = copy_to_mode_reg (mode, op1);
9de08200 3080
4ca79136
RH
3081 pat = GEN_FCN ((int) code) (object, op1, opalign);
3082 if (pat)
9de08200 3083 {
4ca79136
RH
3084 emit_insn (pat);
3085 return true;
3086 }
3087 else
3088 delete_insns_since (last);
3089 }
3090 }
9de08200 3091
4ca79136
RH
3092 return false;
3093}
9de08200 3094
4ca79136
RH
3095/* A subroutine of clear_storage. Expand a call to memset or bzero.
3096 Return the return value of memset, 0 otherwise. */
9de08200 3097
4ca79136
RH
3098static rtx
3099clear_storage_via_libcall (object, size)
3100 rtx object, size;
3101{
3102 tree call_expr, arg_list, fn, object_tree, size_tree;
3103 enum machine_mode size_mode;
3104 rtx retval;
9de08200 3105
4ca79136 3106 /* OBJECT or SIZE may have been passed through protect_from_queue.
52cf7115 3107
4ca79136
RH
3108 It is unsafe to save the value generated by protect_from_queue
3109 and reuse it later. Consider what happens if emit_queue is
3110 called before the return value from protect_from_queue is used.
52cf7115 3111
4ca79136
RH
3112 Expansion of the CALL_EXPR below will call emit_queue before
3113 we are finished emitting RTL for argument setup. So if we are
3114 not careful we could get the wrong value for an argument.
52cf7115 3115
4ca79136
RH
3116 To avoid this problem we go ahead and emit code to copy OBJECT
3117 and SIZE into new pseudos. We can then place those new pseudos
3118 into an RTL_EXPR and use them later, even after a call to
3119 emit_queue.
52cf7115 3120
4ca79136
RH
3121 Note this is not strictly needed for library calls since they
3122 do not call emit_queue before loading their arguments. However,
3123 we may need to have library calls call emit_queue in the future
3124 since failing to do so could cause problems for targets which
3125 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
52cf7115 3126
4ca79136 3127 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 3128
4ca79136
RH
3129 if (TARGET_MEM_FUNCTIONS)
3130 size_mode = TYPE_MODE (sizetype);
3131 else
3132 size_mode = TYPE_MODE (unsigned_type_node);
3133 size = convert_to_mode (size_mode, size, 1);
3134 size = copy_to_mode_reg (size_mode, size);
52cf7115 3135
4ca79136
RH
3136 /* It is incorrect to use the libcall calling conventions to call
3137 memset in this context. This could be a user call to memset and
3138 the user may wish to examine the return value from memset. For
3139 targets where libcalls and normal calls have different conventions
3140 for returning pointers, we could end up generating incorrect code.
4bc973ae 3141
4ca79136 3142 For convenience, we generate the call to bzero this way as well. */
4bc973ae 3143
4ca79136
RH
3144 object_tree = make_tree (ptr_type_node, object);
3145 if (TARGET_MEM_FUNCTIONS)
3146 size_tree = make_tree (sizetype, size);
3147 else
3148 size_tree = make_tree (unsigned_type_node, size);
3149
3150 fn = clear_storage_libcall_fn (true);
3151 arg_list = tree_cons (NULL_TREE, size_tree, NULL_TREE);
3152 if (TARGET_MEM_FUNCTIONS)
3153 arg_list = tree_cons (NULL_TREE, integer_zero_node, arg_list);
3154 arg_list = tree_cons (NULL_TREE, object_tree, arg_list);
3155
3156 /* Now we have to build up the CALL_EXPR itself. */
3157 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3158 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
3159 call_expr, arg_list, NULL_TREE);
3160 TREE_SIDE_EFFECTS (call_expr) = 1;
3161
3162 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
3163
3164 /* If we are initializing a readonly value, show the above call
3165 clobbered it. Otherwise, a load from it may erroneously be
3166 hoisted from a loop. */
3167 if (RTX_UNCHANGING_P (object))
3168 emit_insn (gen_rtx_CLOBBER (VOIDmode, object));
3169
3170 return (TARGET_MEM_FUNCTIONS ? retval : NULL_RTX);
3171}
3172
3173/* A subroutine of clear_storage_via_libcall. Create the tree node
3174 for the function we use for block clears. The first time FOR_CALL
3175 is true, we call assemble_external. */
3176
3177static GTY(()) tree block_clear_fn;
66c60e67 3178
9661b15f
JJ
3179void
3180init_block_clear_fn (asmspec)
3181 const char *asmspec;
4ca79136 3182{
9661b15f 3183 if (!block_clear_fn)
4ca79136 3184 {
9661b15f
JJ
3185 tree fn, args;
3186
4ca79136
RH
3187 if (TARGET_MEM_FUNCTIONS)
3188 {
3189 fn = get_identifier ("memset");
3190 args = build_function_type_list (ptr_type_node, ptr_type_node,
3191 integer_type_node, sizetype,
3192 NULL_TREE);
3193 }
3194 else
3195 {
3196 fn = get_identifier ("bzero");
3197 args = build_function_type_list (void_type_node, ptr_type_node,
3198 unsigned_type_node, NULL_TREE);
9de08200 3199 }
4ca79136
RH
3200
3201 fn = build_decl (FUNCTION_DECL, fn, args);
3202 DECL_EXTERNAL (fn) = 1;
3203 TREE_PUBLIC (fn) = 1;
3204 DECL_ARTIFICIAL (fn) = 1;
3205 TREE_NOTHROW (fn) = 1;
3206
3207 block_clear_fn = fn;
bbf6f052 3208 }
e9a25f70 3209
9661b15f
JJ
3210 if (asmspec)
3211 {
3212 SET_DECL_RTL (block_clear_fn, NULL_RTX);
3213 SET_DECL_ASSEMBLER_NAME (block_clear_fn, get_identifier (asmspec));
3214 }
3215}
3216
3217static tree
3218clear_storage_libcall_fn (for_call)
3219 int for_call;
3220{
3221 static bool emitted_extern;
3222
3223 if (!block_clear_fn)
3224 init_block_clear_fn (NULL);
3225
4ca79136
RH
3226 if (for_call && !emitted_extern)
3227 {
3228 emitted_extern = true;
9661b15f
JJ
3229 make_decl_rtl (block_clear_fn, NULL);
3230 assemble_external (block_clear_fn);
4ca79136 3231 }
bbf6f052 3232
9661b15f 3233 return block_clear_fn;
4ca79136
RH
3234}
3235\f
bbf6f052
RK
3236/* Generate code to copy Y into X.
3237 Both Y and X must have the same mode, except that
3238 Y can be a constant with VOIDmode.
3239 This mode cannot be BLKmode; use emit_block_move for that.
3240
3241 Return the last instruction emitted. */
3242
3243rtx
3244emit_move_insn (x, y)
3245 rtx x, y;
3246{
3247 enum machine_mode mode = GET_MODE (x);
de1b33dd 3248 rtx y_cst = NULL_RTX;
0c19a26f 3249 rtx last_insn, set;
bbf6f052
RK
3250
3251 x = protect_from_queue (x, 1);
3252 y = protect_from_queue (y, 0);
3253
3254 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
3255 abort ();
3256
ee5332b8
RH
3257 /* Never force constant_p_rtx to memory. */
3258 if (GET_CODE (y) == CONSTANT_P_RTX)
3259 ;
51286de6 3260 else if (CONSTANT_P (y))
de1b33dd 3261 {
51286de6 3262 if (optimize
075fc17a 3263 && SCALAR_FLOAT_MODE_P (GET_MODE (x))
51286de6
RH
3264 && (last_insn = compress_float_constant (x, y)))
3265 return last_insn;
3266
0c19a26f
RS
3267 y_cst = y;
3268
51286de6
RH
3269 if (!LEGITIMATE_CONSTANT_P (y))
3270 {
51286de6 3271 y = force_const_mem (mode, y);
3a04ff64
RH
3272
3273 /* If the target's cannot_force_const_mem prevented the spill,
3274 assume that the target's move expanders will also take care
3275 of the non-legitimate constant. */
3276 if (!y)
3277 y = y_cst;
51286de6 3278 }
de1b33dd 3279 }
bbf6f052
RK
3280
3281 /* If X or Y are memory references, verify that their addresses are valid
3282 for the machine. */
3283 if (GET_CODE (x) == MEM
3284 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
3285 && ! push_operand (x, GET_MODE (x)))
3286 || (flag_force_addr
3287 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
792760b9 3288 x = validize_mem (x);
bbf6f052
RK
3289
3290 if (GET_CODE (y) == MEM
3291 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
3292 || (flag_force_addr
3293 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
792760b9 3294 y = validize_mem (y);
bbf6f052
RK
3295
3296 if (mode == BLKmode)
3297 abort ();
3298
de1b33dd
AO
3299 last_insn = emit_move_insn_1 (x, y);
3300
0c19a26f
RS
3301 if (y_cst && GET_CODE (x) == REG
3302 && (set = single_set (last_insn)) != NULL_RTX
3303 && SET_DEST (set) == x
3304 && ! rtx_equal_p (y_cst, SET_SRC (set)))
3d238248 3305 set_unique_reg_note (last_insn, REG_EQUAL, y_cst);
de1b33dd
AO
3306
3307 return last_insn;
261c4230
RS
3308}
3309
3310/* Low level part of emit_move_insn.
3311 Called just like emit_move_insn, but assumes X and Y
3312 are basically valid. */
3313
3314rtx
3315emit_move_insn_1 (x, y)
3316 rtx x, y;
3317{
3318 enum machine_mode mode = GET_MODE (x);
3319 enum machine_mode submode;
3320 enum mode_class class = GET_MODE_CLASS (mode);
261c4230 3321
dbbbbf3b 3322 if ((unsigned int) mode >= (unsigned int) MAX_MACHINE_MODE)
3a94c984 3323 abort ();
76bbe028 3324
bbf6f052
RK
3325 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
3326 return
3327 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
3328
89742723 3329 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 3330 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
27e58a70 3331 && BLKmode != (submode = GET_MODE_INNER (mode))
7308a047
RS
3332 && (mov_optab->handlers[(int) submode].insn_code
3333 != CODE_FOR_nothing))
3334 {
3335 /* Don't split destination if it is a stack push. */
3336 int stack = push_operand (x, GET_MODE (x));
7308a047 3337
79ce92d7 3338#ifdef PUSH_ROUNDING
1a06f5fe
JH
3339 /* In case we output to the stack, but the size is smaller machine can
3340 push exactly, we need to use move instructions. */
3341 if (stack
bb93b973
RK
3342 && (PUSH_ROUNDING (GET_MODE_SIZE (submode))
3343 != GET_MODE_SIZE (submode)))
1a06f5fe
JH
3344 {
3345 rtx temp;
bb93b973 3346 HOST_WIDE_INT offset1, offset2;
1a06f5fe
JH
3347
3348 /* Do not use anti_adjust_stack, since we don't want to update
3349 stack_pointer_delta. */
3350 temp = expand_binop (Pmode,
3351#ifdef STACK_GROWS_DOWNWARD
3352 sub_optab,
3353#else
3354 add_optab,
3355#endif
3356 stack_pointer_rtx,
3357 GEN_INT
bb93b973
RK
3358 (PUSH_ROUNDING
3359 (GET_MODE_SIZE (GET_MODE (x)))),
3360 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
3361
1a06f5fe
JH
3362 if (temp != stack_pointer_rtx)
3363 emit_move_insn (stack_pointer_rtx, temp);
bb93b973 3364
1a06f5fe
JH
3365#ifdef STACK_GROWS_DOWNWARD
3366 offset1 = 0;
3367 offset2 = GET_MODE_SIZE (submode);
3368#else
3369 offset1 = -PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)));
3370 offset2 = (-PUSH_ROUNDING (GET_MODE_SIZE (GET_MODE (x)))
3371 + GET_MODE_SIZE (submode));
3372#endif
bb93b973 3373
1a06f5fe
JH
3374 emit_move_insn (change_address (x, submode,
3375 gen_rtx_PLUS (Pmode,
3376 stack_pointer_rtx,
3377 GEN_INT (offset1))),
3378 gen_realpart (submode, y));
3379 emit_move_insn (change_address (x, submode,
3380 gen_rtx_PLUS (Pmode,
3381 stack_pointer_rtx,
3382 GEN_INT (offset2))),
3383 gen_imagpart (submode, y));
3384 }
e9c0bd54 3385 else
79ce92d7 3386#endif
7308a047
RS
3387 /* If this is a stack, push the highpart first, so it
3388 will be in the argument order.
3389
3390 In that case, change_address is used only to convert
3391 the mode, not to change the address. */
e9c0bd54 3392 if (stack)
c937357e 3393 {
e33c0d66
RS
3394 /* Note that the real part always precedes the imag part in memory
3395 regardless of machine's endianness. */
c937357e
RS
3396#ifdef STACK_GROWS_DOWNWARD
3397 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3398 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3399 gen_imagpart (submode, y)));
c937357e 3400 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3401 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3402 gen_realpart (submode, y)));
c937357e
RS
3403#else
3404 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3405 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3406 gen_realpart (submode, y)));
c937357e 3407 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 3408 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 3409 gen_imagpart (submode, y)));
c937357e
RS
3410#endif
3411 }
3412 else
3413 {
235ae7be
DM
3414 rtx realpart_x, realpart_y;
3415 rtx imagpart_x, imagpart_y;
3416
405f63da
MM
3417 /* If this is a complex value with each part being smaller than a
3418 word, the usual calling sequence will likely pack the pieces into
3419 a single register. Unfortunately, SUBREG of hard registers only
3420 deals in terms of words, so we have a problem converting input
3421 arguments to the CONCAT of two registers that is used elsewhere
3422 for complex values. If this is before reload, we can copy it into
3423 memory and reload. FIXME, we should see about using extract and
3424 insert on integer registers, but complex short and complex char
3425 variables should be rarely used. */
3a94c984 3426 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
3427 && (reload_in_progress | reload_completed) == 0)
3428 {
bb93b973
RK
3429 int packed_dest_p
3430 = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
3431 int packed_src_p
3432 = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
405f63da
MM
3433
3434 if (packed_dest_p || packed_src_p)
3435 {
3436 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
3437 ? MODE_FLOAT : MODE_INT);
3438
1da68f56
RK
3439 enum machine_mode reg_mode
3440 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
3441
3442 if (reg_mode != BLKmode)
3443 {
3444 rtx mem = assign_stack_temp (reg_mode,
3445 GET_MODE_SIZE (mode), 0);
f4ef873c 3446 rtx cmem = adjust_address (mem, mode, 0);
405f63da 3447
1da68f56
RK
3448 cfun->cannot_inline
3449 = N_("function using short complex types cannot be inline");
405f63da
MM
3450
3451 if (packed_dest_p)
3452 {
3453 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
bb93b973 3454
405f63da
MM
3455 emit_move_insn_1 (cmem, y);
3456 return emit_move_insn_1 (sreg, mem);
3457 }
3458 else
3459 {
3460 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
bb93b973 3461
405f63da
MM
3462 emit_move_insn_1 (mem, sreg);
3463 return emit_move_insn_1 (x, cmem);
3464 }
3465 }
3466 }
3467 }
3468
235ae7be
DM
3469 realpart_x = gen_realpart (submode, x);
3470 realpart_y = gen_realpart (submode, y);
3471 imagpart_x = gen_imagpart (submode, x);
3472 imagpart_y = gen_imagpart (submode, y);
3473
3474 /* Show the output dies here. This is necessary for SUBREGs
3475 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
3476 hard regs shouldn't appear here except as return values.
3477 We never want to emit such a clobber after reload. */
3478 if (x != y
235ae7be
DM
3479 && ! (reload_in_progress || reload_completed)
3480 && (GET_CODE (realpart_x) == SUBREG
3481 || GET_CODE (imagpart_x) == SUBREG))
bb93b973 3482 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 3483
c937357e 3484 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3485 (realpart_x, realpart_y));
c937357e 3486 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 3487 (imagpart_x, imagpart_y));
c937357e 3488 }
7308a047 3489
7a1ab50a 3490 return get_last_insn ();
7308a047
RS
3491 }
3492
a3600c71
HPN
3493 /* Handle MODE_CC modes: If we don't have a special move insn for this mode,
3494 find a mode to do it in. If we have a movcc, use it. Otherwise,
3495 find the MODE_INT mode of the same width. */
3496 else if (GET_MODE_CLASS (mode) == MODE_CC
3497 && mov_optab->handlers[(int) mode].insn_code == CODE_FOR_nothing)
3498 {
3499 enum insn_code insn_code;
3500 enum machine_mode tmode = VOIDmode;
3501 rtx x1 = x, y1 = y;
3502
3503 if (mode != CCmode
3504 && mov_optab->handlers[(int) CCmode].insn_code != CODE_FOR_nothing)
3505 tmode = CCmode;
3506 else
3507 for (tmode = QImode; tmode != VOIDmode;
3508 tmode = GET_MODE_WIDER_MODE (tmode))
3509 if (GET_MODE_SIZE (tmode) == GET_MODE_SIZE (mode))
3510 break;
3511
3512 if (tmode == VOIDmode)
3513 abort ();
3514
3515 /* Get X and Y in TMODE. We can't use gen_lowpart here because it
3516 may call change_address which is not appropriate if we were
3517 called when a reload was in progress. We don't have to worry
3518 about changing the address since the size in bytes is supposed to
3519 be the same. Copy the MEM to change the mode and move any
3520 substitutions from the old MEM to the new one. */
3521
3522 if (reload_in_progress)
3523 {
3524 x = gen_lowpart_common (tmode, x1);
3525 if (x == 0 && GET_CODE (x1) == MEM)
3526 {
3527 x = adjust_address_nv (x1, tmode, 0);
3528 copy_replacements (x1, x);
3529 }
3530
3531 y = gen_lowpart_common (tmode, y1);
3532 if (y == 0 && GET_CODE (y1) == MEM)
3533 {
3534 y = adjust_address_nv (y1, tmode, 0);
3535 copy_replacements (y1, y);
3536 }
3537 }
3538 else
3539 {
3540 x = gen_lowpart (tmode, x);
3541 y = gen_lowpart (tmode, y);
3542 }
3543
3544 insn_code = mov_optab->handlers[(int) tmode].insn_code;
3545 return emit_insn (GEN_FCN (insn_code) (x, y));
3546 }
3547
cffa2189
R
3548 /* This will handle any multi-word or full-word mode that lacks a move_insn
3549 pattern. However, you will get better code if you define such patterns,
bbf6f052 3550 even if they must turn into multiple assembler instructions. */
cffa2189 3551 else if (GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
bbf6f052
RK
3552 {
3553 rtx last_insn = 0;
3ef1eef4 3554 rtx seq, inner;
235ae7be 3555 int need_clobber;
bb93b973 3556 int i;
3a94c984 3557
a98c9f1a
RK
3558#ifdef PUSH_ROUNDING
3559
3560 /* If X is a push on the stack, do the push now and replace
3561 X with a reference to the stack pointer. */
3562 if (push_operand (x, GET_MODE (x)))
3563 {
918a6124
GK
3564 rtx temp;
3565 enum rtx_code code;
0fb7aeda 3566
918a6124
GK
3567 /* Do not use anti_adjust_stack, since we don't want to update
3568 stack_pointer_delta. */
3569 temp = expand_binop (Pmode,
3570#ifdef STACK_GROWS_DOWNWARD
3571 sub_optab,
3572#else
3573 add_optab,
3574#endif
3575 stack_pointer_rtx,
3576 GEN_INT
bb93b973
RK
3577 (PUSH_ROUNDING
3578 (GET_MODE_SIZE (GET_MODE (x)))),
a426c92e 3579 stack_pointer_rtx, 0, OPTAB_LIB_WIDEN);
bb93b973 3580
0fb7aeda
KH
3581 if (temp != stack_pointer_rtx)
3582 emit_move_insn (stack_pointer_rtx, temp);
918a6124
GK
3583
3584 code = GET_CODE (XEXP (x, 0));
bb93b973 3585
918a6124
GK
3586 /* Just hope that small offsets off SP are OK. */
3587 if (code == POST_INC)
0fb7aeda 3588 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
bb93b973
RK
3589 GEN_INT (-((HOST_WIDE_INT)
3590 GET_MODE_SIZE (GET_MODE (x)))));
918a6124 3591 else if (code == POST_DEC)
0fb7aeda 3592 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
918a6124
GK
3593 GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
3594 else
3595 temp = stack_pointer_rtx;
3596
3597 x = change_address (x, VOIDmode, temp);
a98c9f1a
RK
3598 }
3599#endif
3a94c984 3600
3ef1eef4
RK
3601 /* If we are in reload, see if either operand is a MEM whose address
3602 is scheduled for replacement. */
3603 if (reload_in_progress && GET_CODE (x) == MEM
3604 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
f1ec5147 3605 x = replace_equiv_address_nv (x, inner);
3ef1eef4
RK
3606 if (reload_in_progress && GET_CODE (y) == MEM
3607 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
f1ec5147 3608 y = replace_equiv_address_nv (y, inner);
3ef1eef4 3609
235ae7be 3610 start_sequence ();
15a7a8ec 3611
235ae7be 3612 need_clobber = 0;
bbf6f052 3613 for (i = 0;
3a94c984 3614 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
3615 i++)
3616 {
3617 rtx xpart = operand_subword (x, i, 1, mode);
3618 rtx ypart = operand_subword (y, i, 1, mode);
3619
3620 /* If we can't get a part of Y, put Y into memory if it is a
3621 constant. Otherwise, force it into a register. If we still
3622 can't get a part of Y, abort. */
3623 if (ypart == 0 && CONSTANT_P (y))
3624 {
3625 y = force_const_mem (mode, y);
3626 ypart = operand_subword (y, i, 1, mode);
3627 }
3628 else if (ypart == 0)
3629 ypart = operand_subword_force (y, i, mode);
3630
3631 if (xpart == 0 || ypart == 0)
3632 abort ();
3633
235ae7be
DM
3634 need_clobber |= (GET_CODE (xpart) == SUBREG);
3635
bbf6f052
RK
3636 last_insn = emit_move_insn (xpart, ypart);
3637 }
6551fa4d 3638
2f937369 3639 seq = get_insns ();
235ae7be
DM
3640 end_sequence ();
3641
3642 /* Show the output dies here. This is necessary for SUBREGs
3643 of pseudos since we cannot track their lifetimes correctly;
3644 hard regs shouldn't appear here except as return values.
3645 We never want to emit such a clobber after reload. */
3646 if (x != y
3647 && ! (reload_in_progress || reload_completed)
3648 && need_clobber != 0)
bb93b973 3649 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
235ae7be
DM
3650
3651 emit_insn (seq);
3652
bbf6f052
RK
3653 return last_insn;
3654 }
3655 else
3656 abort ();
3657}
51286de6
RH
3658
3659/* If Y is representable exactly in a narrower mode, and the target can
3660 perform the extension directly from constant or memory, then emit the
3661 move as an extension. */
3662
3663static rtx
3664compress_float_constant (x, y)
3665 rtx x, y;
3666{
3667 enum machine_mode dstmode = GET_MODE (x);
3668 enum machine_mode orig_srcmode = GET_MODE (y);
3669 enum machine_mode srcmode;
3670 REAL_VALUE_TYPE r;
3671
3672 REAL_VALUE_FROM_CONST_DOUBLE (r, y);
3673
3674 for (srcmode = GET_CLASS_NARROWEST_MODE (GET_MODE_CLASS (orig_srcmode));
3675 srcmode != orig_srcmode;
3676 srcmode = GET_MODE_WIDER_MODE (srcmode))
3677 {
3678 enum insn_code ic;
3679 rtx trunc_y, last_insn;
3680
3681 /* Skip if the target can't extend this way. */
3682 ic = can_extend_p (dstmode, srcmode, 0);
3683 if (ic == CODE_FOR_nothing)
3684 continue;
3685
3686 /* Skip if the narrowed value isn't exact. */
3687 if (! exact_real_truncate (srcmode, &r))
3688 continue;
3689
3690 trunc_y = CONST_DOUBLE_FROM_REAL_VALUE (r, srcmode);
3691
3692 if (LEGITIMATE_CONSTANT_P (trunc_y))
3693 {
3694 /* Skip if the target needs extra instructions to perform
3695 the extension. */
3696 if (! (*insn_data[ic].operand[1].predicate) (trunc_y, srcmode))
3697 continue;
3698 }
3699 else if (float_extend_from_mem[dstmode][srcmode])
3700 trunc_y = validize_mem (force_const_mem (srcmode, trunc_y));
3701 else
3702 continue;
3703
3704 emit_unop_insn (ic, x, trunc_y, UNKNOWN);
3705 last_insn = get_last_insn ();
3706
3707 if (GET_CODE (x) == REG)
0c19a26f 3708 set_unique_reg_note (last_insn, REG_EQUAL, y);
51286de6
RH
3709
3710 return last_insn;
3711 }
3712
3713 return NULL_RTX;
3714}
bbf6f052
RK
3715\f
3716/* Pushing data onto the stack. */
3717
3718/* Push a block of length SIZE (perhaps variable)
3719 and return an rtx to address the beginning of the block.
3720 Note that it is not possible for the value returned to be a QUEUED.
3721 The value may be virtual_outgoing_args_rtx.
3722
3723 EXTRA is the number of bytes of padding to push in addition to SIZE.
3724 BELOW nonzero means this padding comes at low addresses;
3725 otherwise, the padding comes at high addresses. */
3726
3727rtx
3728push_block (size, extra, below)
3729 rtx size;
3730 int extra, below;
3731{
b3694847 3732 rtx temp;
88f63c77
RK
3733
3734 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3735 if (CONSTANT_P (size))
3736 anti_adjust_stack (plus_constant (size, extra));
3737 else if (GET_CODE (size) == REG && extra == 0)
3738 anti_adjust_stack (size);
3739 else
3740 {
ce48579b 3741 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3742 if (extra != 0)
906c4e36 3743 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3744 temp, 0, OPTAB_LIB_WIDEN);
3745 anti_adjust_stack (temp);
3746 }
3747
f73ad30e 3748#ifndef STACK_GROWS_DOWNWARD
f73ad30e 3749 if (0)
f73ad30e
JH
3750#else
3751 if (1)
bbf6f052 3752#endif
f73ad30e 3753 {
f73ad30e
JH
3754 temp = virtual_outgoing_args_rtx;
3755 if (extra != 0 && below)
3756 temp = plus_constant (temp, extra);
3757 }
3758 else
3759 {
3760 if (GET_CODE (size) == CONST_INT)
3761 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3762 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3763 else if (extra != 0 && !below)
3764 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3765 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3766 else
3767 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3768 negate_rtx (Pmode, size));
3769 }
bbf6f052
RK
3770
3771 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3772}
3773
21d93687
RK
3774#ifdef PUSH_ROUNDING
3775
566aa174 3776/* Emit single push insn. */
21d93687 3777
566aa174
JH
3778static void
3779emit_single_push_insn (mode, x, type)
3780 rtx x;
3781 enum machine_mode mode;
3782 tree type;
3783{
566aa174 3784 rtx dest_addr;
918a6124 3785 unsigned rounded_size = PUSH_ROUNDING (GET_MODE_SIZE (mode));
566aa174 3786 rtx dest;
371b8fc0
JH
3787 enum insn_code icode;
3788 insn_operand_predicate_fn pred;
566aa174 3789
371b8fc0
JH
3790 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3791 /* If there is push pattern, use it. Otherwise try old way of throwing
3792 MEM representing push operation to move expander. */
3793 icode = push_optab->handlers[(int) mode].insn_code;
3794 if (icode != CODE_FOR_nothing)
3795 {
3796 if (((pred = insn_data[(int) icode].operand[0].predicate)
505ddab6 3797 && !((*pred) (x, mode))))
371b8fc0
JH
3798 x = force_reg (mode, x);
3799 emit_insn (GEN_FCN (icode) (x));
3800 return;
3801 }
566aa174
JH
3802 if (GET_MODE_SIZE (mode) == rounded_size)
3803 dest_addr = gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
3804 else
3805 {
3806#ifdef STACK_GROWS_DOWNWARD
3807 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
505ddab6 3808 GEN_INT (-(HOST_WIDE_INT) rounded_size));
566aa174
JH
3809#else
3810 dest_addr = gen_rtx_PLUS (Pmode, stack_pointer_rtx,
3811 GEN_INT (rounded_size));
3812#endif
3813 dest_addr = gen_rtx_PRE_MODIFY (Pmode, stack_pointer_rtx, dest_addr);
3814 }
3815
3816 dest = gen_rtx_MEM (mode, dest_addr);
3817
566aa174
JH
3818 if (type != 0)
3819 {
3820 set_mem_attributes (dest, type, 1);
c3d32120
RK
3821
3822 if (flag_optimize_sibling_calls)
3823 /* Function incoming arguments may overlap with sibling call
3824 outgoing arguments and we cannot allow reordering of reads
3825 from function arguments with stores to outgoing arguments
3826 of sibling calls. */
3827 set_mem_alias_set (dest, 0);
566aa174
JH
3828 }
3829 emit_move_insn (dest, x);
566aa174 3830}
21d93687 3831#endif
566aa174 3832
bbf6f052
RK
3833/* Generate code to push X onto the stack, assuming it has mode MODE and
3834 type TYPE.
3835 MODE is redundant except when X is a CONST_INT (since they don't
3836 carry mode info).
3837 SIZE is an rtx for the size of data to be copied (in bytes),
3838 needed only if X is BLKmode.
3839
f1eaaf73 3840 ALIGN (in bits) is maximum alignment we can assume.
bbf6f052 3841
cd048831
RK
3842 If PARTIAL and REG are both nonzero, then copy that many of the first
3843 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3844 The amount of space pushed is decreased by PARTIAL words,
3845 rounded *down* to a multiple of PARM_BOUNDARY.
3846 REG must be a hard register in this case.
cd048831
RK
3847 If REG is zero but PARTIAL is not, take any all others actions for an
3848 argument partially in registers, but do not actually load any
3849 registers.
bbf6f052
RK
3850
3851 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3852 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3853
3854 On a machine that lacks real push insns, ARGS_ADDR is the address of
3855 the bottom of the argument block for this call. We use indexing off there
3856 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3857 argument block has not been preallocated.
3858
e5e809f4
JL
3859 ARGS_SO_FAR is the size of args previously pushed for this call.
3860
3861 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3862 for arguments passed in registers. If nonzero, it will be the number
3863 of bytes required. */
bbf6f052
RK
3864
3865void
3866emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd 3867 args_addr, args_so_far, reg_parm_stack_space,
0fb7aeda 3868 alignment_pad)
b3694847 3869 rtx x;
bbf6f052
RK
3870 enum machine_mode mode;
3871 tree type;
3872 rtx size;
729a2125 3873 unsigned int align;
bbf6f052
RK
3874 int partial;
3875 rtx reg;
3876 int extra;
3877 rtx args_addr;
3878 rtx args_so_far;
e5e809f4 3879 int reg_parm_stack_space;
4fc026cd 3880 rtx alignment_pad;
bbf6f052
RK
3881{
3882 rtx xinner;
3883 enum direction stack_direction
3884#ifdef STACK_GROWS_DOWNWARD
3885 = downward;
3886#else
3887 = upward;
3888#endif
3889
3890 /* Decide where to pad the argument: `downward' for below,
3891 `upward' for above, or `none' for don't pad it.
3892 Default is below for small data on big-endian machines; else above. */
3893 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3894
0fb7aeda 3895 /* Invert direction if stack is post-decrement.
9e0e11bf
GK
3896 FIXME: why? */
3897 if (STACK_PUSH_CODE == POST_DEC)
bbf6f052
RK
3898 if (where_pad != none)
3899 where_pad = (where_pad == downward ? upward : downward);
3900
3901 xinner = x = protect_from_queue (x, 0);
3902
3903 if (mode == BLKmode)
3904 {
3905 /* Copy a block into the stack, entirely or partially. */
3906
b3694847 3907 rtx temp;
bbf6f052
RK
3908 int used = partial * UNITS_PER_WORD;
3909 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3910 int skip;
3a94c984 3911
bbf6f052
RK
3912 if (size == 0)
3913 abort ();
3914
3915 used -= offset;
3916
3917 /* USED is now the # of bytes we need not copy to the stack
3918 because registers will take care of them. */
3919
3920 if (partial != 0)
f4ef873c 3921 xinner = adjust_address (xinner, BLKmode, used);
bbf6f052
RK
3922
3923 /* If the partial register-part of the arg counts in its stack size,
3924 skip the part of stack space corresponding to the registers.
3925 Otherwise, start copying to the beginning of the stack space,
3926 by setting SKIP to 0. */
e5e809f4 3927 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3928
3929#ifdef PUSH_ROUNDING
3930 /* Do it with several push insns if that doesn't take lots of insns
3931 and if there is no difficulty with push insns that skip bytes
3932 on the stack for alignment purposes. */
3933 if (args_addr == 0
f73ad30e 3934 && PUSH_ARGS
bbf6f052
RK
3935 && GET_CODE (size) == CONST_INT
3936 && skip == 0
f26aca6d 3937 && MEM_ALIGN (xinner) >= align
15914757 3938 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3939 /* Here we avoid the case of a structure whose weak alignment
3940 forces many pushes of a small amount of data,
3941 and such small pushes do rounding that causes trouble. */
e1565e65 3942 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3943 || align >= BIGGEST_ALIGNMENT
f1eaaf73
DE
3944 || (PUSH_ROUNDING (align / BITS_PER_UNIT)
3945 == (align / BITS_PER_UNIT)))
bbf6f052
RK
3946 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3947 {
3948 /* Push padding now if padding above and stack grows down,
3949 or if padding below and stack grows up.
3950 But if space already allocated, this has already been done. */
3951 if (extra && args_addr == 0
3952 && where_pad != none && where_pad != stack_direction)
906c4e36 3953 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3954
8fd3cf4e 3955 move_by_pieces (NULL, xinner, INTVAL (size) - used, align, 0);
bbf6f052
RK
3956 }
3957 else
3a94c984 3958#endif /* PUSH_ROUNDING */
bbf6f052 3959 {
7ab923cc
JJ
3960 rtx target;
3961
bbf6f052
RK
3962 /* Otherwise make space on the stack and copy the data
3963 to the address of that space. */
3964
3965 /* Deduct words put into registers from the size we must copy. */
3966 if (partial != 0)
3967 {
3968 if (GET_CODE (size) == CONST_INT)
906c4e36 3969 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3970 else
3971 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3972 GEN_INT (used), NULL_RTX, 0,
3973 OPTAB_LIB_WIDEN);
bbf6f052
RK
3974 }
3975
3976 /* Get the address of the stack space.
3977 In this case, we do not deal with EXTRA separately.
3978 A single stack adjust will do. */
3979 if (! args_addr)
3980 {
3981 temp = push_block (size, extra, where_pad == downward);
3982 extra = 0;
3983 }
3984 else if (GET_CODE (args_so_far) == CONST_INT)
3985 temp = memory_address (BLKmode,
3986 plus_constant (args_addr,
3987 skip + INTVAL (args_so_far)));
3988 else
3989 temp = memory_address (BLKmode,
38a448ca
RH
3990 plus_constant (gen_rtx_PLUS (Pmode,
3991 args_addr,
3992 args_so_far),
bbf6f052 3993 skip));
4ca79136
RH
3994
3995 if (!ACCUMULATE_OUTGOING_ARGS)
3996 {
3997 /* If the source is referenced relative to the stack pointer,
3998 copy it to another register to stabilize it. We do not need
3999 to do this if we know that we won't be changing sp. */
4000
4001 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
4002 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
4003 temp = copy_to_reg (temp);
4004 }
4005
3a94c984 4006 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 4007
3a94c984
KH
4008 if (type != 0)
4009 {
4010 set_mem_attributes (target, type, 1);
4011 /* Function incoming arguments may overlap with sibling call
4012 outgoing arguments and we cannot allow reordering of reads
4013 from function arguments with stores to outgoing arguments
4014 of sibling calls. */
ba4828e0 4015 set_mem_alias_set (target, 0);
3a94c984 4016 }
4ca79136 4017
44bb111a
RH
4018 /* ALIGN may well be better aligned than TYPE, e.g. due to
4019 PARM_BOUNDARY. Assume the caller isn't lying. */
4020 set_mem_align (target, align);
4ca79136 4021
44bb111a 4022 emit_block_move (target, xinner, size, BLOCK_OP_CALL_PARM);
bbf6f052
RK
4023 }
4024 }
4025 else if (partial > 0)
4026 {
4027 /* Scalar partly in registers. */
4028
4029 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
4030 int i;
4031 int not_stack;
4032 /* # words of start of argument
4033 that we must make space for but need not store. */
4034 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
4035 int args_offset = INTVAL (args_so_far);
4036 int skip;
4037
4038 /* Push padding now if padding above and stack grows down,
4039 or if padding below and stack grows up.
4040 But if space already allocated, this has already been done. */
4041 if (extra && args_addr == 0
4042 && where_pad != none && where_pad != stack_direction)
906c4e36 4043 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
4044
4045 /* If we make space by pushing it, we might as well push
4046 the real data. Otherwise, we can leave OFFSET nonzero
4047 and leave the space uninitialized. */
4048 if (args_addr == 0)
4049 offset = 0;
4050
4051 /* Now NOT_STACK gets the number of words that we don't need to
4052 allocate on the stack. */
4053 not_stack = partial - offset;
4054
4055 /* If the partial register-part of the arg counts in its stack size,
4056 skip the part of stack space corresponding to the registers.
4057 Otherwise, start copying to the beginning of the stack space,
4058 by setting SKIP to 0. */
e5e809f4 4059 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
4060
4061 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
4062 x = validize_mem (force_const_mem (mode, x));
4063
4064 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
4065 SUBREGs of such registers are not allowed. */
4066 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
4067 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
4068 x = copy_to_reg (x);
4069
4070 /* Loop over all the words allocated on the stack for this arg. */
4071 /* We can do it by words, because any scalar bigger than a word
4072 has a size a multiple of a word. */
4073#ifndef PUSH_ARGS_REVERSED
4074 for (i = not_stack; i < size; i++)
4075#else
4076 for (i = size - 1; i >= not_stack; i--)
4077#endif
4078 if (i >= not_stack + offset)
4079 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
4080 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
4081 0, args_addr,
4082 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 4083 * UNITS_PER_WORD)),
4fc026cd 4084 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
4085 }
4086 else
4087 {
4088 rtx addr;
3bdf5ad1 4089 rtx dest;
bbf6f052
RK
4090
4091 /* Push padding now if padding above and stack grows down,
4092 or if padding below and stack grows up.
4093 But if space already allocated, this has already been done. */
4094 if (extra && args_addr == 0
4095 && where_pad != none && where_pad != stack_direction)
906c4e36 4096 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
4097
4098#ifdef PUSH_ROUNDING
f73ad30e 4099 if (args_addr == 0 && PUSH_ARGS)
566aa174 4100 emit_single_push_insn (mode, x, type);
bbf6f052
RK
4101 else
4102#endif
921b3427
RK
4103 {
4104 if (GET_CODE (args_so_far) == CONST_INT)
4105 addr
4106 = memory_address (mode,
3a94c984 4107 plus_constant (args_addr,
921b3427 4108 INTVAL (args_so_far)));
3a94c984 4109 else
38a448ca
RH
4110 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
4111 args_so_far));
566aa174
JH
4112 dest = gen_rtx_MEM (mode, addr);
4113 if (type != 0)
4114 {
4115 set_mem_attributes (dest, type, 1);
4116 /* Function incoming arguments may overlap with sibling call
4117 outgoing arguments and we cannot allow reordering of reads
4118 from function arguments with stores to outgoing arguments
4119 of sibling calls. */
ba4828e0 4120 set_mem_alias_set (dest, 0);
566aa174 4121 }
bbf6f052 4122
566aa174 4123 emit_move_insn (dest, x);
566aa174 4124 }
bbf6f052
RK
4125 }
4126
bbf6f052
RK
4127 /* If part should go in registers, copy that part
4128 into the appropriate registers. Do this now, at the end,
4129 since mem-to-mem copies above may do function calls. */
cd048831 4130 if (partial > 0 && reg != 0)
fffa9c1d
JW
4131 {
4132 /* Handle calls that pass values in multiple non-contiguous locations.
4133 The Irix 6 ABI has examples of this. */
4134 if (GET_CODE (reg) == PARALLEL)
04050c69 4135 emit_group_load (reg, x, -1); /* ??? size? */
fffa9c1d
JW
4136 else
4137 move_block_to_reg (REGNO (reg), x, partial, mode);
4138 }
bbf6f052
RK
4139
4140 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 4141 anti_adjust_stack (GEN_INT (extra));
3a94c984 4142
3ea2292a 4143 if (alignment_pad && args_addr == 0)
4fc026cd 4144 anti_adjust_stack (alignment_pad);
bbf6f052
RK
4145}
4146\f
296b4ed9
RK
4147/* Return X if X can be used as a subtarget in a sequence of arithmetic
4148 operations. */
4149
4150static rtx
4151get_subtarget (x)
4152 rtx x;
4153{
4154 return ((x == 0
4155 /* Only registers can be subtargets. */
4156 || GET_CODE (x) != REG
4157 /* If the register is readonly, it can't be set more than once. */
4158 || RTX_UNCHANGING_P (x)
4159 /* Don't use hard regs to avoid extending their life. */
4160 || REGNO (x) < FIRST_PSEUDO_REGISTER
4161 /* Avoid subtargets inside loops,
4162 since they hide some invariant expressions. */
4163 || preserve_subexpressions_p ())
4164 ? 0 : x);
4165}
4166
bbf6f052
RK
4167/* Expand an assignment that stores the value of FROM into TO.
4168 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
4169 (This may contain a QUEUED rtx;
4170 if the value is constant, this rtx is a constant.)
4171 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
4172
4173 SUGGEST_REG is no longer actually used.
4174 It used to mean, copy the value through a register
4175 and return that register, if that is possible.
709f5be1 4176 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
4177
4178rtx
4179expand_assignment (to, from, want_value, suggest_reg)
4180 tree to, from;
4181 int want_value;
c5c76735 4182 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052 4183{
b3694847 4184 rtx to_rtx = 0;
bbf6f052
RK
4185 rtx result;
4186
4187 /* Don't crash if the lhs of the assignment was erroneous. */
4188
4189 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
4190 {
4191 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
4192 return want_value ? result : NULL_RTX;
4193 }
bbf6f052
RK
4194
4195 /* Assignment of a structure component needs special treatment
4196 if the structure component's rtx is not simply a MEM.
6be58303
JW
4197 Assignment of an array element at a constant index, and assignment of
4198 an array element in an unaligned packed structure field, has the same
4199 problem. */
bbf6f052 4200
08293add 4201 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
7c02ae17
DE
4202 || TREE_CODE (to) == ARRAY_REF || TREE_CODE (to) == ARRAY_RANGE_REF
4203 || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
bbf6f052
RK
4204 {
4205 enum machine_mode mode1;
770ae6cc 4206 HOST_WIDE_INT bitsize, bitpos;
a06ef755 4207 rtx orig_to_rtx;
7bb0943f 4208 tree offset;
bbf6f052
RK
4209 int unsignedp;
4210 int volatilep = 0;
0088fcb1
RK
4211 tree tem;
4212
4213 push_temp_slots ();
839c4796 4214 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
a06ef755 4215 &unsignedp, &volatilep);
bbf6f052
RK
4216
4217 /* If we are going to use store_bit_field and extract_bit_field,
4218 make sure to_rtx will be safe for multiple use. */
4219
4220 if (mode1 == VOIDmode && want_value)
4221 tem = stabilize_reference (tem);
4222
1ed1b4fb
RK
4223 orig_to_rtx = to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, 0);
4224
7bb0943f
RS
4225 if (offset != 0)
4226 {
e3c8ea67 4227 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, EXPAND_SUM);
7bb0943f
RS
4228
4229 if (GET_CODE (to_rtx) != MEM)
4230 abort ();
bd070e1a 4231
bd070e1a 4232#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 4233 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 4234 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
4235#else
4236 if (GET_MODE (offset_rtx) != ptr_mode)
4237 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 4238#endif
bd070e1a 4239
9a7b9f4f
JL
4240 /* A constant address in TO_RTX can have VOIDmode, we must not try
4241 to call force_reg for that case. Avoid that case. */
89752202
HB
4242 if (GET_CODE (to_rtx) == MEM
4243 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 4244 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
a06ef755 4245 && bitsize > 0
3a94c984 4246 && (bitpos % bitsize) == 0
89752202 4247 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 4248 && MEM_ALIGN (to_rtx) == GET_MODE_ALIGNMENT (mode1))
89752202 4249 {
e3c8ea67 4250 to_rtx = adjust_address (to_rtx, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
4251 bitpos = 0;
4252 }
4253
0d4903b8 4254 to_rtx = offset_address (to_rtx, offset_rtx,
818c0c94
RH
4255 highest_pow2_factor_for_type (TREE_TYPE (to),
4256 offset));
7bb0943f 4257 }
c5c76735 4258
998d7deb
RH
4259 if (GET_CODE (to_rtx) == MEM)
4260 {
998d7deb
RH
4261 /* If the field is at offset zero, we could have been given the
4262 DECL_RTX of the parent struct. Don't munge it. */
4263 to_rtx = shallow_copy_rtx (to_rtx);
4264
6f1087be 4265 set_mem_attributes_minus_bitpos (to_rtx, to, 0, bitpos);
998d7deb 4266 }
effbcc6a 4267
a06ef755
RK
4268 /* Deal with volatile and readonly fields. The former is only done
4269 for MEM. Also set MEM_KEEP_ALIAS_SET_P if needed. */
4270 if (volatilep && GET_CODE (to_rtx) == MEM)
4271 {
4272 if (to_rtx == orig_to_rtx)
4273 to_rtx = copy_rtx (to_rtx);
4274 MEM_VOLATILE_P (to_rtx) = 1;
bbf6f052
RK
4275 }
4276
956d6950
JL
4277 if (TREE_CODE (to) == COMPONENT_REF
4278 && TREE_READONLY (TREE_OPERAND (to, 1)))
4279 {
a06ef755 4280 if (to_rtx == orig_to_rtx)
956d6950 4281 to_rtx = copy_rtx (to_rtx);
956d6950
JL
4282 RTX_UNCHANGING_P (to_rtx) = 1;
4283 }
4284
a84b4898 4285 if (GET_CODE (to_rtx) == MEM && ! can_address_p (to))
a06ef755
RK
4286 {
4287 if (to_rtx == orig_to_rtx)
4288 to_rtx = copy_rtx (to_rtx);
4289 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
4290 }
4291
a06ef755
RK
4292 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
4293 (want_value
4294 /* Spurious cast for HPUX compiler. */
4295 ? ((enum machine_mode)
4296 TYPE_MODE (TREE_TYPE (to)))
4297 : VOIDmode),
4298 unsignedp, TREE_TYPE (tem), get_alias_set (to));
a69beca1 4299
a06ef755
RK
4300 preserve_temp_slots (result);
4301 free_temp_slots ();
4302 pop_temp_slots ();
a69beca1 4303
a06ef755
RK
4304 /* If the value is meaningful, convert RESULT to the proper mode.
4305 Otherwise, return nothing. */
4306 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
4307 TYPE_MODE (TREE_TYPE (from)),
4308 result,
4309 TREE_UNSIGNED (TREE_TYPE (to)))
4310 : NULL_RTX);
bbf6f052
RK
4311 }
4312
cd1db108
RS
4313 /* If the rhs is a function call and its value is not an aggregate,
4314 call the function before we start to compute the lhs.
4315 This is needed for correct code for cases such as
4316 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
4317 requires loading up part of an address in a separate insn.
4318
1858863b
JW
4319 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
4320 since it might be a promoted variable where the zero- or sign- extension
4321 needs to be done. Handling this in the normal way is safe because no
4322 computation is done before the call. */
1ad87b63 4323 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 4324 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
4325 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
4326 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 4327 {
0088fcb1
RK
4328 rtx value;
4329
4330 push_temp_slots ();
4331 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 4332 if (to_rtx == 0)
37a08a29 4333 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
aaf87c45 4334
fffa9c1d
JW
4335 /* Handle calls that return values in multiple non-contiguous locations.
4336 The Irix 6 ABI has examples of this. */
4337 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4338 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)));
fffa9c1d 4339 else if (GET_MODE (to_rtx) == BLKmode)
44bb111a 4340 emit_block_move (to_rtx, value, expr_size (from), BLOCK_OP_NORMAL);
aaf87c45 4341 else
6419e5b0
DT
4342 {
4343#ifdef POINTERS_EXTEND_UNSIGNED
0d4903b8
RK
4344 if (POINTER_TYPE_P (TREE_TYPE (to))
4345 && GET_MODE (to_rtx) != GET_MODE (value))
6419e5b0
DT
4346 value = convert_memory_address (GET_MODE (to_rtx), value);
4347#endif
4348 emit_move_insn (to_rtx, value);
4349 }
cd1db108
RS
4350 preserve_temp_slots (to_rtx);
4351 free_temp_slots ();
0088fcb1 4352 pop_temp_slots ();
709f5be1 4353 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
4354 }
4355
bbf6f052
RK
4356 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
4357 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
4358
4359 if (to_rtx == 0)
37a08a29 4360 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_WRITE);
bbf6f052 4361
86d38d25 4362 /* Don't move directly into a return register. */
14a774a9
RK
4363 if (TREE_CODE (to) == RESULT_DECL
4364 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 4365 {
0088fcb1
RK
4366 rtx temp;
4367
4368 push_temp_slots ();
4369 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
4370
4371 if (GET_CODE (to_rtx) == PARALLEL)
04050c69 4372 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)));
14a774a9
RK
4373 else
4374 emit_move_insn (to_rtx, temp);
4375
86d38d25
RS
4376 preserve_temp_slots (to_rtx);
4377 free_temp_slots ();
0088fcb1 4378 pop_temp_slots ();
709f5be1 4379 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
4380 }
4381
bbf6f052
RK
4382 /* In case we are returning the contents of an object which overlaps
4383 the place the value is being stored, use a safe function when copying
4384 a value through a pointer into a structure value return block. */
4385 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
4386 && current_function_returns_struct
4387 && !current_function_returns_pcc_struct)
4388 {
0088fcb1
RK
4389 rtx from_rtx, size;
4390
4391 push_temp_slots ();
33a20d10 4392 size = expr_size (from);
37a08a29 4393 from_rtx = expand_expr (from, NULL_RTX, VOIDmode, 0);
bbf6f052 4394
4ca79136
RH
4395 if (TARGET_MEM_FUNCTIONS)
4396 emit_library_call (memmove_libfunc, LCT_NORMAL,
4397 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
4398 XEXP (from_rtx, 0), Pmode,
4399 convert_to_mode (TYPE_MODE (sizetype),
4400 size, TREE_UNSIGNED (sizetype)),
4401 TYPE_MODE (sizetype));
4402 else
4403 emit_library_call (bcopy_libfunc, LCT_NORMAL,
4404 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
4405 XEXP (to_rtx, 0), Pmode,
4406 convert_to_mode (TYPE_MODE (integer_type_node),
4407 size,
4408 TREE_UNSIGNED (integer_type_node)),
4409 TYPE_MODE (integer_type_node));
bbf6f052
RK
4410
4411 preserve_temp_slots (to_rtx);
4412 free_temp_slots ();
0088fcb1 4413 pop_temp_slots ();
709f5be1 4414 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
4415 }
4416
4417 /* Compute FROM and store the value in the rtx we got. */
4418
0088fcb1 4419 push_temp_slots ();
bbf6f052
RK
4420 result = store_expr (from, to_rtx, want_value);
4421 preserve_temp_slots (result);
4422 free_temp_slots ();
0088fcb1 4423 pop_temp_slots ();
709f5be1 4424 return want_value ? result : NULL_RTX;
bbf6f052
RK
4425}
4426
4427/* Generate code for computing expression EXP,
4428 and storing the value into TARGET.
bbf6f052
RK
4429 TARGET may contain a QUEUED rtx.
4430
8403445a 4431 If WANT_VALUE & 1 is nonzero, return a copy of the value
709f5be1
RS
4432 not in TARGET, so that we can be sure to use the proper
4433 value in a containing expression even if TARGET has something
4434 else stored in it. If possible, we copy the value through a pseudo
4435 and return that pseudo. Or, if the value is constant, we try to
4436 return the constant. In some cases, we return a pseudo
4437 copied *from* TARGET.
4438
4439 If the mode is BLKmode then we may return TARGET itself.
4440 It turns out that in BLKmode it doesn't cause a problem.
4441 because C has no operators that could combine two different
4442 assignments into the same BLKmode object with different values
4443 with no sequence point. Will other languages need this to
4444 be more thorough?
4445
8403445a 4446 If WANT_VALUE & 1 is 0, we return NULL, to make sure
709f5be1 4447 to catch quickly any cases where the caller uses the value
8403445a
AM
4448 and fails to set WANT_VALUE.
4449
4450 If WANT_VALUE & 2 is set, this is a store into a call param on the
4451 stack, and block moves may need to be treated specially. */
bbf6f052
RK
4452
4453rtx
709f5be1 4454store_expr (exp, target, want_value)
b3694847
SS
4455 tree exp;
4456 rtx target;
709f5be1 4457 int want_value;
bbf6f052 4458{
b3694847 4459 rtx temp;
bbf6f052 4460 int dont_return_target = 0;
e5408e52 4461 int dont_store_target = 0;
bbf6f052 4462
847311f4
AL
4463 if (VOID_TYPE_P (TREE_TYPE (exp)))
4464 {
4465 /* C++ can generate ?: expressions with a throw expression in one
4466 branch and an rvalue in the other. Here, we resolve attempts to
4467 store the throw expression's nonexistant result. */
4468 if (want_value)
4469 abort ();
4470 expand_expr (exp, const0_rtx, VOIDmode, 0);
4471 return NULL_RTX;
4472 }
bbf6f052
RK
4473 if (TREE_CODE (exp) == COMPOUND_EXPR)
4474 {
4475 /* Perform first part of compound expression, then assign from second
4476 part. */
8403445a
AM
4477 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
4478 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
bbf6f052 4479 emit_queue ();
709f5be1 4480 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
4481 }
4482 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
4483 {
4484 /* For conditional expression, get safe form of the target. Then
4485 test the condition, doing the appropriate assignment on either
4486 side. This avoids the creation of unnecessary temporaries.
4487 For non-BLKmode, it is more efficient not to do this. */
4488
4489 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
4490
4491 emit_queue ();
4492 target = protect_from_queue (target, 1);
4493
dabf8373 4494 do_pending_stack_adjust ();
bbf6f052
RK
4495 NO_DEFER_POP;
4496 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 4497 start_cleanup_deferral ();
8403445a 4498 store_expr (TREE_OPERAND (exp, 1), target, want_value & 2);
956d6950 4499 end_cleanup_deferral ();
bbf6f052
RK
4500 emit_queue ();
4501 emit_jump_insn (gen_jump (lab2));
4502 emit_barrier ();
4503 emit_label (lab1);
956d6950 4504 start_cleanup_deferral ();
8403445a 4505 store_expr (TREE_OPERAND (exp, 2), target, want_value & 2);
956d6950 4506 end_cleanup_deferral ();
bbf6f052
RK
4507 emit_queue ();
4508 emit_label (lab2);
4509 OK_DEFER_POP;
a3a58acc 4510
8403445a 4511 return want_value & 1 ? target : NULL_RTX;
bbf6f052 4512 }
bbf6f052 4513 else if (queued_subexp_p (target))
709f5be1
RS
4514 /* If target contains a postincrement, let's not risk
4515 using it as the place to generate the rhs. */
bbf6f052
RK
4516 {
4517 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
4518 {
4519 /* Expand EXP into a new pseudo. */
4520 temp = gen_reg_rtx (GET_MODE (target));
8403445a
AM
4521 temp = expand_expr (exp, temp, GET_MODE (target),
4522 (want_value & 2
4523 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
bbf6f052
RK
4524 }
4525 else
8403445a
AM
4526 temp = expand_expr (exp, NULL_RTX, GET_MODE (target),
4527 (want_value & 2
4528 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
709f5be1
RS
4529
4530 /* If target is volatile, ANSI requires accessing the value
4531 *from* the target, if it is accessed. So make that happen.
4532 In no case return the target itself. */
8403445a 4533 if (! MEM_VOLATILE_P (target) && (want_value & 1) != 0)
709f5be1 4534 dont_return_target = 1;
bbf6f052 4535 }
8403445a
AM
4536 else if ((want_value & 1) != 0
4537 && GET_CODE (target) == MEM
4538 && ! MEM_VOLATILE_P (target)
12f06d17
CH
4539 && GET_MODE (target) != BLKmode)
4540 /* If target is in memory and caller wants value in a register instead,
4541 arrange that. Pass TARGET as target for expand_expr so that,
4542 if EXP is another assignment, WANT_VALUE will be nonzero for it.
4543 We know expand_expr will not use the target in that case.
4544 Don't do this if TARGET is volatile because we are supposed
4545 to write it and then read it. */
4546 {
8403445a
AM
4547 temp = expand_expr (exp, target, GET_MODE (target),
4548 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
12f06d17 4549 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
e5408e52
JJ
4550 {
4551 /* If TEMP is already in the desired TARGET, only copy it from
4552 memory and don't store it there again. */
4553 if (temp == target
4554 || (rtx_equal_p (temp, target)
4555 && ! side_effects_p (temp) && ! side_effects_p (target)))
4556 dont_store_target = 1;
4557 temp = copy_to_reg (temp);
4558 }
12f06d17
CH
4559 dont_return_target = 1;
4560 }
1499e0a8 4561 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
09da1532 4562 /* If this is a scalar in a register that is stored in a wider mode
1499e0a8
RK
4563 than the declared mode, compute the result into its declared mode
4564 and then convert to the wider mode. Our value is the computed
4565 expression. */
4566 {
b76b08ef
RK
4567 rtx inner_target = 0;
4568
5a32d038 4569 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
4570 which will often result in some optimizations. Do the conversion
4571 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
4572 the extend. But don't do this if the type of EXP is a subtype
4573 of something else since then the conversion might involve
4574 more than just converting modes. */
8403445a
AM
4575 if ((want_value & 1) == 0
4576 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
ab6c58f1 4577 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
4578 {
4579 if (TREE_UNSIGNED (TREE_TYPE (exp))
4580 != SUBREG_PROMOTED_UNSIGNED_P (target))
ceef8ce4
NB
4581 exp = convert
4582 ((*lang_hooks.types.signed_or_unsigned_type)
4583 (SUBREG_PROMOTED_UNSIGNED_P (target), TREE_TYPE (exp)), exp);
f635a84d 4584
b0c48229
NB
4585 exp = convert ((*lang_hooks.types.type_for_mode)
4586 (GET_MODE (SUBREG_REG (target)),
4587 SUBREG_PROMOTED_UNSIGNED_P (target)),
f635a84d 4588 exp);
b76b08ef
RK
4589
4590 inner_target = SUBREG_REG (target);
f635a84d 4591 }
3a94c984 4592
8403445a
AM
4593 temp = expand_expr (exp, inner_target, VOIDmode,
4594 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
b258707c 4595
7abec5be
RH
4596 /* If TEMP is a MEM and we want a result value, make the access
4597 now so it gets done only once. Strictly speaking, this is
4598 only necessary if the MEM is volatile, or if the address
4599 overlaps TARGET. But not performing the load twice also
4600 reduces the amount of rtl we generate and then have to CSE. */
8403445a 4601 if (GET_CODE (temp) == MEM && (want_value & 1) != 0)
766f36c7
RK
4602 temp = copy_to_reg (temp);
4603
b258707c
RS
4604 /* If TEMP is a VOIDmode constant, use convert_modes to make
4605 sure that we properly convert it. */
4606 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
1f1b0541
RH
4607 {
4608 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4609 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
4610 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4611 GET_MODE (target), temp,
4612 SUBREG_PROMOTED_UNSIGNED_P (target));
4613 }
b258707c 4614
1499e0a8
RK
4615 convert_move (SUBREG_REG (target), temp,
4616 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4617
4618 /* If we promoted a constant, change the mode back down to match
4619 target. Otherwise, the caller might get confused by a result whose
4620 mode is larger than expected. */
4621
8403445a 4622 if ((want_value & 1) != 0 && GET_MODE (temp) != GET_MODE (target))
3dbecef9 4623 {
b3ca30df
JJ
4624 if (GET_MODE (temp) != VOIDmode)
4625 {
4626 temp = gen_lowpart_SUBREG (GET_MODE (target), temp);
4627 SUBREG_PROMOTED_VAR_P (temp) = 1;
0fb7aeda 4628 SUBREG_PROMOTED_UNSIGNED_SET (temp,
7879b81e 4629 SUBREG_PROMOTED_UNSIGNED_P (target));
b3ca30df
JJ
4630 }
4631 else
4632 temp = convert_modes (GET_MODE (target),
4633 GET_MODE (SUBREG_REG (target)),
4634 temp, SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4635 }
4636
8403445a 4637 return want_value & 1 ? temp : NULL_RTX;
1499e0a8 4638 }
bbf6f052
RK
4639 else
4640 {
8403445a
AM
4641 temp = expand_expr (exp, target, GET_MODE (target),
4642 want_value & 2 ? EXPAND_STACK_PARM : EXPAND_NORMAL);
766f36c7 4643 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4644 If TARGET is a volatile mem ref, either return TARGET
4645 or return a reg copied *from* TARGET; ANSI requires this.
4646
4647 Otherwise, if TEMP is not TARGET, return TEMP
4648 if it is constant (for efficiency),
4649 or if we really want the correct value. */
bbf6f052
RK
4650 if (!(target && GET_CODE (target) == REG
4651 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4652 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4653 && ! rtx_equal_p (temp, target)
8403445a 4654 && (CONSTANT_P (temp) || (want_value & 1) != 0))
bbf6f052
RK
4655 dont_return_target = 1;
4656 }
4657
b258707c
RS
4658 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4659 the same as that of TARGET, adjust the constant. This is needed, for
4660 example, in case it is a CONST_DOUBLE and we want only a word-sized
4661 value. */
4662 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4663 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4664 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4665 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4666 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4667
bbf6f052 4668 /* If value was not generated in the target, store it there.
37a08a29
RK
4669 Convert the value to TARGET's type first if necessary.
4670 If TEMP and TARGET compare equal according to rtx_equal_p, but
f3f2255a
R
4671 one or both of them are volatile memory refs, we have to distinguish
4672 two cases:
4673 - expand_expr has used TARGET. In this case, we must not generate
4674 another copy. This can be detected by TARGET being equal according
4675 to == .
4676 - expand_expr has not used TARGET - that means that the source just
4677 happens to have the same RTX form. Since temp will have been created
4678 by expand_expr, it will compare unequal according to == .
4679 We must generate a copy in this case, to reach the correct number
4680 of volatile memory references. */
bbf6f052 4681
6036acbb 4682 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4683 || (temp != target && (side_effects_p (temp)
4684 || side_effects_p (target))))
e5408e52 4685 && TREE_CODE (exp) != ERROR_MARK
a9772b60
JJ
4686 && ! dont_store_target
4687 /* If store_expr stores a DECL whose DECL_RTL(exp) == TARGET,
4688 but TARGET is not valid memory reference, TEMP will differ
4689 from TARGET although it is really the same location. */
4690 && (TREE_CODE_CLASS (TREE_CODE (exp)) != 'd'
e56fc090
HPN
4691 || target != DECL_RTL_IF_SET (exp))
4692 /* If there's nothing to copy, don't bother. Don't call expr_size
4693 unless necessary, because some front-ends (C++) expr_size-hook
4694 aborts on objects that are not supposed to be bit-copied or
4695 bit-initialized. */
4696 && expr_size (exp) != const0_rtx)
bbf6f052
RK
4697 {
4698 target = protect_from_queue (target, 1);
4699 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4700 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4701 {
4702 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4703 if (dont_return_target)
4704 {
4705 /* In this case, we will return TEMP,
4706 so make sure it has the proper mode.
4707 But don't forget to store the value into TARGET. */
4708 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4709 emit_move_insn (target, temp);
4710 }
4711 else
4712 convert_move (target, temp, unsignedp);
4713 }
4714
4715 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4716 {
c24ae149
RK
4717 /* Handle copying a string constant into an array. The string
4718 constant may be shorter than the array. So copy just the string's
4719 actual length, and clear the rest. First get the size of the data
4720 type of the string, which is actually the size of the target. */
4721 rtx size = expr_size (exp);
bbf6f052 4722
e87b4f3f
RS
4723 if (GET_CODE (size) == CONST_INT
4724 && INTVAL (size) < TREE_STRING_LENGTH (exp))
8403445a
AM
4725 emit_block_move (target, temp, size,
4726 (want_value & 2
4727 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4728 else
bbf6f052 4729 {
e87b4f3f
RS
4730 /* Compute the size of the data to copy from the string. */
4731 tree copy_size
c03b7665 4732 = size_binop (MIN_EXPR,
b50d17a1 4733 make_tree (sizetype, size),
fed3cef0 4734 size_int (TREE_STRING_LENGTH (exp)));
8403445a
AM
4735 rtx copy_size_rtx
4736 = expand_expr (copy_size, NULL_RTX, VOIDmode,
4737 (want_value & 2
4738 ? EXPAND_STACK_PARM : EXPAND_NORMAL));
e87b4f3f
RS
4739 rtx label = 0;
4740
4741 /* Copy that much. */
267b28bd
SE
4742 copy_size_rtx = convert_to_mode (ptr_mode, copy_size_rtx,
4743 TREE_UNSIGNED (sizetype));
8403445a
AM
4744 emit_block_move (target, temp, copy_size_rtx,
4745 (want_value & 2
4746 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
e87b4f3f 4747
88f63c77
RK
4748 /* Figure out how much is left in TARGET that we have to clear.
4749 Do all calculations in ptr_mode. */
e87b4f3f
RS
4750 if (GET_CODE (copy_size_rtx) == CONST_INT)
4751 {
c24ae149
RK
4752 size = plus_constant (size, -INTVAL (copy_size_rtx));
4753 target = adjust_address (target, BLKmode,
4754 INTVAL (copy_size_rtx));
e87b4f3f
RS
4755 }
4756 else
4757 {
fa06ab5c 4758 size = expand_binop (TYPE_MODE (sizetype), sub_optab, size,
906c4e36
RK
4759 copy_size_rtx, NULL_RTX, 0,
4760 OPTAB_LIB_WIDEN);
e87b4f3f 4761
c24ae149
RK
4762#ifdef POINTERS_EXTEND_UNSIGNED
4763 if (GET_MODE (copy_size_rtx) != Pmode)
267b28bd
SE
4764 copy_size_rtx = convert_to_mode (Pmode, copy_size_rtx,
4765 TREE_UNSIGNED (sizetype));
c24ae149
RK
4766#endif
4767
4768 target = offset_address (target, copy_size_rtx,
4769 highest_pow2_factor (copy_size));
e87b4f3f 4770 label = gen_label_rtx ();
c5d5d461 4771 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
a06ef755 4772 GET_MODE (size), 0, label);
e87b4f3f
RS
4773 }
4774
4775 if (size != const0_rtx)
37a08a29 4776 clear_storage (target, size);
22619c3f 4777
e87b4f3f
RS
4778 if (label)
4779 emit_label (label);
bbf6f052
RK
4780 }
4781 }
fffa9c1d
JW
4782 /* Handle calls that return values in multiple non-contiguous locations.
4783 The Irix 6 ABI has examples of this. */
4784 else if (GET_CODE (target) == PARALLEL)
04050c69 4785 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052 4786 else if (GET_MODE (temp) == BLKmode)
8403445a
AM
4787 emit_block_move (target, temp, expr_size (exp),
4788 (want_value & 2
4789 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bbf6f052
RK
4790 else
4791 emit_move_insn (target, temp);
4792 }
709f5be1 4793
766f36c7 4794 /* If we don't want a value, return NULL_RTX. */
8403445a 4795 if ((want_value & 1) == 0)
766f36c7
RK
4796 return NULL_RTX;
4797
4798 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4799 ??? The latter test doesn't seem to make sense. */
4800 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4801 return temp;
766f36c7
RK
4802
4803 /* Return TARGET itself if it is a hard register. */
8403445a
AM
4804 else if ((want_value & 1) != 0
4805 && GET_MODE (target) != BLKmode
766f36c7
RK
4806 && ! (GET_CODE (target) == REG
4807 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4808 return copy_to_reg (target);
3a94c984 4809
766f36c7 4810 else
709f5be1 4811 return target;
bbf6f052
RK
4812}
4813\f
9de08200
RK
4814/* Return 1 if EXP just contains zeros. */
4815
4816static int
4817is_zeros_p (exp)
4818 tree exp;
4819{
4820 tree elt;
4821
4822 switch (TREE_CODE (exp))
4823 {
4824 case CONVERT_EXPR:
4825 case NOP_EXPR:
4826 case NON_LVALUE_EXPR:
ed239f5a 4827 case VIEW_CONVERT_EXPR:
9de08200
RK
4828 return is_zeros_p (TREE_OPERAND (exp, 0));
4829
4830 case INTEGER_CST:
05bccae2 4831 return integer_zerop (exp);
9de08200
RK
4832
4833 case COMPLEX_CST:
4834 return
4835 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4836
4837 case REAL_CST:
41c9120b 4838 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200 4839
69ef87e2
AH
4840 case VECTOR_CST:
4841 for (elt = TREE_VECTOR_CST_ELTS (exp); elt;
4842 elt = TREE_CHAIN (elt))
4843 if (!is_zeros_p (TREE_VALUE (elt)))
4844 return 0;
4845
4846 return 1;
4847
9de08200 4848 case CONSTRUCTOR:
e1a43f73
PB
4849 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4850 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4851 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4852 if (! is_zeros_p (TREE_VALUE (elt)))
4853 return 0;
4854
4855 return 1;
3a94c984 4856
e9a25f70
JL
4857 default:
4858 return 0;
9de08200 4859 }
9de08200
RK
4860}
4861
4862/* Return 1 if EXP contains mostly (3/4) zeros. */
4863
4864static int
4865mostly_zeros_p (exp)
4866 tree exp;
4867{
9de08200
RK
4868 if (TREE_CODE (exp) == CONSTRUCTOR)
4869 {
e1a43f73
PB
4870 int elts = 0, zeros = 0;
4871 tree elt = CONSTRUCTOR_ELTS (exp);
4872 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4873 {
4874 /* If there are no ranges of true bits, it is all zero. */
4875 return elt == NULL_TREE;
4876 }
4877 for (; elt; elt = TREE_CHAIN (elt))
4878 {
4879 /* We do not handle the case where the index is a RANGE_EXPR,
4880 so the statistic will be somewhat inaccurate.
4881 We do make a more accurate count in store_constructor itself,
4882 so since this function is only used for nested array elements,
0f41302f 4883 this should be close enough. */
e1a43f73
PB
4884 if (mostly_zeros_p (TREE_VALUE (elt)))
4885 zeros++;
4886 elts++;
4887 }
9de08200
RK
4888
4889 return 4 * zeros >= 3 * elts;
4890 }
4891
4892 return is_zeros_p (exp);
4893}
4894\f
e1a43f73
PB
4895/* Helper function for store_constructor.
4896 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4897 TYPE is the type of the CONSTRUCTOR, not the element type.
04050c69 4898 CLEARED is as for store_constructor.
23cb1766 4899 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4900
4901 This provides a recursive shortcut back to store_constructor when it isn't
4902 necessary to go through store_field. This is so that we can pass through
4903 the cleared field to let store_constructor know that we may not have to
4904 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4905
4906static void
04050c69
RK
4907store_constructor_field (target, bitsize, bitpos, mode, exp, type, cleared,
4908 alias_set)
e1a43f73 4909 rtx target;
770ae6cc
RK
4910 unsigned HOST_WIDE_INT bitsize;
4911 HOST_WIDE_INT bitpos;
e1a43f73
PB
4912 enum machine_mode mode;
4913 tree exp, type;
4914 int cleared;
23cb1766 4915 int alias_set;
e1a43f73
PB
4916{
4917 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44 4918 && bitpos % BITS_PER_UNIT == 0
cc2902df 4919 /* If we have a nonzero bitpos for a register target, then we just
23ccec44
JW
4920 let store_field do the bitfield handling. This is unlikely to
4921 generate unnecessary clear instructions anyways. */
4922 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4923 {
61cb205c
RK
4924 if (GET_CODE (target) == MEM)
4925 target
4926 = adjust_address (target,
4927 GET_MODE (target) == BLKmode
4928 || 0 != (bitpos
4929 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4930 ? BLKmode : VOIDmode, bitpos / BITS_PER_UNIT);
23cb1766 4931
e0339ef7 4932
04050c69 4933 /* Update the alias set, if required. */
10b76d73
RK
4934 if (GET_CODE (target) == MEM && ! MEM_KEEP_ALIAS_SET_P (target)
4935 && MEM_ALIAS_SET (target) != 0)
70072ed9
RK
4936 {
4937 target = copy_rtx (target);
4938 set_mem_alias_set (target, alias_set);
4939 }
e0339ef7 4940
04050c69 4941 store_constructor (exp, target, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4942 }
4943 else
a06ef755
RK
4944 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
4945 alias_set);
e1a43f73
PB
4946}
4947
bbf6f052 4948/* Store the value of constructor EXP into the rtx TARGET.
04050c69
RK
4949 TARGET is either a REG or a MEM; we know it cannot conflict, since
4950 safe_from_p has been called.
b7010412
RK
4951 CLEARED is true if TARGET is known to have been zero'd.
4952 SIZE is the number of bytes of TARGET we are allowed to modify: this
4953 may not be the same as the size of EXP if we are assigning to a field
4954 which has been packed to exclude padding bits. */
bbf6f052
RK
4955
4956static void
04050c69 4957store_constructor (exp, target, cleared, size)
bbf6f052
RK
4958 tree exp;
4959 rtx target;
e1a43f73 4960 int cleared;
13eb1f7f 4961 HOST_WIDE_INT size;
bbf6f052 4962{
4af3895e 4963 tree type = TREE_TYPE (exp);
a5efcd63 4964#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4965 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4966#endif
4af3895e 4967
e44842fe
RK
4968 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4969 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052 4970 {
b3694847 4971 tree elt;
bbf6f052 4972
04050c69 4973 /* We either clear the aggregate or indicate the value is dead. */
dd1db5ec
RK
4974 if ((TREE_CODE (type) == UNION_TYPE
4975 || TREE_CODE (type) == QUAL_UNION_TYPE)
04050c69
RK
4976 && ! cleared
4977 && ! CONSTRUCTOR_ELTS (exp))
4978 /* If the constructor is empty, clear the union. */
a59f8640 4979 {
04050c69
RK
4980 clear_storage (target, expr_size (exp));
4981 cleared = 1;
a59f8640 4982 }
4af3895e
JVA
4983
4984 /* If we are building a static constructor into a register,
4985 set the initial value as zero so we can fold the value into
67225c15
RK
4986 a constant. But if more than one register is involved,
4987 this probably loses. */
04050c69 4988 else if (! cleared && GET_CODE (target) == REG && TREE_STATIC (exp)
67225c15 4989 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200 4990 {
04050c69 4991 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
9de08200
RK
4992 cleared = 1;
4993 }
4994
4995 /* If the constructor has fewer fields than the structure
4996 or if we are initializing the structure to mostly zeros,
0d97bf4c 4997 clear the whole structure first. Don't do this if TARGET is a
fcf1b822
RK
4998 register whose mode size isn't equal to SIZE since clear_storage
4999 can't handle this case. */
04050c69 5000 else if (! cleared && size > 0
9376fcd6 5001 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 5002 != fields_length (type))
fcf1b822
RK
5003 || mostly_zeros_p (exp))
5004 && (GET_CODE (target) != REG
04050c69
RK
5005 || ((HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target))
5006 == size)))
9de08200 5007 {
337f4314
RK
5008 rtx xtarget = target;
5009
5010 if (readonly_fields_p (type))
5011 {
5012 xtarget = copy_rtx (xtarget);
5013 RTX_UNCHANGING_P (xtarget) = 1;
5014 }
5015
5016 clear_storage (xtarget, GEN_INT (size));
9de08200
RK
5017 cleared = 1;
5018 }
04050c69
RK
5019
5020 if (! cleared)
38a448ca 5021 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
5022
5023 /* Store each element of the constructor into
5024 the corresponding field of TARGET. */
5025
5026 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
5027 {
b3694847 5028 tree field = TREE_PURPOSE (elt);
34c73909 5029 tree value = TREE_VALUE (elt);
b3694847 5030 enum machine_mode mode;
770ae6cc
RK
5031 HOST_WIDE_INT bitsize;
5032 HOST_WIDE_INT bitpos = 0;
770ae6cc 5033 tree offset;
b50d17a1 5034 rtx to_rtx = target;
bbf6f052 5035
f32fd778
RS
5036 /* Just ignore missing fields.
5037 We cleared the whole structure, above,
5038 if any fields are missing. */
5039 if (field == 0)
5040 continue;
5041
8b6000fc 5042 if (cleared && is_zeros_p (value))
e1a43f73 5043 continue;
9de08200 5044
770ae6cc
RK
5045 if (host_integerp (DECL_SIZE (field), 1))
5046 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
5047 else
5048 bitsize = -1;
5049
bbf6f052
RK
5050 mode = DECL_MODE (field);
5051 if (DECL_BIT_FIELD (field))
5052 mode = VOIDmode;
5053
770ae6cc
RK
5054 offset = DECL_FIELD_OFFSET (field);
5055 if (host_integerp (offset, 0)
5056 && host_integerp (bit_position (field), 0))
5057 {
5058 bitpos = int_bit_position (field);
5059 offset = 0;
5060 }
b50d17a1 5061 else
770ae6cc 5062 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 5063
b50d17a1
RK
5064 if (offset)
5065 {
5066 rtx offset_rtx;
5067
7a6cdb44 5068 if (CONTAINS_PLACEHOLDER_P (offset))
7fa96708 5069 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 5070 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 5071
b50d17a1
RK
5072 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
5073 if (GET_CODE (to_rtx) != MEM)
5074 abort ();
5075
bd070e1a 5076#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 5077 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 5078 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
5079#else
5080 if (GET_MODE (offset_rtx) != ptr_mode)
5081 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
bd070e1a 5082#endif
bd070e1a 5083
0d4903b8
RK
5084 to_rtx = offset_address (to_rtx, offset_rtx,
5085 highest_pow2_factor (offset));
b50d17a1 5086 }
c5c76735 5087
cf04eb80
RK
5088 if (TREE_READONLY (field))
5089 {
9151b3bf 5090 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
5091 to_rtx = copy_rtx (to_rtx);
5092
cf04eb80
RK
5093 RTX_UNCHANGING_P (to_rtx) = 1;
5094 }
5095
34c73909
R
5096#ifdef WORD_REGISTER_OPERATIONS
5097 /* If this initializes a field that is smaller than a word, at the
5098 start of a word, try to widen it to a full word.
5099 This special case allows us to output C++ member function
5100 initializations in a form that the optimizers can understand. */
770ae6cc 5101 if (GET_CODE (target) == REG
34c73909
R
5102 && bitsize < BITS_PER_WORD
5103 && bitpos % BITS_PER_WORD == 0
5104 && GET_MODE_CLASS (mode) == MODE_INT
5105 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
5106 && exp_size >= 0
5107 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
5108 {
5109 tree type = TREE_TYPE (value);
04050c69 5110
34c73909
R
5111 if (TYPE_PRECISION (type) < BITS_PER_WORD)
5112 {
b0c48229
NB
5113 type = (*lang_hooks.types.type_for_size)
5114 (BITS_PER_WORD, TREE_UNSIGNED (type));
34c73909
R
5115 value = convert (type, value);
5116 }
04050c69 5117
34c73909
R
5118 if (BYTES_BIG_ENDIAN)
5119 value
5120 = fold (build (LSHIFT_EXPR, type, value,
5121 build_int_2 (BITS_PER_WORD - bitsize, 0)));
5122 bitsize = BITS_PER_WORD;
5123 mode = word_mode;
5124 }
5125#endif
10b76d73
RK
5126
5127 if (GET_CODE (to_rtx) == MEM && !MEM_KEEP_ALIAS_SET_P (to_rtx)
5128 && DECL_NONADDRESSABLE_P (field))
5129 {
5130 to_rtx = copy_rtx (to_rtx);
5131 MEM_KEEP_ALIAS_SET_P (to_rtx) = 1;
5132 }
5133
c5c76735 5134 store_constructor_field (to_rtx, bitsize, bitpos, mode,
8b6000fc 5135 value, type, cleared,
10b76d73 5136 get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
5137 }
5138 }
e6834654
SS
5139 else if (TREE_CODE (type) == ARRAY_TYPE
5140 || TREE_CODE (type) == VECTOR_TYPE)
bbf6f052 5141 {
b3694847
SS
5142 tree elt;
5143 int i;
e1a43f73 5144 int need_to_clear;
4af3895e 5145 tree domain = TYPE_DOMAIN (type);
4af3895e 5146 tree elttype = TREE_TYPE (type);
e6834654 5147 int const_bounds_p;
ae0ed63a
JM
5148 HOST_WIDE_INT minelt = 0;
5149 HOST_WIDE_INT maxelt = 0;
85f3d674 5150
e6834654
SS
5151 /* Vectors are like arrays, but the domain is stored via an array
5152 type indirectly. */
5153 if (TREE_CODE (type) == VECTOR_TYPE)
5154 {
5155 /* Note that although TYPE_DEBUG_REPRESENTATION_TYPE uses
5156 the same field as TYPE_DOMAIN, we are not guaranteed that
5157 it always will. */
5158 domain = TYPE_DEBUG_REPRESENTATION_TYPE (type);
5159 domain = TYPE_DOMAIN (TREE_TYPE (TYPE_FIELDS (domain)));
5160 }
5161
5162 const_bounds_p = (TYPE_MIN_VALUE (domain)
5163 && TYPE_MAX_VALUE (domain)
5164 && host_integerp (TYPE_MIN_VALUE (domain), 0)
5165 && host_integerp (TYPE_MAX_VALUE (domain), 0));
5166
85f3d674
RK
5167 /* If we have constant bounds for the range of the type, get them. */
5168 if (const_bounds_p)
5169 {
5170 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
5171 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
5172 }
bbf6f052 5173
e1a43f73 5174 /* If the constructor has fewer elements than the array,
38e01259 5175 clear the whole array first. Similarly if this is
e1a43f73
PB
5176 static constructor of a non-BLKmode object. */
5177 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
5178 need_to_clear = 1;
5179 else
5180 {
5181 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
5182 need_to_clear = ! const_bounds_p;
5183
e1a43f73
PB
5184 /* This loop is a more accurate version of the loop in
5185 mostly_zeros_p (it handles RANGE_EXPR in an index).
5186 It is also needed to check for missing elements. */
5187 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 5188 elt != NULL_TREE && ! need_to_clear;
df0faff1 5189 elt = TREE_CHAIN (elt))
e1a43f73
PB
5190 {
5191 tree index = TREE_PURPOSE (elt);
5192 HOST_WIDE_INT this_node_count;
19caa751 5193
e1a43f73
PB
5194 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5195 {
5196 tree lo_index = TREE_OPERAND (index, 0);
5197 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 5198
19caa751
RK
5199 if (! host_integerp (lo_index, 1)
5200 || ! host_integerp (hi_index, 1))
e1a43f73
PB
5201 {
5202 need_to_clear = 1;
5203 break;
5204 }
19caa751
RK
5205
5206 this_node_count = (tree_low_cst (hi_index, 1)
5207 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
5208 }
5209 else
5210 this_node_count = 1;
85f3d674 5211
e1a43f73
PB
5212 count += this_node_count;
5213 if (mostly_zeros_p (TREE_VALUE (elt)))
5214 zero_count += this_node_count;
5215 }
85f3d674 5216
8e958f70 5217 /* Clear the entire array first if there are any missing elements,
0f41302f 5218 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
5219 if (! need_to_clear
5220 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
5221 need_to_clear = 1;
5222 }
85f3d674 5223
9376fcd6 5224 if (need_to_clear && size > 0)
9de08200
RK
5225 {
5226 if (! cleared)
725e58b1
RK
5227 {
5228 if (REG_P (target))
5229 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
5230 else
5231 clear_storage (target, GEN_INT (size));
5232 }
9de08200
RK
5233 cleared = 1;
5234 }
df4556a3 5235 else if (REG_P (target))
bbf6f052 5236 /* Inform later passes that the old value is dead. */
38a448ca 5237 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
5238
5239 /* Store each element of the constructor into
5240 the corresponding element of TARGET, determined
5241 by counting the elements. */
5242 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
5243 elt;
5244 elt = TREE_CHAIN (elt), i++)
5245 {
b3694847 5246 enum machine_mode mode;
19caa751
RK
5247 HOST_WIDE_INT bitsize;
5248 HOST_WIDE_INT bitpos;
bbf6f052 5249 int unsignedp;
e1a43f73 5250 tree value = TREE_VALUE (elt);
03dc44a6
RS
5251 tree index = TREE_PURPOSE (elt);
5252 rtx xtarget = target;
bbf6f052 5253
e1a43f73
PB
5254 if (cleared && is_zeros_p (value))
5255 continue;
9de08200 5256
bbf6f052 5257 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
5258 mode = TYPE_MODE (elttype);
5259 if (mode == BLKmode)
19caa751
RK
5260 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
5261 ? tree_low_cst (TYPE_SIZE (elttype), 1)
5262 : -1);
14a774a9
RK
5263 else
5264 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5265
e1a43f73
PB
5266 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
5267 {
5268 tree lo_index = TREE_OPERAND (index, 0);
5269 tree hi_index = TREE_OPERAND (index, 1);
4977bab6 5270 rtx index_r, pos_rtx, loop_end;
e1a43f73 5271 struct nesting *loop;
05c0b405
PB
5272 HOST_WIDE_INT lo, hi, count;
5273 tree position;
e1a43f73 5274
0f41302f 5275 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
5276 if (const_bounds_p
5277 && host_integerp (lo_index, 0)
19caa751
RK
5278 && host_integerp (hi_index, 0)
5279 && (lo = tree_low_cst (lo_index, 0),
5280 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
5281 count = hi - lo + 1,
5282 (GET_CODE (target) != MEM
5283 || count <= 2
19caa751
RK
5284 || (host_integerp (TYPE_SIZE (elttype), 1)
5285 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
5286 <= 40 * 8)))))
e1a43f73 5287 {
05c0b405
PB
5288 lo -= minelt; hi -= minelt;
5289 for (; lo <= hi; lo++)
e1a43f73 5290 {
19caa751 5291 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
10b76d73
RK
5292
5293 if (GET_CODE (target) == MEM
5294 && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5295 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5296 && TYPE_NONALIASED_COMPONENT (type))
5297 {
5298 target = copy_rtx (target);
5299 MEM_KEEP_ALIAS_SET_P (target) = 1;
5300 }
5301
23cb1766 5302 store_constructor_field
04050c69
RK
5303 (target, bitsize, bitpos, mode, value, type, cleared,
5304 get_alias_set (elttype));
e1a43f73
PB
5305 }
5306 }
5307 else
5308 {
4977bab6 5309 expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
e1a43f73
PB
5310 loop_end = gen_label_rtx ();
5311
5312 unsignedp = TREE_UNSIGNED (domain);
5313
5314 index = build_decl (VAR_DECL, NULL_TREE, domain);
5315
19e7881c 5316 index_r
e1a43f73
PB
5317 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
5318 &unsignedp, 0));
19e7881c 5319 SET_DECL_RTL (index, index_r);
e1a43f73
PB
5320 if (TREE_CODE (value) == SAVE_EXPR
5321 && SAVE_EXPR_RTL (value) == 0)
5322 {
0f41302f
MS
5323 /* Make sure value gets expanded once before the
5324 loop. */
e1a43f73
PB
5325 expand_expr (value, const0_rtx, VOIDmode, 0);
5326 emit_queue ();
5327 }
5328 store_expr (lo_index, index_r, 0);
5329 loop = expand_start_loop (0);
5330
0f41302f 5331 /* Assign value to element index. */
fed3cef0
RK
5332 position
5333 = convert (ssizetype,
5334 fold (build (MINUS_EXPR, TREE_TYPE (index),
5335 index, TYPE_MIN_VALUE (domain))));
5336 position = size_binop (MULT_EXPR, position,
5337 convert (ssizetype,
5338 TYPE_SIZE_UNIT (elttype)));
5339
e1a43f73 5340 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
0d4903b8
RK
5341 xtarget = offset_address (target, pos_rtx,
5342 highest_pow2_factor (position));
5343 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5344 if (TREE_CODE (value) == CONSTRUCTOR)
04050c69 5345 store_constructor (value, xtarget, cleared,
b7010412 5346 bitsize / BITS_PER_UNIT);
e1a43f73
PB
5347 else
5348 store_expr (value, xtarget, 0);
5349
5350 expand_exit_loop_if_false (loop,
5351 build (LT_EXPR, integer_type_node,
5352 index, hi_index));
5353
5354 expand_increment (build (PREINCREMENT_EXPR,
5355 TREE_TYPE (index),
7b8b9722 5356 index, integer_one_node), 0, 0);
e1a43f73
PB
5357 expand_end_loop ();
5358 emit_label (loop_end);
e1a43f73
PB
5359 }
5360 }
19caa751
RK
5361 else if ((index != 0 && ! host_integerp (index, 0))
5362 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 5363 {
03dc44a6
RS
5364 tree position;
5365
5b6c44ff 5366 if (index == 0)
fed3cef0 5367 index = ssize_int (1);
5b6c44ff 5368
e1a43f73 5369 if (minelt)
fed3cef0
RK
5370 index = convert (ssizetype,
5371 fold (build (MINUS_EXPR, index,
5372 TYPE_MIN_VALUE (domain))));
19caa751 5373
fed3cef0
RK
5374 position = size_binop (MULT_EXPR, index,
5375 convert (ssizetype,
5376 TYPE_SIZE_UNIT (elttype)));
0d4903b8
RK
5377 xtarget = offset_address (target,
5378 expand_expr (position, 0, VOIDmode, 0),
5379 highest_pow2_factor (position));
5380 xtarget = adjust_address (xtarget, mode, 0);
e1a43f73 5381 store_expr (value, xtarget, 0);
03dc44a6
RS
5382 }
5383 else
5384 {
5385 if (index != 0)
19caa751
RK
5386 bitpos = ((tree_low_cst (index, 0) - minelt)
5387 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 5388 else
19caa751
RK
5389 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
5390
10b76d73 5391 if (GET_CODE (target) == MEM && !MEM_KEEP_ALIAS_SET_P (target)
e6834654 5392 && TREE_CODE (type) == ARRAY_TYPE
10b76d73
RK
5393 && TYPE_NONALIASED_COMPONENT (type))
5394 {
5395 target = copy_rtx (target);
5396 MEM_KEEP_ALIAS_SET_P (target) = 1;
5397 }
5398
c5c76735 5399 store_constructor_field (target, bitsize, bitpos, mode, value,
04050c69 5400 type, cleared, get_alias_set (elttype));
23cb1766 5401
03dc44a6 5402 }
bbf6f052
RK
5403 }
5404 }
19caa751 5405
3a94c984 5406 /* Set constructor assignments. */
071a6595
PB
5407 else if (TREE_CODE (type) == SET_TYPE)
5408 {
e1a43f73 5409 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 5410 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
5411 tree domain = TYPE_DOMAIN (type);
5412 tree domain_min, domain_max, bitlength;
5413
9faa82d8 5414 /* The default implementation strategy is to extract the constant
071a6595
PB
5415 parts of the constructor, use that to initialize the target,
5416 and then "or" in whatever non-constant ranges we need in addition.
5417
5418 If a large set is all zero or all ones, it is
5419 probably better to set it using memset (if available) or bzero.
5420 Also, if a large set has just a single range, it may also be
5421 better to first clear all the first clear the set (using
0f41302f 5422 bzero/memset), and set the bits we want. */
3a94c984 5423
0f41302f 5424 /* Check for all zeros. */
9376fcd6 5425 if (elt == NULL_TREE && size > 0)
071a6595 5426 {
e1a43f73 5427 if (!cleared)
8ac61af7 5428 clear_storage (target, GEN_INT (size));
071a6595
PB
5429 return;
5430 }
5431
071a6595
PB
5432 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
5433 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
5434 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
5435 size_diffop (domain_max, domain_min),
5436 ssize_int (1));
071a6595 5437
19caa751 5438 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
5439
5440 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
5441 are "complicated" (more than one range), initialize (the
3a94c984 5442 constant parts) by copying from a constant. */
e1a43f73
PB
5443 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
5444 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 5445 {
19caa751 5446 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 5447 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 5448 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 5449 HOST_WIDE_INT word = 0;
19caa751
RK
5450 unsigned int bit_pos = 0;
5451 unsigned int ibit = 0;
5452 unsigned int offset = 0; /* In bytes from beginning of set. */
5453
e1a43f73 5454 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 5455 for (;;)
071a6595 5456 {
b4ee5a72
PB
5457 if (bit_buffer[ibit])
5458 {
b09f3348 5459 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
5460 word |= (1 << (set_word_size - 1 - bit_pos));
5461 else
5462 word |= 1 << bit_pos;
5463 }
19caa751 5464
b4ee5a72
PB
5465 bit_pos++; ibit++;
5466 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 5467 {
e1a43f73
PB
5468 if (word != 0 || ! cleared)
5469 {
5470 rtx datum = GEN_INT (word);
5471 rtx to_rtx;
19caa751 5472
0f41302f
MS
5473 /* The assumption here is that it is safe to use
5474 XEXP if the set is multi-word, but not if
5475 it's single-word. */
e1a43f73 5476 if (GET_CODE (target) == MEM)
f4ef873c 5477 to_rtx = adjust_address (target, mode, offset);
3a94c984 5478 else if (offset == 0)
e1a43f73
PB
5479 to_rtx = target;
5480 else
5481 abort ();
5482 emit_move_insn (to_rtx, datum);
5483 }
19caa751 5484
b4ee5a72
PB
5485 if (ibit == nbits)
5486 break;
5487 word = 0;
5488 bit_pos = 0;
5489 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
5490 }
5491 }
071a6595 5492 }
e1a43f73 5493 else if (!cleared)
19caa751
RK
5494 /* Don't bother clearing storage if the set is all ones. */
5495 if (TREE_CHAIN (elt) != NULL_TREE
5496 || (TREE_PURPOSE (elt) == NULL_TREE
5497 ? nbits != 1
5498 : ( ! host_integerp (TREE_VALUE (elt), 0)
5499 || ! host_integerp (TREE_PURPOSE (elt), 0)
5500 || (tree_low_cst (TREE_VALUE (elt), 0)
5501 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
5502 != (HOST_WIDE_INT) nbits))))
8ac61af7 5503 clear_storage (target, expr_size (exp));
3a94c984 5504
e1a43f73 5505 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 5506 {
3a94c984 5507 /* Start of range of element or NULL. */
071a6595 5508 tree startbit = TREE_PURPOSE (elt);
3a94c984 5509 /* End of range of element, or element value. */
071a6595
PB
5510 tree endbit = TREE_VALUE (elt);
5511 HOST_WIDE_INT startb, endb;
19caa751 5512 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
5513
5514 bitlength_rtx = expand_expr (bitlength,
19caa751 5515 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 5516
3a94c984 5517 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
5518 if (startbit == NULL_TREE)
5519 {
5520 startbit = save_expr (endbit);
5521 endbit = startbit;
5522 }
19caa751 5523
071a6595
PB
5524 startbit = convert (sizetype, startbit);
5525 endbit = convert (sizetype, endbit);
5526 if (! integer_zerop (domain_min))
5527 {
5528 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
5529 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
5530 }
3a94c984 5531 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 5532 EXPAND_CONST_ADDRESS);
3a94c984 5533 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
5534 EXPAND_CONST_ADDRESS);
5535
5536 if (REG_P (target))
5537 {
1da68f56
RK
5538 targetx
5539 = assign_temp
b0c48229
NB
5540 ((build_qualified_type ((*lang_hooks.types.type_for_mode)
5541 (GET_MODE (target), 0),
1da68f56
RK
5542 TYPE_QUAL_CONST)),
5543 0, 1, 1);
071a6595
PB
5544 emit_move_insn (targetx, target);
5545 }
19caa751 5546
071a6595
PB
5547 else if (GET_CODE (target) == MEM)
5548 targetx = target;
5549 else
5550 abort ();
5551
4ca79136
RH
5552 /* Optimization: If startbit and endbit are constants divisible
5553 by BITS_PER_UNIT, call memset instead. */
5554 if (TARGET_MEM_FUNCTIONS
5555 && TREE_CODE (startbit) == INTEGER_CST
071a6595
PB
5556 && TREE_CODE (endbit) == INTEGER_CST
5557 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 5558 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 5559 {
ebb1b59a 5560 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 5561 VOIDmode, 3,
e1a43f73
PB
5562 plus_constant (XEXP (targetx, 0),
5563 startb / BITS_PER_UNIT),
071a6595 5564 Pmode,
3b6f75e2 5565 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 5566 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 5567 TYPE_MODE (sizetype));
071a6595
PB
5568 }
5569 else
68d28100
RH
5570 emit_library_call (setbits_libfunc, LCT_NORMAL,
5571 VOIDmode, 4, XEXP (targetx, 0),
ebb1b59a 5572 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
5573 startbit_rtx, TYPE_MODE (sizetype),
5574 endbit_rtx, TYPE_MODE (sizetype));
5575
071a6595
PB
5576 if (REG_P (target))
5577 emit_move_insn (target, targetx);
5578 }
5579 }
bbf6f052
RK
5580
5581 else
5582 abort ();
5583}
5584
5585/* Store the value of EXP (an expression tree)
5586 into a subfield of TARGET which has mode MODE and occupies
5587 BITSIZE bits, starting BITPOS bits from the start of TARGET.
5588 If MODE is VOIDmode, it means that we are storing into a bit-field.
5589
5590 If VALUE_MODE is VOIDmode, return nothing in particular.
5591 UNSIGNEDP is not used in this case.
5592
5593 Otherwise, return an rtx for the value stored. This rtx
5594 has mode VALUE_MODE if that is convenient to do.
5595 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5596
a06ef755 5597 TYPE is the type of the underlying object,
ece32014
MM
5598
5599 ALIAS_SET is the alias set for the destination. This value will
5600 (in general) be different from that for TARGET, since TARGET is a
5601 reference to the containing structure. */
bbf6f052
RK
5602
5603static rtx
a06ef755
RK
5604store_field (target, bitsize, bitpos, mode, exp, value_mode, unsignedp, type,
5605 alias_set)
bbf6f052 5606 rtx target;
770ae6cc
RK
5607 HOST_WIDE_INT bitsize;
5608 HOST_WIDE_INT bitpos;
bbf6f052
RK
5609 enum machine_mode mode;
5610 tree exp;
5611 enum machine_mode value_mode;
5612 int unsignedp;
a06ef755 5613 tree type;
ece32014 5614 int alias_set;
bbf6f052 5615{
906c4e36 5616 HOST_WIDE_INT width_mask = 0;
bbf6f052 5617
e9a25f70
JL
5618 if (TREE_CODE (exp) == ERROR_MARK)
5619 return const0_rtx;
5620
2be6a7e9
RK
5621 /* If we have nothing to store, do nothing unless the expression has
5622 side-effects. */
5623 if (bitsize == 0)
5624 return expand_expr (exp, const0_rtx, VOIDmode, 0);
6a87d634 5625 else if (bitsize >= 0 && bitsize < HOST_BITS_PER_WIDE_INT)
906c4e36 5626 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5627
5628 /* If we are storing into an unaligned field of an aligned union that is
5629 in a register, we may have the mode of TARGET being an integer mode but
5630 MODE == BLKmode. In that case, get an aligned object whose size and
5631 alignment are the same as TARGET and store TARGET into it (we can avoid
5632 the store if the field being stored is the entire width of TARGET). Then
5633 call ourselves recursively to store the field into a BLKmode version of
5634 that object. Finally, load from the object into TARGET. This is not
5635 very efficient in general, but should only be slightly more expensive
5636 than the otherwise-required unaligned accesses. Perhaps this can be
85a43a2f
RK
5637 cleaned up later. It's tempting to make OBJECT readonly, but it's set
5638 twice, once with emit_move_insn and once via store_field. */
bbf6f052
RK
5639
5640 if (mode == BLKmode
5641 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5642 {
85a43a2f 5643 rtx object = assign_temp (type, 0, 1, 1);
c4e59f51 5644 rtx blk_object = adjust_address (object, BLKmode, 0);
bbf6f052 5645
8752c357 5646 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5647 emit_move_insn (object, target);
5648
a06ef755
RK
5649 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0, type,
5650 alias_set);
bbf6f052
RK
5651
5652 emit_move_insn (target, object);
5653
a06ef755 5654 /* We want to return the BLKmode version of the data. */
46093b97 5655 return blk_object;
bbf6f052 5656 }
c3b247b4
JM
5657
5658 if (GET_CODE (target) == CONCAT)
5659 {
5660 /* We're storing into a struct containing a single __complex. */
5661
5662 if (bitpos != 0)
5663 abort ();
5664 return store_expr (exp, target, 0);
5665 }
bbf6f052
RK
5666
5667 /* If the structure is in a register or if the component
5668 is a bit field, we cannot use addressing to access it.
5669 Use bit-field techniques or SUBREG to store in it. */
5670
4fa52007 5671 if (mode == VOIDmode
6ab06cbb
JW
5672 || (mode != BLKmode && ! direct_store[(int) mode]
5673 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5674 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5675 || GET_CODE (target) == REG
c980ac49 5676 || GET_CODE (target) == SUBREG
ccc98036
RS
5677 /* If the field isn't aligned enough to store as an ordinary memref,
5678 store it as a bit field. */
15b19a7d 5679 || (mode != BLKmode
9e5f281f
OH
5680 && ((((MEM_ALIGN (target) < GET_MODE_ALIGNMENT (mode))
5681 || bitpos % GET_MODE_ALIGNMENT (mode))
5682 && SLOW_UNALIGNED_ACCESS (mode, MEM_ALIGN (target)))
5683 || (bitpos % BITS_PER_UNIT != 0)))
14a774a9
RK
5684 /* If the RHS and field are a constant size and the size of the
5685 RHS isn't the same size as the bitfield, we must use bitfield
5686 operations. */
05bccae2
RK
5687 || (bitsize >= 0
5688 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5689 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5690 {
906c4e36 5691 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5692
ef19912d
RK
5693 /* If BITSIZE is narrower than the size of the type of EXP
5694 we will be narrowing TEMP. Normally, what's wanted are the
5695 low-order bits. However, if EXP's type is a record and this is
5696 big-endian machine, we want the upper BITSIZE bits. */
5697 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
65a07688 5698 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (temp))
ef19912d
RK
5699 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5700 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5701 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5702 - bitsize),
c1853da7 5703 NULL_RTX, 1);
ef19912d 5704
bbd6cf73
RK
5705 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5706 MODE. */
5707 if (mode != VOIDmode && mode != BLKmode
5708 && mode != TYPE_MODE (TREE_TYPE (exp)))
5709 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5710
a281e72d
RK
5711 /* If the modes of TARGET and TEMP are both BLKmode, both
5712 must be in memory and BITPOS must be aligned on a byte
5713 boundary. If so, we simply do a block copy. */
5714 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5715 {
5716 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5717 || bitpos % BITS_PER_UNIT != 0)
5718 abort ();
5719
f4ef873c 5720 target = adjust_address (target, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d 5721 emit_block_move (target, temp,
a06ef755 5722 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a
RH
5723 / BITS_PER_UNIT),
5724 BLOCK_OP_NORMAL);
a281e72d
RK
5725
5726 return value_mode == VOIDmode ? const0_rtx : target;
5727 }
5728
bbf6f052 5729 /* Store the value in the bitfield. */
a06ef755
RK
5730 store_bit_field (target, bitsize, bitpos, mode, temp,
5731 int_size_in_bytes (type));
5732
bbf6f052
RK
5733 if (value_mode != VOIDmode)
5734 {
04050c69
RK
5735 /* The caller wants an rtx for the value.
5736 If possible, avoid refetching from the bitfield itself. */
bbf6f052
RK
5737 if (width_mask != 0
5738 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5739 {
9074de27 5740 tree count;
5c4d7cfb 5741 enum machine_mode tmode;
86a2c12a 5742
5c4d7cfb 5743 tmode = GET_MODE (temp);
86a2c12a
RS
5744 if (tmode == VOIDmode)
5745 tmode = value_mode;
22273300
JJ
5746
5747 if (unsignedp)
5748 return expand_and (tmode, temp,
2496c7bd 5749 gen_int_mode (width_mask, tmode),
22273300
JJ
5750 NULL_RTX);
5751
5c4d7cfb
RS
5752 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5753 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5754 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5755 }
04050c69 5756
bbf6f052 5757 return extract_bit_field (target, bitsize, bitpos, unsignedp,
04050c69 5758 NULL_RTX, value_mode, VOIDmode,
a06ef755 5759 int_size_in_bytes (type));
bbf6f052
RK
5760 }
5761 return const0_rtx;
5762 }
5763 else
5764 {
5765 rtx addr = XEXP (target, 0);
a06ef755 5766 rtx to_rtx = target;
bbf6f052
RK
5767
5768 /* If a value is wanted, it must be the lhs;
5769 so make the address stable for multiple use. */
5770
5771 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5772 && ! CONSTANT_ADDRESS_P (addr)
5773 /* A frame-pointer reference is already stable. */
5774 && ! (GET_CODE (addr) == PLUS
5775 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5776 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5777 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
a06ef755 5778 to_rtx = replace_equiv_address (to_rtx, copy_to_reg (addr));
bbf6f052
RK
5779
5780 /* Now build a reference to just the desired component. */
5781
a06ef755
RK
5782 to_rtx = adjust_address (target, mode, bitpos / BITS_PER_UNIT);
5783
5784 if (to_rtx == target)
5785 to_rtx = copy_rtx (to_rtx);
792760b9 5786
c6df88cb 5787 MEM_SET_IN_STRUCT_P (to_rtx, 1);
10b76d73 5788 if (!MEM_KEEP_ALIAS_SET_P (to_rtx) && MEM_ALIAS_SET (to_rtx) != 0)
a06ef755 5789 set_mem_alias_set (to_rtx, alias_set);
bbf6f052
RK
5790
5791 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5792 }
5793}
5794\f
5795/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
b4e3fabb
RK
5796 an ARRAY_REF, or an ARRAY_RANGE_REF, look for nested operations of these
5797 codes and find the ultimate containing object, which we return.
bbf6f052
RK
5798
5799 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5800 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5801 If the position of the field is variable, we store a tree
5802 giving the variable offset (in units) in *POFFSET.
5803 This offset is in addition to the bit position.
5804 If the position is not variable, we store 0 in *POFFSET.
bbf6f052
RK
5805
5806 If any of the extraction expressions is volatile,
5807 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5808
5809 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5810 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5811 is redundant.
5812
5813 If the field describes a variable-sized object, *PMODE is set to
5814 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
6d2f8887 5815 this case, but the address of the object can be found. */
bbf6f052
RK
5816
5817tree
4969d05d 5818get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
a06ef755 5819 punsignedp, pvolatilep)
bbf6f052 5820 tree exp;
770ae6cc
RK
5821 HOST_WIDE_INT *pbitsize;
5822 HOST_WIDE_INT *pbitpos;
7bb0943f 5823 tree *poffset;
bbf6f052
RK
5824 enum machine_mode *pmode;
5825 int *punsignedp;
5826 int *pvolatilep;
5827{
5828 tree size_tree = 0;
5829 enum machine_mode mode = VOIDmode;
fed3cef0 5830 tree offset = size_zero_node;
770ae6cc 5831 tree bit_offset = bitsize_zero_node;
738cc472 5832 tree placeholder_ptr = 0;
770ae6cc 5833 tree tem;
bbf6f052 5834
770ae6cc
RK
5835 /* First get the mode, signedness, and size. We do this from just the
5836 outermost expression. */
bbf6f052
RK
5837 if (TREE_CODE (exp) == COMPONENT_REF)
5838 {
5839 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5840 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5841 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5842
bbf6f052
RK
5843 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5844 }
5845 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5846 {
5847 size_tree = TREE_OPERAND (exp, 1);
5848 *punsignedp = TREE_UNSIGNED (exp);
5849 }
5850 else
5851 {
5852 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5853 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5854
ab87f8c8
JL
5855 if (mode == BLKmode)
5856 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5857 else
5858 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5859 }
3a94c984 5860
770ae6cc 5861 if (size_tree != 0)
bbf6f052 5862 {
770ae6cc 5863 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5864 mode = BLKmode, *pbitsize = -1;
5865 else
770ae6cc 5866 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5867 }
5868
5869 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5870 and find the ultimate containing object. */
bbf6f052
RK
5871 while (1)
5872 {
770ae6cc
RK
5873 if (TREE_CODE (exp) == BIT_FIELD_REF)
5874 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5875 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5876 {
770ae6cc
RK
5877 tree field = TREE_OPERAND (exp, 1);
5878 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5879
e7f3c83f
RK
5880 /* If this field hasn't been filled in yet, don't go
5881 past it. This should only happen when folding expressions
5882 made during type construction. */
770ae6cc 5883 if (this_offset == 0)
e7f3c83f 5884 break;
7a6cdb44 5885 else if (CONTAINS_PLACEHOLDER_P (this_offset))
770ae6cc 5886 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5887
7156dead 5888 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5889 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5890 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5891
a06ef755 5892 /* ??? Right now we don't do anything with DECL_OFFSET_ALIGN. */
bbf6f052 5893 }
7156dead 5894
b4e3fabb
RK
5895 else if (TREE_CODE (exp) == ARRAY_REF
5896 || TREE_CODE (exp) == ARRAY_RANGE_REF)
bbf6f052 5897 {
742920c7 5898 tree index = TREE_OPERAND (exp, 1);
b4e3fabb
RK
5899 tree array = TREE_OPERAND (exp, 0);
5900 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
770ae6cc 5901 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
b4e3fabb 5902 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (array)));
742920c7 5903
770ae6cc
RK
5904 /* We assume all arrays have sizes that are a multiple of a byte.
5905 First subtract the lower bound, if any, in the type of the
5906 index, then convert to sizetype and multiply by the size of the
5907 array element. */
5908 if (low_bound != 0 && ! integer_zerop (low_bound))
5909 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5910 index, low_bound));
f8dac6eb 5911
7156dead
RK
5912 /* If the index has a self-referential type, pass it to a
5913 WITH_RECORD_EXPR; if the component size is, pass our
5914 component to one. */
7a6cdb44 5915 if (CONTAINS_PLACEHOLDER_P (index))
770ae6cc 5916 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7a6cdb44 5917 if (CONTAINS_PLACEHOLDER_P (unit_size))
b4e3fabb 5918 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size, array);
742920c7 5919
770ae6cc
RK
5920 offset = size_binop (PLUS_EXPR, offset,
5921 size_binop (MULT_EXPR,
5922 convert (sizetype, index),
7156dead 5923 unit_size));
bbf6f052 5924 }
7156dead 5925
738cc472
RK
5926 else if (TREE_CODE (exp) == PLACEHOLDER_EXPR)
5927 {
70072ed9
RK
5928 tree new = find_placeholder (exp, &placeholder_ptr);
5929
5930 /* If we couldn't find the replacement, return the PLACEHOLDER_EXPR.
5931 We might have been called from tree optimization where we
5932 haven't set up an object yet. */
5933 if (new == 0)
5934 break;
5935 else
5936 exp = new;
5937
738cc472
RK
5938 continue;
5939 }
c1853da7
RK
5940
5941 /* We can go inside most conversions: all NON_VALUE_EXPRs, all normal
5942 conversions that don't change the mode, and all view conversions
5943 except those that need to "step up" the alignment. */
bbf6f052 5944 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
c1853da7
RK
5945 && ! (TREE_CODE (exp) == VIEW_CONVERT_EXPR
5946 && ! ((TYPE_ALIGN (TREE_TYPE (exp))
5947 > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
5948 && STRICT_ALIGNMENT
5949 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
5950 < BIGGEST_ALIGNMENT)
5951 && (TYPE_ALIGN_OK (TREE_TYPE (exp))
5952 || TYPE_ALIGN_OK (TREE_TYPE
5953 (TREE_OPERAND (exp, 0))))))
bbf6f052
RK
5954 && ! ((TREE_CODE (exp) == NOP_EXPR
5955 || TREE_CODE (exp) == CONVERT_EXPR)
5956 && (TYPE_MODE (TREE_TYPE (exp))
5957 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5958 break;
7bb0943f
RS
5959
5960 /* If any reference in the chain is volatile, the effect is volatile. */
5961 if (TREE_THIS_VOLATILE (exp))
5962 *pvolatilep = 1;
839c4796 5963
bbf6f052
RK
5964 exp = TREE_OPERAND (exp, 0);
5965 }
5966
770ae6cc
RK
5967 /* If OFFSET is constant, see if we can return the whole thing as a
5968 constant bit position. Otherwise, split it up. */
5969 if (host_integerp (offset, 0)
5970 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5971 bitsize_unit_node))
5972 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5973 && host_integerp (tem, 0))
5974 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5975 else
5976 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5977
bbf6f052 5978 *pmode = mode;
bbf6f052
RK
5979 return exp;
5980}
921b3427 5981
ed239f5a
RK
5982/* Return 1 if T is an expression that get_inner_reference handles. */
5983
5984int
5985handled_component_p (t)
5986 tree t;
5987{
5988 switch (TREE_CODE (t))
5989 {
5990 case BIT_FIELD_REF:
5991 case COMPONENT_REF:
5992 case ARRAY_REF:
5993 case ARRAY_RANGE_REF:
5994 case NON_LVALUE_EXPR:
5995 case VIEW_CONVERT_EXPR:
5996 return 1;
5997
5998 case NOP_EXPR:
5999 case CONVERT_EXPR:
6000 return (TYPE_MODE (TREE_TYPE (t))
6001 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (t, 0))));
6002
6003 default:
6004 return 0;
6005 }
6006}
bbf6f052 6007\f
3fe44edd
RK
6008/* Given an rtx VALUE that may contain additions and multiplications, return
6009 an equivalent value that just refers to a register, memory, or constant.
6010 This is done by generating instructions to perform the arithmetic and
6011 returning a pseudo-register containing the value.
c45a13a6
RK
6012
6013 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
6014
6015rtx
6016force_operand (value, target)
6017 rtx value, target;
6018{
8a28dbcc 6019 rtx op1, op2;
bbf6f052 6020 /* Use subtarget as the target for operand 0 of a binary operation. */
b3694847 6021 rtx subtarget = get_subtarget (target);
8a28dbcc 6022 enum rtx_code code = GET_CODE (value);
bbf6f052 6023
8b015896 6024 /* Check for a PIC address load. */
8a28dbcc 6025 if ((code == PLUS || code == MINUS)
8b015896
RH
6026 && XEXP (value, 0) == pic_offset_table_rtx
6027 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
6028 || GET_CODE (XEXP (value, 1)) == LABEL_REF
6029 || GET_CODE (XEXP (value, 1)) == CONST))
6030 {
6031 if (!subtarget)
6032 subtarget = gen_reg_rtx (GET_MODE (value));
6033 emit_move_insn (subtarget, value);
6034 return subtarget;
6035 }
6036
8a28dbcc 6037 if (code == ZERO_EXTEND || code == SIGN_EXTEND)
bbf6f052 6038 {
8a28dbcc
JH
6039 if (!target)
6040 target = gen_reg_rtx (GET_MODE (value));
ce0f3925 6041 convert_move (target, force_operand (XEXP (value, 0), NULL),
8a28dbcc
JH
6042 code == ZERO_EXTEND);
6043 return target;
bbf6f052
RK
6044 }
6045
8a28dbcc 6046 if (GET_RTX_CLASS (code) == '2' || GET_RTX_CLASS (code) == 'c')
bbf6f052
RK
6047 {
6048 op2 = XEXP (value, 1);
8a28dbcc 6049 if (!CONSTANT_P (op2) && !(GET_CODE (op2) == REG && op2 != subtarget))
bbf6f052 6050 subtarget = 0;
8a28dbcc 6051 if (code == MINUS && GET_CODE (op2) == CONST_INT)
bbf6f052 6052 {
8a28dbcc 6053 code = PLUS;
bbf6f052
RK
6054 op2 = negate_rtx (GET_MODE (value), op2);
6055 }
6056
6057 /* Check for an addition with OP2 a constant integer and our first
8a28dbcc
JH
6058 operand a PLUS of a virtual register and something else. In that
6059 case, we want to emit the sum of the virtual register and the
6060 constant first and then add the other value. This allows virtual
6061 register instantiation to simply modify the constant rather than
6062 creating another one around this addition. */
6063 if (code == PLUS && GET_CODE (op2) == CONST_INT
bbf6f052
RK
6064 && GET_CODE (XEXP (value, 0)) == PLUS
6065 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
6066 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
6067 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
6068 {
8a28dbcc
JH
6069 rtx temp = expand_simple_binop (GET_MODE (value), code,
6070 XEXP (XEXP (value, 0), 0), op2,
6071 subtarget, 0, OPTAB_LIB_WIDEN);
6072 return expand_simple_binop (GET_MODE (value), code, temp,
6073 force_operand (XEXP (XEXP (value,
6074 0), 1), 0),
6075 target, 0, OPTAB_LIB_WIDEN);
bbf6f052 6076 }
3a94c984 6077
8a28dbcc
JH
6078 op1 = force_operand (XEXP (value, 0), subtarget);
6079 op2 = force_operand (op2, NULL_RTX);
6080 switch (code)
6081 {
6082 case MULT:
6083 return expand_mult (GET_MODE (value), op1, op2, target, 1);
6084 case DIV:
6085 if (!INTEGRAL_MODE_P (GET_MODE (value)))
6086 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6087 target, 1, OPTAB_LIB_WIDEN);
6088 else
6089 return expand_divmod (0,
6090 FLOAT_MODE_P (GET_MODE (value))
6091 ? RDIV_EXPR : TRUNC_DIV_EXPR,
6092 GET_MODE (value), op1, op2, target, 0);
6093 break;
6094 case MOD:
6095 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6096 target, 0);
6097 break;
6098 case UDIV:
6099 return expand_divmod (0, TRUNC_DIV_EXPR, GET_MODE (value), op1, op2,
6100 target, 1);
6101 break;
6102 case UMOD:
6103 return expand_divmod (1, TRUNC_MOD_EXPR, GET_MODE (value), op1, op2,
6104 target, 1);
6105 break;
6106 case ASHIFTRT:
6107 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6108 target, 0, OPTAB_LIB_WIDEN);
6109 break;
6110 default:
6111 return expand_simple_binop (GET_MODE (value), code, op1, op2,
6112 target, 1, OPTAB_LIB_WIDEN);
6113 }
6114 }
6115 if (GET_RTX_CLASS (code) == '1')
6116 {
6117 op1 = force_operand (XEXP (value, 0), NULL_RTX);
6118 return expand_simple_unop (GET_MODE (value), code, op1, target, 0);
bbf6f052 6119 }
34e81b5a
RK
6120
6121#ifdef INSN_SCHEDULING
6122 /* On machines that have insn scheduling, we want all memory reference to be
6123 explicit, so we need to deal with such paradoxical SUBREGs. */
6124 if (GET_CODE (value) == SUBREG && GET_CODE (SUBREG_REG (value)) == MEM
6125 && (GET_MODE_SIZE (GET_MODE (value))
6126 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (value)))))
6127 value
6128 = simplify_gen_subreg (GET_MODE (value),
6129 force_reg (GET_MODE (SUBREG_REG (value)),
6130 force_operand (SUBREG_REG (value),
6131 NULL_RTX)),
6132 GET_MODE (SUBREG_REG (value)),
6133 SUBREG_BYTE (value));
6134#endif
6135
bbf6f052
RK
6136 return value;
6137}
6138\f
bbf6f052 6139/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
6140 EXP can reference X, which is being modified. TOP_P is nonzero if this
6141 call is going to be used to determine whether we need a temporary
ff439b5f
CB
6142 for EXP, as opposed to a recursive call to this function.
6143
6144 It is always safe for this routine to return zero since it merely
6145 searches for optimization opportunities. */
bbf6f052 6146
8f17b5c5 6147int
e5e809f4 6148safe_from_p (x, exp, top_p)
bbf6f052
RK
6149 rtx x;
6150 tree exp;
e5e809f4 6151 int top_p;
bbf6f052
RK
6152{
6153 rtx exp_rtl = 0;
6154 int i, nops;
1da68f56 6155 static tree save_expr_list;
bbf6f052 6156
6676e72f
RK
6157 if (x == 0
6158 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
6159 have no way of allocating temporaries of variable size
6160 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
6161 So we assume here that something at a higher level has prevented a
f4510f37 6162 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 6163 do this when X is BLKmode and when we are at the top level. */
d0f062fb 6164 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 6165 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
6166 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
6167 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
6168 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
6169 != INTEGER_CST)
1da68f56
RK
6170 && GET_MODE (x) == BLKmode)
6171 /* If X is in the outgoing argument area, it is always safe. */
6172 || (GET_CODE (x) == MEM
6173 && (XEXP (x, 0) == virtual_outgoing_args_rtx
6174 || (GET_CODE (XEXP (x, 0)) == PLUS
6175 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
6176 return 1;
6177
6178 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
6179 find the underlying pseudo. */
6180 if (GET_CODE (x) == SUBREG)
6181 {
6182 x = SUBREG_REG (x);
6183 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6184 return 0;
6185 }
6186
1da68f56
RK
6187 /* A SAVE_EXPR might appear many times in the expression passed to the
6188 top-level safe_from_p call, and if it has a complex subexpression,
6189 examining it multiple times could result in a combinatorial explosion.
6190 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
6191 with optimization took about 28 minutes to compile -- even though it was
6192 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
6193 and turn that off when we are done. We keep a list of the SAVE_EXPRs
6194 we have processed. Note that the only test of top_p was above. */
6195
6196 if (top_p)
6197 {
6198 int rtn;
6199 tree t;
6200
6201 save_expr_list = 0;
6202
6203 rtn = safe_from_p (x, exp, 0);
6204
6205 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
6206 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
6207
6208 return rtn;
6209 }
bbf6f052 6210
1da68f56 6211 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
6212 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
6213 {
6214 case 'd':
a9772b60 6215 exp_rtl = DECL_RTL_IF_SET (exp);
bbf6f052
RK
6216 break;
6217
6218 case 'c':
6219 return 1;
6220
6221 case 'x':
6222 if (TREE_CODE (exp) == TREE_LIST)
f8d4be57
CE
6223 {
6224 while (1)
6225 {
6226 if (TREE_VALUE (exp) && !safe_from_p (x, TREE_VALUE (exp), 0))
6227 return 0;
6228 exp = TREE_CHAIN (exp);
6229 if (!exp)
6230 return 1;
6231 if (TREE_CODE (exp) != TREE_LIST)
6232 return safe_from_p (x, exp, 0);
6233 }
6234 }
ff439b5f
CB
6235 else if (TREE_CODE (exp) == ERROR_MARK)
6236 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
6237 else
6238 return 0;
6239
bbf6f052
RK
6240 case '2':
6241 case '<':
f8d4be57
CE
6242 if (!safe_from_p (x, TREE_OPERAND (exp, 1), 0))
6243 return 0;
6244 /* FALLTHRU */
6245
6246 case '1':
6247 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
6248
6249 case 'e':
6250 case 'r':
6251 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
6252 the expression. If it is set, we conflict iff we are that rtx or
6253 both are in memory. Otherwise, we check all operands of the
6254 expression recursively. */
6255
6256 switch (TREE_CODE (exp))
6257 {
6258 case ADDR_EXPR:
70072ed9
RK
6259 /* If the operand is static or we are static, we can't conflict.
6260 Likewise if we don't conflict with the operand at all. */
6261 if (staticp (TREE_OPERAND (exp, 0))
6262 || TREE_STATIC (exp)
6263 || safe_from_p (x, TREE_OPERAND (exp, 0), 0))
6264 return 1;
6265
6266 /* Otherwise, the only way this can conflict is if we are taking
6267 the address of a DECL a that address if part of X, which is
6268 very rare. */
6269 exp = TREE_OPERAND (exp, 0);
6270 if (DECL_P (exp))
6271 {
6272 if (!DECL_RTL_SET_P (exp)
6273 || GET_CODE (DECL_RTL (exp)) != MEM)
6274 return 0;
6275 else
6276 exp_rtl = XEXP (DECL_RTL (exp), 0);
6277 }
6278 break;
bbf6f052
RK
6279
6280 case INDIRECT_REF:
1da68f56
RK
6281 if (GET_CODE (x) == MEM
6282 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
6283 get_alias_set (exp)))
bbf6f052
RK
6284 return 0;
6285 break;
6286
6287 case CALL_EXPR:
f9808f81
MM
6288 /* Assume that the call will clobber all hard registers and
6289 all of memory. */
6290 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
6291 || GET_CODE (x) == MEM)
6292 return 0;
bbf6f052
RK
6293 break;
6294
6295 case RTL_EXPR:
3bb5826a
RK
6296 /* If a sequence exists, we would have to scan every instruction
6297 in the sequence to see if it was safe. This is probably not
6298 worthwhile. */
6299 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
6300 return 0;
6301
3bb5826a 6302 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
6303 break;
6304
6305 case WITH_CLEANUP_EXPR:
6ad7895a 6306 exp_rtl = WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052
RK
6307 break;
6308
5dab5552 6309 case CLEANUP_POINT_EXPR:
e5e809f4 6310 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 6311
bbf6f052
RK
6312 case SAVE_EXPR:
6313 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
6314 if (exp_rtl)
6315 break;
6316
1da68f56
RK
6317 /* If we've already scanned this, don't do it again. Otherwise,
6318 show we've scanned it and record for clearing the flag if we're
6319 going on. */
6320 if (TREE_PRIVATE (exp))
6321 return 1;
ff439b5f 6322
1da68f56
RK
6323 TREE_PRIVATE (exp) = 1;
6324 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 6325 {
1da68f56
RK
6326 TREE_PRIVATE (exp) = 0;
6327 return 0;
ff59bfe6 6328 }
1da68f56
RK
6329
6330 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 6331 return 1;
bbf6f052 6332
8129842c
RS
6333 case BIND_EXPR:
6334 /* The only operand we look at is operand 1. The rest aren't
6335 part of the expression. */
e5e809f4 6336 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 6337
bbf6f052 6338 case METHOD_CALL_EXPR:
4fe9b91c 6339 /* This takes an rtx argument, but shouldn't appear here. */
bbf6f052 6340 abort ();
3a94c984 6341
e9a25f70
JL
6342 default:
6343 break;
bbf6f052
RK
6344 }
6345
6346 /* If we have an rtx, we do not need to scan our operands. */
6347 if (exp_rtl)
6348 break;
6349
8f17b5c5 6350 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
6351 for (i = 0; i < nops; i++)
6352 if (TREE_OPERAND (exp, i) != 0
e5e809f4 6353 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 6354 return 0;
8f17b5c5
MM
6355
6356 /* If this is a language-specific tree code, it may require
6357 special handling. */
dbbbbf3b
JDA
6358 if ((unsigned int) TREE_CODE (exp)
6359 >= (unsigned int) LAST_AND_UNUSED_TREE_CODE
ac79cd5a 6360 && !(*lang_hooks.safe_from_p) (x, exp))
8f17b5c5 6361 return 0;
bbf6f052
RK
6362 }
6363
6364 /* If we have an rtl, find any enclosed object. Then see if we conflict
6365 with it. */
6366 if (exp_rtl)
6367 {
6368 if (GET_CODE (exp_rtl) == SUBREG)
6369 {
6370 exp_rtl = SUBREG_REG (exp_rtl);
6371 if (GET_CODE (exp_rtl) == REG
6372 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
6373 return 0;
6374 }
6375
6376 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 6377 are memory and they conflict. */
bbf6f052
RK
6378 return ! (rtx_equal_p (x, exp_rtl)
6379 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
21117a17 6380 && true_dependence (exp_rtl, VOIDmode, x,
1da68f56 6381 rtx_addr_varies_p)));
bbf6f052
RK
6382 }
6383
6384 /* If we reach here, it is safe. */
6385 return 1;
6386}
6387
01c8a7c8
RK
6388/* Subroutine of expand_expr: return rtx if EXP is a
6389 variable or parameter; else return 0. */
6390
6391static rtx
6392var_rtx (exp)
6393 tree exp;
6394{
6395 STRIP_NOPS (exp);
6396 switch (TREE_CODE (exp))
6397 {
6398 case PARM_DECL:
6399 case VAR_DECL:
6400 return DECL_RTL (exp);
6401 default:
6402 return 0;
6403 }
6404}
dbecbbe4
JL
6405
6406#ifdef MAX_INTEGER_COMPUTATION_MODE
400500c4 6407
dbecbbe4
JL
6408void
6409check_max_integer_computation_mode (exp)
3a94c984 6410 tree exp;
dbecbbe4 6411{
5f652c07 6412 enum tree_code code;
dbecbbe4
JL
6413 enum machine_mode mode;
6414
5f652c07
JM
6415 /* Strip any NOPs that don't change the mode. */
6416 STRIP_NOPS (exp);
6417 code = TREE_CODE (exp);
6418
71bca506
JL
6419 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
6420 if (code == NOP_EXPR
6421 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6422 return;
6423
dbecbbe4
JL
6424 /* First check the type of the overall operation. We need only look at
6425 unary, binary and relational operations. */
6426 if (TREE_CODE_CLASS (code) == '1'
6427 || TREE_CODE_CLASS (code) == '2'
6428 || TREE_CODE_CLASS (code) == '<')
6429 {
6430 mode = TYPE_MODE (TREE_TYPE (exp));
6431 if (GET_MODE_CLASS (mode) == MODE_INT
6432 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6433 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6434 }
6435
6436 /* Check operand of a unary op. */
6437 if (TREE_CODE_CLASS (code) == '1')
6438 {
6439 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6440 if (GET_MODE_CLASS (mode) == MODE_INT
6441 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6442 internal_error ("unsupported wide integer operation");
dbecbbe4 6443 }
3a94c984 6444
dbecbbe4
JL
6445 /* Check operands of a binary/comparison op. */
6446 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
6447 {
6448 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
6449 if (GET_MODE_CLASS (mode) == MODE_INT
6450 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6451 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6452
6453 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
6454 if (GET_MODE_CLASS (mode) == MODE_INT
6455 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6456 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6457 }
6458}
6459#endif
14a774a9 6460\f
0d4903b8
RK
6461/* Return the highest power of two that EXP is known to be a multiple of.
6462 This is used in updating alignment of MEMs in array references. */
6463
9ceca302 6464static unsigned HOST_WIDE_INT
0d4903b8
RK
6465highest_pow2_factor (exp)
6466 tree exp;
6467{
9ceca302 6468 unsigned HOST_WIDE_INT c0, c1;
0d4903b8
RK
6469
6470 switch (TREE_CODE (exp))
6471 {
6472 case INTEGER_CST:
e0f1be5c
JJ
6473 /* We can find the lowest bit that's a one. If the low
6474 HOST_BITS_PER_WIDE_INT bits are zero, return BIGGEST_ALIGNMENT.
6475 We need to handle this case since we can find it in a COND_EXPR,
6476 a MIN_EXPR, or a MAX_EXPR. If the constant overlows, we have an
6477 erroneous program, so return BIGGEST_ALIGNMENT to avoid any
3a531a8b 6478 later ICE. */
e0f1be5c 6479 if (TREE_CONSTANT_OVERFLOW (exp))
1ed1b4fb 6480 return BIGGEST_ALIGNMENT;
e0f1be5c 6481 else
0d4903b8 6482 {
e0f1be5c
JJ
6483 /* Note: tree_low_cst is intentionally not used here,
6484 we don't care about the upper bits. */
6485 c0 = TREE_INT_CST_LOW (exp);
6486 c0 &= -c0;
6487 return c0 ? c0 : BIGGEST_ALIGNMENT;
0d4903b8
RK
6488 }
6489 break;
6490
65a07688 6491 case PLUS_EXPR: case MINUS_EXPR: case MIN_EXPR: case MAX_EXPR:
0d4903b8
RK
6492 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6493 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6494 return MIN (c0, c1);
6495
6496 case MULT_EXPR:
6497 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6498 c1 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6499 return c0 * c1;
6500
6501 case ROUND_DIV_EXPR: case TRUNC_DIV_EXPR: case FLOOR_DIV_EXPR:
6502 case CEIL_DIV_EXPR:
65a07688
RK
6503 if (integer_pow2p (TREE_OPERAND (exp, 1))
6504 && host_integerp (TREE_OPERAND (exp, 1), 1))
6505 {
6506 c0 = highest_pow2_factor (TREE_OPERAND (exp, 0));
6507 c1 = tree_low_cst (TREE_OPERAND (exp, 1), 1);
6508 return MAX (1, c0 / c1);
6509 }
6510 break;
0d4903b8
RK
6511
6512 case NON_LVALUE_EXPR: case NOP_EXPR: case CONVERT_EXPR:
65a07688 6513 case SAVE_EXPR: case WITH_RECORD_EXPR:
0d4903b8
RK
6514 return highest_pow2_factor (TREE_OPERAND (exp, 0));
6515
65a07688
RK
6516 case COMPOUND_EXPR:
6517 return highest_pow2_factor (TREE_OPERAND (exp, 1));
6518
0d4903b8
RK
6519 case COND_EXPR:
6520 c0 = highest_pow2_factor (TREE_OPERAND (exp, 1));
6521 c1 = highest_pow2_factor (TREE_OPERAND (exp, 2));
6522 return MIN (c0, c1);
6523
6524 default:
6525 break;
6526 }
6527
6528 return 1;
6529}
818c0c94
RH
6530
6531/* Similar, except that it is known that the expression must be a multiple
6532 of the alignment of TYPE. */
6533
9ceca302 6534static unsigned HOST_WIDE_INT
818c0c94
RH
6535highest_pow2_factor_for_type (type, exp)
6536 tree type;
6537 tree exp;
6538{
9ceca302 6539 unsigned HOST_WIDE_INT type_align, factor;
818c0c94
RH
6540
6541 factor = highest_pow2_factor (exp);
6542 type_align = TYPE_ALIGN (type) / BITS_PER_UNIT;
6543 return MAX (factor, type_align);
6544}
0d4903b8 6545\f
f47e9b4e
RK
6546/* Return an object on the placeholder list that matches EXP, a
6547 PLACEHOLDER_EXPR. An object "matches" if it is of the type of the
738cc472 6548 PLACEHOLDER_EXPR or a pointer type to it. For further information, see
70072ed9
RK
6549 tree.def. If no such object is found, return 0. If PLIST is nonzero, it
6550 is a location which initially points to a starting location in the
738cc472
RK
6551 placeholder list (zero means start of the list) and where a pointer into
6552 the placeholder list at which the object is found is placed. */
f47e9b4e
RK
6553
6554tree
6555find_placeholder (exp, plist)
6556 tree exp;
6557 tree *plist;
6558{
6559 tree type = TREE_TYPE (exp);
6560 tree placeholder_expr;
6561
738cc472
RK
6562 for (placeholder_expr
6563 = plist && *plist ? TREE_CHAIN (*plist) : placeholder_list;
6564 placeholder_expr != 0;
f47e9b4e
RK
6565 placeholder_expr = TREE_CHAIN (placeholder_expr))
6566 {
6567 tree need_type = TYPE_MAIN_VARIANT (type);
6568 tree elt;
6569
6570 /* Find the outermost reference that is of the type we want. If none,
6571 see if any object has a type that is a pointer to the type we
6572 want. */
6573 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6574 elt = ((TREE_CODE (elt) == COMPOUND_EXPR
6575 || TREE_CODE (elt) == COND_EXPR)
6576 ? TREE_OPERAND (elt, 1)
6577 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6578 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6579 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6580 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6581 ? TREE_OPERAND (elt, 0) : 0))
6582 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6583 {
6584 if (plist)
6585 *plist = placeholder_expr;
6586 return elt;
6587 }
6588
6589 for (elt = TREE_PURPOSE (placeholder_expr); elt != 0;
6590 elt
6591 = ((TREE_CODE (elt) == COMPOUND_EXPR
6592 || TREE_CODE (elt) == COND_EXPR)
6593 ? TREE_OPERAND (elt, 1)
6594 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6595 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6596 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6597 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6598 ? TREE_OPERAND (elt, 0) : 0))
6599 if (POINTER_TYPE_P (TREE_TYPE (elt))
6600 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
6601 == need_type))
6602 {
6603 if (plist)
6604 *plist = placeholder_expr;
6605 return build1 (INDIRECT_REF, need_type, elt);
6606 }
6607 }
6608
70072ed9 6609 return 0;
f47e9b4e
RK
6610}
6611\f
bbf6f052
RK
6612/* expand_expr: generate code for computing expression EXP.
6613 An rtx for the computed value is returned. The value is never null.
6614 In the case of a void EXP, const0_rtx is returned.
6615
6616 The value may be stored in TARGET if TARGET is nonzero.
6617 TARGET is just a suggestion; callers must assume that
6618 the rtx returned may not be the same as TARGET.
6619
6620 If TARGET is CONST0_RTX, it means that the value will be ignored.
6621
6622 If TMODE is not VOIDmode, it suggests generating the
6623 result in mode TMODE. But this is done only when convenient.
6624 Otherwise, TMODE is ignored and the value generated in its natural mode.
6625 TMODE is just a suggestion; callers must assume that
6626 the rtx returned may not have mode TMODE.
6627
d6a5ac33
RK
6628 Note that TARGET may have neither TMODE nor MODE. In that case, it
6629 probably will not be used.
bbf6f052
RK
6630
6631 If MODIFIER is EXPAND_SUM then when EXP is an addition
6632 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
6633 or a nest of (PLUS ...) and (MINUS ...) where the terms are
6634 products as above, or REG or MEM, or constant.
6635 Ordinarily in such cases we would output mul or add instructions
6636 and then return a pseudo reg containing the sum.
6637
6638 EXPAND_INITIALIZER is much like EXPAND_SUM except that
6639 it also marks a label as absolutely required (it can't be dead).
26fcb35a 6640 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
6641 This is used for outputting expressions used in initializers.
6642
6643 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
6644 with a constant address even if that address is not normally legitimate.
8403445a
AM
6645 EXPAND_INITIALIZER and EXPAND_SUM also have this effect.
6646
6647 EXPAND_STACK_PARM is used when expanding to a TARGET on the stack for
6648 a call parameter. Such targets require special care as we haven't yet
6649 marked TARGET so that it's safe from being trashed by libcalls. We
6650 don't want to use TARGET for anything but the final result;
6651 Intermediate values must go elsewhere. Additionally, calls to
6652 emit_block_move will be flagged with BLOCK_OP_CALL_PARM. */
bbf6f052
RK
6653
6654rtx
6655expand_expr (exp, target, tmode, modifier)
b3694847 6656 tree exp;
bbf6f052
RK
6657 rtx target;
6658 enum machine_mode tmode;
6659 enum expand_modifier modifier;
6660{
b3694847 6661 rtx op0, op1, temp;
bbf6f052
RK
6662 tree type = TREE_TYPE (exp);
6663 int unsignedp = TREE_UNSIGNED (type);
b3694847
SS
6664 enum machine_mode mode;
6665 enum tree_code code = TREE_CODE (exp);
bbf6f052 6666 optab this_optab;
68557e14
ML
6667 rtx subtarget, original_target;
6668 int ignore;
bbf6f052
RK
6669 tree context;
6670
3a94c984 6671 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 6672 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
6673 {
6674 op0 = CONST0_RTX (tmode);
6675 if (op0 != 0)
6676 return op0;
6677 return const0_rtx;
6678 }
6679
6680 mode = TYPE_MODE (type);
6681 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 6682 subtarget = get_subtarget (target);
68557e14
ML
6683 original_target = target;
6684 ignore = (target == const0_rtx
6685 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
6686 || code == CONVERT_EXPR || code == REFERENCE_EXPR
ac79cd5a 6687 || code == COND_EXPR || code == VIEW_CONVERT_EXPR)
68557e14
ML
6688 && TREE_CODE (type) == VOID_TYPE));
6689
dd27116b
RK
6690 /* If we are going to ignore this result, we need only do something
6691 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
6692 is, short-circuit the most common cases here. Note that we must
6693 not call expand_expr with anything but const0_rtx in case this
6694 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 6695
dd27116b
RK
6696 if (ignore)
6697 {
6698 if (! TREE_SIDE_EFFECTS (exp))
6699 return const0_rtx;
6700
14a774a9
RK
6701 /* Ensure we reference a volatile object even if value is ignored, but
6702 don't do this if all we are doing is taking its address. */
dd27116b
RK
6703 if (TREE_THIS_VOLATILE (exp)
6704 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
6705 && mode != VOIDmode && mode != BLKmode
6706 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 6707 {
37a08a29 6708 temp = expand_expr (exp, NULL_RTX, VOIDmode, modifier);
dd27116b
RK
6709 if (GET_CODE (temp) == MEM)
6710 temp = copy_to_reg (temp);
6711 return const0_rtx;
6712 }
6713
14a774a9
RK
6714 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
6715 || code == INDIRECT_REF || code == BUFFER_REF)
37a08a29
RK
6716 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6717 modifier);
6718
14a774a9 6719 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
b4e3fabb 6720 || code == ARRAY_REF || code == ARRAY_RANGE_REF)
dd27116b 6721 {
37a08a29
RK
6722 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6723 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
dd27116b
RK
6724 return const0_rtx;
6725 }
6726 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6727 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6728 /* If the second operand has no side effects, just evaluate
0f41302f 6729 the first. */
37a08a29
RK
6730 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6731 modifier);
14a774a9
RK
6732 else if (code == BIT_FIELD_REF)
6733 {
37a08a29
RK
6734 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, modifier);
6735 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, modifier);
6736 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, modifier);
14a774a9
RK
6737 return const0_rtx;
6738 }
37a08a29 6739
90764a87 6740 target = 0;
dd27116b 6741 }
bbf6f052 6742
dbecbbe4 6743#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07 6744 /* Only check stuff here if the mode we want is different from the mode
fbe5a4a6 6745 of the expression; if it's the same, check_max_integer_computation_mode
5f652c07
JM
6746 will handle it. Do we really need to check this stuff at all? */
6747
ce3c0b53 6748 if (target
5f652c07 6749 && GET_MODE (target) != mode
ce3c0b53
JL
6750 && TREE_CODE (exp) != INTEGER_CST
6751 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6752 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6753 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6754 && TREE_CODE (exp) != COMPONENT_REF
6755 && TREE_CODE (exp) != BIT_FIELD_REF
6756 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6757 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6758 && TREE_CODE (exp) != VAR_DECL
6759 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6760 {
6761 enum machine_mode mode = GET_MODE (target);
6762
6763 if (GET_MODE_CLASS (mode) == MODE_INT
6764 && mode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6765 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6766 }
6767
5f652c07
JM
6768 if (tmode != mode
6769 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6770 && TREE_CODE (exp) != PARM_DECL
ee06cc21 6771 && TREE_CODE (exp) != ARRAY_REF
b4e3fabb 6772 && TREE_CODE (exp) != ARRAY_RANGE_REF
ee06cc21
JL
6773 && TREE_CODE (exp) != COMPONENT_REF
6774 && TREE_CODE (exp) != BIT_FIELD_REF
6775 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6776 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6777 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6778 && TREE_CODE (exp) != RTL_EXPR
71bca506 6779 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4 6780 && tmode > MAX_INTEGER_COMPUTATION_MODE)
400500c4 6781 internal_error ("unsupported wide integer operation");
dbecbbe4
JL
6782
6783 check_max_integer_computation_mode (exp);
6784#endif
6785
e44842fe
RK
6786 /* If will do cse, generate all results into pseudo registers
6787 since 1) that allows cse to find more things
6788 and 2) otherwise cse could produce an insn the machine
4977bab6
ZW
6789 cannot support. An exception is a CONSTRUCTOR into a multi-word
6790 MEM: that's much more likely to be most efficient into the MEM.
6791 Another is a CALL_EXPR which must return in memory. */
e44842fe 6792
bbf6f052 6793 if (! cse_not_expected && mode != BLKmode && target
c24ae149 6794 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER)
4977bab6
ZW
6795 && ! (code == CONSTRUCTOR && GET_MODE_SIZE (mode) > UNITS_PER_WORD)
6796 && ! (code == CALL_EXPR && aggregate_value_p (exp)))
8403445a 6797 target = 0;
bbf6f052 6798
bbf6f052
RK
6799 switch (code)
6800 {
6801 case LABEL_DECL:
b552441b
RS
6802 {
6803 tree function = decl_function_context (exp);
046e4e36
ZW
6804 /* Labels in containing functions, or labels used from initializers,
6805 must be forced. */
6806 if (modifier == EXPAND_INITIALIZER
6807 || (function != current_function_decl
6808 && function != inline_function_decl
6809 && function != 0))
6810 temp = force_label_rtx (exp);
ab87f8c8 6811 else
046e4e36 6812 temp = label_rtx (exp);
c5c76735 6813
046e4e36 6814 temp = gen_rtx_MEM (FUNCTION_MODE, gen_rtx_LABEL_REF (Pmode, temp));
d0977240
RK
6815 if (function != current_function_decl
6816 && function != inline_function_decl && function != 0)
26fcb35a
RS
6817 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6818 return temp;
b552441b 6819 }
bbf6f052
RK
6820
6821 case PARM_DECL:
1877be45 6822 if (!DECL_RTL_SET_P (exp))
bbf6f052
RK
6823 {
6824 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6825 return CONST0_RTX (mode);
bbf6f052
RK
6826 }
6827
0f41302f 6828 /* ... fall through ... */
d6a5ac33 6829
bbf6f052 6830 case VAR_DECL:
2dca20cd
RS
6831 /* If a static var's type was incomplete when the decl was written,
6832 but the type is complete now, lay out the decl now. */
ca06cfe6
RH
6833 if (DECL_SIZE (exp) == 0
6834 && COMPLETE_OR_UNBOUND_ARRAY_TYPE_P (TREE_TYPE (exp))
2dca20cd 6835 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
a46666a9 6836 layout_decl (exp, 0);
921b3427 6837
0f41302f 6838 /* ... fall through ... */
d6a5ac33 6839
2dca20cd 6840 case FUNCTION_DECL:
bbf6f052
RK
6841 case RESULT_DECL:
6842 if (DECL_RTL (exp) == 0)
6843 abort ();
d6a5ac33 6844
e44842fe
RK
6845 /* Ensure variable marked as used even if it doesn't go through
6846 a parser. If it hasn't be used yet, write out an external
6847 definition. */
6848 if (! TREE_USED (exp))
6849 {
6850 assemble_external (exp);
6851 TREE_USED (exp) = 1;
6852 }
6853
dc6d66b3
RK
6854 /* Show we haven't gotten RTL for this yet. */
6855 temp = 0;
6856
bbf6f052
RK
6857 /* Handle variables inherited from containing functions. */
6858 context = decl_function_context (exp);
6859
6860 /* We treat inline_function_decl as an alias for the current function
6861 because that is the inline function whose vars, types, etc.
6862 are being merged into the current function.
6863 See expand_inline_function. */
d6a5ac33 6864
bbf6f052
RK
6865 if (context != 0 && context != current_function_decl
6866 && context != inline_function_decl
6867 /* If var is static, we don't need a static chain to access it. */
6868 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6869 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6870 {
6871 rtx addr;
6872
6873 /* Mark as non-local and addressable. */
81feeecb 6874 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6875 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6876 abort ();
dffd7eb6 6877 (*lang_hooks.mark_addressable) (exp);
bbf6f052
RK
6878 if (GET_CODE (DECL_RTL (exp)) != MEM)
6879 abort ();
6880 addr = XEXP (DECL_RTL (exp), 0);
6881 if (GET_CODE (addr) == MEM)
792760b9
RK
6882 addr
6883 = replace_equiv_address (addr,
6884 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6885 else
6886 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6887
792760b9 6888 temp = replace_equiv_address (DECL_RTL (exp), addr);
bbf6f052 6889 }
4af3895e 6890
bbf6f052
RK
6891 /* This is the case of an array whose size is to be determined
6892 from its initializer, while the initializer is still being parsed.
6893 See expand_decl. */
d6a5ac33 6894
dc6d66b3
RK
6895 else if (GET_CODE (DECL_RTL (exp)) == MEM
6896 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
792760b9 6897 temp = validize_mem (DECL_RTL (exp));
d6a5ac33
RK
6898
6899 /* If DECL_RTL is memory, we are in the normal case and either
6900 the address is not valid or it is not a register and -fforce-addr
6901 is specified, get the address into a register. */
6902
dc6d66b3
RK
6903 else if (GET_CODE (DECL_RTL (exp)) == MEM
6904 && modifier != EXPAND_CONST_ADDRESS
6905 && modifier != EXPAND_SUM
6906 && modifier != EXPAND_INITIALIZER
6907 && (! memory_address_p (DECL_MODE (exp),
6908 XEXP (DECL_RTL (exp), 0))
6909 || (flag_force_addr
6910 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
792760b9
RK
6911 temp = replace_equiv_address (DECL_RTL (exp),
6912 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6913
dc6d66b3 6914 /* If we got something, return it. But first, set the alignment
04956a1a 6915 if the address is a register. */
dc6d66b3
RK
6916 if (temp != 0)
6917 {
6918 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6919 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6920
6921 return temp;
6922 }
6923
1499e0a8
RK
6924 /* If the mode of DECL_RTL does not match that of the decl, it
6925 must be a promoted value. We return a SUBREG of the wanted mode,
6926 but mark it so that we know that it was already extended. */
6927
6928 if (GET_CODE (DECL_RTL (exp)) == REG
7254c5fa 6929 && GET_MODE (DECL_RTL (exp)) != DECL_MODE (exp))
1499e0a8 6930 {
1499e0a8
RK
6931 /* Get the signedness used for this variable. Ensure we get the
6932 same mode we got when the variable was declared. */
78911e8b 6933 if (GET_MODE (DECL_RTL (exp))
0fb7aeda 6934 != promote_mode (type, DECL_MODE (exp), &unsignedp,
e8dcd824 6935 (TREE_CODE (exp) == RESULT_DECL ? 1 : 0)))
1499e0a8
RK
6936 abort ();
6937
ddef6bc7 6938 temp = gen_lowpart_SUBREG (mode, DECL_RTL (exp));
1499e0a8 6939 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 6940 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
6941 return temp;
6942 }
6943
bbf6f052
RK
6944 return DECL_RTL (exp);
6945
6946 case INTEGER_CST:
d8a50944 6947 temp = immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6948 TREE_INT_CST_HIGH (exp), mode);
bbf6f052 6949
d8a50944
RH
6950 /* ??? If overflow is set, fold will have done an incomplete job,
6951 which can result in (plus xx (const_int 0)), which can get
6952 simplified by validate_replace_rtx during virtual register
6953 instantiation, which can result in unrecognizable insns.
6954 Avoid this by forcing all overflows into registers. */
c2e9dc85
RH
6955 if (TREE_CONSTANT_OVERFLOW (exp)
6956 && modifier != EXPAND_INITIALIZER)
d8a50944
RH
6957 temp = force_reg (mode, temp);
6958
6959 return temp;
6960
d744e06e
AH
6961 case VECTOR_CST:
6962 return const_vector_from_tree (exp);
6963
bbf6f052 6964 case CONST_DECL:
8403445a 6965 return expand_expr (DECL_INITIAL (exp), target, VOIDmode, modifier);
bbf6f052
RK
6966
6967 case REAL_CST:
6968 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6969 which will be turned into memory by reload if necessary.
6970
bbf6f052
RK
6971 We used to force a register so that loop.c could see it. But
6972 this does not allow gen_* patterns to perform optimizations with
6973 the constants. It also produces two insns in cases like "x = 1.0;".
6974 On most machines, floating-point constants are not permitted in
6975 many insns, so we'd end up copying it to a register in any case.
6976
6977 Now, we do the copying in expand_binop, if appropriate. */
5692c7bc
ZW
6978 return CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (exp),
6979 TYPE_MODE (TREE_TYPE (exp)));
bbf6f052
RK
6980
6981 case COMPLEX_CST:
6982 case STRING_CST:
afc6aaab 6983 temp = output_constant_def (exp, 1);
bbf6f052 6984
afc6aaab 6985 /* temp contains a constant address.
bbf6f052
RK
6986 On RISC machines where a constant address isn't valid,
6987 make some insns to get that address into a register. */
afc6aaab 6988 if (modifier != EXPAND_CONST_ADDRESS
bbf6f052
RK
6989 && modifier != EXPAND_INITIALIZER
6990 && modifier != EXPAND_SUM
afc6aaab
ZW
6991 && (! memory_address_p (mode, XEXP (temp, 0))
6992 || flag_force_addr))
6993 return replace_equiv_address (temp,
6994 copy_rtx (XEXP (temp, 0)));
6995 return temp;
bbf6f052 6996
bf1e5319 6997 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6998 {
6999 rtx to_return;
070588f0 7000 location_t saved_loc = input_location;
b24f65cd 7001 input_filename = EXPR_WFL_FILENAME (exp);
d479d37f 7002 input_line = EXPR_WFL_LINENO (exp);
b24f65cd 7003 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
d479d37f 7004 emit_line_note (input_filename, input_line);
6ad7895a 7005 /* Possibly avoid switching back and forth here. */
b0ca54af 7006 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
070588f0 7007 input_location = saved_loc;
b24f65cd
APB
7008 return to_return;
7009 }
bf1e5319 7010
bbf6f052
RK
7011 case SAVE_EXPR:
7012 context = decl_function_context (exp);
d6a5ac33 7013
d0977240
RK
7014 /* If this SAVE_EXPR was at global context, assume we are an
7015 initialization function and move it into our context. */
7016 if (context == 0)
7017 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
7018
bbf6f052
RK
7019 /* We treat inline_function_decl as an alias for the current function
7020 because that is the inline function whose vars, types, etc.
7021 are being merged into the current function.
7022 See expand_inline_function. */
7023 if (context == current_function_decl || context == inline_function_decl)
7024 context = 0;
7025
7026 /* If this is non-local, handle it. */
7027 if (context)
7028 {
d0977240
RK
7029 /* The following call just exists to abort if the context is
7030 not of a containing function. */
7031 find_function_data (context);
7032
bbf6f052
RK
7033 temp = SAVE_EXPR_RTL (exp);
7034 if (temp && GET_CODE (temp) == REG)
7035 {
f29a2bd1 7036 put_var_into_stack (exp, /*rescan=*/true);
bbf6f052
RK
7037 temp = SAVE_EXPR_RTL (exp);
7038 }
7039 if (temp == 0 || GET_CODE (temp) != MEM)
7040 abort ();
792760b9
RK
7041 return
7042 replace_equiv_address (temp,
7043 fix_lexical_addr (XEXP (temp, 0), exp));
bbf6f052
RK
7044 }
7045 if (SAVE_EXPR_RTL (exp) == 0)
7046 {
06089a8b
RK
7047 if (mode == VOIDmode)
7048 temp = const0_rtx;
7049 else
1da68f56
RK
7050 temp = assign_temp (build_qualified_type (type,
7051 (TYPE_QUALS (type)
7052 | TYPE_QUAL_CONST)),
7053 3, 0, 0);
1499e0a8 7054
bbf6f052 7055 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 7056 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
7057 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
7058 save_expr_regs);
ff78f773
RK
7059
7060 /* If the mode of TEMP does not match that of the expression, it
7061 must be a promoted value. We pass store_expr a SUBREG of the
7062 wanted mode but mark it so that we know that it was already
3ac1a319 7063 extended. */
ff78f773
RK
7064
7065 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
7066 {
ddef6bc7 7067 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
3ac1a319 7068 promote_mode (type, mode, &unsignedp, 0);
ff78f773 7069 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 7070 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
ff78f773
RK
7071 }
7072
4c7a0be9 7073 if (temp == const0_rtx)
37a08a29 7074 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
4c7a0be9 7075 else
8403445a
AM
7076 store_expr (TREE_OPERAND (exp, 0), temp,
7077 modifier == EXPAND_STACK_PARM ? 2 : 0);
e5e809f4
JL
7078
7079 TREE_USED (exp) = 1;
bbf6f052 7080 }
1499e0a8
RK
7081
7082 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
7083 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 7084 but mark it so that we know that it was already extended. */
1499e0a8
RK
7085
7086 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
7087 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
7088 {
e70d22c8
RK
7089 /* Compute the signedness and make the proper SUBREG. */
7090 promote_mode (type, mode, &unsignedp, 0);
ddef6bc7 7091 temp = gen_lowpart_SUBREG (mode, SAVE_EXPR_RTL (exp));
1499e0a8 7092 SUBREG_PROMOTED_VAR_P (temp) = 1;
7879b81e 7093 SUBREG_PROMOTED_UNSIGNED_SET (temp, unsignedp);
1499e0a8
RK
7094 return temp;
7095 }
7096
bbf6f052
RK
7097 return SAVE_EXPR_RTL (exp);
7098
679163cf
MS
7099 case UNSAVE_EXPR:
7100 {
7101 rtx temp;
7102 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
24965e7a
NB
7103 TREE_OPERAND (exp, 0)
7104 = (*lang_hooks.unsave_expr_now) (TREE_OPERAND (exp, 0));
679163cf
MS
7105 return temp;
7106 }
7107
b50d17a1 7108 case PLACEHOLDER_EXPR:
e9a25f70 7109 {
f47e9b4e 7110 tree old_list = placeholder_list;
738cc472 7111 tree placeholder_expr = 0;
e9a25f70 7112
f47e9b4e 7113 exp = find_placeholder (exp, &placeholder_expr);
70072ed9
RK
7114 if (exp == 0)
7115 abort ();
7116
f47e9b4e 7117 placeholder_list = TREE_CHAIN (placeholder_expr);
37a08a29 7118 temp = expand_expr (exp, original_target, tmode, modifier);
f47e9b4e
RK
7119 placeholder_list = old_list;
7120 return temp;
e9a25f70 7121 }
b50d17a1 7122
b50d17a1
RK
7123 case WITH_RECORD_EXPR:
7124 /* Put the object on the placeholder list, expand our first operand,
7125 and pop the list. */
7126 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
7127 placeholder_list);
37a08a29
RK
7128 target = expand_expr (TREE_OPERAND (exp, 0), original_target, tmode,
7129 modifier);
b50d17a1
RK
7130 placeholder_list = TREE_CHAIN (placeholder_list);
7131 return target;
7132
70e6ca43
APB
7133 case GOTO_EXPR:
7134 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
7135 expand_goto (TREE_OPERAND (exp, 0));
7136 else
7137 expand_computed_goto (TREE_OPERAND (exp, 0));
7138 return const0_rtx;
7139
bbf6f052 7140 case EXIT_EXPR:
df4ae160 7141 expand_exit_loop_if_false (NULL,
e44842fe 7142 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
7143 return const0_rtx;
7144
f42e28dd
APB
7145 case LABELED_BLOCK_EXPR:
7146 if (LABELED_BLOCK_BODY (exp))
b0832fe1 7147 expand_expr_stmt_value (LABELED_BLOCK_BODY (exp), 0, 1);
30f7a378 7148 /* Should perhaps use expand_label, but this is simpler and safer. */
0a5fee32 7149 do_pending_stack_adjust ();
f42e28dd
APB
7150 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
7151 return const0_rtx;
7152
7153 case EXIT_BLOCK_EXPR:
7154 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 7155 sorry ("returned value in block_exit_expr");
f42e28dd
APB
7156 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
7157 return const0_rtx;
7158
bbf6f052 7159 case LOOP_EXPR:
0088fcb1 7160 push_temp_slots ();
bbf6f052 7161 expand_start_loop (1);
b0832fe1 7162 expand_expr_stmt_value (TREE_OPERAND (exp, 0), 0, 1);
bbf6f052 7163 expand_end_loop ();
0088fcb1 7164 pop_temp_slots ();
bbf6f052
RK
7165
7166 return const0_rtx;
7167
7168 case BIND_EXPR:
7169 {
7170 tree vars = TREE_OPERAND (exp, 0);
bbf6f052
RK
7171
7172 /* Need to open a binding contour here because
e976b8b2 7173 if there are any cleanups they must be contained here. */
8e91754e 7174 expand_start_bindings (2);
bbf6f052 7175
2df53c0b
RS
7176 /* Mark the corresponding BLOCK for output in its proper place. */
7177 if (TREE_OPERAND (exp, 2) != 0
7178 && ! TREE_USED (TREE_OPERAND (exp, 2)))
43577e6b 7179 (*lang_hooks.decls.insert_block) (TREE_OPERAND (exp, 2));
bbf6f052
RK
7180
7181 /* If VARS have not yet been expanded, expand them now. */
7182 while (vars)
7183 {
19e7881c 7184 if (!DECL_RTL_SET_P (vars))
4977bab6 7185 expand_decl (vars);
bbf6f052
RK
7186 expand_decl_init (vars);
7187 vars = TREE_CHAIN (vars);
7188 }
7189
37a08a29 7190 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, modifier);
bbf6f052
RK
7191
7192 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
7193
7194 return temp;
7195 }
7196
7197 case RTL_EXPR:
83b853c9
JM
7198 if (RTL_EXPR_SEQUENCE (exp))
7199 {
7200 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
7201 abort ();
2f937369 7202 emit_insn (RTL_EXPR_SEQUENCE (exp));
83b853c9
JM
7203 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
7204 }
64dc53f3
MM
7205 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
7206 free_temps_for_rtl_expr (exp);
bbf6f052
RK
7207 return RTL_EXPR_RTL (exp);
7208
7209 case CONSTRUCTOR:
dd27116b
RK
7210 /* If we don't need the result, just ensure we evaluate any
7211 subexpressions. */
7212 if (ignore)
7213 {
7214 tree elt;
37a08a29 7215
dd27116b 7216 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
37a08a29
RK
7217 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode, 0);
7218
dd27116b
RK
7219 return const0_rtx;
7220 }
3207b172 7221
4af3895e
JVA
7222 /* All elts simple constants => refer to a constant in memory. But
7223 if this is a non-BLKmode mode, let it store a field at a time
7224 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 7225 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
7226 store directly into the target unless the type is large enough
7227 that memcpy will be used. If we are making an initializer and
00182e1e
AH
7228 all operands are constant, put it in memory as well.
7229
7230 FIXME: Avoid trying to fill vector constructors piece-meal.
7231 Output them with output_constant_def below unless we're sure
7232 they're zeros. This should go away when vector initializers
7233 are treated like VECTOR_CST instead of arrays.
7234 */
dd27116b 7235 else if ((TREE_STATIC (exp)
3207b172 7236 && ((mode == BLKmode
e5e809f4 7237 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 7238 || TREE_ADDRESSABLE (exp)
19caa751 7239 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 7240 && (! MOVE_BY_PIECES_P
19caa751
RK
7241 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
7242 TYPE_ALIGN (type)))
0fb7aeda
KH
7243 && ((TREE_CODE (type) == VECTOR_TYPE
7244 && !is_zeros_p (exp))
7245 || ! mostly_zeros_p (exp)))))
f59700f9
RK
7246 || ((modifier == EXPAND_INITIALIZER
7247 || modifier == EXPAND_CONST_ADDRESS)
7248 && TREE_CONSTANT (exp)))
bbf6f052 7249 {
bd7cf17e 7250 rtx constructor = output_constant_def (exp, 1);
19caa751 7251
b552441b
RS
7252 if (modifier != EXPAND_CONST_ADDRESS
7253 && modifier != EXPAND_INITIALIZER
792760b9
RK
7254 && modifier != EXPAND_SUM)
7255 constructor = validize_mem (constructor);
7256
bbf6f052
RK
7257 return constructor;
7258 }
bbf6f052
RK
7259 else
7260 {
e9ac02a6
JW
7261 /* Handle calls that pass values in multiple non-contiguous
7262 locations. The Irix 6 ABI has examples of this. */
e5e809f4 7263 if (target == 0 || ! safe_from_p (target, exp, 1)
8403445a
AM
7264 || GET_CODE (target) == PARALLEL
7265 || modifier == EXPAND_STACK_PARM)
1da68f56
RK
7266 target
7267 = assign_temp (build_qualified_type (type,
7268 (TYPE_QUALS (type)
7269 | (TREE_READONLY (exp)
7270 * TYPE_QUAL_CONST))),
c24ae149 7271 0, TREE_ADDRESSABLE (exp), 1);
07604beb 7272
de8920be 7273 store_constructor (exp, target, 0, int_expr_size (exp));
bbf6f052
RK
7274 return target;
7275 }
7276
7277 case INDIRECT_REF:
7278 {
7279 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 7280 tree index;
3a94c984
KH
7281 tree string = string_constant (exp1, &index);
7282
06eaa86f 7283 /* Try to optimize reads from const strings. */
0fb7aeda
KH
7284 if (string
7285 && TREE_CODE (string) == STRING_CST
7286 && TREE_CODE (index) == INTEGER_CST
05bccae2 7287 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
0fb7aeda
KH
7288 && GET_MODE_CLASS (mode) == MODE_INT
7289 && GET_MODE_SIZE (mode) == 1
37a08a29 7290 && modifier != EXPAND_WRITE)
0fb7aeda 7291 return gen_int_mode (TREE_STRING_POINTER (string)
21ef78aa 7292 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7293
405f0da6
JW
7294 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
7295 op0 = memory_address (mode, op0);
38a448ca 7296 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 7297 set_mem_attributes (temp, exp, 0);
1125706f 7298
14a774a9
RK
7299 /* If we are writing to this object and its type is a record with
7300 readonly fields, we must mark it as readonly so it will
7301 conflict with readonly references to those fields. */
37a08a29 7302 if (modifier == EXPAND_WRITE && readonly_fields_p (type))
14a774a9
RK
7303 RTX_UNCHANGING_P (temp) = 1;
7304
8c8a8e34
JW
7305 return temp;
7306 }
bbf6f052
RK
7307
7308 case ARRAY_REF:
742920c7
RK
7309 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
7310 abort ();
bbf6f052 7311
bbf6f052 7312 {
742920c7
RK
7313 tree array = TREE_OPERAND (exp, 0);
7314 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
7315 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 7316 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 7317 HOST_WIDE_INT i;
b50d17a1 7318
d4c89139
PB
7319 /* Optimize the special-case of a zero lower bound.
7320
7321 We convert the low_bound to sizetype to avoid some problems
7322 with constant folding. (E.g. suppose the lower bound is 1,
7323 and its mode is QI. Without the conversion, (ARRAY
7324 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 7325 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 7326
742920c7 7327 if (! integer_zerop (low_bound))
fed3cef0 7328 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 7329
742920c7 7330 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
7331 This is not done in fold so it won't happen inside &.
7332 Don't fold if this is for wide characters since it's too
7333 difficult to do correctly and this is a very rare case. */
742920c7 7334
cb5fa0f8
RK
7335 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7336 && TREE_CODE (array) == STRING_CST
742920c7 7337 && TREE_CODE (index) == INTEGER_CST
05bccae2 7338 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
7339 && GET_MODE_CLASS (mode) == MODE_INT
7340 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7341 return gen_int_mode (TREE_STRING_POINTER (array)
7342 [TREE_INT_CST_LOW (index)], mode);
bbf6f052 7343
742920c7
RK
7344 /* If this is a constant index into a constant array,
7345 just get the value from the array. Handle both the cases when
7346 we have an explicit constructor and when our operand is a variable
7347 that was declared const. */
4af3895e 7348
cb5fa0f8
RK
7349 if (modifier != EXPAND_CONST_ADDRESS && modifier != EXPAND_INITIALIZER
7350 && TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
05bccae2 7351 && TREE_CODE (index) == INTEGER_CST
3a94c984 7352 && 0 > compare_tree_int (index,
05bccae2
RK
7353 list_length (CONSTRUCTOR_ELTS
7354 (TREE_OPERAND (exp, 0)))))
742920c7 7355 {
05bccae2
RK
7356 tree elem;
7357
7358 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
7359 i = TREE_INT_CST_LOW (index);
7360 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
7361 ;
7362
7363 if (elem)
37a08a29
RK
7364 return expand_expr (fold (TREE_VALUE (elem)), target, tmode,
7365 modifier);
742920c7 7366 }
3a94c984 7367
742920c7 7368 else if (optimize >= 1
cb5fa0f8
RK
7369 && modifier != EXPAND_CONST_ADDRESS
7370 && modifier != EXPAND_INITIALIZER
742920c7
RK
7371 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
7372 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
7373 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
7374 {
08293add 7375 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
7376 {
7377 tree init = DECL_INITIAL (array);
7378
742920c7
RK
7379 if (TREE_CODE (init) == CONSTRUCTOR)
7380 {
665f2503 7381 tree elem;
742920c7 7382
05bccae2 7383 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
7384 (elem
7385 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
7386 elem = TREE_CHAIN (elem))
7387 ;
7388
c54b0a5e 7389 if (elem && !TREE_SIDE_EFFECTS (TREE_VALUE (elem)))
742920c7 7390 return expand_expr (fold (TREE_VALUE (elem)), target,
37a08a29 7391 tmode, modifier);
742920c7
RK
7392 }
7393 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
7394 && 0 > compare_tree_int (index,
7395 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
7396 {
7397 tree type = TREE_TYPE (TREE_TYPE (init));
7398 enum machine_mode mode = TYPE_MODE (type);
7399
7400 if (GET_MODE_CLASS (mode) == MODE_INT
7401 && GET_MODE_SIZE (mode) == 1)
21ef78aa
DE
7402 return gen_int_mode (TREE_STRING_POINTER (init)
7403 [TREE_INT_CST_LOW (index)], mode);
5c80f6e6 7404 }
742920c7
RK
7405 }
7406 }
7407 }
afc6aaab 7408 goto normal_inner_ref;
bbf6f052
RK
7409
7410 case COMPONENT_REF:
4af3895e 7411 /* If the operand is a CONSTRUCTOR, we can just extract the
afc6aaab
ZW
7412 appropriate field if it is present. */
7413 if (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR)
4af3895e
JVA
7414 {
7415 tree elt;
7416
7417 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
7418 elt = TREE_CHAIN (elt))
86b5812c
RK
7419 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
7420 /* We can normally use the value of the field in the
7421 CONSTRUCTOR. However, if this is a bitfield in
7422 an integral mode that we can fit in a HOST_WIDE_INT,
7423 we must mask only the number of bits in the bitfield,
7424 since this is done implicitly by the constructor. If
7425 the bitfield does not meet either of those conditions,
7426 we can't do this optimization. */
7427 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
7428 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
7429 == MODE_INT)
7430 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
7431 <= HOST_BITS_PER_WIDE_INT))))
7432 {
8403445a
AM
7433 if (DECL_BIT_FIELD (TREE_PURPOSE (elt))
7434 && modifier == EXPAND_STACK_PARM)
7435 target = 0;
3a94c984 7436 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
7437 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
7438 {
9df2c88c
RK
7439 HOST_WIDE_INT bitsize
7440 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
22273300
JJ
7441 enum machine_mode imode
7442 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c
RK
7443
7444 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
7445 {
7446 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
22273300 7447 op0 = expand_and (imode, op0, op1, target);
86b5812c
RK
7448 }
7449 else
7450 {
7451 tree count
e5e809f4
JL
7452 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
7453 0);
86b5812c
RK
7454
7455 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
7456 target, 0);
7457 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
7458 target, 0);
7459 }
7460 }
7461
7462 return op0;
7463 }
4af3895e 7464 }
afc6aaab 7465 goto normal_inner_ref;
4af3895e 7466
afc6aaab
ZW
7467 case BIT_FIELD_REF:
7468 case ARRAY_RANGE_REF:
7469 normal_inner_ref:
bbf6f052
RK
7470 {
7471 enum machine_mode mode1;
770ae6cc 7472 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 7473 tree offset;
bbf6f052 7474 int volatilep = 0;
839c4796 7475 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
a06ef755 7476 &mode1, &unsignedp, &volatilep);
f47e9b4e 7477 rtx orig_op0;
bbf6f052 7478
e7f3c83f
RK
7479 /* If we got back the original object, something is wrong. Perhaps
7480 we are evaluating an expression too early. In any event, don't
7481 infinitely recurse. */
7482 if (tem == exp)
7483 abort ();
7484
3d27140a 7485 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
7486 computation, since it will need a temporary and TARGET is known
7487 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 7488
f47e9b4e
RK
7489 orig_op0 = op0
7490 = expand_expr (tem,
7491 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
7492 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
7493 != INTEGER_CST)
8403445a 7494 && modifier != EXPAND_STACK_PARM
f47e9b4e
RK
7495 ? target : NULL_RTX),
7496 VOIDmode,
7497 (modifier == EXPAND_INITIALIZER
8403445a
AM
7498 || modifier == EXPAND_CONST_ADDRESS
7499 || modifier == EXPAND_STACK_PARM)
f47e9b4e 7500 ? modifier : EXPAND_NORMAL);
bbf6f052 7501
8c8a8e34 7502 /* If this is a constant, put it into a register if it is a
14a774a9 7503 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
7504 if (CONSTANT_P (op0))
7505 {
7506 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
7507 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
7508 && offset == 0)
8c8a8e34
JW
7509 op0 = force_reg (mode, op0);
7510 else
7511 op0 = validize_mem (force_const_mem (mode, op0));
7512 }
7513
7bb0943f
RS
7514 if (offset != 0)
7515 {
8403445a
AM
7516 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode,
7517 EXPAND_SUM);
7bb0943f 7518
a2725049 7519 /* If this object is in a register, put it into memory.
14a774a9
RK
7520 This case can't occur in C, but can in Ada if we have
7521 unchecked conversion of an expression from a scalar type to
7522 an array or record type. */
7523 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
7524 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
7525 {
d04218c0
RK
7526 /* If the operand is a SAVE_EXPR, we can deal with this by
7527 forcing the SAVE_EXPR into memory. */
7528 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
eeb35b45 7529 {
f29a2bd1
MM
7530 put_var_into_stack (TREE_OPERAND (exp, 0),
7531 /*rescan=*/true);
eeb35b45
RK
7532 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
7533 }
d04218c0
RK
7534 else
7535 {
7536 tree nt
7537 = build_qualified_type (TREE_TYPE (tem),
7538 (TYPE_QUALS (TREE_TYPE (tem))
7539 | TYPE_QUAL_CONST));
7540 rtx memloc = assign_temp (nt, 1, 1, 1);
7541
d04218c0
RK
7542 emit_move_insn (memloc, op0);
7543 op0 = memloc;
7544 }
14a774a9
RK
7545 }
7546
7bb0943f
RS
7547 if (GET_CODE (op0) != MEM)
7548 abort ();
2d48c13d 7549
2d48c13d 7550#ifdef POINTERS_EXTEND_UNSIGNED
4b6c1672 7551 if (GET_MODE (offset_rtx) != Pmode)
267b28bd 7552 offset_rtx = convert_to_mode (Pmode, offset_rtx, 0);
fa06ab5c
RK
7553#else
7554 if (GET_MODE (offset_rtx) != ptr_mode)
7555 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d
JL
7556#endif
7557
14a774a9 7558 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 7559 to call force_reg for that case. Avoid that case. */
89752202
HB
7560 if (GET_CODE (op0) == MEM
7561 && GET_MODE (op0) == BLKmode
efd07ca7 7562 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 7563 && bitsize != 0
3a94c984 7564 && (bitpos % bitsize) == 0
89752202 7565 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
a06ef755 7566 && MEM_ALIGN (op0) == GET_MODE_ALIGNMENT (mode1))
89752202 7567 {
e3c8ea67 7568 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
89752202
HB
7569 bitpos = 0;
7570 }
7571
0d4903b8
RK
7572 op0 = offset_address (op0, offset_rtx,
7573 highest_pow2_factor (offset));
7bb0943f
RS
7574 }
7575
1ce7f3c2
RK
7576 /* If OFFSET is making OP0 more aligned than BIGGEST_ALIGNMENT,
7577 record its alignment as BIGGEST_ALIGNMENT. */
7578 if (GET_CODE (op0) == MEM && bitpos == 0 && offset != 0
7579 && is_aligning_offset (offset, tem))
7580 set_mem_align (op0, BIGGEST_ALIGNMENT);
7581
bbf6f052
RK
7582 /* Don't forget about volatility even if this is a bitfield. */
7583 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
7584 {
f47e9b4e
RK
7585 if (op0 == orig_op0)
7586 op0 = copy_rtx (op0);
7587
bbf6f052
RK
7588 MEM_VOLATILE_P (op0) = 1;
7589 }
7590
010f87c4
JJ
7591 /* The following code doesn't handle CONCAT.
7592 Assume only bitpos == 0 can be used for CONCAT, due to
7593 one element arrays having the same mode as its element. */
7594 if (GET_CODE (op0) == CONCAT)
7595 {
7596 if (bitpos != 0 || bitsize != GET_MODE_BITSIZE (GET_MODE (op0)))
7597 abort ();
7598 return op0;
7599 }
7600
ccc98036
RS
7601 /* In cases where an aligned union has an unaligned object
7602 as a field, we might be extracting a BLKmode value from
7603 an integer-mode (e.g., SImode) object. Handle this case
7604 by doing the extract into an object as wide as the field
7605 (which we know to be the width of a basic mode), then
cb5fa0f8 7606 storing into memory, and changing the mode to BLKmode. */
bbf6f052 7607 if (mode1 == VOIDmode
ccc98036 7608 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
cb5fa0f8
RK
7609 || (mode1 != BLKmode && ! direct_load[(int) mode1]
7610 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
10c2a453
RK
7611 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT
7612 && modifier != EXPAND_CONST_ADDRESS
7613 && modifier != EXPAND_INITIALIZER)
cb5fa0f8
RK
7614 /* If the field isn't aligned enough to fetch as a memref,
7615 fetch it as a bit field. */
7616 || (mode1 != BLKmode
9e5f281f
OH
7617 && (((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode)
7618 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))
15b19a7d 7619 && SLOW_UNALIGNED_ACCESS (mode1, MEM_ALIGN (op0)))
9e5f281f 7620 || (bitpos % BITS_PER_UNIT != 0)))
cb5fa0f8
RK
7621 /* If the type and the field are a constant size and the
7622 size of the type isn't the same size as the bitfield,
7623 we must use bitfield operations. */
7624 || (bitsize >= 0
7625 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7626 == INTEGER_CST)
7627 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
a06ef755 7628 bitsize)))
bbf6f052 7629 {
bbf6f052
RK
7630 enum machine_mode ext_mode = mode;
7631
14a774a9
RK
7632 if (ext_mode == BLKmode
7633 && ! (target != 0 && GET_CODE (op0) == MEM
7634 && GET_CODE (target) == MEM
7635 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7636 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7637
7638 if (ext_mode == BLKmode)
a281e72d
RK
7639 {
7640 /* In this case, BITPOS must start at a byte boundary and
7641 TARGET, if specified, must be a MEM. */
7642 if (GET_CODE (op0) != MEM
7643 || (target != 0 && GET_CODE (target) != MEM)
7644 || bitpos % BITS_PER_UNIT != 0)
7645 abort ();
7646
f4ef873c 7647 op0 = adjust_address (op0, VOIDmode, bitpos / BITS_PER_UNIT);
a281e72d
RK
7648 if (target == 0)
7649 target = assign_temp (type, 0, 1, 1);
7650
7651 emit_block_move (target, op0,
a06ef755 7652 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
44bb111a 7653 / BITS_PER_UNIT),
8403445a
AM
7654 (modifier == EXPAND_STACK_PARM
7655 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
3a94c984 7656
a281e72d
RK
7657 return target;
7658 }
bbf6f052 7659
dc6d66b3
RK
7660 op0 = validize_mem (op0);
7661
7662 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
04050c69 7663 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7664
8403445a
AM
7665 op0 = extract_bit_field (op0, bitsize, bitpos, unsignedp,
7666 (modifier == EXPAND_STACK_PARM
7667 ? NULL_RTX : target),
7668 ext_mode, ext_mode,
bbf6f052 7669 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7670
7671 /* If the result is a record type and BITSIZE is narrower than
7672 the mode of OP0, an integral mode, and this is a big endian
7673 machine, we must put the field into the high-order bits. */
7674 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7675 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
65a07688 7676 && bitsize < (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (op0)))
ef19912d
RK
7677 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7678 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7679 - bitsize),
7680 op0, 1);
7681
bbf6f052
RK
7682 if (mode == BLKmode)
7683 {
c3d32120 7684 rtx new = assign_temp (build_qualified_type
b0c48229
NB
7685 ((*lang_hooks.types.type_for_mode)
7686 (ext_mode, 0),
c3d32120 7687 TYPE_QUAL_CONST), 0, 1, 1);
bbf6f052
RK
7688
7689 emit_move_insn (new, op0);
7690 op0 = copy_rtx (new);
7691 PUT_MODE (op0, BLKmode);
c3d32120 7692 set_mem_attributes (op0, exp, 1);
bbf6f052
RK
7693 }
7694
7695 return op0;
7696 }
7697
05019f83
RK
7698 /* If the result is BLKmode, use that to access the object
7699 now as well. */
7700 if (mode == BLKmode)
7701 mode1 = BLKmode;
7702
bbf6f052
RK
7703 /* Get a reference to just this component. */
7704 if (modifier == EXPAND_CONST_ADDRESS
7705 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
f1ec5147 7706 op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT);
bbf6f052 7707 else
f4ef873c 7708 op0 = adjust_address (op0, mode1, bitpos / BITS_PER_UNIT);
41472af8 7709
f47e9b4e
RK
7710 if (op0 == orig_op0)
7711 op0 = copy_rtx (op0);
7712
3bdf5ad1 7713 set_mem_attributes (op0, exp, 0);
dc6d66b3 7714 if (GET_CODE (XEXP (op0, 0)) == REG)
a06ef755 7715 mark_reg_pointer (XEXP (op0, 0), MEM_ALIGN (op0));
dc6d66b3 7716
bbf6f052 7717 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7718 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7719 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7720 || modifier == EXPAND_INITIALIZER)
bbf6f052 7721 return op0;
0d15e60c 7722 else if (target == 0)
bbf6f052 7723 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7724
bbf6f052
RK
7725 convert_move (target, op0, unsignedp);
7726 return target;
7727 }
7728
4a8d0c9c
RH
7729 case VTABLE_REF:
7730 {
7731 rtx insn, before = get_last_insn (), vtbl_ref;
7732
7733 /* Evaluate the interior expression. */
7734 subtarget = expand_expr (TREE_OPERAND (exp, 0), target,
7735 tmode, modifier);
7736
7737 /* Get or create an instruction off which to hang a note. */
7738 if (REG_P (subtarget))
7739 {
7740 target = subtarget;
7741 insn = get_last_insn ();
7742 if (insn == before)
7743 abort ();
7744 if (! INSN_P (insn))
7745 insn = prev_nonnote_insn (insn);
7746 }
7747 else
7748 {
7749 target = gen_reg_rtx (GET_MODE (subtarget));
7750 insn = emit_move_insn (target, subtarget);
7751 }
7752
7753 /* Collect the data for the note. */
7754 vtbl_ref = XEXP (DECL_RTL (TREE_OPERAND (exp, 1)), 0);
7755 vtbl_ref = plus_constant (vtbl_ref,
7756 tree_low_cst (TREE_OPERAND (exp, 2), 0));
7757 /* Discard the initial CONST that was added. */
7758 vtbl_ref = XEXP (vtbl_ref, 0);
7759
7760 REG_NOTES (insn)
7761 = gen_rtx_EXPR_LIST (REG_VTABLE_REF, vtbl_ref, REG_NOTES (insn));
7762
7763 return target;
7764 }
7765
bbf6f052
RK
7766 /* Intended for a reference to a buffer of a file-object in Pascal.
7767 But it's not certain that a special tree code will really be
7768 necessary for these. INDIRECT_REF might work for them. */
7769 case BUFFER_REF:
7770 abort ();
7771
7308a047 7772 case IN_EXPR:
7308a047 7773 {
d6a5ac33
RK
7774 /* Pascal set IN expression.
7775
7776 Algorithm:
7777 rlo = set_low - (set_low%bits_per_word);
7778 the_word = set [ (index - rlo)/bits_per_word ];
7779 bit_index = index % bits_per_word;
7780 bitmask = 1 << bit_index;
7781 return !!(the_word & bitmask); */
7782
7308a047
RS
7783 tree set = TREE_OPERAND (exp, 0);
7784 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7785 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7786 tree set_type = TREE_TYPE (set);
7308a047
RS
7787 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7788 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7789 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7790 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7791 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7792 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7793 rtx setaddr = XEXP (setval, 0);
7794 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7795 rtx rlow;
7796 rtx diff, quo, rem, addr, bit, result;
7308a047 7797
d6a5ac33
RK
7798 /* If domain is empty, answer is no. Likewise if index is constant
7799 and out of bounds. */
51723711 7800 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7801 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7802 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7803 || (TREE_CODE (index) == INTEGER_CST
7804 && TREE_CODE (set_low_bound) == INTEGER_CST
7805 && tree_int_cst_lt (index, set_low_bound))
7806 || (TREE_CODE (set_high_bound) == INTEGER_CST
7807 && TREE_CODE (index) == INTEGER_CST
7808 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7809 return const0_rtx;
7810
d6a5ac33
RK
7811 if (target == 0)
7812 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7813
7814 /* If we get here, we have to generate the code for both cases
7815 (in range and out of range). */
7816
7817 op0 = gen_label_rtx ();
7818 op1 = gen_label_rtx ();
7819
7820 if (! (GET_CODE (index_val) == CONST_INT
7821 && GET_CODE (lo_r) == CONST_INT))
a06ef755
RK
7822 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7823 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7824
7825 if (! (GET_CODE (index_val) == CONST_INT
7826 && GET_CODE (hi_r) == CONST_INT))
a06ef755
RK
7827 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7828 GET_MODE (index_val), iunsignedp, op1);
7308a047
RS
7829
7830 /* Calculate the element number of bit zero in the first word
7831 of the set. */
7832 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7833 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7834 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7835 else
17938e57
RK
7836 rlow = expand_binop (index_mode, and_optab, lo_r,
7837 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7838 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7839
d6a5ac33
RK
7840 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7841 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7842
7843 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7844 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7845 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7846 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7847
7308a047 7848 addr = memory_address (byte_mode,
d6a5ac33
RK
7849 expand_binop (index_mode, add_optab, diff,
7850 setaddr, NULL_RTX, iunsignedp,
17938e57 7851 OPTAB_LIB_WIDEN));
d6a5ac33 7852
3a94c984 7853 /* Extract the bit we want to examine. */
7308a047 7854 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7855 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7856 make_tree (TREE_TYPE (index), rem),
7857 NULL_RTX, 1);
7858 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7859 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7860 1, OPTAB_LIB_WIDEN);
17938e57
RK
7861
7862 if (result != target)
7863 convert_move (target, result, 1);
7308a047
RS
7864
7865 /* Output the code to handle the out-of-range case. */
7866 emit_jump (op0);
7867 emit_label (op1);
7868 emit_move_insn (target, const0_rtx);
7869 emit_label (op0);
7870 return target;
7871 }
7872
bbf6f052 7873 case WITH_CLEANUP_EXPR:
6ad7895a 7874 if (WITH_CLEANUP_EXPR_RTL (exp) == 0)
bbf6f052 7875 {
6ad7895a 7876 WITH_CLEANUP_EXPR_RTL (exp)
37a08a29 7877 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
659e5a7a
JM
7878 expand_decl_cleanup_eh (NULL_TREE, TREE_OPERAND (exp, 1),
7879 CLEANUP_EH_ONLY (exp));
e976b8b2 7880
bbf6f052 7881 /* That's it for this cleanup. */
6ad7895a 7882 TREE_OPERAND (exp, 1) = 0;
bbf6f052 7883 }
6ad7895a 7884 return WITH_CLEANUP_EXPR_RTL (exp);
bbf6f052 7885
5dab5552
MS
7886 case CLEANUP_POINT_EXPR:
7887 {
e976b8b2
MS
7888 /* Start a new binding layer that will keep track of all cleanup
7889 actions to be performed. */
8e91754e 7890 expand_start_bindings (2);
e976b8b2 7891
d93d4205 7892 target_temp_slot_level = temp_slot_level;
e976b8b2 7893
37a08a29 7894 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
f283f66b
JM
7895 /* If we're going to use this value, load it up now. */
7896 if (! ignore)
7897 op0 = force_not_mem (op0);
d93d4205 7898 preserve_temp_slots (op0);
e976b8b2 7899 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7900 }
7901 return op0;
7902
bbf6f052
RK
7903 case CALL_EXPR:
7904 /* Check for a built-in function. */
7905 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7906 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7907 == FUNCTION_DECL)
bbf6f052 7908 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
0fb7aeda 7909 {
c70eaeaf
KG
7910 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7911 == BUILT_IN_FRONTEND)
8403445a
AM
7912 return (*lang_hooks.expand_expr) (exp, original_target,
7913 tmode, modifier);
c70eaeaf
KG
7914 else
7915 return expand_builtin (exp, target, subtarget, tmode, ignore);
7916 }
d6a5ac33 7917
8129842c 7918 return expand_call (exp, target, ignore);
bbf6f052
RK
7919
7920 case NON_LVALUE_EXPR:
7921 case NOP_EXPR:
7922 case CONVERT_EXPR:
7923 case REFERENCE_EXPR:
4a53008b 7924 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7925 return const0_rtx;
4a53008b 7926
bbf6f052
RK
7927 if (TREE_CODE (type) == UNION_TYPE)
7928 {
7929 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9 7930
c3d32120
RK
7931 /* If both input and output are BLKmode, this conversion isn't doing
7932 anything except possibly changing memory attribute. */
7933 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode)
7934 {
7935 rtx result = expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7936 modifier);
7937
7938 result = copy_rtx (result);
7939 set_mem_attributes (result, exp, 0);
7940 return result;
7941 }
14a774a9 7942
bbf6f052 7943 if (target == 0)
1da68f56 7944 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7945
bbf6f052
RK
7946 if (GET_CODE (target) == MEM)
7947 /* Store data into beginning of memory target. */
7948 store_expr (TREE_OPERAND (exp, 0),
8403445a
AM
7949 adjust_address (target, TYPE_MODE (valtype), 0),
7950 modifier == EXPAND_STACK_PARM ? 2 : 0);
1499e0a8 7951
bbf6f052
RK
7952 else if (GET_CODE (target) == REG)
7953 /* Store this field into a union of the proper type. */
14a774a9
RK
7954 store_field (target,
7955 MIN ((int_size_in_bytes (TREE_TYPE
7956 (TREE_OPERAND (exp, 0)))
7957 * BITS_PER_UNIT),
8752c357 7958 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7959 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
a06ef755 7960 VOIDmode, 0, type, 0);
bbf6f052
RK
7961 else
7962 abort ();
7963
7964 /* Return the entire union. */
7965 return target;
7966 }
d6a5ac33 7967
7f62854a
RK
7968 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7969 {
7970 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
37a08a29 7971 modifier);
7f62854a
RK
7972
7973 /* If the signedness of the conversion differs and OP0 is
7974 a promoted SUBREG, clear that indication since we now
7975 have to do the proper extension. */
7976 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7977 && GET_CODE (op0) == SUBREG)
7978 SUBREG_PROMOTED_VAR_P (op0) = 0;
7979
7980 return op0;
7981 }
7982
fdf473ae 7983 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
12342f90
RS
7984 if (GET_MODE (op0) == mode)
7985 return op0;
12342f90 7986
d6a5ac33
RK
7987 /* If OP0 is a constant, just convert it into the proper mode. */
7988 if (CONSTANT_P (op0))
fdf473ae
RH
7989 {
7990 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
7991 enum machine_mode inner_mode = TYPE_MODE (inner_type);
7992
0fb7aeda 7993 if (modifier == EXPAND_INITIALIZER)
fdf473ae
RH
7994 return simplify_gen_subreg (mode, op0, inner_mode,
7995 subreg_lowpart_offset (mode,
7996 inner_mode));
7997 else
7998 return convert_modes (mode, inner_mode, op0,
7999 TREE_UNSIGNED (inner_type));
8000 }
12342f90 8001
26fcb35a 8002 if (modifier == EXPAND_INITIALIZER)
38a448ca 8003 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 8004
bbf6f052 8005 if (target == 0)
d6a5ac33
RK
8006 return
8007 convert_to_mode (mode, op0,
8008 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 8009 else
d6a5ac33
RK
8010 convert_move (target, op0,
8011 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
8012 return target;
8013
ed239f5a 8014 case VIEW_CONVERT_EXPR:
37a08a29 8015 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, modifier);
ed239f5a
RK
8016
8017 /* If the input and output modes are both the same, we are done.
13cf99ec
RK
8018 Otherwise, if neither mode is BLKmode and both are integral and within
8019 a word, we can use gen_lowpart. If neither is true, make sure the
8020 operand is in memory and convert the MEM to the new mode. */
ed239f5a
RK
8021 if (TYPE_MODE (type) == GET_MODE (op0))
8022 ;
8023 else if (TYPE_MODE (type) != BLKmode && GET_MODE (op0) != BLKmode
13cf99ec
RK
8024 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8025 && GET_MODE_CLASS (TYPE_MODE (type)) == MODE_INT
ed239f5a
RK
8026 && GET_MODE_SIZE (TYPE_MODE (type)) <= UNITS_PER_WORD
8027 && GET_MODE_SIZE (GET_MODE (op0)) <= UNITS_PER_WORD)
8028 op0 = gen_lowpart (TYPE_MODE (type), op0);
c11c10d8 8029 else if (GET_CODE (op0) != MEM)
ed239f5a 8030 {
c11c10d8
RK
8031 /* If the operand is not a MEM, force it into memory. Since we
8032 are going to be be changing the mode of the MEM, don't call
8033 force_const_mem for constants because we don't allow pool
8034 constants to change mode. */
ed239f5a 8035 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
ed239f5a 8036
c11c10d8
RK
8037 if (TREE_ADDRESSABLE (exp))
8038 abort ();
ed239f5a 8039
c11c10d8
RK
8040 if (target == 0 || GET_MODE (target) != TYPE_MODE (inner_type))
8041 target
8042 = assign_stack_temp_for_type
8043 (TYPE_MODE (inner_type),
8044 GET_MODE_SIZE (TYPE_MODE (inner_type)), 0, inner_type);
ed239f5a 8045
c11c10d8
RK
8046 emit_move_insn (target, op0);
8047 op0 = target;
ed239f5a
RK
8048 }
8049
c11c10d8
RK
8050 /* At this point, OP0 is in the correct mode. If the output type is such
8051 that the operand is known to be aligned, indicate that it is.
8052 Otherwise, we need only be concerned about alignment for non-BLKmode
8053 results. */
ed239f5a
RK
8054 if (GET_CODE (op0) == MEM)
8055 {
8056 op0 = copy_rtx (op0);
8057
ed239f5a
RK
8058 if (TYPE_ALIGN_OK (type))
8059 set_mem_align (op0, MAX (MEM_ALIGN (op0), TYPE_ALIGN (type)));
8060 else if (TYPE_MODE (type) != BLKmode && STRICT_ALIGNMENT
8061 && MEM_ALIGN (op0) < GET_MODE_ALIGNMENT (TYPE_MODE (type)))
8062 {
8063 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
65a07688
RK
8064 HOST_WIDE_INT temp_size
8065 = MAX (int_size_in_bytes (inner_type),
8066 (HOST_WIDE_INT) GET_MODE_SIZE (TYPE_MODE (type)));
ed239f5a
RK
8067 rtx new = assign_stack_temp_for_type (TYPE_MODE (type),
8068 temp_size, 0, type);
c4e59f51 8069 rtx new_with_op0_mode = adjust_address (new, GET_MODE (op0), 0);
ed239f5a 8070
c11c10d8
RK
8071 if (TREE_ADDRESSABLE (exp))
8072 abort ();
8073
ed239f5a
RK
8074 if (GET_MODE (op0) == BLKmode)
8075 emit_block_move (new_with_op0_mode, op0,
44bb111a 8076 GEN_INT (GET_MODE_SIZE (TYPE_MODE (type))),
8403445a
AM
8077 (modifier == EXPAND_STACK_PARM
8078 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
ed239f5a
RK
8079 else
8080 emit_move_insn (new_with_op0_mode, op0);
8081
8082 op0 = new;
8083 }
0fb7aeda 8084
c4e59f51 8085 op0 = adjust_address (op0, TYPE_MODE (type), 0);
ed239f5a
RK
8086 }
8087
8088 return op0;
8089
bbf6f052 8090 case PLUS_EXPR:
91ce572a 8091 this_optab = ! unsignedp && flag_trapv
a9785c70 8092 && (GET_MODE_CLASS (mode) == MODE_INT)
91ce572a 8093 ? addv_optab : add_optab;
bbf6f052
RK
8094
8095 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
8096 something else, make sure we add the register to the constant and
8097 then to the other thing. This case can occur during strength
8098 reduction and doing it this way will produce better code if the
8099 frame pointer or argument pointer is eliminated.
8100
8101 fold-const.c will ensure that the constant is always in the inner
8102 PLUS_EXPR, so the only case we need to do anything about is if
8103 sp, ap, or fp is our second argument, in which case we must swap
8104 the innermost first argument and our second argument. */
8105
8106 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
8107 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
8108 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
8109 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
8110 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
8111 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
8112 {
8113 tree t = TREE_OPERAND (exp, 1);
8114
8115 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8116 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
8117 }
8118
88f63c77 8119 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
8120 something, we might be forming a constant. So try to use
8121 plus_constant. If it produces a sum and we can't accept it,
8122 use force_operand. This allows P = &ARR[const] to generate
8123 efficient code on machines where a SYMBOL_REF is not a valid
8124 address.
8125
8126 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 8127 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
0fb7aeda 8128 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 8129 {
8403445a
AM
8130 if (modifier == EXPAND_STACK_PARM)
8131 target = 0;
c980ac49
RS
8132 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
8133 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
8134 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
8135 {
cbbc503e
JL
8136 rtx constant_part;
8137
c980ac49
RS
8138 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
8139 EXPAND_SUM);
cbbc503e
JL
8140 /* Use immed_double_const to ensure that the constant is
8141 truncated according to the mode of OP1, then sign extended
8142 to a HOST_WIDE_INT. Using the constant directly can result
8143 in non-canonical RTL in a 64x32 cross compile. */
8144 constant_part
8145 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
8146 (HOST_WIDE_INT) 0,
a5efcd63 8147 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 8148 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
8149 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8150 op1 = force_operand (op1, target);
8151 return op1;
8152 }
bbf6f052 8153
c980ac49
RS
8154 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8155 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
8156 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
8157 {
cbbc503e
JL
8158 rtx constant_part;
8159
c980ac49 8160 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
70d95bac
RH
8161 (modifier == EXPAND_INITIALIZER
8162 ? EXPAND_INITIALIZER : EXPAND_SUM));
c980ac49
RS
8163 if (! CONSTANT_P (op0))
8164 {
8165 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8166 VOIDmode, modifier);
709f5be1
RS
8167 /* Don't go to both_summands if modifier
8168 says it's not right to return a PLUS. */
8169 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8170 goto binop2;
c980ac49
RS
8171 goto both_summands;
8172 }
cbbc503e
JL
8173 /* Use immed_double_const to ensure that the constant is
8174 truncated according to the mode of OP1, then sign extended
8175 to a HOST_WIDE_INT. Using the constant directly can result
8176 in non-canonical RTL in a 64x32 cross compile. */
8177 constant_part
8178 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
8179 (HOST_WIDE_INT) 0,
2a94e396 8180 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 8181 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
8182 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8183 op0 = force_operand (op0, target);
8184 return op0;
8185 }
bbf6f052
RK
8186 }
8187
4ef7870a
EB
8188 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8189 subtarget = 0;
8190
bbf6f052
RK
8191 /* No sense saving up arithmetic to be done
8192 if it's all in the wrong mode to form part of an address.
8193 And force_operand won't know whether to sign-extend or
8194 zero-extend. */
8195 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 8196 || mode != ptr_mode)
4ef7870a
EB
8197 {
8198 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8199 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
6e7727eb
EB
8200 if (op0 == const0_rtx)
8201 return op1;
8202 if (op1 == const0_rtx)
8203 return op0;
4ef7870a
EB
8204 goto binop2;
8205 }
bbf6f052 8206
37a08a29
RK
8207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
bbf6f052 8209
1717e19e
UW
8210 /* We come here from MINUS_EXPR when the second operand is a
8211 constant. */
c980ac49 8212 both_summands:
bbf6f052
RK
8213 /* Make sure any term that's a sum with a constant comes last. */
8214 if (GET_CODE (op0) == PLUS
8215 && CONSTANT_P (XEXP (op0, 1)))
8216 {
8217 temp = op0;
8218 op0 = op1;
8219 op1 = temp;
8220 }
8221 /* If adding to a sum including a constant,
8222 associate it to put the constant outside. */
8223 if (GET_CODE (op1) == PLUS
8224 && CONSTANT_P (XEXP (op1, 1)))
8225 {
8226 rtx constant_term = const0_rtx;
8227
8228 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
8229 if (temp != 0)
8230 op0 = temp;
6f90e075
JW
8231 /* Ensure that MULT comes first if there is one. */
8232 else if (GET_CODE (op0) == MULT)
38a448ca 8233 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 8234 else
38a448ca 8235 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
8236
8237 /* Let's also eliminate constants from op0 if possible. */
8238 op0 = eliminate_constant_term (op0, &constant_term);
8239
8240 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 8241 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
8242 result we want will then be OP0 + OP1. */
8243
8244 temp = simplify_binary_operation (PLUS, mode, constant_term,
8245 XEXP (op1, 1));
8246 if (temp != 0)
8247 op1 = temp;
8248 else
38a448ca 8249 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
8250 }
8251
8252 /* Put a constant term last and put a multiplication first. */
8253 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
8254 temp = op1, op1 = op0, op0 = temp;
8255
8256 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 8257 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
8258
8259 case MINUS_EXPR:
ea87523e
RK
8260 /* For initializers, we are allowed to return a MINUS of two
8261 symbolic constants. Here we handle all cases when both operands
8262 are constant. */
bbf6f052
RK
8263 /* Handle difference of two symbolic constants,
8264 for the sake of an initializer. */
8265 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
8266 && really_constant_p (TREE_OPERAND (exp, 0))
8267 && really_constant_p (TREE_OPERAND (exp, 1)))
8268 {
37a08a29
RK
8269 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode,
8270 modifier);
8271 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8272 modifier);
ea87523e 8273
ea87523e
RK
8274 /* If the last operand is a CONST_INT, use plus_constant of
8275 the negated constant. Else make the MINUS. */
8276 if (GET_CODE (op1) == CONST_INT)
8277 return plus_constant (op0, - INTVAL (op1));
8278 else
38a448ca 8279 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052 8280 }
ae431183 8281
91ce572a
CC
8282 this_optab = ! unsignedp && flag_trapv
8283 && (GET_MODE_CLASS(mode) == MODE_INT)
8284 ? subv_optab : sub_optab;
1717e19e
UW
8285
8286 /* No sense saving up arithmetic to be done
8287 if it's all in the wrong mode to form part of an address.
8288 And force_operand won't know whether to sign-extend or
8289 zero-extend. */
8290 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
8291 || mode != ptr_mode)
8292 goto binop;
8293
8294 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
8295 subtarget = 0;
8296
8297 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, modifier);
8298 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, modifier);
8299
8300 /* Convert A - const to A + (-const). */
8301 if (GET_CODE (op1) == CONST_INT)
8302 {
8303 op1 = negate_rtx (mode, op1);
8304 goto both_summands;
8305 }
8306
8307 goto binop2;
bbf6f052
RK
8308
8309 case MULT_EXPR:
bbf6f052
RK
8310 /* If first operand is constant, swap them.
8311 Thus the following special case checks need only
8312 check the second operand. */
8313 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
8314 {
b3694847 8315 tree t1 = TREE_OPERAND (exp, 0);
bbf6f052
RK
8316 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
8317 TREE_OPERAND (exp, 1) = t1;
8318 }
8319
8320 /* Attempt to return something suitable for generating an
8321 indexed address, for machines that support that. */
8322
88f63c77 8323 if (modifier == EXPAND_SUM && mode == ptr_mode
3b40e71b 8324 && host_integerp (TREE_OPERAND (exp, 1), 0))
bbf6f052 8325 {
48a5f2fa
DJ
8326 tree exp1 = TREE_OPERAND (exp, 1);
8327
921b3427
RK
8328 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
8329 EXPAND_SUM);
bbf6f052 8330
3b40e71b
RH
8331 /* If we knew for certain that this is arithmetic for an array
8332 reference, and we knew the bounds of the array, then we could
8333 apply the distributive law across (PLUS X C) for constant C.
8334 Without such knowledge, we risk overflowing the computation
8335 when both X and C are large, but X+C isn't. */
8336 /* ??? Could perhaps special-case EXP being unsigned and C being
8337 positive. In that case we are certain that X+C is no smaller
8338 than X and so the transformed expression will overflow iff the
8339 original would have. */
bbf6f052
RK
8340
8341 if (GET_CODE (op0) != REG)
906c4e36 8342 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
8343 if (GET_CODE (op0) != REG)
8344 op0 = copy_to_mode_reg (mode, op0);
8345
48a5f2fa
DJ
8346 return gen_rtx_MULT (mode, op0,
8347 gen_int_mode (tree_low_cst (exp1, 0),
8348 TYPE_MODE (TREE_TYPE (exp1))));
bbf6f052
RK
8349 }
8350
e5e809f4 8351 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8352 subtarget = 0;
8353
8403445a
AM
8354 if (modifier == EXPAND_STACK_PARM)
8355 target = 0;
8356
bbf6f052
RK
8357 /* Check for multiplying things that have been extended
8358 from a narrower type. If this machine supports multiplying
8359 in that narrower type with a result in the desired type,
8360 do it that way, and avoid the explicit type-conversion. */
8361 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
8362 && TREE_CODE (type) == INTEGER_TYPE
8363 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8364 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
8365 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8366 && int_fits_type_p (TREE_OPERAND (exp, 1),
8367 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8368 /* Don't use a widening multiply if a shift will do. */
8369 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 8370 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
8371 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
8372 ||
8373 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8374 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8375 ==
8376 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8377 /* If both operands are extended, they must either both
8378 be zero-extended or both be sign-extended. */
8379 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
8380 ==
8381 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
8382 {
8383 enum machine_mode innermode
8384 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
8385 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8386 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
8387 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
8388 ? umul_widen_optab : smul_widen_optab);
b10af0c8 8389 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 8390 {
b10af0c8
TG
8391 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
8392 {
8393 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8394 NULL_RTX, VOIDmode, 0);
8395 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8396 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
8397 VOIDmode, 0);
8398 else
8399 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8400 NULL_RTX, VOIDmode, 0);
8401 goto binop2;
8402 }
8403 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
8404 && innermode == word_mode)
8405 {
8406 rtx htem;
8407 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8408 NULL_RTX, VOIDmode, 0);
8409 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
8410 op1 = convert_modes (innermode, mode,
8411 expand_expr (TREE_OPERAND (exp, 1),
8412 NULL_RTX, VOIDmode, 0),
8413 unsignedp);
b10af0c8
TG
8414 else
8415 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
8416 NULL_RTX, VOIDmode, 0);
8417 temp = expand_binop (mode, other_optab, op0, op1, target,
8418 unsignedp, OPTAB_LIB_WIDEN);
8419 htem = expand_mult_highpart_adjust (innermode,
8420 gen_highpart (innermode, temp),
8421 op0, op1,
8422 gen_highpart (innermode, temp),
8423 unsignedp);
8424 emit_move_insn (gen_highpart (innermode, temp), htem);
8425 return temp;
8426 }
bbf6f052
RK
8427 }
8428 }
8429 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8430 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8431 return expand_mult (mode, op0, op1, target, unsignedp);
8432
8433 case TRUNC_DIV_EXPR:
8434 case FLOOR_DIV_EXPR:
8435 case CEIL_DIV_EXPR:
8436 case ROUND_DIV_EXPR:
8437 case EXACT_DIV_EXPR:
e5e809f4 8438 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8439 subtarget = 0;
8403445a
AM
8440 if (modifier == EXPAND_STACK_PARM)
8441 target = 0;
bbf6f052
RK
8442 /* Possible optimization: compute the dividend with EXPAND_SUM
8443 then if the divisor is constant can optimize the case
8444 where some terms of the dividend have coeffs divisible by it. */
8445 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8446 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8447 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
8448
8449 case RDIV_EXPR:
b7e9703c
JH
8450 /* Emit a/b as a*(1/b). Later we may manage CSE the reciprocal saving
8451 expensive divide. If not, combine will rebuild the original
8452 computation. */
8453 if (flag_unsafe_math_optimizations && optimize && !optimize_size
ed7d44bc 8454 && TREE_CODE (type) == REAL_TYPE
b7e9703c
JH
8455 && !real_onep (TREE_OPERAND (exp, 0)))
8456 return expand_expr (build (MULT_EXPR, type, TREE_OPERAND (exp, 0),
8457 build (RDIV_EXPR, type,
8458 build_real (type, dconst1),
8459 TREE_OPERAND (exp, 1))),
8e37cba8 8460 target, tmode, modifier);
ef89d648 8461 this_optab = sdiv_optab;
bbf6f052
RK
8462 goto binop;
8463
8464 case TRUNC_MOD_EXPR:
8465 case FLOOR_MOD_EXPR:
8466 case CEIL_MOD_EXPR:
8467 case ROUND_MOD_EXPR:
e5e809f4 8468 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8469 subtarget = 0;
8403445a
AM
8470 if (modifier == EXPAND_STACK_PARM)
8471 target = 0;
bbf6f052 8472 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8473 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8474 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
8475
8476 case FIX_ROUND_EXPR:
8477 case FIX_FLOOR_EXPR:
8478 case FIX_CEIL_EXPR:
8479 abort (); /* Not used for C. */
8480
8481 case FIX_TRUNC_EXPR:
906c4e36 8482 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8483 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8484 target = gen_reg_rtx (mode);
8485 expand_fix (target, op0, unsignedp);
8486 return target;
8487
8488 case FLOAT_EXPR:
906c4e36 8489 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
8403445a 8490 if (target == 0 || modifier == EXPAND_STACK_PARM)
bbf6f052
RK
8491 target = gen_reg_rtx (mode);
8492 /* expand_float can't figure out what to do if FROM has VOIDmode.
8493 So give it the correct mode. With -O, cse will optimize this. */
8494 if (GET_MODE (op0) == VOIDmode)
8495 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8496 op0);
8497 expand_float (target, op0,
8498 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
8499 return target;
8500
8501 case NEGATE_EXPR:
5b22bee8 8502 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8503 if (modifier == EXPAND_STACK_PARM)
8504 target = 0;
91ce572a 8505 temp = expand_unop (mode,
0fb7aeda
KH
8506 ! unsignedp && flag_trapv
8507 && (GET_MODE_CLASS(mode) == MODE_INT)
8508 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
8509 if (temp == 0)
8510 abort ();
8511 return temp;
8512
8513 case ABS_EXPR:
8514 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8515 if (modifier == EXPAND_STACK_PARM)
8516 target = 0;
bbf6f052 8517
2d7050fd 8518 /* Handle complex values specially. */
d6a5ac33
RK
8519 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
8520 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
8521 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 8522
bbf6f052
RK
8523 /* Unsigned abs is simply the operand. Testing here means we don't
8524 risk generating incorrect code below. */
8525 if (TREE_UNSIGNED (type))
8526 return op0;
8527
91ce572a 8528 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 8529 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
8530
8531 case MAX_EXPR:
8532 case MIN_EXPR:
8533 target = original_target;
8403445a
AM
8534 if (target == 0
8535 || modifier == EXPAND_STACK_PARM
8536 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 8537 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 8538 || GET_MODE (target) != mode
bbf6f052
RK
8539 || (GET_CODE (target) == REG
8540 && REGNO (target) < FIRST_PSEUDO_REGISTER))
8541 target = gen_reg_rtx (mode);
906c4e36 8542 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8543 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8544
8545 /* First try to do it with a special MIN or MAX instruction.
8546 If that does not win, use a conditional jump to select the proper
8547 value. */
8548 this_optab = (TREE_UNSIGNED (type)
8549 ? (code == MIN_EXPR ? umin_optab : umax_optab)
8550 : (code == MIN_EXPR ? smin_optab : smax_optab));
8551
8552 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
8553 OPTAB_WIDEN);
8554 if (temp != 0)
8555 return temp;
8556
fa2981d8
JW
8557 /* At this point, a MEM target is no longer useful; we will get better
8558 code without it. */
3a94c984 8559
fa2981d8
JW
8560 if (GET_CODE (target) == MEM)
8561 target = gen_reg_rtx (mode);
8562
ee456b1c
RK
8563 if (target != op0)
8564 emit_move_insn (target, op0);
d6a5ac33 8565
bbf6f052 8566 op0 = gen_label_rtx ();
d6a5ac33 8567
f81497d9
RS
8568 /* If this mode is an integer too wide to compare properly,
8569 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
8570 if (GET_MODE_CLASS (mode) == MODE_INT
8571 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 8572 {
f81497d9 8573 if (code == MAX_EXPR)
d6a5ac33
RK
8574 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8575 target, op1, NULL_RTX, op0);
bbf6f052 8576 else
d6a5ac33
RK
8577 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
8578 op1, target, NULL_RTX, op0);
bbf6f052 8579 }
f81497d9
RS
8580 else
8581 {
b30f05db
BS
8582 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
8583 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
a06ef755 8584 unsignedp, mode, NULL_RTX, NULL_RTX,
b30f05db 8585 op0);
f81497d9 8586 }
b30f05db 8587 emit_move_insn (target, op1);
bbf6f052
RK
8588 emit_label (op0);
8589 return target;
8590
bbf6f052
RK
8591 case BIT_NOT_EXPR:
8592 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8593 if (modifier == EXPAND_STACK_PARM)
8594 target = 0;
bbf6f052
RK
8595 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
8596 if (temp == 0)
8597 abort ();
8598 return temp;
8599
8600 case FFS_EXPR:
8601 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8403445a
AM
8602 if (modifier == EXPAND_STACK_PARM)
8603 target = 0;
bbf6f052
RK
8604 temp = expand_unop (mode, ffs_optab, op0, target, 1);
8605 if (temp == 0)
8606 abort ();
8607 return temp;
8608
2928cd7a
RH
8609 case CLZ_EXPR:
8610 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8611 temp = expand_unop (mode, clz_optab, op0, target, 1);
8612 if (temp == 0)
8613 abort ();
8614 return temp;
8615
8616 case CTZ_EXPR:
8617 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8618 temp = expand_unop (mode, ctz_optab, op0, target, 1);
8619 if (temp == 0)
8620 abort ();
8621 return temp;
8622
8623 case POPCOUNT_EXPR:
8624 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8625 temp = expand_unop (mode, popcount_optab, op0, target, 1);
8626 if (temp == 0)
8627 abort ();
8628 return temp;
8629
8630 case PARITY_EXPR:
8631 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8632 temp = expand_unop (mode, parity_optab, op0, target, 1);
8633 if (temp == 0)
8634 abort ();
8635 return temp;
8636
d6a5ac33
RK
8637 /* ??? Can optimize bitwise operations with one arg constant.
8638 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
8639 and (a bitwise1 b) bitwise2 b (etc)
8640 but that is probably not worth while. */
8641
8642 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
8643 boolean values when we want in all cases to compute both of them. In
8644 general it is fastest to do TRUTH_AND_EXPR by computing both operands
8645 as actual zero-or-1 values and then bitwise anding. In cases where
8646 there cannot be any side effects, better code would be made by
8647 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
8648 how to recognize those cases. */
8649
bbf6f052
RK
8650 case TRUTH_AND_EXPR:
8651 case BIT_AND_EXPR:
8652 this_optab = and_optab;
8653 goto binop;
8654
bbf6f052
RK
8655 case TRUTH_OR_EXPR:
8656 case BIT_IOR_EXPR:
8657 this_optab = ior_optab;
8658 goto binop;
8659
874726a8 8660 case TRUTH_XOR_EXPR:
bbf6f052
RK
8661 case BIT_XOR_EXPR:
8662 this_optab = xor_optab;
8663 goto binop;
8664
8665 case LSHIFT_EXPR:
8666 case RSHIFT_EXPR:
8667 case LROTATE_EXPR:
8668 case RROTATE_EXPR:
e5e809f4 8669 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052 8670 subtarget = 0;
8403445a
AM
8671 if (modifier == EXPAND_STACK_PARM)
8672 target = 0;
bbf6f052
RK
8673 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
8674 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
8675 unsignedp);
8676
d6a5ac33
RK
8677 /* Could determine the answer when only additive constants differ. Also,
8678 the addition of one can be handled by changing the condition. */
bbf6f052
RK
8679 case LT_EXPR:
8680 case LE_EXPR:
8681 case GT_EXPR:
8682 case GE_EXPR:
8683 case EQ_EXPR:
8684 case NE_EXPR:
1eb8759b
RH
8685 case UNORDERED_EXPR:
8686 case ORDERED_EXPR:
8687 case UNLT_EXPR:
8688 case UNLE_EXPR:
8689 case UNGT_EXPR:
8690 case UNGE_EXPR:
8691 case UNEQ_EXPR:
8403445a
AM
8692 temp = do_store_flag (exp,
8693 modifier != EXPAND_STACK_PARM ? target : NULL_RTX,
8694 tmode != VOIDmode ? tmode : mode, 0);
bbf6f052
RK
8695 if (temp != 0)
8696 return temp;
d6a5ac33 8697
0f41302f 8698 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
8699 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
8700 && original_target
8701 && GET_CODE (original_target) == REG
8702 && (GET_MODE (original_target)
8703 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8704 {
d6a5ac33
RK
8705 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
8706 VOIDmode, 0);
8707
c0a3eeac
UW
8708 /* If temp is constant, we can just compute the result. */
8709 if (GET_CODE (temp) == CONST_INT)
8710 {
8711 if (INTVAL (temp) != 0)
8712 emit_move_insn (target, const1_rtx);
8713 else
8714 emit_move_insn (target, const0_rtx);
8715
8716 return target;
8717 }
8718
bbf6f052 8719 if (temp != original_target)
c0a3eeac
UW
8720 {
8721 enum machine_mode mode1 = GET_MODE (temp);
8722 if (mode1 == VOIDmode)
8723 mode1 = tmode != VOIDmode ? tmode : mode;
0fb7aeda 8724
c0a3eeac
UW
8725 temp = copy_to_mode_reg (mode1, temp);
8726 }
d6a5ac33 8727
bbf6f052 8728 op1 = gen_label_rtx ();
c5d5d461 8729 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
a06ef755 8730 GET_MODE (temp), unsignedp, op1);
bbf6f052
RK
8731 emit_move_insn (temp, const1_rtx);
8732 emit_label (op1);
8733 return temp;
8734 }
d6a5ac33 8735
bbf6f052
RK
8736 /* If no set-flag instruction, must generate a conditional
8737 store into a temporary variable. Drop through
8738 and handle this like && and ||. */
8739
8740 case TRUTH_ANDIF_EXPR:
8741 case TRUTH_ORIF_EXPR:
e44842fe 8742 if (! ignore
8403445a
AM
8743 && (target == 0
8744 || modifier == EXPAND_STACK_PARM
8745 || ! safe_from_p (target, exp, 1)
e44842fe
RK
8746 /* Make sure we don't have a hard reg (such as function's return
8747 value) live across basic blocks, if not optimizing. */
8748 || (!optimize && GET_CODE (target) == REG
8749 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 8750 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
8751
8752 if (target)
8753 emit_clr_insn (target);
8754
bbf6f052
RK
8755 op1 = gen_label_rtx ();
8756 jumpifnot (exp, op1);
e44842fe
RK
8757
8758 if (target)
8759 emit_0_to_1_insn (target);
8760
bbf6f052 8761 emit_label (op1);
e44842fe 8762 return ignore ? const0_rtx : target;
bbf6f052
RK
8763
8764 case TRUTH_NOT_EXPR:
8403445a
AM
8765 if (modifier == EXPAND_STACK_PARM)
8766 target = 0;
bbf6f052
RK
8767 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
8768 /* The parser is careful to generate TRUTH_NOT_EXPR
8769 only with operands that are always zero or one. */
906c4e36 8770 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
8771 target, 1, OPTAB_LIB_WIDEN);
8772 if (temp == 0)
8773 abort ();
8774 return temp;
8775
8776 case COMPOUND_EXPR:
8777 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8778 emit_queue ();
8779 return expand_expr (TREE_OPERAND (exp, 1),
8780 (ignore ? const0_rtx : target),
8403445a 8781 VOIDmode, modifier);
bbf6f052
RK
8782
8783 case COND_EXPR:
ac01eace
RK
8784 /* If we would have a "singleton" (see below) were it not for a
8785 conversion in each arm, bring that conversion back out. */
8786 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
8787 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
8788 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
8789 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
8790 {
d6edb99e
ZW
8791 tree iftrue = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
8792 tree iffalse = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
8793
8794 if ((TREE_CODE_CLASS (TREE_CODE (iftrue)) == '2'
8795 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8796 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '2'
8797 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0))
8798 || (TREE_CODE_CLASS (TREE_CODE (iftrue)) == '1'
8799 && operand_equal_p (iffalse, TREE_OPERAND (iftrue, 0), 0))
8800 || (TREE_CODE_CLASS (TREE_CODE (iffalse)) == '1'
8801 && operand_equal_p (iftrue, TREE_OPERAND (iffalse, 0), 0)))
ac01eace 8802 return expand_expr (build1 (NOP_EXPR, type,
d6edb99e 8803 build (COND_EXPR, TREE_TYPE (iftrue),
ac01eace 8804 TREE_OPERAND (exp, 0),
d6edb99e 8805 iftrue, iffalse)),
ac01eace
RK
8806 target, tmode, modifier);
8807 }
8808
bbf6f052
RK
8809 {
8810 /* Note that COND_EXPRs whose type is a structure or union
8811 are required to be constructed to contain assignments of
8812 a temporary variable, so that we can evaluate them here
8813 for side effect only. If type is void, we must do likewise. */
8814
8815 /* If an arm of the branch requires a cleanup,
8816 only that cleanup is performed. */
8817
8818 tree singleton = 0;
8819 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
8820
8821 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
8822 convert it to our mode, if necessary. */
8823 if (integer_onep (TREE_OPERAND (exp, 1))
8824 && integer_zerop (TREE_OPERAND (exp, 2))
8825 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8826 {
dd27116b
RK
8827 if (ignore)
8828 {
8829 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
37a08a29 8830 modifier);
dd27116b
RK
8831 return const0_rtx;
8832 }
8833
8403445a
AM
8834 if (modifier == EXPAND_STACK_PARM)
8835 target = 0;
37a08a29 8836 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, modifier);
bbf6f052
RK
8837 if (GET_MODE (op0) == mode)
8838 return op0;
d6a5ac33 8839
bbf6f052
RK
8840 if (target == 0)
8841 target = gen_reg_rtx (mode);
8842 convert_move (target, op0, unsignedp);
8843 return target;
8844 }
8845
ac01eace
RK
8846 /* Check for X ? A + B : A. If we have this, we can copy A to the
8847 output and conditionally add B. Similarly for unary operations.
8848 Don't do this if X has side-effects because those side effects
8849 might affect A or B and the "?" operation is a sequence point in
8850 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8851
8852 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8853 && operand_equal_p (TREE_OPERAND (exp, 2),
8854 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8855 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8856 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8857 && operand_equal_p (TREE_OPERAND (exp, 1),
8858 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8859 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8860 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8861 && operand_equal_p (TREE_OPERAND (exp, 2),
8862 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8863 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8864 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8865 && operand_equal_p (TREE_OPERAND (exp, 1),
8866 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8867 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8868
01c8a7c8
RK
8869 /* If we are not to produce a result, we have no target. Otherwise,
8870 if a target was specified use it; it will not be used as an
3a94c984 8871 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8872 temporary. */
8873
8874 if (ignore)
8875 temp = 0;
8403445a
AM
8876 else if (modifier == EXPAND_STACK_PARM)
8877 temp = assign_temp (type, 0, 0, 1);
01c8a7c8 8878 else if (original_target
e5e809f4 8879 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8880 || (singleton && GET_CODE (original_target) == REG
8881 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8882 && original_target == var_rtx (singleton)))
8883 && GET_MODE (original_target) == mode
7c00d1fe
RK
8884#ifdef HAVE_conditional_move
8885 && (! can_conditionally_move_p (mode)
8886 || GET_CODE (original_target) == REG
8887 || TREE_ADDRESSABLE (type))
8888#endif
8125d7e9
BS
8889 && (GET_CODE (original_target) != MEM
8890 || TREE_ADDRESSABLE (type)))
01c8a7c8
RK
8891 temp = original_target;
8892 else if (TREE_ADDRESSABLE (type))
8893 abort ();
8894 else
8895 temp = assign_temp (type, 0, 0, 1);
8896
ac01eace
RK
8897 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8898 do the test of X as a store-flag operation, do this as
8899 A + ((X != 0) << log C). Similarly for other simple binary
8900 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8901 if (temp && singleton && binary_op
bbf6f052
RK
8902 && (TREE_CODE (binary_op) == PLUS_EXPR
8903 || TREE_CODE (binary_op) == MINUS_EXPR
8904 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8905 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8906 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8907 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8908 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8909 {
8910 rtx result;
61f6c84f 8911 tree cond;
91ce572a 8912 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
0fb7aeda
KH
8913 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8914 ? addv_optab : add_optab)
8915 : TREE_CODE (binary_op) == MINUS_EXPR
8916 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8917 ? subv_optab : sub_optab)
8918 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8919 : xor_optab);
bbf6f052 8920
61f6c84f 8921 /* If we had X ? A : A + 1, do this as A + (X == 0). */
bbf6f052 8922 if (singleton == TREE_OPERAND (exp, 1))
61f6c84f
JJ
8923 cond = invert_truthvalue (TREE_OPERAND (exp, 0));
8924 else
8925 cond = TREE_OPERAND (exp, 0);
bbf6f052 8926
61f6c84f
JJ
8927 result = do_store_flag (cond, (safe_from_p (temp, singleton, 1)
8928 ? temp : NULL_RTX),
bbf6f052
RK
8929 mode, BRANCH_COST <= 1);
8930
ac01eace
RK
8931 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8932 result = expand_shift (LSHIFT_EXPR, mode, result,
8933 build_int_2 (tree_log2
8934 (TREE_OPERAND
8935 (binary_op, 1)),
8936 0),
e5e809f4 8937 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8938 ? temp : NULL_RTX), 0);
8939
bbf6f052
RK
8940 if (result)
8941 {
906c4e36 8942 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8943 return expand_binop (mode, boptab, op1, result, temp,
8944 unsignedp, OPTAB_LIB_WIDEN);
8945 }
bbf6f052 8946 }
3a94c984 8947
dabf8373 8948 do_pending_stack_adjust ();
bbf6f052
RK
8949 NO_DEFER_POP;
8950 op0 = gen_label_rtx ();
8951
8952 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8953 {
8954 if (temp != 0)
8955 {
8956 /* If the target conflicts with the other operand of the
8957 binary op, we can't use it. Also, we can't use the target
8958 if it is a hard register, because evaluating the condition
8959 might clobber it. */
8960 if ((binary_op
e5e809f4 8961 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8962 || (GET_CODE (temp) == REG
8963 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8964 temp = gen_reg_rtx (mode);
8403445a
AM
8965 store_expr (singleton, temp,
8966 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8967 }
8968 else
906c4e36 8969 expand_expr (singleton,
2937cf87 8970 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8971 if (singleton == TREE_OPERAND (exp, 1))
8972 jumpif (TREE_OPERAND (exp, 0), op0);
8973 else
8974 jumpifnot (TREE_OPERAND (exp, 0), op0);
8975
956d6950 8976 start_cleanup_deferral ();
bbf6f052
RK
8977 if (binary_op && temp == 0)
8978 /* Just touch the other operand. */
8979 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8980 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8981 else if (binary_op)
8982 store_expr (build (TREE_CODE (binary_op), type,
8983 make_tree (type, temp),
8984 TREE_OPERAND (binary_op, 1)),
8403445a 8985 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
8986 else
8987 store_expr (build1 (TREE_CODE (unary_op), type,
8988 make_tree (type, temp)),
8403445a 8989 temp, modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 8990 op1 = op0;
bbf6f052 8991 }
bbf6f052
RK
8992 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8993 comparison operator. If we have one of these cases, set the
8994 output to A, branch on A (cse will merge these two references),
8995 then set the output to FOO. */
8996 else if (temp
8997 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8998 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8999 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9000 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
9001 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9002 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 9003 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 9004 {
3a94c984
KH
9005 if (GET_CODE (temp) == REG
9006 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 9007 temp = gen_reg_rtx (mode);
8403445a
AM
9008 store_expr (TREE_OPERAND (exp, 1), temp,
9009 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 9010 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 9011
956d6950 9012 start_cleanup_deferral ();
8403445a
AM
9013 store_expr (TREE_OPERAND (exp, 2), temp,
9014 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
9015 op1 = op0;
9016 }
9017 else if (temp
9018 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
9019 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
9020 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
9021 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
9022 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
9023 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 9024 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 9025 {
3a94c984
KH
9026 if (GET_CODE (temp) == REG
9027 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052 9028 temp = gen_reg_rtx (mode);
8403445a
AM
9029 store_expr (TREE_OPERAND (exp, 2), temp,
9030 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 9031 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 9032
956d6950 9033 start_cleanup_deferral ();
8403445a
AM
9034 store_expr (TREE_OPERAND (exp, 1), temp,
9035 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052
RK
9036 op1 = op0;
9037 }
9038 else
9039 {
9040 op1 = gen_label_rtx ();
9041 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 9042
956d6950 9043 start_cleanup_deferral ();
3a94c984 9044
2ac84cfe 9045 /* One branch of the cond can be void, if it never returns. For
3a94c984 9046 example A ? throw : E */
2ac84cfe 9047 if (temp != 0
3a94c984 9048 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
8403445a
AM
9049 store_expr (TREE_OPERAND (exp, 1), temp,
9050 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 9051 else
906c4e36
RK
9052 expand_expr (TREE_OPERAND (exp, 1),
9053 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 9054 end_cleanup_deferral ();
bbf6f052
RK
9055 emit_queue ();
9056 emit_jump_insn (gen_jump (op1));
9057 emit_barrier ();
9058 emit_label (op0);
956d6950 9059 start_cleanup_deferral ();
2ac84cfe 9060 if (temp != 0
3a94c984 9061 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
8403445a
AM
9062 store_expr (TREE_OPERAND (exp, 2), temp,
9063 modifier == EXPAND_STACK_PARM ? 2 : 0);
bbf6f052 9064 else
906c4e36
RK
9065 expand_expr (TREE_OPERAND (exp, 2),
9066 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
9067 }
9068
956d6950 9069 end_cleanup_deferral ();
bbf6f052
RK
9070
9071 emit_queue ();
9072 emit_label (op1);
9073 OK_DEFER_POP;
5dab5552 9074
bbf6f052
RK
9075 return temp;
9076 }
9077
9078 case TARGET_EXPR:
9079 {
9080 /* Something needs to be initialized, but we didn't know
9081 where that thing was when building the tree. For example,
9082 it could be the return value of a function, or a parameter
9083 to a function which lays down in the stack, or a temporary
9084 variable which must be passed by reference.
9085
9086 We guarantee that the expression will either be constructed
9087 or copied into our original target. */
9088
9089 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 9090 tree cleanups = NULL_TREE;
5c062816 9091 tree exp1;
bbf6f052
RK
9092
9093 if (TREE_CODE (slot) != VAR_DECL)
9094 abort ();
9095
9c51f375
RK
9096 if (! ignore)
9097 target = original_target;
9098
6fbfac92
JM
9099 /* Set this here so that if we get a target that refers to a
9100 register variable that's already been used, put_reg_into_stack
3a94c984 9101 knows that it should fix up those uses. */
6fbfac92
JM
9102 TREE_USED (slot) = 1;
9103
bbf6f052
RK
9104 if (target == 0)
9105 {
19e7881c 9106 if (DECL_RTL_SET_P (slot))
ac993f4f
MS
9107 {
9108 target = DECL_RTL (slot);
5c062816 9109 /* If we have already expanded the slot, so don't do
ac993f4f 9110 it again. (mrs) */
5c062816
MS
9111 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9112 return target;
ac993f4f 9113 }
bbf6f052
RK
9114 else
9115 {
e9a25f70 9116 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
9117 /* All temp slots at this level must not conflict. */
9118 preserve_temp_slots (target);
19e7881c 9119 SET_DECL_RTL (slot, target);
e9a25f70 9120 if (TREE_ADDRESSABLE (slot))
f29a2bd1 9121 put_var_into_stack (slot, /*rescan=*/false);
bbf6f052 9122
e287fd6e
RK
9123 /* Since SLOT is not known to the called function
9124 to belong to its stack frame, we must build an explicit
9125 cleanup. This case occurs when we must build up a reference
9126 to pass the reference as an argument. In this case,
9127 it is very likely that such a reference need not be
9128 built here. */
9129
9130 if (TREE_OPERAND (exp, 2) == 0)
c88770e9
NB
9131 TREE_OPERAND (exp, 2)
9132 = (*lang_hooks.maybe_build_cleanup) (slot);
2a888d4c 9133 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 9134 }
bbf6f052
RK
9135 }
9136 else
9137 {
9138 /* This case does occur, when expanding a parameter which
9139 needs to be constructed on the stack. The target
9140 is the actual stack address that we want to initialize.
9141 The function we call will perform the cleanup in this case. */
9142
8c042b47
RS
9143 /* If we have already assigned it space, use that space,
9144 not target that we were passed in, as our target
9145 parameter is only a hint. */
19e7881c 9146 if (DECL_RTL_SET_P (slot))
3a94c984
KH
9147 {
9148 target = DECL_RTL (slot);
9149 /* If we have already expanded the slot, so don't do
8c042b47 9150 it again. (mrs) */
3a94c984
KH
9151 if (TREE_OPERAND (exp, 1) == NULL_TREE)
9152 return target;
8c042b47 9153 }
21002281
JW
9154 else
9155 {
19e7881c 9156 SET_DECL_RTL (slot, target);
21002281
JW
9157 /* If we must have an addressable slot, then make sure that
9158 the RTL that we just stored in slot is OK. */
9159 if (TREE_ADDRESSABLE (slot))
f29a2bd1 9160 put_var_into_stack (slot, /*rescan=*/true);
21002281 9161 }
bbf6f052
RK
9162 }
9163
4847c938 9164 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
9165 /* Mark it as expanded. */
9166 TREE_OPERAND (exp, 1) = NULL_TREE;
9167
8403445a 9168 store_expr (exp1, target, modifier == EXPAND_STACK_PARM ? 2 : 0);
61d6b1cc 9169
659e5a7a 9170 expand_decl_cleanup_eh (NULL_TREE, cleanups, CLEANUP_EH_ONLY (exp));
3a94c984 9171
41531e5b 9172 return target;
bbf6f052
RK
9173 }
9174
9175 case INIT_EXPR:
9176 {
9177 tree lhs = TREE_OPERAND (exp, 0);
9178 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9179
9180 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
bbf6f052
RK
9181 return temp;
9182 }
9183
9184 case MODIFY_EXPR:
9185 {
9186 /* If lhs is complex, expand calls in rhs before computing it.
6d0a3f67
NS
9187 That's so we don't compute a pointer and save it over a
9188 call. If lhs is simple, compute it first so we can give it
9189 as a target if the rhs is just a call. This avoids an
9190 extra temp and copy and that prevents a partial-subsumption
9191 which makes bad code. Actually we could treat
9192 component_ref's of vars like vars. */
bbf6f052
RK
9193
9194 tree lhs = TREE_OPERAND (exp, 0);
9195 tree rhs = TREE_OPERAND (exp, 1);
bbf6f052
RK
9196
9197 temp = 0;
9198
bbf6f052
RK
9199 /* Check for |= or &= of a bitfield of size one into another bitfield
9200 of size 1. In this case, (unless we need the result of the
9201 assignment) we can do this more efficiently with a
9202 test followed by an assignment, if necessary.
9203
9204 ??? At this point, we can't get a BIT_FIELD_REF here. But if
9205 things change so we do, this code should be enhanced to
9206 support it. */
9207 if (ignore
9208 && TREE_CODE (lhs) == COMPONENT_REF
9209 && (TREE_CODE (rhs) == BIT_IOR_EXPR
9210 || TREE_CODE (rhs) == BIT_AND_EXPR)
9211 && TREE_OPERAND (rhs, 0) == lhs
9212 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
9213 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
9214 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
9215 {
9216 rtx label = gen_label_rtx ();
9217
9218 do_jump (TREE_OPERAND (rhs, 1),
9219 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
9220 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
9221 expand_assignment (lhs, convert (TREE_TYPE (rhs),
9222 (TREE_CODE (rhs) == BIT_IOR_EXPR
9223 ? integer_one_node
9224 : integer_zero_node)),
9225 0, 0);
e7c33f54 9226 do_pending_stack_adjust ();
bbf6f052
RK
9227 emit_label (label);
9228 return const0_rtx;
9229 }
9230
bbf6f052 9231 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
0fb7aeda 9232
bbf6f052
RK
9233 return temp;
9234 }
9235
6e7f84a7
APB
9236 case RETURN_EXPR:
9237 if (!TREE_OPERAND (exp, 0))
9238 expand_null_return ();
9239 else
9240 expand_return (TREE_OPERAND (exp, 0));
9241 return const0_rtx;
9242
bbf6f052
RK
9243 case PREINCREMENT_EXPR:
9244 case PREDECREMENT_EXPR:
7b8b9722 9245 return expand_increment (exp, 0, ignore);
bbf6f052
RK
9246
9247 case POSTINCREMENT_EXPR:
9248 case POSTDECREMENT_EXPR:
9249 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 9250 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
9251
9252 case ADDR_EXPR:
8403445a
AM
9253 if (modifier == EXPAND_STACK_PARM)
9254 target = 0;
bbf6f052
RK
9255 /* Are we taking the address of a nested function? */
9256 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 9257 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
9258 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
9259 && ! TREE_STATIC (exp))
bbf6f052
RK
9260 {
9261 op0 = trampoline_address (TREE_OPERAND (exp, 0));
9262 op0 = force_operand (op0, target);
9263 }
682ba3a6
RK
9264 /* If we are taking the address of something erroneous, just
9265 return a zero. */
9266 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9267 return const0_rtx;
d6b6783b
RK
9268 /* If we are taking the address of a constant and are at the
9269 top level, we have to use output_constant_def since we can't
9270 call force_const_mem at top level. */
9271 else if (cfun == 0
9272 && (TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
9273 || (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0)))
9274 == 'c')))
9275 op0 = XEXP (output_constant_def (TREE_OPERAND (exp, 0), 0), 0);
bbf6f052
RK
9276 else
9277 {
e287fd6e
RK
9278 /* We make sure to pass const0_rtx down if we came in with
9279 ignore set, to avoid doing the cleanups twice for something. */
9280 op0 = expand_expr (TREE_OPERAND (exp, 0),
9281 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
9282 (modifier == EXPAND_INITIALIZER
9283 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 9284
119af78a
RK
9285 /* If we are going to ignore the result, OP0 will have been set
9286 to const0_rtx, so just return it. Don't get confused and
9287 think we are taking the address of the constant. */
9288 if (ignore)
9289 return op0;
9290
73b7f58c
BS
9291 /* Pass 1 for MODIFY, so that protect_from_queue doesn't get
9292 clever and returns a REG when given a MEM. */
9293 op0 = protect_from_queue (op0, 1);
3539e816 9294
c5c76735
JL
9295 /* We would like the object in memory. If it is a constant, we can
9296 have it be statically allocated into memory. For a non-constant,
9297 we need to allocate some memory and store the value into it. */
896102d0
RK
9298
9299 if (CONSTANT_P (op0))
9300 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
9301 op0);
682ba3a6 9302 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd 9303 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
c1853da7 9304 || GET_CODE (op0) == PARALLEL || GET_CODE (op0) == LO_SUM)
896102d0 9305 {
6c7d86ec
RK
9306 /* If the operand is a SAVE_EXPR, we can deal with this by
9307 forcing the SAVE_EXPR into memory. */
9308 if (TREE_CODE (TREE_OPERAND (exp, 0)) == SAVE_EXPR)
9309 {
f29a2bd1
MM
9310 put_var_into_stack (TREE_OPERAND (exp, 0),
9311 /*rescan=*/true);
6c7d86ec
RK
9312 op0 = SAVE_EXPR_RTL (TREE_OPERAND (exp, 0));
9313 }
df6018fd 9314 else
6c7d86ec
RK
9315 {
9316 /* If this object is in a register, it can't be BLKmode. */
9317 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
19f90fad 9318 rtx memloc = assign_temp (inner_type, 1, 1, 1);
6c7d86ec
RK
9319
9320 if (GET_CODE (op0) == PARALLEL)
9321 /* Handle calls that pass values in multiple
9322 non-contiguous locations. The Irix 6 ABI has examples
9323 of this. */
0fb7aeda 9324 emit_group_store (memloc, op0,
6c7d86ec
RK
9325 int_size_in_bytes (inner_type));
9326 else
9327 emit_move_insn (memloc, op0);
0fb7aeda 9328
6c7d86ec
RK
9329 op0 = memloc;
9330 }
896102d0
RK
9331 }
9332
bbf6f052
RK
9333 if (GET_CODE (op0) != MEM)
9334 abort ();
3a94c984 9335
34e81b5a 9336 mark_temp_addr_taken (op0);
bbf6f052 9337 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77 9338 {
34e81b5a 9339 op0 = XEXP (op0, 0);
88f63c77 9340#ifdef POINTERS_EXTEND_UNSIGNED
34e81b5a 9341 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
88f63c77 9342 && mode == ptr_mode)
34e81b5a 9343 op0 = convert_memory_address (ptr_mode, op0);
88f63c77 9344#endif
34e81b5a 9345 return op0;
88f63c77 9346 }
987c71d9 9347
c952ff4b
RK
9348 /* If OP0 is not aligned as least as much as the type requires, we
9349 need to make a temporary, copy OP0 to it, and take the address of
9350 the temporary. We want to use the alignment of the type, not of
9351 the operand. Note that this is incorrect for FUNCTION_TYPE, but
9352 the test for BLKmode means that can't happen. The test for
9353 BLKmode is because we never make mis-aligned MEMs with
9354 non-BLKmode.
9355
9356 We don't need to do this at all if the machine doesn't have
9357 strict alignment. */
9358 if (STRICT_ALIGNMENT && GET_MODE (op0) == BLKmode
9359 && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
ed239f5a
RK
9360 > MEM_ALIGN (op0))
9361 && MEM_ALIGN (op0) < BIGGEST_ALIGNMENT)
a06ef755
RK
9362 {
9363 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bdaa131b 9364 rtx new;
a06ef755 9365
c3d32120
RK
9366 if (TYPE_ALIGN_OK (inner_type))
9367 abort ();
9368
bdaa131b
JM
9369 if (TREE_ADDRESSABLE (inner_type))
9370 {
9371 /* We can't make a bitwise copy of this object, so fail. */
9372 error ("cannot take the address of an unaligned member");
9373 return const0_rtx;
9374 }
9375
9376 new = assign_stack_temp_for_type
9377 (TYPE_MODE (inner_type),
9378 MEM_SIZE (op0) ? INTVAL (MEM_SIZE (op0))
9379 : int_size_in_bytes (inner_type),
9380 1, build_qualified_type (inner_type,
9381 (TYPE_QUALS (inner_type)
9382 | TYPE_QUAL_CONST)));
9383
44bb111a 9384 emit_block_move (new, op0, expr_size (TREE_OPERAND (exp, 0)),
8403445a
AM
9385 (modifier == EXPAND_STACK_PARM
9386 ? BLOCK_OP_CALL_PARM : BLOCK_OP_NORMAL));
bdaa131b 9387
a06ef755
RK
9388 op0 = new;
9389 }
9390
bbf6f052
RK
9391 op0 = force_operand (XEXP (op0, 0), target);
9392 }
987c71d9 9393
05c8e58b
HPN
9394 if (flag_force_addr
9395 && GET_CODE (op0) != REG
9396 && modifier != EXPAND_CONST_ADDRESS
9397 && modifier != EXPAND_INITIALIZER
9398 && modifier != EXPAND_SUM)
987c71d9
RK
9399 op0 = force_reg (Pmode, op0);
9400
dc6d66b3
RK
9401 if (GET_CODE (op0) == REG
9402 && ! REG_USERVAR_P (op0))
bdb429a5 9403 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9 9404
88f63c77
RK
9405#ifdef POINTERS_EXTEND_UNSIGNED
9406 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
9407 && mode == ptr_mode)
9fcfcce7 9408 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
9409#endif
9410
bbf6f052
RK
9411 return op0;
9412
9413 case ENTRY_VALUE_EXPR:
9414 abort ();
9415
7308a047
RS
9416 /* COMPLEX type for Extended Pascal & Fortran */
9417 case COMPLEX_EXPR:
9418 {
9419 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 9420 rtx insns;
7308a047
RS
9421
9422 /* Get the rtx code of the operands. */
9423 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9424 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
9425
9426 if (! target)
9427 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9428
6551fa4d 9429 start_sequence ();
7308a047
RS
9430
9431 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
9432 emit_move_insn (gen_realpart (mode, target), op0);
9433 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 9434
6551fa4d
JW
9435 insns = get_insns ();
9436 end_sequence ();
9437
7308a047 9438 /* Complex construction should appear as a single unit. */
6551fa4d
JW
9439 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
9440 each with a separate pseudo as destination.
9441 It's not correct for flow to treat them as a unit. */
6d6e61ce 9442 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9443 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
9444 else
2f937369 9445 emit_insn (insns);
7308a047
RS
9446
9447 return target;
9448 }
9449
9450 case REALPART_EXPR:
2d7050fd
RS
9451 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9452 return gen_realpart (mode, op0);
3a94c984 9453
7308a047 9454 case IMAGPART_EXPR:
2d7050fd
RS
9455 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9456 return gen_imagpart (mode, op0);
7308a047
RS
9457
9458 case CONJ_EXPR:
9459 {
62acb978 9460 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 9461 rtx imag_t;
6551fa4d 9462 rtx insns;
3a94c984
KH
9463
9464 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
9465
9466 if (! target)
d6a5ac33 9467 target = gen_reg_rtx (mode);
3a94c984 9468
6551fa4d 9469 start_sequence ();
7308a047
RS
9470
9471 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
9472 emit_move_insn (gen_realpart (partmode, target),
9473 gen_realpart (partmode, op0));
7308a047 9474
62acb978 9475 imag_t = gen_imagpart (partmode, target);
91ce572a 9476 temp = expand_unop (partmode,
0fb7aeda
KH
9477 ! unsignedp && flag_trapv
9478 && (GET_MODE_CLASS(partmode) == MODE_INT)
9479 ? negv_optab : neg_optab,
3a94c984 9480 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
9481 if (temp != imag_t)
9482 emit_move_insn (imag_t, temp);
9483
6551fa4d
JW
9484 insns = get_insns ();
9485 end_sequence ();
9486
3a94c984 9487 /* Conjugate should appear as a single unit
d6a5ac33 9488 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
9489 each with a separate pseudo as destination.
9490 It's not correct for flow to treat them as a unit. */
6d6e61ce 9491 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
9492 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
9493 else
2f937369 9494 emit_insn (insns);
7308a047
RS
9495
9496 return target;
9497 }
9498
e976b8b2
MS
9499 case TRY_CATCH_EXPR:
9500 {
9501 tree handler = TREE_OPERAND (exp, 1);
9502
9503 expand_eh_region_start ();
9504
9505 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
9506
52a11cbf 9507 expand_eh_region_end_cleanup (handler);
e976b8b2
MS
9508
9509 return op0;
9510 }
9511
b335b813
PB
9512 case TRY_FINALLY_EXPR:
9513 {
9514 tree try_block = TREE_OPERAND (exp, 0);
9515 tree finally_block = TREE_OPERAND (exp, 1);
b335b813 9516
8ad8135a 9517 if (!optimize || unsafe_for_reeval (finally_block) > 1)
8943a0b4
RH
9518 {
9519 /* In this case, wrapping FINALLY_BLOCK in an UNSAVE_EXPR
9520 is not sufficient, so we cannot expand the block twice.
9521 So we play games with GOTO_SUBROUTINE_EXPR to let us
9522 expand the thing only once. */
8ad8135a
RH
9523 /* When not optimizing, we go ahead with this form since
9524 (1) user breakpoints operate more predictably without
9525 code duplication, and
9526 (2) we're not running any of the global optimizers
9527 that would explode in time/space with the highly
9528 connected CFG created by the indirect branching. */
8943a0b4
RH
9529
9530 rtx finally_label = gen_label_rtx ();
9531 rtx done_label = gen_label_rtx ();
9532 rtx return_link = gen_reg_rtx (Pmode);
9533 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
9534 (tree) finally_label, (tree) return_link);
9535 TREE_SIDE_EFFECTS (cleanup) = 1;
9536
9537 /* Start a new binding layer that will keep track of all cleanup
9538 actions to be performed. */
9539 expand_start_bindings (2);
9540 target_temp_slot_level = temp_slot_level;
9541
9542 expand_decl_cleanup (NULL_TREE, cleanup);
9543 op0 = expand_expr (try_block, target, tmode, modifier);
9544
9545 preserve_temp_slots (op0);
9546 expand_end_bindings (NULL_TREE, 0, 0);
9547 emit_jump (done_label);
9548 emit_label (finally_label);
9549 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
9550 emit_indirect_jump (return_link);
9551 emit_label (done_label);
9552 }
9553 else
9554 {
9555 expand_start_bindings (2);
9556 target_temp_slot_level = temp_slot_level;
b335b813 9557
8943a0b4
RH
9558 expand_decl_cleanup (NULL_TREE, finally_block);
9559 op0 = expand_expr (try_block, target, tmode, modifier);
b335b813 9560
8943a0b4
RH
9561 preserve_temp_slots (op0);
9562 expand_end_bindings (NULL_TREE, 0, 0);
9563 }
b335b813 9564
b335b813
PB
9565 return op0;
9566 }
9567
3a94c984 9568 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
9569 {
9570 rtx subr = (rtx) TREE_OPERAND (exp, 0);
9571 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
9572 rtx return_address = gen_label_rtx ();
3a94c984
KH
9573 emit_move_insn (return_link,
9574 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
9575 emit_jump (subr);
9576 emit_label (return_address);
9577 return const0_rtx;
9578 }
9579
d3707adb
RH
9580 case VA_ARG_EXPR:
9581 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
9582
52a11cbf 9583 case EXC_PTR_EXPR:
86c99549 9584 return get_exception_pointer (cfun);
52a11cbf 9585
67231816
RH
9586 case FDESC_EXPR:
9587 /* Function descriptors are not valid except for as
9588 initialization constants, and should not be expanded. */
9589 abort ();
9590
bbf6f052 9591 default:
c9d892a8 9592 return (*lang_hooks.expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
9593 }
9594
9595 /* Here to do an ordinary binary operator, generating an instruction
9596 from the optab already placed in `this_optab'. */
9597 binop:
e5e809f4 9598 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
9599 subtarget = 0;
9600 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 9601 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052 9602 binop2:
8403445a
AM
9603 if (modifier == EXPAND_STACK_PARM)
9604 target = 0;
bbf6f052
RK
9605 temp = expand_binop (mode, this_optab, op0, op1, target,
9606 unsignedp, OPTAB_LIB_WIDEN);
9607 if (temp == 0)
9608 abort ();
9609 return temp;
9610}
b93a436e 9611\f
1ce7f3c2
RK
9612/* Subroutine of above: returns 1 if OFFSET corresponds to an offset that
9613 when applied to the address of EXP produces an address known to be
9614 aligned more than BIGGEST_ALIGNMENT. */
9615
9616static int
9617is_aligning_offset (offset, exp)
9618 tree offset;
9619 tree exp;
9620{
9621 /* Strip off any conversions and WITH_RECORD_EXPR nodes. */
9622 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9623 || TREE_CODE (offset) == NOP_EXPR
9624 || TREE_CODE (offset) == CONVERT_EXPR
9625 || TREE_CODE (offset) == WITH_RECORD_EXPR)
9626 offset = TREE_OPERAND (offset, 0);
9627
9628 /* We must now have a BIT_AND_EXPR with a constant that is one less than
9629 power of 2 and which is larger than BIGGEST_ALIGNMENT. */
9630 if (TREE_CODE (offset) != BIT_AND_EXPR
9631 || !host_integerp (TREE_OPERAND (offset, 1), 1)
9632 || compare_tree_int (TREE_OPERAND (offset, 1), BIGGEST_ALIGNMENT) <= 0
9633 || !exact_log2 (tree_low_cst (TREE_OPERAND (offset, 1), 1) + 1) < 0)
9634 return 0;
9635
9636 /* Look at the first operand of BIT_AND_EXPR and strip any conversion.
9637 It must be NEGATE_EXPR. Then strip any more conversions. */
9638 offset = TREE_OPERAND (offset, 0);
9639 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9640 || TREE_CODE (offset) == NOP_EXPR
9641 || TREE_CODE (offset) == CONVERT_EXPR)
9642 offset = TREE_OPERAND (offset, 0);
9643
9644 if (TREE_CODE (offset) != NEGATE_EXPR)
9645 return 0;
9646
9647 offset = TREE_OPERAND (offset, 0);
9648 while (TREE_CODE (offset) == NON_LVALUE_EXPR
9649 || TREE_CODE (offset) == NOP_EXPR
9650 || TREE_CODE (offset) == CONVERT_EXPR)
9651 offset = TREE_OPERAND (offset, 0);
9652
9653 /* This must now be the address either of EXP or of a PLACEHOLDER_EXPR
9654 whose type is the same as EXP. */
9655 return (TREE_CODE (offset) == ADDR_EXPR
9656 && (TREE_OPERAND (offset, 0) == exp
9657 || (TREE_CODE (TREE_OPERAND (offset, 0)) == PLACEHOLDER_EXPR
9658 && (TREE_TYPE (TREE_OPERAND (offset, 0))
9659 == TREE_TYPE (exp)))));
9660}
9661\f
e0a2f705 9662/* Return the tree node if an ARG corresponds to a string constant or zero
cc2902df 9663 if it doesn't. If we return nonzero, set *PTR_OFFSET to the offset
fed3cef0
RK
9664 in bytes within the string that ARG is accessing. The type of the
9665 offset will be `sizetype'. */
b93a436e 9666
28f4ec01 9667tree
b93a436e
JL
9668string_constant (arg, ptr_offset)
9669 tree arg;
9670 tree *ptr_offset;
9671{
9672 STRIP_NOPS (arg);
9673
9674 if (TREE_CODE (arg) == ADDR_EXPR
9675 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9676 {
fed3cef0 9677 *ptr_offset = size_zero_node;
b93a436e
JL
9678 return TREE_OPERAND (arg, 0);
9679 }
9680 else if (TREE_CODE (arg) == PLUS_EXPR)
9681 {
9682 tree arg0 = TREE_OPERAND (arg, 0);
9683 tree arg1 = TREE_OPERAND (arg, 1);
9684
9685 STRIP_NOPS (arg0);
9686 STRIP_NOPS (arg1);
9687
9688 if (TREE_CODE (arg0) == ADDR_EXPR
9689 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9690 {
fed3cef0 9691 *ptr_offset = convert (sizetype, arg1);
b93a436e 9692 return TREE_OPERAND (arg0, 0);
bbf6f052 9693 }
b93a436e
JL
9694 else if (TREE_CODE (arg1) == ADDR_EXPR
9695 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9696 {
fed3cef0 9697 *ptr_offset = convert (sizetype, arg0);
b93a436e 9698 return TREE_OPERAND (arg1, 0);
bbf6f052 9699 }
b93a436e 9700 }
ca695ac9 9701
b93a436e
JL
9702 return 0;
9703}
ca695ac9 9704\f
b93a436e
JL
9705/* Expand code for a post- or pre- increment or decrement
9706 and return the RTX for the result.
9707 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9708
b93a436e
JL
9709static rtx
9710expand_increment (exp, post, ignore)
b3694847 9711 tree exp;
b93a436e 9712 int post, ignore;
ca695ac9 9713{
b3694847
SS
9714 rtx op0, op1;
9715 rtx temp, value;
9716 tree incremented = TREE_OPERAND (exp, 0);
b93a436e
JL
9717 optab this_optab = add_optab;
9718 int icode;
9719 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9720 int op0_is_copy = 0;
9721 int single_insn = 0;
9722 /* 1 means we can't store into OP0 directly,
9723 because it is a subreg narrower than a word,
9724 and we don't dare clobber the rest of the word. */
9725 int bad_subreg = 0;
1499e0a8 9726
b93a436e
JL
9727 /* Stabilize any component ref that might need to be
9728 evaluated more than once below. */
9729 if (!post
9730 || TREE_CODE (incremented) == BIT_FIELD_REF
9731 || (TREE_CODE (incremented) == COMPONENT_REF
9732 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9733 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9734 incremented = stabilize_reference (incremented);
9735 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9736 ones into save exprs so that they don't accidentally get evaluated
9737 more than once by the code below. */
9738 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9739 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9740 incremented = save_expr (incremented);
e9a25f70 9741
b93a436e
JL
9742 /* Compute the operands as RTX.
9743 Note whether OP0 is the actual lvalue or a copy of it:
9744 I believe it is a copy iff it is a register or subreg
6d2f8887 9745 and insns were generated in computing it. */
e9a25f70 9746
b93a436e 9747 temp = get_last_insn ();
37a08a29 9748 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, 0);
e9a25f70 9749
b93a436e
JL
9750 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9751 in place but instead must do sign- or zero-extension during assignment,
9752 so we copy it into a new register and let the code below use it as
9753 a copy.
e9a25f70 9754
b93a436e
JL
9755 Note that we can safely modify this SUBREG since it is know not to be
9756 shared (it was made by the expand_expr call above). */
9757
9758 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9759 {
9760 if (post)
9761 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9762 else
9763 bad_subreg = 1;
9764 }
9765 else if (GET_CODE (op0) == SUBREG
9766 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9767 {
9768 /* We cannot increment this SUBREG in place. If we are
9769 post-incrementing, get a copy of the old value. Otherwise,
9770 just mark that we cannot increment in place. */
9771 if (post)
9772 op0 = copy_to_reg (op0);
9773 else
9774 bad_subreg = 1;
e9a25f70
JL
9775 }
9776
b93a436e
JL
9777 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9778 && temp != get_last_insn ());
37a08a29 9779 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
1499e0a8 9780
b93a436e
JL
9781 /* Decide whether incrementing or decrementing. */
9782 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9783 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9784 this_optab = sub_optab;
9785
9786 /* Convert decrement by a constant into a negative increment. */
9787 if (this_optab == sub_optab
9788 && GET_CODE (op1) == CONST_INT)
ca695ac9 9789 {
3a94c984 9790 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9791 this_optab = add_optab;
ca695ac9 9792 }
1499e0a8 9793
91ce572a 9794 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
505ddab6 9795 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
91ce572a 9796
b93a436e
JL
9797 /* For a preincrement, see if we can do this with a single instruction. */
9798 if (!post)
9799 {
9800 icode = (int) this_optab->handlers[(int) mode].insn_code;
9801 if (icode != (int) CODE_FOR_nothing
9802 /* Make sure that OP0 is valid for operands 0 and 1
9803 of the insn we want to queue. */
a995e389
RH
9804 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9805 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9806 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9807 single_insn = 1;
9808 }
bbf6f052 9809
b93a436e
JL
9810 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9811 then we cannot just increment OP0. We must therefore contrive to
9812 increment the original value. Then, for postincrement, we can return
9813 OP0 since it is a copy of the old value. For preincrement, expand here
9814 unless we can do it with a single insn.
bbf6f052 9815
b93a436e
JL
9816 Likewise if storing directly into OP0 would clobber high bits
9817 we need to preserve (bad_subreg). */
9818 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9819 {
b93a436e
JL
9820 /* This is the easiest way to increment the value wherever it is.
9821 Problems with multiple evaluation of INCREMENTED are prevented
9822 because either (1) it is a component_ref or preincrement,
9823 in which case it was stabilized above, or (2) it is an array_ref
9824 with constant index in an array in a register, which is
9825 safe to reevaluate. */
9826 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9827 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9828 ? MINUS_EXPR : PLUS_EXPR),
9829 TREE_TYPE (exp),
9830 incremented,
9831 TREE_OPERAND (exp, 1));
a358cee0 9832
b93a436e
JL
9833 while (TREE_CODE (incremented) == NOP_EXPR
9834 || TREE_CODE (incremented) == CONVERT_EXPR)
9835 {
9836 newexp = convert (TREE_TYPE (incremented), newexp);
9837 incremented = TREE_OPERAND (incremented, 0);
9838 }
bbf6f052 9839
b93a436e
JL
9840 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9841 return post ? op0 : temp;
9842 }
bbf6f052 9843
b93a436e
JL
9844 if (post)
9845 {
9846 /* We have a true reference to the value in OP0.
9847 If there is an insn to add or subtract in this mode, queue it.
9848 Queueing the increment insn avoids the register shuffling
9849 that often results if we must increment now and first save
9850 the old value for subsequent use. */
bbf6f052 9851
b93a436e
JL
9852#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9853 op0 = stabilize (op0);
9854#endif
41dfd40c 9855
b93a436e
JL
9856 icode = (int) this_optab->handlers[(int) mode].insn_code;
9857 if (icode != (int) CODE_FOR_nothing
9858 /* Make sure that OP0 is valid for operands 0 and 1
9859 of the insn we want to queue. */
a995e389
RH
9860 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9861 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9862 {
a995e389 9863 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9864 op1 = force_reg (mode, op1);
bbf6f052 9865
b93a436e
JL
9866 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9867 }
9868 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9869 {
9870 rtx addr = (general_operand (XEXP (op0, 0), mode)
9871 ? force_reg (Pmode, XEXP (op0, 0))
9872 : copy_to_reg (XEXP (op0, 0)));
9873 rtx temp, result;
ca695ac9 9874
792760b9 9875 op0 = replace_equiv_address (op0, addr);
b93a436e 9876 temp = force_reg (GET_MODE (op0), op0);
a995e389 9877 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9878 op1 = force_reg (mode, op1);
ca695ac9 9879
b93a436e
JL
9880 /* The increment queue is LIFO, thus we have to `queue'
9881 the instructions in reverse order. */
9882 enqueue_insn (op0, gen_move_insn (op0, temp));
9883 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9884 return result;
bbf6f052
RK
9885 }
9886 }
ca695ac9 9887
b93a436e
JL
9888 /* Preincrement, or we can't increment with one simple insn. */
9889 if (post)
9890 /* Save a copy of the value before inc or dec, to return it later. */
9891 temp = value = copy_to_reg (op0);
9892 else
9893 /* Arrange to return the incremented value. */
9894 /* Copy the rtx because expand_binop will protect from the queue,
9895 and the results of that would be invalid for us to return
9896 if our caller does emit_queue before using our result. */
9897 temp = copy_rtx (value = op0);
bbf6f052 9898
b93a436e 9899 /* Increment however we can. */
37a08a29 9900 op1 = expand_binop (mode, this_optab, value, op1, op0,
b93a436e 9901 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
37a08a29 9902
b93a436e
JL
9903 /* Make sure the value is stored into OP0. */
9904 if (op1 != op0)
9905 emit_move_insn (op0, op1);
5718612f 9906
b93a436e
JL
9907 return temp;
9908}
9909\f
b93a436e
JL
9910/* Generate code to calculate EXP using a store-flag instruction
9911 and return an rtx for the result. EXP is either a comparison
9912 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9913
b93a436e 9914 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9915
cc2902df 9916 If ONLY_CHEAP is nonzero, only do this if it is likely to be very
b93a436e 9917 cheap.
ca695ac9 9918
b93a436e
JL
9919 Return zero if there is no suitable set-flag instruction
9920 available on this machine.
ca695ac9 9921
b93a436e
JL
9922 Once expand_expr has been called on the arguments of the comparison,
9923 we are committed to doing the store flag, since it is not safe to
9924 re-evaluate the expression. We emit the store-flag insn by calling
9925 emit_store_flag, but only expand the arguments if we have a reason
9926 to believe that emit_store_flag will be successful. If we think that
9927 it will, but it isn't, we have to simulate the store-flag with a
9928 set/jump/set sequence. */
ca695ac9 9929
b93a436e
JL
9930static rtx
9931do_store_flag (exp, target, mode, only_cheap)
9932 tree exp;
9933 rtx target;
9934 enum machine_mode mode;
9935 int only_cheap;
9936{
9937 enum rtx_code code;
9938 tree arg0, arg1, type;
9939 tree tem;
9940 enum machine_mode operand_mode;
9941 int invert = 0;
9942 int unsignedp;
9943 rtx op0, op1;
9944 enum insn_code icode;
9945 rtx subtarget = target;
381127e8 9946 rtx result, label;
ca695ac9 9947
b93a436e
JL
9948 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9949 result at the end. We can't simply invert the test since it would
9950 have already been inverted if it were valid. This case occurs for
9951 some floating-point comparisons. */
ca695ac9 9952
b93a436e
JL
9953 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9954 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9955
b93a436e
JL
9956 arg0 = TREE_OPERAND (exp, 0);
9957 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
9958
9959 /* Don't crash if the comparison was erroneous. */
9960 if (arg0 == error_mark_node || arg1 == error_mark_node)
9961 return const0_rtx;
9962
b93a436e
JL
9963 type = TREE_TYPE (arg0);
9964 operand_mode = TYPE_MODE (type);
9965 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9966
b93a436e
JL
9967 /* We won't bother with BLKmode store-flag operations because it would mean
9968 passing a lot of information to emit_store_flag. */
9969 if (operand_mode == BLKmode)
9970 return 0;
ca695ac9 9971
b93a436e
JL
9972 /* We won't bother with store-flag operations involving function pointers
9973 when function pointers must be canonicalized before comparisons. */
9974#ifdef HAVE_canonicalize_funcptr_for_compare
9975 if (HAVE_canonicalize_funcptr_for_compare
9976 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9977 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9978 == FUNCTION_TYPE))
9979 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9980 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9981 == FUNCTION_TYPE))))
9982 return 0;
ca695ac9
JB
9983#endif
9984
b93a436e
JL
9985 STRIP_NOPS (arg0);
9986 STRIP_NOPS (arg1);
ca695ac9 9987
b93a436e
JL
9988 /* Get the rtx comparison code to use. We know that EXP is a comparison
9989 operation of some type. Some comparisons against 1 and -1 can be
9990 converted to comparisons with zero. Do so here so that the tests
9991 below will be aware that we have a comparison with zero. These
9992 tests will not catch constants in the first operand, but constants
9993 are rarely passed as the first operand. */
ca695ac9 9994
b93a436e
JL
9995 switch (TREE_CODE (exp))
9996 {
9997 case EQ_EXPR:
9998 code = EQ;
bbf6f052 9999 break;
b93a436e
JL
10000 case NE_EXPR:
10001 code = NE;
bbf6f052 10002 break;
b93a436e
JL
10003 case LT_EXPR:
10004 if (integer_onep (arg1))
10005 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10006 else
10007 code = unsignedp ? LTU : LT;
ca695ac9 10008 break;
b93a436e
JL
10009 case LE_EXPR:
10010 if (! unsignedp && integer_all_onesp (arg1))
10011 arg1 = integer_zero_node, code = LT;
10012 else
10013 code = unsignedp ? LEU : LE;
ca695ac9 10014 break;
b93a436e
JL
10015 case GT_EXPR:
10016 if (! unsignedp && integer_all_onesp (arg1))
10017 arg1 = integer_zero_node, code = GE;
10018 else
10019 code = unsignedp ? GTU : GT;
10020 break;
10021 case GE_EXPR:
10022 if (integer_onep (arg1))
10023 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10024 else
10025 code = unsignedp ? GEU : GE;
ca695ac9 10026 break;
1eb8759b
RH
10027
10028 case UNORDERED_EXPR:
10029 code = UNORDERED;
10030 break;
10031 case ORDERED_EXPR:
10032 code = ORDERED;
10033 break;
10034 case UNLT_EXPR:
10035 code = UNLT;
10036 break;
10037 case UNLE_EXPR:
10038 code = UNLE;
10039 break;
10040 case UNGT_EXPR:
10041 code = UNGT;
10042 break;
10043 case UNGE_EXPR:
10044 code = UNGE;
10045 break;
10046 case UNEQ_EXPR:
10047 code = UNEQ;
10048 break;
1eb8759b 10049
ca695ac9 10050 default:
b93a436e 10051 abort ();
bbf6f052 10052 }
bbf6f052 10053
b93a436e
JL
10054 /* Put a constant second. */
10055 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10056 {
10057 tem = arg0; arg0 = arg1; arg1 = tem;
10058 code = swap_condition (code);
ca695ac9 10059 }
bbf6f052 10060
b93a436e
JL
10061 /* If this is an equality or inequality test of a single bit, we can
10062 do this by shifting the bit being tested to the low-order bit and
10063 masking the result with the constant 1. If the condition was EQ,
10064 we xor it with 1. This does not require an scc insn and is faster
10065 than an scc insn even if we have it. */
d39985fa 10066
b93a436e
JL
10067 if ((code == NE || code == EQ)
10068 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10069 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10070 {
10071 tree inner = TREE_OPERAND (arg0, 0);
10072 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10073 int ops_unsignedp;
bbf6f052 10074
b93a436e
JL
10075 /* If INNER is a right shift of a constant and it plus BITNUM does
10076 not overflow, adjust BITNUM and INNER. */
ca695ac9 10077
b93a436e
JL
10078 if (TREE_CODE (inner) == RSHIFT_EXPR
10079 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10080 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10081 && bitnum < TYPE_PRECISION (type)
10082 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10083 bitnum - TYPE_PRECISION (type)))
ca695ac9 10084 {
b93a436e
JL
10085 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10086 inner = TREE_OPERAND (inner, 0);
ca695ac9 10087 }
ca695ac9 10088
b93a436e
JL
10089 /* If we are going to be able to omit the AND below, we must do our
10090 operations as unsigned. If we must use the AND, we have a choice.
10091 Normally unsigned is faster, but for some machines signed is. */
10092 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10093#ifdef LOAD_EXTEND_OP
10094 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10095#else
10096 : 1
10097#endif
10098 );
bbf6f052 10099
296b4ed9 10100 if (! get_subtarget (subtarget)
a47fed55 10101 || GET_MODE (subtarget) != operand_mode
e5e809f4 10102 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10103 subtarget = 0;
bbf6f052 10104
b93a436e 10105 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10106
b93a436e 10107 if (bitnum != 0)
681cb233 10108 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10109 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10110
b93a436e
JL
10111 if (GET_MODE (op0) != mode)
10112 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10113
b93a436e
JL
10114 if ((code == EQ && ! invert) || (code == NE && invert))
10115 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10116 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10117
b93a436e
JL
10118 /* Put the AND last so it can combine with more things. */
10119 if (bitnum != TYPE_PRECISION (type) - 1)
22273300 10120 op0 = expand_and (mode, op0, const1_rtx, subtarget);
bbf6f052 10121
b93a436e
JL
10122 return op0;
10123 }
bbf6f052 10124
b93a436e 10125 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10126 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10127 return 0;
1eb8759b 10128
b93a436e
JL
10129 icode = setcc_gen_code[(int) code];
10130 if (icode == CODE_FOR_nothing
a995e389 10131 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10132 {
b93a436e
JL
10133 /* We can only do this if it is one of the special cases that
10134 can be handled without an scc insn. */
10135 if ((code == LT && integer_zerop (arg1))
10136 || (! only_cheap && code == GE && integer_zerop (arg1)))
10137 ;
10138 else if (BRANCH_COST >= 0
10139 && ! only_cheap && (code == NE || code == EQ)
10140 && TREE_CODE (type) != REAL_TYPE
10141 && ((abs_optab->handlers[(int) operand_mode].insn_code
10142 != CODE_FOR_nothing)
10143 || (ffs_optab->handlers[(int) operand_mode].insn_code
10144 != CODE_FOR_nothing)))
10145 ;
10146 else
10147 return 0;
ca695ac9 10148 }
3a94c984 10149
296b4ed9 10150 if (! get_subtarget (target)
a47fed55 10151 || GET_MODE (subtarget) != operand_mode
e5e809f4 10152 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10153 subtarget = 0;
10154
10155 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10156 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10157
10158 if (target == 0)
10159 target = gen_reg_rtx (mode);
10160
10161 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10162 because, if the emit_store_flag does anything it will succeed and
10163 OP0 and OP1 will not be used subsequently. */
ca695ac9 10164
b93a436e
JL
10165 result = emit_store_flag (target, code,
10166 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10167 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10168 operand_mode, unsignedp, 1);
ca695ac9 10169
b93a436e
JL
10170 if (result)
10171 {
10172 if (invert)
10173 result = expand_binop (mode, xor_optab, result, const1_rtx,
10174 result, 0, OPTAB_LIB_WIDEN);
10175 return result;
ca695ac9 10176 }
bbf6f052 10177
b93a436e
JL
10178 /* If this failed, we have to do this with set/compare/jump/set code. */
10179 if (GET_CODE (target) != REG
10180 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10181 target = gen_reg_rtx (GET_MODE (target));
10182
10183 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10184 result = compare_from_rtx (op0, op1, code, unsignedp,
a06ef755 10185 operand_mode, NULL_RTX);
b93a436e
JL
10186 if (GET_CODE (result) == CONST_INT)
10187 return (((result == const0_rtx && ! invert)
10188 || (result != const0_rtx && invert))
10189 ? const0_rtx : const1_rtx);
ca695ac9 10190
8f08e8c0
JL
10191 /* The code of RESULT may not match CODE if compare_from_rtx
10192 decided to swap its operands and reverse the original code.
10193
10194 We know that compare_from_rtx returns either a CONST_INT or
10195 a new comparison code, so it is safe to just extract the
10196 code from RESULT. */
10197 code = GET_CODE (result);
10198
b93a436e
JL
10199 label = gen_label_rtx ();
10200 if (bcc_gen_fctn[(int) code] == 0)
10201 abort ();
0f41302f 10202
b93a436e
JL
10203 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10204 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10205 emit_label (label);
bbf6f052 10206
b93a436e 10207 return target;
ca695ac9 10208}
b93a436e 10209\f
b93a436e 10210
ad82abb8
ZW
10211/* Stubs in case we haven't got a casesi insn. */
10212#ifndef HAVE_casesi
10213# define HAVE_casesi 0
10214# define gen_casesi(a, b, c, d, e) (0)
10215# define CODE_FOR_casesi CODE_FOR_nothing
10216#endif
10217
10218/* If the machine does not have a case insn that compares the bounds,
10219 this means extra overhead for dispatch tables, which raises the
10220 threshold for using them. */
10221#ifndef CASE_VALUES_THRESHOLD
10222#define CASE_VALUES_THRESHOLD (HAVE_casesi ? 4 : 5)
10223#endif /* CASE_VALUES_THRESHOLD */
10224
10225unsigned int
10226case_values_threshold ()
10227{
10228 return CASE_VALUES_THRESHOLD;
10229}
10230
10231/* Attempt to generate a casesi instruction. Returns 1 if successful,
10232 0 otherwise (i.e. if there is no casesi instruction). */
10233int
10234try_casesi (index_type, index_expr, minval, range,
10235 table_label, default_label)
10236 tree index_type, index_expr, minval, range;
10237 rtx table_label ATTRIBUTE_UNUSED;
10238 rtx default_label;
10239{
10240 enum machine_mode index_mode = SImode;
10241 int index_bits = GET_MODE_BITSIZE (index_mode);
10242 rtx op1, op2, index;
10243 enum machine_mode op_mode;
10244
10245 if (! HAVE_casesi)
10246 return 0;
10247
10248 /* Convert the index to SImode. */
10249 if (GET_MODE_BITSIZE (TYPE_MODE (index_type)) > GET_MODE_BITSIZE (index_mode))
10250 {
10251 enum machine_mode omode = TYPE_MODE (index_type);
10252 rtx rangertx = expand_expr (range, NULL_RTX, VOIDmode, 0);
10253
10254 /* We must handle the endpoints in the original mode. */
10255 index_expr = build (MINUS_EXPR, index_type,
10256 index_expr, minval);
10257 minval = integer_zero_node;
10258 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10259 emit_cmp_and_jump_insns (rangertx, index, LTU, NULL_RTX,
a06ef755 10260 omode, 1, default_label);
ad82abb8
ZW
10261 /* Now we can safely truncate. */
10262 index = convert_to_mode (index_mode, index, 0);
10263 }
10264 else
10265 {
10266 if (TYPE_MODE (index_type) != index_mode)
10267 {
b0c48229
NB
10268 index_expr = convert ((*lang_hooks.types.type_for_size)
10269 (index_bits, 0), index_expr);
ad82abb8
ZW
10270 index_type = TREE_TYPE (index_expr);
10271 }
10272
10273 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10274 }
10275 emit_queue ();
10276 index = protect_from_queue (index, 0);
10277 do_pending_stack_adjust ();
10278
10279 op_mode = insn_data[(int) CODE_FOR_casesi].operand[0].mode;
10280 if (! (*insn_data[(int) CODE_FOR_casesi].operand[0].predicate)
10281 (index, op_mode))
10282 index = copy_to_mode_reg (op_mode, index);
e87b4f3f 10283
ad82abb8
ZW
10284 op1 = expand_expr (minval, NULL_RTX, VOIDmode, 0);
10285
10286 op_mode = insn_data[(int) CODE_FOR_casesi].operand[1].mode;
10287 op1 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (minval)),
10288 op1, TREE_UNSIGNED (TREE_TYPE (minval)));
10289 if (! (*insn_data[(int) CODE_FOR_casesi].operand[1].predicate)
10290 (op1, op_mode))
10291 op1 = copy_to_mode_reg (op_mode, op1);
10292
10293 op2 = expand_expr (range, NULL_RTX, VOIDmode, 0);
10294
10295 op_mode = insn_data[(int) CODE_FOR_casesi].operand[2].mode;
10296 op2 = convert_modes (op_mode, TYPE_MODE (TREE_TYPE (range)),
10297 op2, TREE_UNSIGNED (TREE_TYPE (range)));
10298 if (! (*insn_data[(int) CODE_FOR_casesi].operand[2].predicate)
10299 (op2, op_mode))
10300 op2 = copy_to_mode_reg (op_mode, op2);
10301
10302 emit_jump_insn (gen_casesi (index, op1, op2,
10303 table_label, default_label));
10304 return 1;
10305}
10306
10307/* Attempt to generate a tablejump instruction; same concept. */
10308#ifndef HAVE_tablejump
10309#define HAVE_tablejump 0
10310#define gen_tablejump(x, y) (0)
10311#endif
10312
10313/* Subroutine of the next function.
10314
10315 INDEX is the value being switched on, with the lowest value
b93a436e
JL
10316 in the table already subtracted.
10317 MODE is its expected mode (needed if INDEX is constant).
10318 RANGE is the length of the jump table.
10319 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10320
b93a436e
JL
10321 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10322 index value is out of range. */
0f41302f 10323
ad82abb8 10324static void
b93a436e
JL
10325do_tablejump (index, mode, range, table_label, default_label)
10326 rtx index, range, table_label, default_label;
10327 enum machine_mode mode;
ca695ac9 10328{
b3694847 10329 rtx temp, vector;
88d3b7f0 10330
74f6d071
JH
10331 if (INTVAL (range) > cfun->max_jumptable_ents)
10332 cfun->max_jumptable_ents = INTVAL (range);
1877be45 10333
b93a436e
JL
10334 /* Do an unsigned comparison (in the proper mode) between the index
10335 expression and the value which represents the length of the range.
10336 Since we just finished subtracting the lower bound of the range
10337 from the index expression, this comparison allows us to simultaneously
10338 check that the original index expression value is both greater than
10339 or equal to the minimum value of the range and less than or equal to
10340 the maximum value of the range. */
709f5be1 10341
c5d5d461 10342 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
a06ef755 10343 default_label);
bbf6f052 10344
b93a436e
JL
10345 /* If index is in range, it must fit in Pmode.
10346 Convert to Pmode so we can index with it. */
10347 if (mode != Pmode)
10348 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10349
b93a436e
JL
10350 /* Don't let a MEM slip thru, because then INDEX that comes
10351 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10352 and break_out_memory_refs will go to work on it and mess it up. */
10353#ifdef PIC_CASE_VECTOR_ADDRESS
10354 if (flag_pic && GET_CODE (index) != REG)
10355 index = copy_to_mode_reg (Pmode, index);
10356#endif
ca695ac9 10357
b93a436e
JL
10358 /* If flag_force_addr were to affect this address
10359 it could interfere with the tricky assumptions made
10360 about addresses that contain label-refs,
10361 which may be valid only very near the tablejump itself. */
10362 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10363 GET_MODE_SIZE, because this indicates how large insns are. The other
10364 uses should all be Pmode, because they are addresses. This code
10365 could fail if addresses and insns are not the same size. */
10366 index = gen_rtx_PLUS (Pmode,
10367 gen_rtx_MULT (Pmode, index,
10368 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10369 gen_rtx_LABEL_REF (Pmode, table_label));
10370#ifdef PIC_CASE_VECTOR_ADDRESS
10371 if (flag_pic)
10372 index = PIC_CASE_VECTOR_ADDRESS (index);
10373 else
bbf6f052 10374#endif
b93a436e
JL
10375 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10376 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10377 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10378 RTX_UNCHANGING_P (vector) = 1;
4da2eb6b 10379 MEM_NOTRAP_P (vector) = 1;
b93a436e
JL
10380 convert_move (temp, vector, 0);
10381
10382 emit_jump_insn (gen_tablejump (temp, table_label));
10383
10384 /* If we are generating PIC code or if the table is PC-relative, the
10385 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10386 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10387 emit_barrier ();
bbf6f052 10388}
b93a436e 10389
ad82abb8
ZW
10390int
10391try_tablejump (index_type, index_expr, minval, range,
10392 table_label, default_label)
10393 tree index_type, index_expr, minval, range;
10394 rtx table_label, default_label;
10395{
10396 rtx index;
10397
10398 if (! HAVE_tablejump)
10399 return 0;
10400
10401 index_expr = fold (build (MINUS_EXPR, index_type,
10402 convert (index_type, index_expr),
10403 convert (index_type, minval)));
10404 index = expand_expr (index_expr, NULL_RTX, VOIDmode, 0);
10405 emit_queue ();
10406 index = protect_from_queue (index, 0);
10407 do_pending_stack_adjust ();
10408
10409 do_tablejump (index, TYPE_MODE (index_type),
10410 convert_modes (TYPE_MODE (index_type),
10411 TYPE_MODE (TREE_TYPE (range)),
10412 expand_expr (range, NULL_RTX,
10413 VOIDmode, 0),
10414 TREE_UNSIGNED (TREE_TYPE (range))),
10415 table_label, default_label);
10416 return 1;
10417}
e2500fed 10418
cb2a532e
AH
10419/* Nonzero if the mode is a valid vector mode for this architecture.
10420 This returns nonzero even if there is no hardware support for the
10421 vector mode, but we can emulate with narrower modes. */
10422
10423int
10424vector_mode_valid_p (mode)
10425 enum machine_mode mode;
10426{
10427 enum mode_class class = GET_MODE_CLASS (mode);
10428 enum machine_mode innermode;
10429
10430 /* Doh! What's going on? */
10431 if (class != MODE_VECTOR_INT
10432 && class != MODE_VECTOR_FLOAT)
10433 return 0;
10434
10435 /* Hardware support. Woo hoo! */
10436 if (VECTOR_MODE_SUPPORTED_P (mode))
10437 return 1;
10438
10439 innermode = GET_MODE_INNER (mode);
10440
10441 /* We should probably return 1 if requesting V4DI and we have no DI,
10442 but we have V2DI, but this is probably very unlikely. */
10443
10444 /* If we have support for the inner mode, we can safely emulate it.
10445 We may not have V2DI, but me can emulate with a pair of DIs. */
10446 return mov_optab->handlers[innermode].insn_code != CODE_FOR_nothing;
10447}
10448
d744e06e
AH
10449/* Return a CONST_VECTOR rtx for a VECTOR_CST tree. */
10450static rtx
10451const_vector_from_tree (exp)
10452 tree exp;
10453{
10454 rtvec v;
10455 int units, i;
10456 tree link, elt;
10457 enum machine_mode inner, mode;
10458
10459 mode = TYPE_MODE (TREE_TYPE (exp));
10460
10461 if (is_zeros_p (exp))
10462 return CONST0_RTX (mode);
10463
10464 units = GET_MODE_NUNITS (mode);
10465 inner = GET_MODE_INNER (mode);
10466
10467 v = rtvec_alloc (units);
10468
10469 link = TREE_VECTOR_CST_ELTS (exp);
10470 for (i = 0; link; link = TREE_CHAIN (link), ++i)
10471 {
10472 elt = TREE_VALUE (link);
10473
10474 if (TREE_CODE (elt) == REAL_CST)
10475 RTVEC_ELT (v, i) = CONST_DOUBLE_FROM_REAL_VALUE (TREE_REAL_CST (elt),
10476 inner);
10477 else
10478 RTVEC_ELT (v, i) = immed_double_const (TREE_INT_CST_LOW (elt),
10479 TREE_INT_CST_HIGH (elt),
10480 inner);
10481 }
10482
10483 return gen_rtx_raw_CONST_VECTOR (mode, v);
10484}
10485
e2500fed 10486#include "gt-expr.h"
This page took 4.045085 seconds and 5 git commands to generate.