]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Add sopme more names of ARM cores.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
8752c357
AJ
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999,
3 2000, 2001 Free Software Foundation, Inc.
bbf6f052
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
bbf6f052 21
bbf6f052 22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
3a94c984 36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
d6f4ec51 37#include "expr.h"
bbf6f052 38#include "recog.h"
3ef1eef4 39#include "reload.h"
bbf6f052 40#include "output.h"
bbf6f052 41#include "typeclass.h"
10f0ad3d 42#include "toplev.h"
d7db6646 43#include "ggc.h"
e2c49ac2 44#include "intl.h"
b1474bb7 45#include "tm_p.h"
bbf6f052 46
f73ad30e
JH
47#ifndef ACCUMULATE_OUTGOING_ARGS
48#define ACCUMULATE_OUTGOING_ARGS 0
49#endif
50
51/* Supply a default definition for PUSH_ARGS. */
52#ifndef PUSH_ARGS
53#ifdef PUSH_ROUNDING
54#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
55#else
56#define PUSH_ARGS 0
57#endif
58#endif
59
bbf6f052 60/* Decide whether a function's arguments should be processed
bbc8a071
RK
61 from first to last or from last to first.
62
63 They should if the stack and args grow in opposite directions, but
64 only if we have push insns. */
bbf6f052 65
bbf6f052 66#ifdef PUSH_ROUNDING
bbc8a071 67
3319a347 68#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
3a94c984 69#define PUSH_ARGS_REVERSED /* If it's last to first. */
bbf6f052 70#endif
bbc8a071 71
bbf6f052
RK
72#endif
73
74#ifndef STACK_PUSH_CODE
75#ifdef STACK_GROWS_DOWNWARD
76#define STACK_PUSH_CODE PRE_DEC
77#else
78#define STACK_PUSH_CODE PRE_INC
79#endif
80#endif
81
18543a22
ILT
82/* Assume that case vectors are not pc-relative. */
83#ifndef CASE_VECTOR_PC_RELATIVE
84#define CASE_VECTOR_PC_RELATIVE 0
85#endif
86
8f17b5c5
MM
87/* Hook called by safe_from_p for language-specific tree codes. It is
88 up to the language front-end to install a hook if it has any such
89 codes that safe_from_p needs to know about. Since same_from_p will
90 recursively explore the TREE_OPERANDs of an expression, this hook
91 should not reexamine those pieces. This routine may recursively
92 call safe_from_p; it should always pass `0' as the TOP_P
93 parameter. */
94int (*lang_safe_from_p) PARAMS ((rtx, tree));
95
bbf6f052
RK
96/* If this is nonzero, we do not bother generating VOLATILE
97 around volatile memory references, and we are willing to
98 output indirect addresses. If cse is to follow, we reject
99 indirect addresses so a useful potential cse is generated;
100 if it is used only once, instruction combination will produce
101 the same indirect address eventually. */
102int cse_not_expected;
103
956d6950 104/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
105 usage. Used when current_function_check_memory_usage is true, to avoid
106 infinite recursion. */
956d6950
JL
107static int in_check_memory_usage;
108
14a774a9
RK
109/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
110static tree placeholder_list = 0;
111
4969d05d
RK
112/* This structure is used by move_by_pieces to describe the move to
113 be performed. */
4969d05d
RK
114struct move_by_pieces
115{
116 rtx to;
117 rtx to_addr;
118 int autinc_to;
119 int explicit_inc_to;
120 rtx from;
121 rtx from_addr;
122 int autinc_from;
123 int explicit_inc_from;
3bdf5ad1
RK
124 unsigned HOST_WIDE_INT len;
125 HOST_WIDE_INT offset;
4969d05d
RK
126 int reverse;
127};
128
57814e5e 129/* This structure is used by store_by_pieces to describe the clear to
9de08200
RK
130 be performed. */
131
57814e5e 132struct store_by_pieces
9de08200
RK
133{
134 rtx to;
135 rtx to_addr;
136 int autinc_to;
137 int explicit_inc_to;
3bdf5ad1
RK
138 unsigned HOST_WIDE_INT len;
139 HOST_WIDE_INT offset;
57814e5e
JJ
140 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
141 PTR constfundata;
9de08200
RK
142 int reverse;
143};
144
292b1216 145extern struct obstack permanent_obstack;
c02bd5d9 146
711d877c
KG
147static rtx get_push_address PARAMS ((int));
148
149static rtx enqueue_insn PARAMS ((rtx, rtx));
3bdf5ad1
RK
150static unsigned HOST_WIDE_INT move_by_pieces_ninsns
151 PARAMS ((unsigned HOST_WIDE_INT,
152 unsigned int));
711d877c
KG
153static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
154 struct move_by_pieces *));
57814e5e
JJ
155static rtx clear_by_pieces_1 PARAMS ((PTR, HOST_WIDE_INT,
156 enum machine_mode));
3bdf5ad1
RK
157static void clear_by_pieces PARAMS ((rtx, unsigned HOST_WIDE_INT,
158 unsigned int));
57814e5e
JJ
159static void store_by_pieces_1 PARAMS ((struct store_by_pieces *,
160 unsigned int));
161static void store_by_pieces_2 PARAMS ((rtx (*) (rtx, ...),
711d877c 162 enum machine_mode,
57814e5e 163 struct store_by_pieces *));
296b4ed9 164static rtx get_subtarget PARAMS ((rtx));
711d877c
KG
165static int is_zeros_p PARAMS ((tree));
166static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
167static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
168 HOST_WIDE_INT, enum machine_mode,
23cb1766
RK
169 tree, tree, unsigned int, int,
170 int));
770ae6cc 171static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
13eb1f7f 172 HOST_WIDE_INT));
770ae6cc
RK
173static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
174 HOST_WIDE_INT, enum machine_mode,
729a2125 175 tree, enum machine_mode, int,
770ae6cc 176 unsigned int, HOST_WIDE_INT, int));
e009aaf3 177static enum memory_use_mode
711d877c
KG
178 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
179static tree save_noncopied_parts PARAMS ((tree, tree));
180static tree init_noncopied_parts PARAMS ((tree, tree));
711d877c
KG
181static int fixed_type_p PARAMS ((tree));
182static rtx var_rtx PARAMS ((tree));
729a2125 183static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c 184static rtx expand_increment PARAMS ((tree, int, int));
711d877c
KG
185static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
186static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
187static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
188 rtx, rtx));
711d877c 189static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
bbf6f052 190
4fa52007
RK
191/* Record for each mode whether we can move a register directly to or
192 from an object of that mode in memory. If we can't, we won't try
193 to use that mode directly when accessing a field of that mode. */
194
195static char direct_load[NUM_MACHINE_MODES];
196static char direct_store[NUM_MACHINE_MODES];
197
7e24ffc9
HPN
198/* If a memory-to-memory move would take MOVE_RATIO or more simple
199 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
200
201#ifndef MOVE_RATIO
266007a7 202#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
203#define MOVE_RATIO 2
204#else
3a94c984 205/* If we are optimizing for space (-Os), cut down the default move ratio. */
996d9dac 206#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
207#endif
208#endif
e87b4f3f 209
fbe1758d 210/* This macro is used to determine whether move_by_pieces should be called
3a94c984 211 to perform a structure copy. */
fbe1758d 212#ifndef MOVE_BY_PIECES_P
19caa751 213#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
8752c357 214 (move_by_pieces_ninsns (SIZE, ALIGN) < (unsigned int) MOVE_RATIO)
fbe1758d
AM
215#endif
216
266007a7 217/* This array records the insn_code of insns to perform block moves. */
e6677db3 218enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 219
9de08200
RK
220/* This array records the insn_code of insns to perform block clears. */
221enum insn_code clrstr_optab[NUM_MACHINE_MODES];
222
0f41302f 223/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
224
225#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 226#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 227#endif
bbf6f052 228\f
4fa52007 229/* This is run once per compilation to set up which modes can be used
266007a7 230 directly in memory and to initialize the block move optab. */
4fa52007
RK
231
232void
233init_expr_once ()
234{
235 rtx insn, pat;
236 enum machine_mode mode;
cff48d8f 237 int num_clobbers;
9ec36da5 238 rtx mem, mem1;
9ec36da5
JL
239
240 start_sequence ();
241
e2549997
RS
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
9ec36da5
JL
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 247
38a448ca 248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
249 pat = PATTERN (insn);
250
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
253 {
254 int regno;
255 rtx reg;
4fa52007
RK
256
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
e2549997 259 PUT_MODE (mem1, mode);
4fa52007 260
e6fe56a4
RK
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
263
7308a047
RS
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
268 {
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
e6fe56a4 271
38a448ca 272 reg = gen_rtx_REG (mode, regno);
e6fe56a4 273
7308a047
RS
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
e6fe56a4 278
e2549997
RS
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
7308a047
RS
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
e2549997
RS
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
7308a047 293 }
4fa52007
RK
294 }
295
296 end_sequence ();
297}
cff48d8f 298
bbf6f052
RK
299/* This is run at the start of compiling a function. */
300
301void
302init_expr ()
303{
01d939e8 304 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 305
49ad7cfa 306 pending_chain = 0;
bbf6f052 307 pending_stack_adjust = 0;
1503a7ec 308 stack_pointer_delta = 0;
bbf6f052 309 inhibit_defer_pop = 0;
bbf6f052 310 saveregs_value = 0;
0006469d 311 apply_args_value = 0;
e87b4f3f 312 forced_labels = 0;
bbf6f052
RK
313}
314
fa51b01b
RH
315void
316mark_expr_status (p)
317 struct expr_status *p;
318{
319 if (p == NULL)
320 return;
321
322 ggc_mark_rtx (p->x_saveregs_value);
323 ggc_mark_rtx (p->x_apply_args_value);
324 ggc_mark_rtx (p->x_forced_labels);
325}
326
327void
328free_expr_status (f)
329 struct function *f;
330{
331 free (f->expr);
332 f->expr = NULL;
333}
334
49ad7cfa 335/* Small sanity check that the queue is empty at the end of a function. */
296b4ed9 336
bbf6f052 337void
49ad7cfa 338finish_expr_for_function ()
bbf6f052 339{
49ad7cfa
BS
340 if (pending_chain)
341 abort ();
bbf6f052
RK
342}
343\f
344/* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
bbf6f052
RK
347/* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354static rtx
355enqueue_insn (var, body)
356 rtx var, body;
357{
c5c76735
JL
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
bbf6f052
RK
360 return pending_chain;
361}
362
363/* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378rtx
379protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382{
383 register RTX_CODE code = GET_CODE (x);
384
385#if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389#endif
390
391 if (code != QUEUED)
392 {
e9baa644
RK
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
bbf6f052
RK
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
38a448ca 402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 403
c6df88cb 404 MEM_COPY_ATTRIBUTES (new, x);
e9baa644 405
bbf6f052
RK
406 if (QUEUED_INSN (y))
407 {
e9baa644
RK
408 register rtx temp = gen_reg_rtx (GET_MODE (new));
409 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
410 QUEUED_INSN (y));
411 return temp;
412 }
e9baa644 413 return new;
bbf6f052
RK
414 }
415 /* Otherwise, recursively protect the subexpressions of all
416 the kinds of rtx's that can contain a QUEUED. */
417 if (code == MEM)
3f15938e
RS
418 {
419 rtx tem = protect_from_queue (XEXP (x, 0), 0);
420 if (tem != XEXP (x, 0))
421 {
422 x = copy_rtx (x);
423 XEXP (x, 0) = tem;
424 }
425 }
bbf6f052
RK
426 else if (code == PLUS || code == MULT)
427 {
3f15938e
RS
428 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
429 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
430 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
431 {
432 x = copy_rtx (x);
433 XEXP (x, 0) = new0;
434 XEXP (x, 1) = new1;
435 }
bbf6f052
RK
436 }
437 return x;
438 }
439 /* If the increment has not happened, use the variable itself. */
440 if (QUEUED_INSN (x) == 0)
441 return QUEUED_VAR (x);
442 /* If the increment has happened and a pre-increment copy exists,
443 use that copy. */
444 if (QUEUED_COPY (x) != 0)
445 return QUEUED_COPY (x);
446 /* The increment has happened but we haven't set up a pre-increment copy.
447 Set one up now, and use it. */
448 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
449 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
450 QUEUED_INSN (x));
451 return QUEUED_COPY (x);
452}
453
454/* Return nonzero if X contains a QUEUED expression:
455 if it contains anything that will be altered by a queued increment.
456 We handle only combinations of MEM, PLUS, MINUS and MULT operators
457 since memory addresses generally contain only those. */
458
1f06ee8d 459int
bbf6f052
RK
460queued_subexp_p (x)
461 rtx x;
462{
463 register enum rtx_code code = GET_CODE (x);
464 switch (code)
465 {
466 case QUEUED:
467 return 1;
468 case MEM:
469 return queued_subexp_p (XEXP (x, 0));
470 case MULT:
471 case PLUS:
472 case MINUS:
e9a25f70
JL
473 return (queued_subexp_p (XEXP (x, 0))
474 || queued_subexp_p (XEXP (x, 1)));
475 default:
476 return 0;
bbf6f052 477 }
bbf6f052
RK
478}
479
480/* Perform all the pending incrementations. */
481
482void
483emit_queue ()
484{
485 register rtx p;
381127e8 486 while ((p = pending_chain))
bbf6f052 487 {
41b083c4
R
488 rtx body = QUEUED_BODY (p);
489
490 if (GET_CODE (body) == SEQUENCE)
491 {
492 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
493 emit_insn (QUEUED_BODY (p));
494 }
495 else
496 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
497 pending_chain = QUEUED_NEXT (p);
498 }
499}
bbf6f052
RK
500\f
501/* Copy data from FROM to TO, where the machine modes are not the same.
502 Both modes may be integer, or both may be floating.
503 UNSIGNEDP should be nonzero if FROM is an unsigned type.
504 This causes zero-extension instead of sign-extension. */
505
506void
507convert_move (to, from, unsignedp)
508 register rtx to, from;
509 int unsignedp;
510{
511 enum machine_mode to_mode = GET_MODE (to);
512 enum machine_mode from_mode = GET_MODE (from);
513 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
514 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
515 enum insn_code code;
516 rtx libcall;
517
518 /* rtx code for making an equivalent value. */
519 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
520
521 to = protect_from_queue (to, 1);
522 from = protect_from_queue (from, 0);
523
524 if (to_real != from_real)
525 abort ();
526
1499e0a8
RK
527 /* If FROM is a SUBREG that indicates that we have already done at least
528 the required extension, strip it. We don't handle such SUBREGs as
529 TO here. */
530
531 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
532 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
533 >= GET_MODE_SIZE (to_mode))
534 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
535 from = gen_lowpart (to_mode, from), from_mode = to_mode;
536
537 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
538 abort ();
539
bbf6f052
RK
540 if (to_mode == from_mode
541 || (from_mode == VOIDmode && CONSTANT_P (from)))
542 {
543 emit_move_insn (to, from);
544 return;
545 }
546
0b4565c9
BS
547 if (VECTOR_MODE_P (to_mode) || VECTOR_MODE_P (from_mode))
548 {
549 if (GET_MODE_BITSIZE (from_mode) != GET_MODE_BITSIZE (to_mode))
550 abort ();
3a94c984 551
0b4565c9
BS
552 if (VECTOR_MODE_P (to_mode))
553 from = gen_rtx_SUBREG (to_mode, from, 0);
554 else
555 to = gen_rtx_SUBREG (from_mode, to, 0);
556
557 emit_move_insn (to, from);
558 return;
559 }
560
561 if (to_real != from_real)
562 abort ();
563
bbf6f052
RK
564 if (to_real)
565 {
642dfa8b 566 rtx value, insns;
81d79e2c 567
2b01c326 568 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 569 {
2b01c326
RK
570 /* Try converting directly if the insn is supported. */
571 if ((code = can_extend_p (to_mode, from_mode, 0))
572 != CODE_FOR_nothing)
573 {
574 emit_unop_insn (code, to, from, UNKNOWN);
575 return;
576 }
bbf6f052 577 }
3a94c984 578
b424402e
RS
579#ifdef HAVE_trunchfqf2
580 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
583 return;
584 }
585#endif
704af6a1
JL
586#ifdef HAVE_trunctqfqf2
587 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
588 {
589 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
590 return;
591 }
592#endif
b424402e
RS
593#ifdef HAVE_truncsfqf2
594 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
595 {
596 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
597 return;
598 }
599#endif
600#ifdef HAVE_truncdfqf2
601 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
604 return;
605 }
606#endif
607#ifdef HAVE_truncxfqf2
608 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_trunctfqf2
615 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
03747aa3
RK
621
622#ifdef HAVE_trunctqfhf2
623 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
624 {
625 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
626 return;
627 }
628#endif
b424402e
RS
629#ifdef HAVE_truncsfhf2
630 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
633 return;
634 }
635#endif
636#ifdef HAVE_truncdfhf2
637 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
643#ifdef HAVE_truncxfhf2
644 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
650#ifdef HAVE_trunctfhf2
651 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
2b01c326
RK
657
658#ifdef HAVE_truncsftqf2
659 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
660 {
661 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
662 return;
663 }
664#endif
665#ifdef HAVE_truncdftqf2
666 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
667 {
668 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
669 return;
670 }
671#endif
672#ifdef HAVE_truncxftqf2
673 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
679#ifdef HAVE_trunctftqf2
680 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
681 {
682 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
686
bbf6f052
RK
687#ifdef HAVE_truncdfsf2
688 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
689 {
690 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
691 return;
692 }
693#endif
b092b471
JW
694#ifdef HAVE_truncxfsf2
695 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
696 {
697 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
698 return;
699 }
700#endif
bbf6f052
RK
701#ifdef HAVE_trunctfsf2
702 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
703 {
704 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
705 return;
706 }
707#endif
b092b471
JW
708#ifdef HAVE_truncxfdf2
709 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
710 {
711 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
712 return;
713 }
714#endif
bbf6f052
RK
715#ifdef HAVE_trunctfdf2
716 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
717 {
718 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
719 return;
720 }
721#endif
722
b092b471
JW
723 libcall = (rtx) 0;
724 switch (from_mode)
725 {
726 case SFmode:
727 switch (to_mode)
728 {
729 case DFmode:
730 libcall = extendsfdf2_libfunc;
731 break;
732
733 case XFmode:
734 libcall = extendsfxf2_libfunc;
735 break;
736
737 case TFmode:
738 libcall = extendsftf2_libfunc;
739 break;
3a94c984 740
e9a25f70
JL
741 default:
742 break;
b092b471
JW
743 }
744 break;
745
746 case DFmode:
747 switch (to_mode)
748 {
749 case SFmode:
750 libcall = truncdfsf2_libfunc;
751 break;
752
753 case XFmode:
754 libcall = extenddfxf2_libfunc;
755 break;
756
757 case TFmode:
758 libcall = extenddftf2_libfunc;
759 break;
3a94c984 760
e9a25f70
JL
761 default:
762 break;
b092b471
JW
763 }
764 break;
765
766 case XFmode:
767 switch (to_mode)
768 {
769 case SFmode:
770 libcall = truncxfsf2_libfunc;
771 break;
772
773 case DFmode:
774 libcall = truncxfdf2_libfunc;
775 break;
3a94c984 776
e9a25f70
JL
777 default:
778 break;
b092b471
JW
779 }
780 break;
781
782 case TFmode:
783 switch (to_mode)
784 {
785 case SFmode:
786 libcall = trunctfsf2_libfunc;
787 break;
788
789 case DFmode:
790 libcall = trunctfdf2_libfunc;
791 break;
3a94c984 792
e9a25f70
JL
793 default:
794 break;
b092b471
JW
795 }
796 break;
3a94c984 797
e9a25f70
JL
798 default:
799 break;
b092b471
JW
800 }
801
802 if (libcall == (rtx) 0)
803 /* This conversion is not implemented yet. */
bbf6f052
RK
804 abort ();
805
642dfa8b 806 start_sequence ();
ebb1b59a 807 value = emit_library_call_value (libcall, NULL_RTX, LCT_CONST, to_mode,
81d79e2c 808 1, from, from_mode);
642dfa8b
BS
809 insns = get_insns ();
810 end_sequence ();
811 emit_libcall_block (insns, to, value, gen_rtx_FLOAT_TRUNCATE (to_mode,
812 from));
bbf6f052
RK
813 return;
814 }
815
816 /* Now both modes are integers. */
817
818 /* Handle expanding beyond a word. */
819 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
820 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
821 {
822 rtx insns;
823 rtx lowpart;
824 rtx fill_value;
825 rtx lowfrom;
826 int i;
827 enum machine_mode lowpart_mode;
828 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
829
830 /* Try converting directly if the insn is supported. */
831 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
832 != CODE_FOR_nothing)
833 {
cd1b4b44
RK
834 /* If FROM is a SUBREG, put it into a register. Do this
835 so that we always generate the same set of insns for
836 better cse'ing; if an intermediate assignment occurred,
837 we won't be doing the operation directly on the SUBREG. */
838 if (optimize > 0 && GET_CODE (from) == SUBREG)
839 from = force_reg (from_mode, from);
bbf6f052
RK
840 emit_unop_insn (code, to, from, equiv_code);
841 return;
842 }
843 /* Next, try converting via full word. */
844 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
845 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
846 != CODE_FOR_nothing))
847 {
a81fee56 848 if (GET_CODE (to) == REG)
38a448ca 849 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
850 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
851 emit_unop_insn (code, to,
852 gen_lowpart (word_mode, to), equiv_code);
853 return;
854 }
855
856 /* No special multiword conversion insn; do it by hand. */
857 start_sequence ();
858
5c5033c3
RK
859 /* Since we will turn this into a no conflict block, we must ensure
860 that the source does not overlap the target. */
861
862 if (reg_overlap_mentioned_p (to, from))
863 from = force_reg (from_mode, from);
864
bbf6f052
RK
865 /* Get a copy of FROM widened to a word, if necessary. */
866 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
867 lowpart_mode = word_mode;
868 else
869 lowpart_mode = from_mode;
870
871 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
872
873 lowpart = gen_lowpart (lowpart_mode, to);
874 emit_move_insn (lowpart, lowfrom);
875
876 /* Compute the value to put in each remaining word. */
877 if (unsignedp)
878 fill_value = const0_rtx;
879 else
880 {
881#ifdef HAVE_slt
882 if (HAVE_slt
a995e389 883 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
884 && STORE_FLAG_VALUE == -1)
885 {
906c4e36
RK
886 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
887 lowpart_mode, 0, 0);
bbf6f052
RK
888 fill_value = gen_reg_rtx (word_mode);
889 emit_insn (gen_slt (fill_value));
890 }
891 else
892#endif
893 {
894 fill_value
895 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
896 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 897 NULL_RTX, 0);
bbf6f052
RK
898 fill_value = convert_to_mode (word_mode, fill_value, 1);
899 }
900 }
901
902 /* Fill the remaining words. */
903 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
904 {
905 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
906 rtx subword = operand_subword (to, index, 1, to_mode);
907
908 if (subword == 0)
909 abort ();
910
911 if (fill_value != subword)
912 emit_move_insn (subword, fill_value);
913 }
914
915 insns = get_insns ();
916 end_sequence ();
917
906c4e36 918 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 919 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
920 return;
921 }
922
d3c64ee3
RS
923 /* Truncating multi-word to a word or less. */
924 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
925 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 926 {
431a6eca
JW
927 if (!((GET_CODE (from) == MEM
928 && ! MEM_VOLATILE_P (from)
929 && direct_load[(int) to_mode]
930 && ! mode_dependent_address_p (XEXP (from, 0)))
931 || GET_CODE (from) == REG
932 || GET_CODE (from) == SUBREG))
933 from = force_reg (from_mode, from);
bbf6f052
RK
934 convert_move (to, gen_lowpart (word_mode, from), 0);
935 return;
936 }
937
3a94c984 938 /* Handle pointer conversion. */ /* SPEE 900220. */
e5e809f4
JL
939 if (to_mode == PQImode)
940 {
941 if (from_mode != QImode)
942 from = convert_to_mode (QImode, from, unsignedp);
943
944#ifdef HAVE_truncqipqi2
945 if (HAVE_truncqipqi2)
946 {
947 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
948 return;
949 }
950#endif /* HAVE_truncqipqi2 */
951 abort ();
952 }
953
954 if (from_mode == PQImode)
955 {
956 if (to_mode != QImode)
957 {
958 from = convert_to_mode (QImode, from, unsignedp);
959 from_mode = QImode;
960 }
961 else
962 {
963#ifdef HAVE_extendpqiqi2
964 if (HAVE_extendpqiqi2)
965 {
966 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
967 return;
968 }
969#endif /* HAVE_extendpqiqi2 */
970 abort ();
971 }
972 }
973
bbf6f052
RK
974 if (to_mode == PSImode)
975 {
976 if (from_mode != SImode)
977 from = convert_to_mode (SImode, from, unsignedp);
978
1f584163
DE
979#ifdef HAVE_truncsipsi2
980 if (HAVE_truncsipsi2)
bbf6f052 981 {
1f584163 982 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
983 return;
984 }
1f584163 985#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
986 abort ();
987 }
988
989 if (from_mode == PSImode)
990 {
991 if (to_mode != SImode)
992 {
993 from = convert_to_mode (SImode, from, unsignedp);
994 from_mode = SImode;
995 }
996 else
997 {
1f584163 998#ifdef HAVE_extendpsisi2
43d75418 999 if (! unsignedp && HAVE_extendpsisi2)
bbf6f052 1000 {
1f584163 1001 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1002 return;
1003 }
1f584163 1004#endif /* HAVE_extendpsisi2 */
43d75418
R
1005#ifdef HAVE_zero_extendpsisi2
1006 if (unsignedp && HAVE_zero_extendpsisi2)
1007 {
1008 emit_unop_insn (CODE_FOR_zero_extendpsisi2, to, from, UNKNOWN);
1009 return;
1010 }
1011#endif /* HAVE_zero_extendpsisi2 */
bbf6f052
RK
1012 abort ();
1013 }
1014 }
1015
0407367d
RK
1016 if (to_mode == PDImode)
1017 {
1018 if (from_mode != DImode)
1019 from = convert_to_mode (DImode, from, unsignedp);
1020
1021#ifdef HAVE_truncdipdi2
1022 if (HAVE_truncdipdi2)
1023 {
1024 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1025 return;
1026 }
1027#endif /* HAVE_truncdipdi2 */
1028 abort ();
1029 }
1030
1031 if (from_mode == PDImode)
1032 {
1033 if (to_mode != DImode)
1034 {
1035 from = convert_to_mode (DImode, from, unsignedp);
1036 from_mode = DImode;
1037 }
1038 else
1039 {
1040#ifdef HAVE_extendpdidi2
1041 if (HAVE_extendpdidi2)
1042 {
1043 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1044 return;
1045 }
1046#endif /* HAVE_extendpdidi2 */
1047 abort ();
1048 }
1049 }
1050
bbf6f052
RK
1051 /* Now follow all the conversions between integers
1052 no more than a word long. */
1053
1054 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1055 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1056 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1057 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1058 {
d3c64ee3
RS
1059 if (!((GET_CODE (from) == MEM
1060 && ! MEM_VOLATILE_P (from)
1061 && direct_load[(int) to_mode]
1062 && ! mode_dependent_address_p (XEXP (from, 0)))
1063 || GET_CODE (from) == REG
1064 || GET_CODE (from) == SUBREG))
1065 from = force_reg (from_mode, from);
34aa3599
RK
1066 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1067 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1068 from = copy_to_reg (from);
bbf6f052
RK
1069 emit_move_insn (to, gen_lowpart (to_mode, from));
1070 return;
1071 }
1072
d3c64ee3 1073 /* Handle extension. */
bbf6f052
RK
1074 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1075 {
1076 /* Convert directly if that works. */
1077 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1078 != CODE_FOR_nothing)
1079 {
1080 emit_unop_insn (code, to, from, equiv_code);
1081 return;
1082 }
1083 else
1084 {
1085 enum machine_mode intermediate;
2b28d92e
NC
1086 rtx tmp;
1087 tree shift_amount;
bbf6f052
RK
1088
1089 /* Search for a mode to convert via. */
1090 for (intermediate = from_mode; intermediate != VOIDmode;
1091 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1092 if (((can_extend_p (to_mode, intermediate, unsignedp)
1093 != CODE_FOR_nothing)
1094 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1095 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1096 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1097 && (can_extend_p (intermediate, from_mode, unsignedp)
1098 != CODE_FOR_nothing))
1099 {
1100 convert_move (to, convert_to_mode (intermediate, from,
1101 unsignedp), unsignedp);
1102 return;
1103 }
1104
2b28d92e 1105 /* No suitable intermediate mode.
3a94c984 1106 Generate what we need with shifts. */
2b28d92e
NC
1107 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1108 - GET_MODE_BITSIZE (from_mode), 0);
1109 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1110 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1111 to, unsignedp);
3a94c984 1112 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
2b28d92e
NC
1113 to, unsignedp);
1114 if (tmp != to)
1115 emit_move_insn (to, tmp);
1116 return;
bbf6f052
RK
1117 }
1118 }
1119
3a94c984 1120 /* Support special truncate insns for certain modes. */
bbf6f052
RK
1121
1122 if (from_mode == DImode && to_mode == SImode)
1123 {
1124#ifdef HAVE_truncdisi2
1125 if (HAVE_truncdisi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1128 return;
1129 }
1130#endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == DImode && to_mode == HImode)
1136 {
1137#ifdef HAVE_truncdihi2
1138 if (HAVE_truncdihi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1141 return;
1142 }
1143#endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == DImode && to_mode == QImode)
1149 {
1150#ifdef HAVE_truncdiqi2
1151 if (HAVE_truncdiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156#endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == SImode && to_mode == HImode)
1162 {
1163#ifdef HAVE_truncsihi2
1164 if (HAVE_truncsihi2)
1165 {
1166 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1167 return;
1168 }
1169#endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
1174 if (from_mode == SImode && to_mode == QImode)
1175 {
1176#ifdef HAVE_truncsiqi2
1177 if (HAVE_truncsiqi2)
1178 {
1179 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1180 return;
1181 }
1182#endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == HImode && to_mode == QImode)
1188 {
1189#ifdef HAVE_trunchiqi2
1190 if (HAVE_trunchiqi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1193 return;
1194 }
1195#endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
b9bcad65
RK
1200 if (from_mode == TImode && to_mode == DImode)
1201 {
1202#ifdef HAVE_trunctidi2
1203 if (HAVE_trunctidi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1206 return;
1207 }
1208#endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 if (from_mode == TImode && to_mode == SImode)
1214 {
1215#ifdef HAVE_trunctisi2
1216 if (HAVE_trunctisi2)
1217 {
1218 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1219 return;
1220 }
1221#endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1224 }
1225
1226 if (from_mode == TImode && to_mode == HImode)
1227 {
1228#ifdef HAVE_trunctihi2
1229 if (HAVE_trunctihi2)
1230 {
1231 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1232 return;
1233 }
1234#endif
1235 convert_move (to, force_reg (from_mode, from), unsignedp);
1236 return;
1237 }
1238
1239 if (from_mode == TImode && to_mode == QImode)
1240 {
1241#ifdef HAVE_trunctiqi2
1242 if (HAVE_trunctiqi2)
1243 {
1244 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1245 return;
1246 }
1247#endif
1248 convert_move (to, force_reg (from_mode, from), unsignedp);
1249 return;
1250 }
1251
bbf6f052
RK
1252 /* Handle truncation of volatile memrefs, and so on;
1253 the things that couldn't be truncated directly,
1254 and for which there was no special instruction. */
1255 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1256 {
1257 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1258 emit_move_insn (to, temp);
1259 return;
1260 }
1261
1262 /* Mode combination is not recognized. */
1263 abort ();
1264}
1265
1266/* Return an rtx for a value that would result
1267 from converting X to mode MODE.
1268 Both X and MODE may be floating, or both integer.
1269 UNSIGNEDP is nonzero if X is an unsigned value.
1270 This can be done by referring to a part of X in place
5d901c31
RS
1271 or by copying to a new temporary with conversion.
1272
1273 This function *must not* call protect_from_queue
1274 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1275
1276rtx
1277convert_to_mode (mode, x, unsignedp)
1278 enum machine_mode mode;
1279 rtx x;
1280 int unsignedp;
5ffe63ed
RS
1281{
1282 return convert_modes (mode, VOIDmode, x, unsignedp);
1283}
1284
1285/* Return an rtx for a value that would result
1286 from converting X from mode OLDMODE to mode MODE.
1287 Both modes may be floating, or both integer.
1288 UNSIGNEDP is nonzero if X is an unsigned value.
1289
1290 This can be done by referring to a part of X in place
1291 or by copying to a new temporary with conversion.
1292
1293 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1294
1295 This function *must not* call protect_from_queue
1296 except when putting X into an insn (in which case convert_move does it). */
1297
1298rtx
1299convert_modes (mode, oldmode, x, unsignedp)
1300 enum machine_mode mode, oldmode;
1301 rtx x;
1302 int unsignedp;
bbf6f052
RK
1303{
1304 register rtx temp;
5ffe63ed 1305
1499e0a8
RK
1306 /* If FROM is a SUBREG that indicates that we have already done at least
1307 the required extension, strip it. */
1308
1309 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1310 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1311 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1312 x = gen_lowpart (mode, x);
bbf6f052 1313
64791b18
RK
1314 if (GET_MODE (x) != VOIDmode)
1315 oldmode = GET_MODE (x);
3a94c984 1316
5ffe63ed 1317 if (mode == oldmode)
bbf6f052
RK
1318 return x;
1319
1320 /* There is one case that we must handle specially: If we are converting
906c4e36 1321 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1322 we are to interpret the constant as unsigned, gen_lowpart will do
1323 the wrong if the constant appears negative. What we want to do is
1324 make the high-order word of the constant zero, not all ones. */
1325
1326 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1327 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1328 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1329 {
1330 HOST_WIDE_INT val = INTVAL (x);
1331
1332 if (oldmode != VOIDmode
1333 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1334 {
1335 int width = GET_MODE_BITSIZE (oldmode);
1336
1337 /* We need to zero extend VAL. */
1338 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1339 }
1340
1341 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1342 }
bbf6f052
RK
1343
1344 /* We can do this with a gen_lowpart if both desired and current modes
1345 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1346 non-volatile MEM. Except for the constant case where MODE is no
1347 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1348
ba2e110c
RK
1349 if ((GET_CODE (x) == CONST_INT
1350 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1351 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1352 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1353 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1354 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1355 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1356 && direct_load[(int) mode])
2bf29316
JW
1357 || (GET_CODE (x) == REG
1358 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1359 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1360 {
1361 /* ?? If we don't know OLDMODE, we have to assume here that
1362 X does not need sign- or zero-extension. This may not be
1363 the case, but it's the best we can do. */
1364 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1365 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1366 {
1367 HOST_WIDE_INT val = INTVAL (x);
1368 int width = GET_MODE_BITSIZE (oldmode);
1369
1370 /* We must sign or zero-extend in this case. Start by
1371 zero-extending, then sign extend if we need to. */
1372 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1373 if (! unsignedp
1374 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1375 val |= (HOST_WIDE_INT) (-1) << width;
1376
1377 return GEN_INT (val);
1378 }
1379
1380 return gen_lowpart (mode, x);
1381 }
bbf6f052
RK
1382
1383 temp = gen_reg_rtx (mode);
1384 convert_move (temp, x, unsignedp);
1385 return temp;
1386}
1387\f
fbe1758d 1388/* This macro is used to determine what the largest unit size that
3a94c984 1389 move_by_pieces can use is. */
fbe1758d
AM
1390
1391/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1392 move efficiently, as opposed to MOVE_MAX which is the maximum
3a94c984 1393 number of bytes we can move with a single instruction. */
fbe1758d
AM
1394
1395#ifndef MOVE_MAX_PIECES
1396#define MOVE_MAX_PIECES MOVE_MAX
1397#endif
1398
bbf6f052
RK
1399/* Generate several move instructions to copy LEN bytes
1400 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1401 The caller must pass FROM and TO
1402 through protect_from_queue before calling.
19caa751 1403 ALIGN is maximum alignment we can assume. */
bbf6f052 1404
2e245dac 1405void
bbf6f052
RK
1406move_by_pieces (to, from, len, align)
1407 rtx to, from;
3bdf5ad1 1408 unsigned HOST_WIDE_INT len;
729a2125 1409 unsigned int align;
bbf6f052
RK
1410{
1411 struct move_by_pieces data;
1412 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
770ae6cc 1413 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1414 enum machine_mode mode = VOIDmode, tmode;
1415 enum insn_code icode;
bbf6f052
RK
1416
1417 data.offset = 0;
1418 data.to_addr = to_addr;
1419 data.from_addr = from_addr;
1420 data.to = to;
1421 data.from = from;
1422 data.autinc_to
1423 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1424 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1425 data.autinc_from
1426 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1427 || GET_CODE (from_addr) == POST_INC
1428 || GET_CODE (from_addr) == POST_DEC);
1429
1430 data.explicit_inc_from = 0;
1431 data.explicit_inc_to = 0;
1432 data.reverse
1433 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1434 if (data.reverse) data.offset = len;
1435 data.len = len;
1436
1437 /* If copying requires more than two move insns,
1438 copy addresses to registers (to make displacements shorter)
1439 and use post-increment if available. */
1440 if (!(data.autinc_from && data.autinc_to)
1441 && move_by_pieces_ninsns (len, align) > 2)
1442 {
3a94c984 1443 /* Find the mode of the largest move... */
fbe1758d
AM
1444 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1445 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1446 if (GET_MODE_SIZE (tmode) < max_size)
1447 mode = tmode;
1448
1449 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1450 {
1451 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1452 data.autinc_from = 1;
1453 data.explicit_inc_from = -1;
1454 }
fbe1758d 1455 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1456 {
1457 data.from_addr = copy_addr_to_reg (from_addr);
1458 data.autinc_from = 1;
1459 data.explicit_inc_from = 1;
1460 }
bbf6f052
RK
1461 if (!data.autinc_from && CONSTANT_P (from_addr))
1462 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1463 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1464 {
1465 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1466 data.autinc_to = 1;
1467 data.explicit_inc_to = -1;
1468 }
fbe1758d 1469 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1470 {
1471 data.to_addr = copy_addr_to_reg (to_addr);
1472 data.autinc_to = 1;
1473 data.explicit_inc_to = 1;
1474 }
bbf6f052
RK
1475 if (!data.autinc_to && CONSTANT_P (to_addr))
1476 data.to_addr = copy_addr_to_reg (to_addr);
1477 }
1478
e1565e65 1479 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1480 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1481 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1482
1483 /* First move what we can in the largest integer mode, then go to
1484 successively smaller modes. */
1485
1486 while (max_size > 1)
1487 {
e7c33f54
RK
1488 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1489 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1490 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1491 mode = tmode;
1492
1493 if (mode == VOIDmode)
1494 break;
1495
1496 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1497 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1498 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1499
1500 max_size = GET_MODE_SIZE (mode);
1501 }
1502
1503 /* The code above should have handled everything. */
2a8e278c 1504 if (data.len > 0)
bbf6f052
RK
1505 abort ();
1506}
1507
1508/* Return number of insns required to move L bytes by pieces.
1509 ALIGN (in bytes) is maximum alignment we can assume. */
1510
3bdf5ad1 1511static unsigned HOST_WIDE_INT
bbf6f052 1512move_by_pieces_ninsns (l, align)
3bdf5ad1 1513 unsigned HOST_WIDE_INT l;
729a2125 1514 unsigned int align;
bbf6f052 1515{
3bdf5ad1
RK
1516 unsigned HOST_WIDE_INT n_insns = 0;
1517 unsigned HOST_WIDE_INT max_size = MOVE_MAX + 1;
bbf6f052 1518
e1565e65 1519 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1520 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1521 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1522
1523 while (max_size > 1)
1524 {
1525 enum machine_mode mode = VOIDmode, tmode;
1526 enum insn_code icode;
1527
e7c33f54
RK
1528 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1529 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1530 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1531 mode = tmode;
1532
1533 if (mode == VOIDmode)
1534 break;
1535
1536 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1537 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1538 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1539
1540 max_size = GET_MODE_SIZE (mode);
1541 }
1542
13c6f0d5
NS
1543 if (l)
1544 abort ();
bbf6f052
RK
1545 return n_insns;
1546}
1547
1548/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1549 with move instructions for mode MODE. GENFUN is the gen_... function
1550 to make a move insn for that mode. DATA has all the other info. */
1551
1552static void
1553move_by_pieces_1 (genfun, mode, data)
711d877c 1554 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1555 enum machine_mode mode;
1556 struct move_by_pieces *data;
1557{
3bdf5ad1
RK
1558 unsigned int size = GET_MODE_SIZE (mode);
1559 rtx to1, from1;
bbf6f052
RK
1560
1561 while (data->len >= size)
1562 {
3bdf5ad1
RK
1563 if (data->reverse)
1564 data->offset -= size;
1565
1566 if (data->autinc_to)
1567 {
1568 to1 = gen_rtx_MEM (mode, data->to_addr);
1569 MEM_COPY_ATTRIBUTES (to1, data->to);
1570 }
1571 else
1572 to1 = change_address (data->to, mode,
1573 plus_constant (data->to_addr, data->offset));
1574
1575 if (data->autinc_from)
1576 {
1577 from1 = gen_rtx_MEM (mode, data->from_addr);
1578 MEM_COPY_ATTRIBUTES (from1, data->from);
1579 }
1580 else
1581 from1 = change_address (data->from, mode,
1582 plus_constant (data->from_addr, data->offset));
bbf6f052 1583
940da324 1584 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1585 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1586 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1587 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1588
1589 emit_insn ((*genfun) (to1, from1));
3bdf5ad1 1590
940da324 1591 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1592 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1593 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1594 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052 1595
3bdf5ad1
RK
1596 if (! data->reverse)
1597 data->offset += size;
bbf6f052
RK
1598
1599 data->len -= size;
1600 }
1601}
1602\f
1603/* Emit code to move a block Y to a block X.
1604 This may be done with string-move instructions,
1605 with multiple scalar move instructions, or with a library call.
1606
1607 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1608 with mode BLKmode.
1609 SIZE is an rtx that says how long they are.
19caa751 1610 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1611
e9a25f70
JL
1612 Return the address of the new block, if memcpy is called and returns it,
1613 0 otherwise. */
1614
1615rtx
bbf6f052
RK
1616emit_block_move (x, y, size, align)
1617 rtx x, y;
1618 rtx size;
729a2125 1619 unsigned int align;
bbf6f052 1620{
e9a25f70 1621 rtx retval = 0;
52cf7115
JL
1622#ifdef TARGET_MEM_FUNCTIONS
1623 static tree fn;
1624 tree call_expr, arg_list;
1625#endif
e9a25f70 1626
bbf6f052
RK
1627 if (GET_MODE (x) != BLKmode)
1628 abort ();
1629
1630 if (GET_MODE (y) != BLKmode)
1631 abort ();
1632
1633 x = protect_from_queue (x, 1);
1634 y = protect_from_queue (y, 0);
5d901c31 1635 size = protect_from_queue (size, 0);
bbf6f052
RK
1636
1637 if (GET_CODE (x) != MEM)
1638 abort ();
1639 if (GET_CODE (y) != MEM)
1640 abort ();
1641 if (size == 0)
1642 abort ();
1643
fbe1758d 1644 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1645 move_by_pieces (x, y, INTVAL (size), align);
1646 else
1647 {
1648 /* Try the most limited insn first, because there's no point
1649 including more than one in the machine description unless
1650 the more limited one has some advantage. */
266007a7 1651
19caa751 1652 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1653 enum machine_mode mode;
1654
3ef1eef4
RK
1655 /* Since this is a move insn, we don't care about volatility. */
1656 volatile_ok = 1;
1657
266007a7
RK
1658 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1659 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1660 {
266007a7 1661 enum insn_code code = movstr_optab[(int) mode];
a995e389 1662 insn_operand_predicate_fn pred;
266007a7
RK
1663
1664 if (code != CODE_FOR_nothing
803090c4
RK
1665 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1666 here because if SIZE is less than the mode mask, as it is
8008b228 1667 returned by the macro, it will definitely be less than the
803090c4 1668 actual mode mask. */
8ca00751
RK
1669 && ((GET_CODE (size) == CONST_INT
1670 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1671 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1672 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1673 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1674 || (*pred) (x, BLKmode))
1675 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1676 || (*pred) (y, BLKmode))
1677 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1678 || (*pred) (opalign, VOIDmode)))
bbf6f052 1679 {
1ba1e2a8 1680 rtx op2;
266007a7
RK
1681 rtx last = get_last_insn ();
1682 rtx pat;
1683
1ba1e2a8 1684 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1685 pred = insn_data[(int) code].operand[2].predicate;
1686 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1687 op2 = copy_to_mode_reg (mode, op2);
1688
1689 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1690 if (pat)
1691 {
1692 emit_insn (pat);
3ef1eef4 1693 volatile_ok = 0;
e9a25f70 1694 return 0;
266007a7
RK
1695 }
1696 else
1697 delete_insns_since (last);
bbf6f052
RK
1698 }
1699 }
bbf6f052 1700
3ef1eef4
RK
1701 volatile_ok = 0;
1702
4bc973ae
JL
1703 /* X, Y, or SIZE may have been passed through protect_from_queue.
1704
1705 It is unsafe to save the value generated by protect_from_queue
1706 and reuse it later. Consider what happens if emit_queue is
1707 called before the return value from protect_from_queue is used.
1708
1709 Expansion of the CALL_EXPR below will call emit_queue before
1710 we are finished emitting RTL for argument setup. So if we are
1711 not careful we could get the wrong value for an argument.
1712
1713 To avoid this problem we go ahead and emit code to copy X, Y &
1714 SIZE into new pseudos. We can then place those new pseudos
1715 into an RTL_EXPR and use them later, even after a call to
3a94c984 1716 emit_queue.
4bc973ae
JL
1717
1718 Note this is not strictly needed for library calls since they
1719 do not call emit_queue before loading their arguments. However,
1720 we may need to have library calls call emit_queue in the future
1721 since failing to do so could cause problems for targets which
1722 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1723 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1724 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1725
1726#ifdef TARGET_MEM_FUNCTIONS
1727 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1728#else
1729 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1730 TREE_UNSIGNED (integer_type_node));
f3dc586a 1731 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1732#endif
1733
bbf6f052 1734#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1735 /* It is incorrect to use the libcall calling conventions to call
1736 memcpy in this context.
1737
1738 This could be a user call to memcpy and the user may wish to
1739 examine the return value from memcpy.
1740
1741 For targets where libcalls and normal calls have different conventions
3a94c984 1742 for returning pointers, we could end up generating incorrect code.
52cf7115
JL
1743
1744 So instead of using a libcall sequence we build up a suitable
1745 CALL_EXPR and expand the call in the normal fashion. */
1746 if (fn == NULL_TREE)
1747 {
1748 tree fntype;
1749
1750 /* This was copied from except.c, I don't know if all this is
1751 necessary in this context or not. */
1752 fn = get_identifier ("memcpy");
52cf7115
JL
1753 fntype = build_pointer_type (void_type_node);
1754 fntype = build_function_type (fntype, NULL_TREE);
1755 fn = build_decl (FUNCTION_DECL, fn, fntype);
3a94c984 1756 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1757 DECL_EXTERNAL (fn) = 1;
1758 TREE_PUBLIC (fn) = 1;
1759 DECL_ARTIFICIAL (fn) = 1;
6c418184 1760 make_decl_rtl (fn, NULL_PTR);
52cf7115 1761 assemble_external (fn);
52cf7115
JL
1762 }
1763
3a94c984 1764 /* We need to make an argument list for the function call.
52cf7115
JL
1765
1766 memcpy has three arguments, the first two are void * addresses and
1767 the last is a size_t byte count for the copy. */
1768 arg_list
1769 = build_tree_list (NULL_TREE,
4bc973ae 1770 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1771 TREE_CHAIN (arg_list)
1772 = build_tree_list (NULL_TREE,
4bc973ae 1773 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1774 TREE_CHAIN (TREE_CHAIN (arg_list))
1775 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1776 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1777
1778 /* Now we have to build up the CALL_EXPR itself. */
1779 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1780 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1781 call_expr, arg_list, NULL_TREE);
1782 TREE_SIDE_EFFECTS (call_expr) = 1;
1783
1784 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1785#else
ebb1b59a 1786 emit_library_call (bcopy_libfunc, LCT_NORMAL,
fe7bbd2a 1787 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1788 convert_to_mode (TYPE_MODE (integer_type_node), size,
1789 TREE_UNSIGNED (integer_type_node)),
1790 TYPE_MODE (integer_type_node));
bbf6f052
RK
1791#endif
1792 }
e9a25f70
JL
1793
1794 return retval;
bbf6f052
RK
1795}
1796\f
1797/* Copy all or part of a value X into registers starting at REGNO.
1798 The number of registers to be filled is NREGS. */
1799
1800void
1801move_block_to_reg (regno, x, nregs, mode)
1802 int regno;
1803 rtx x;
1804 int nregs;
1805 enum machine_mode mode;
1806{
1807 int i;
381127e8 1808#ifdef HAVE_load_multiple
3a94c984 1809 rtx pat;
381127e8
RL
1810 rtx last;
1811#endif
bbf6f052 1812
72bb9717
RK
1813 if (nregs == 0)
1814 return;
1815
bbf6f052
RK
1816 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1817 x = validize_mem (force_const_mem (mode, x));
1818
1819 /* See if the machine can do this with a load multiple insn. */
1820#ifdef HAVE_load_multiple
c3a02afe 1821 if (HAVE_load_multiple)
bbf6f052 1822 {
c3a02afe 1823 last = get_last_insn ();
38a448ca 1824 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1825 GEN_INT (nregs));
1826 if (pat)
1827 {
1828 emit_insn (pat);
1829 return;
1830 }
1831 else
1832 delete_insns_since (last);
bbf6f052 1833 }
bbf6f052
RK
1834#endif
1835
1836 for (i = 0; i < nregs; i++)
38a448ca 1837 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1838 operand_subword_force (x, i, mode));
1839}
1840
1841/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1842 The number of registers to be filled is NREGS. SIZE indicates the number
1843 of bytes in the object X. */
1844
bbf6f052 1845void
0040593d 1846move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1847 int regno;
1848 rtx x;
1849 int nregs;
0040593d 1850 int size;
bbf6f052
RK
1851{
1852 int i;
381127e8 1853#ifdef HAVE_store_multiple
3a94c984 1854 rtx pat;
381127e8
RL
1855 rtx last;
1856#endif
58a32c5c 1857 enum machine_mode mode;
bbf6f052 1858
2954d7db
RK
1859 if (nregs == 0)
1860 return;
1861
58a32c5c
DE
1862 /* If SIZE is that of a mode no bigger than a word, just use that
1863 mode's store operation. */
1864 if (size <= UNITS_PER_WORD
1865 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1866 {
1867 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1868 gen_rtx_REG (mode, regno));
58a32c5c
DE
1869 return;
1870 }
3a94c984 1871
0040593d 1872 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1873 to the left before storing to memory. Note that the previous test
1874 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1875 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1876 {
1877 rtx tem = operand_subword (x, 0, 1, BLKmode);
1878 rtx shift;
1879
1880 if (tem == 0)
1881 abort ();
1882
1883 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1884 gen_rtx_REG (word_mode, regno),
0040593d
JW
1885 build_int_2 ((UNITS_PER_WORD - size)
1886 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1887 emit_move_insn (tem, shift);
1888 return;
1889 }
1890
bbf6f052
RK
1891 /* See if the machine can do this with a store multiple insn. */
1892#ifdef HAVE_store_multiple
c3a02afe 1893 if (HAVE_store_multiple)
bbf6f052 1894 {
c3a02afe 1895 last = get_last_insn ();
38a448ca 1896 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1897 GEN_INT (nregs));
1898 if (pat)
1899 {
1900 emit_insn (pat);
1901 return;
1902 }
1903 else
1904 delete_insns_since (last);
bbf6f052 1905 }
bbf6f052
RK
1906#endif
1907
1908 for (i = 0; i < nregs; i++)
1909 {
1910 rtx tem = operand_subword (x, i, 1, BLKmode);
1911
1912 if (tem == 0)
1913 abort ();
1914
38a448ca 1915 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1916 }
1917}
1918
aac5cc16
RH
1919/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1920 registers represented by a PARALLEL. SSIZE represents the total size of
1921 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1922 SRC in bits. */
1923/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1924 the balance will be in what would be the low-order memory addresses, i.e.
1925 left justified for big endian, right justified for little endian. This
1926 happens to be true for the targets currently using this support. If this
1927 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1928 would be needed. */
fffa9c1d
JW
1929
1930void
aac5cc16
RH
1931emit_group_load (dst, orig_src, ssize, align)
1932 rtx dst, orig_src;
729a2125
RK
1933 unsigned int align;
1934 int ssize;
fffa9c1d 1935{
aac5cc16
RH
1936 rtx *tmps, src;
1937 int start, i;
fffa9c1d 1938
aac5cc16 1939 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1940 abort ();
1941
1942 /* Check for a NULL entry, used to indicate that the parameter goes
1943 both on the stack and in registers. */
aac5cc16
RH
1944 if (XEXP (XVECEXP (dst, 0, 0), 0))
1945 start = 0;
fffa9c1d 1946 else
aac5cc16
RH
1947 start = 1;
1948
3a94c984 1949 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (dst, 0));
aac5cc16
RH
1950
1951 /* If we won't be loading directly from memory, protect the real source
1952 from strange tricks we might play. */
1953 src = orig_src;
2ee5437b 1954 if (GET_CODE (src) != MEM && ! CONSTANT_P (src))
aac5cc16 1955 {
4636c0a2 1956 if (GET_MODE (src) == VOIDmode)
8b725198
JJ
1957 src = gen_reg_rtx (GET_MODE (dst));
1958 else
1959 src = gen_reg_rtx (GET_MODE (orig_src));
aac5cc16
RH
1960 emit_move_insn (src, orig_src);
1961 }
1962
1963 /* Process the pieces. */
1964 for (i = start; i < XVECLEN (dst, 0); i++)
1965 {
1966 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1967 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1968 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1969 int shift = 0;
1970
1971 /* Handle trailing fragments that run over the size of the struct. */
8752c357 1972 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
aac5cc16
RH
1973 {
1974 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1975 bytelen = ssize - bytepos;
1976 if (bytelen <= 0)
729a2125 1977 abort ();
aac5cc16
RH
1978 }
1979
1980 /* Optimize the access just a bit. */
1981 if (GET_CODE (src) == MEM
19caa751 1982 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 1983 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1984 && bytelen == GET_MODE_SIZE (mode))
1985 {
1986 tmps[i] = gen_reg_rtx (mode);
1987 emit_move_insn (tmps[i],
1988 change_address (src, mode,
1989 plus_constant (XEXP (src, 0),
1990 bytepos)));
fffa9c1d 1991 }
7c4a6db0
JW
1992 else if (GET_CODE (src) == CONCAT)
1993 {
1994 if (bytepos == 0
1995 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1996 tmps[i] = XEXP (src, 0);
8752c357 1997 else if (bytepos == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
7c4a6db0
JW
1998 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1999 tmps[i] = XEXP (src, 1);
2000 else
2001 abort ();
2002 }
2ee5437b
RH
2003 else if ((CONSTANT_P (src)
2004 && (GET_MODE (src) == VOIDmode || GET_MODE (src) == mode))
2005 || (GET_CODE (src) == REG && GET_MODE (src) == mode))
2006 tmps[i] = src;
fffa9c1d 2007 else
19caa751
RK
2008 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
2009 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
2010 mode, mode, align, ssize);
fffa9c1d 2011
aac5cc16 2012 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
2013 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2014 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 2015 }
19caa751 2016
3a94c984 2017 emit_queue ();
aac5cc16
RH
2018
2019 /* Copy the extracted pieces into the proper (probable) hard regs. */
2020 for (i = start; i < XVECLEN (dst, 0); i++)
2021 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2022}
2023
aac5cc16
RH
2024/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2025 registers represented by a PARALLEL. SSIZE represents the total size of
2026 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2027
2028void
aac5cc16
RH
2029emit_group_store (orig_dst, src, ssize, align)
2030 rtx orig_dst, src;
729a2125
RK
2031 int ssize;
2032 unsigned int align;
fffa9c1d 2033{
aac5cc16
RH
2034 rtx *tmps, dst;
2035 int start, i;
fffa9c1d 2036
aac5cc16 2037 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2038 abort ();
2039
2040 /* Check for a NULL entry, used to indicate that the parameter goes
2041 both on the stack and in registers. */
aac5cc16
RH
2042 if (XEXP (XVECEXP (src, 0, 0), 0))
2043 start = 0;
fffa9c1d 2044 else
aac5cc16
RH
2045 start = 1;
2046
3a94c984 2047 tmps = (rtx *) alloca (sizeof (rtx) * XVECLEN (src, 0));
fffa9c1d 2048
aac5cc16
RH
2049 /* Copy the (probable) hard regs into pseudos. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2051 {
aac5cc16
RH
2052 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2053 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2054 emit_move_insn (tmps[i], reg);
2055 }
3a94c984 2056 emit_queue ();
fffa9c1d 2057
aac5cc16
RH
2058 /* If we won't be storing directly into memory, protect the real destination
2059 from strange tricks we might play. */
2060 dst = orig_dst;
10a9f2be
JW
2061 if (GET_CODE (dst) == PARALLEL)
2062 {
2063 rtx temp;
2064
2065 /* We can get a PARALLEL dst if there is a conditional expression in
2066 a return statement. In that case, the dst and src are the same,
2067 so no action is necessary. */
2068 if (rtx_equal_p (dst, src))
2069 return;
2070
2071 /* It is unclear if we can ever reach here, but we may as well handle
2072 it. Allocate a temporary, and split this into a store/load to/from
2073 the temporary. */
2074
2075 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2076 emit_group_store (temp, src, ssize, align);
2077 emit_group_load (dst, temp, ssize, align);
2078 return;
2079 }
2080 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2081 {
2082 dst = gen_reg_rtx (GET_MODE (orig_dst));
2083 /* Make life a bit easier for combine. */
2084 emit_move_insn (dst, const0_rtx);
2085 }
aac5cc16
RH
2086
2087 /* Process the pieces. */
2088 for (i = start; i < XVECLEN (src, 0); i++)
2089 {
770ae6cc 2090 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2091 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2092 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2093
2094 /* Handle trailing fragments that run over the size of the struct. */
8752c357 2095 if (ssize >= 0 && bytepos + (HOST_WIDE_INT) bytelen > ssize)
71bc0330 2096 {
aac5cc16
RH
2097 if (BYTES_BIG_ENDIAN)
2098 {
2099 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2100 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2101 tmps[i], 0, OPTAB_WIDEN);
2102 }
2103 bytelen = ssize - bytepos;
71bc0330 2104 }
fffa9c1d 2105
aac5cc16
RH
2106 /* Optimize the access just a bit. */
2107 if (GET_CODE (dst) == MEM
19caa751 2108 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2109 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2110 && bytelen == GET_MODE_SIZE (mode))
729a2125
RK
2111 emit_move_insn (change_address (dst, mode,
2112 plus_constant (XEXP (dst, 0),
2113 bytepos)),
2114 tmps[i]);
aac5cc16 2115 else
729a2125 2116 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
aac5cc16 2117 mode, tmps[i], align, ssize);
fffa9c1d 2118 }
729a2125 2119
3a94c984 2120 emit_queue ();
aac5cc16
RH
2121
2122 /* Copy from the pseudo into the (probable) hard reg. */
2123 if (GET_CODE (dst) == REG)
2124 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2125}
2126
c36fce9a
GRK
2127/* Generate code to copy a BLKmode object of TYPE out of a
2128 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2129 is null, a stack temporary is created. TGTBLK is returned.
2130
2131 The primary purpose of this routine is to handle functions
2132 that return BLKmode structures in registers. Some machines
2133 (the PA for example) want to return all small structures
3a94c984 2134 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2135
2136rtx
19caa751 2137copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2138 rtx tgtblk;
2139 rtx srcreg;
2140 tree type;
2141{
19caa751
RK
2142 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2143 rtx src = NULL, dst = NULL;
2144 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2145 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2146
2147 if (tgtblk == 0)
2148 {
1da68f56
RK
2149 tgtblk = assign_temp (build_qualified_type (type,
2150 (TYPE_QUALS (type)
2151 | TYPE_QUAL_CONST)),
2152 0, 1, 1);
19caa751
RK
2153 preserve_temp_slots (tgtblk);
2154 }
3a94c984 2155
19caa751
RK
2156 /* This code assumes srcreg is at least a full word. If it isn't,
2157 copy it into a new pseudo which is a full word. */
2158 if (GET_MODE (srcreg) != BLKmode
2159 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2160 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2161
2162 /* Structures whose size is not a multiple of a word are aligned
2163 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2164 machine, this means we must skip the empty high order bytes when
2165 calculating the bit offset. */
2166 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2167 big_endian_correction
2168 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2169
2170 /* Copy the structure BITSIZE bites at a time.
3a94c984 2171
19caa751
RK
2172 We could probably emit more efficient code for machines which do not use
2173 strict alignment, but it doesn't seem worth the effort at the current
2174 time. */
2175 for (bitpos = 0, xbitpos = big_endian_correction;
2176 bitpos < bytes * BITS_PER_UNIT;
2177 bitpos += bitsize, xbitpos += bitsize)
2178 {
3a94c984 2179 /* We need a new source operand each time xbitpos is on a
19caa751
RK
2180 word boundary and when xbitpos == big_endian_correction
2181 (the first time through). */
2182 if (xbitpos % BITS_PER_WORD == 0
2183 || xbitpos == big_endian_correction)
2184 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2185
2186 /* We need a new destination operand each time bitpos is on
2187 a word boundary. */
2188 if (bitpos % BITS_PER_WORD == 0)
2189 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
3a94c984 2190
19caa751
RK
2191 /* Use xbitpos for the source extraction (right justified) and
2192 xbitpos for the destination store (left justified). */
2193 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2194 extract_bit_field (src, bitsize,
2195 xbitpos % BITS_PER_WORD, 1,
2196 NULL_RTX, word_mode, word_mode,
2197 bitsize, BITS_PER_WORD),
2198 bitsize, BITS_PER_WORD);
2199 }
2200
2201 return tgtblk;
c36fce9a
GRK
2202}
2203
94b25f81
RK
2204/* Add a USE expression for REG to the (possibly empty) list pointed
2205 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2206
2207void
b3f8cf4a
RK
2208use_reg (call_fusage, reg)
2209 rtx *call_fusage, reg;
2210{
0304dfbb
DE
2211 if (GET_CODE (reg) != REG
2212 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
3a94c984 2213 abort ();
b3f8cf4a
RK
2214
2215 *call_fusage
38a448ca
RH
2216 = gen_rtx_EXPR_LIST (VOIDmode,
2217 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2218}
2219
94b25f81
RK
2220/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2221 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2222
2223void
0304dfbb
DE
2224use_regs (call_fusage, regno, nregs)
2225 rtx *call_fusage;
bbf6f052
RK
2226 int regno;
2227 int nregs;
2228{
0304dfbb 2229 int i;
bbf6f052 2230
0304dfbb
DE
2231 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2232 abort ();
2233
2234 for (i = 0; i < nregs; i++)
38a448ca 2235 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2236}
fffa9c1d
JW
2237
2238/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2239 PARALLEL REGS. This is for calls that pass values in multiple
2240 non-contiguous locations. The Irix 6 ABI has examples of this. */
2241
2242void
2243use_group_regs (call_fusage, regs)
2244 rtx *call_fusage;
2245 rtx regs;
2246{
2247 int i;
2248
6bd35f86
DE
2249 for (i = 0; i < XVECLEN (regs, 0); i++)
2250 {
2251 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2252
6bd35f86
DE
2253 /* A NULL entry means the parameter goes both on the stack and in
2254 registers. This can also be a MEM for targets that pass values
2255 partially on the stack and partially in registers. */
e9a25f70 2256 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2257 use_reg (call_fusage, reg);
2258 }
fffa9c1d 2259}
bbf6f052 2260\f
57814e5e
JJ
2261
2262int
2263can_store_by_pieces (len, constfun, constfundata, align)
2264 unsigned HOST_WIDE_INT len;
2265 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2266 PTR constfundata;
2267 unsigned int align;
2268{
98166639 2269 unsigned HOST_WIDE_INT max_size, l;
57814e5e
JJ
2270 HOST_WIDE_INT offset = 0;
2271 enum machine_mode mode, tmode;
2272 enum insn_code icode;
2273 int reverse;
2274 rtx cst;
2275
2276 if (! MOVE_BY_PIECES_P (len, align))
2277 return 0;
2278
2279 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
2280 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
2281 align = MOVE_MAX * BITS_PER_UNIT;
2282
2283 /* We would first store what we can in the largest integer mode, then go to
2284 successively smaller modes. */
2285
2286 for (reverse = 0;
2287 reverse <= (HAVE_PRE_DECREMENT || HAVE_POST_DECREMENT);
2288 reverse++)
2289 {
2290 l = len;
2291 mode = VOIDmode;
98166639 2292 max_size = MOVE_MAX_PIECES + 1;
57814e5e
JJ
2293 while (max_size > 1)
2294 {
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= GET_MODE_ALIGNMENT (mode))
2306 {
2307 unsigned int size = GET_MODE_SIZE (mode);
2308
2309 while (l >= size)
2310 {
2311 if (reverse)
2312 offset -= size;
2313
2314 cst = (*constfun) (constfundata, offset, mode);
2315 if (!LEGITIMATE_CONSTANT_P (cst))
2316 return 0;
2317
2318 if (!reverse)
2319 offset += size;
2320
2321 l -= size;
2322 }
2323 }
2324
2325 max_size = GET_MODE_SIZE (mode);
2326 }
2327
2328 /* The code above should have handled everything. */
2329 if (l != 0)
2330 abort ();
2331 }
2332
2333 return 1;
2334}
2335
2336/* Generate several move instructions to store LEN bytes generated by
2337 CONSTFUN to block TO. (A MEM rtx with BLKmode). CONSTFUNDATA is a
2338 pointer which will be passed as argument in every CONSTFUN call.
2339 ALIGN is maximum alignment we can assume. */
2340
2341void
2342store_by_pieces (to, len, constfun, constfundata, align)
2343 rtx to;
2344 unsigned HOST_WIDE_INT len;
2345 rtx (*constfun) PARAMS ((PTR, HOST_WIDE_INT, enum machine_mode));
2346 PTR constfundata;
2347 unsigned int align;
2348{
2349 struct store_by_pieces data;
2350
2351 if (! MOVE_BY_PIECES_P (len, align))
2352 abort ();
2353 to = protect_from_queue (to, 1);
2354 data.constfun = constfun;
2355 data.constfundata = constfundata;
2356 data.len = len;
2357 data.to = to;
2358 store_by_pieces_1 (&data, align);
2359}
2360
19caa751
RK
2361/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2362 rtx with BLKmode). The caller must pass TO through protect_from_queue
2363 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2364
2365static void
2366clear_by_pieces (to, len, align)
2367 rtx to;
3bdf5ad1 2368 unsigned HOST_WIDE_INT len;
729a2125 2369 unsigned int align;
9de08200 2370{
57814e5e
JJ
2371 struct store_by_pieces data;
2372
2373 data.constfun = clear_by_pieces_1;
2374 data.constfundata = NULL_PTR;
2375 data.len = len;
2376 data.to = to;
2377 store_by_pieces_1 (&data, align);
2378}
2379
2380/* Callback routine for clear_by_pieces.
2381 Return const0_rtx unconditionally. */
2382
2383static rtx
2384clear_by_pieces_1 (data, offset, mode)
2385 PTR data ATTRIBUTE_UNUSED;
2386 HOST_WIDE_INT offset ATTRIBUTE_UNUSED;
2387 enum machine_mode mode ATTRIBUTE_UNUSED;
2388{
2389 return const0_rtx;
2390}
2391
2392/* Subroutine of clear_by_pieces and store_by_pieces.
2393 Generate several move instructions to store LEN bytes of block TO. (A MEM
2394 rtx with BLKmode). The caller must pass TO through protect_from_queue
2395 before calling. ALIGN is maximum alignment we can assume. */
2396
2397static void
2398store_by_pieces_1 (data, align)
2399 struct store_by_pieces *data;
2400 unsigned int align;
2401{
2402 rtx to_addr = XEXP (data->to, 0);
3bdf5ad1 2403 unsigned HOST_WIDE_INT max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2404 enum machine_mode mode = VOIDmode, tmode;
2405 enum insn_code icode;
9de08200 2406
57814e5e
JJ
2407 data->offset = 0;
2408 data->to_addr = to_addr;
2409 data->autinc_to
9de08200
RK
2410 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2411 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2412
57814e5e
JJ
2413 data->explicit_inc_to = 0;
2414 data->reverse
9de08200 2415 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
57814e5e
JJ
2416 if (data->reverse)
2417 data->offset = data->len;
9de08200 2418
57814e5e 2419 /* If storing requires more than two move insns,
9de08200
RK
2420 copy addresses to registers (to make displacements shorter)
2421 and use post-increment if available. */
57814e5e
JJ
2422 if (!data->autinc_to
2423 && move_by_pieces_ninsns (data->len, align) > 2)
9de08200 2424 {
3a94c984 2425 /* Determine the main mode we'll be using. */
fbe1758d
AM
2426 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2427 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2428 if (GET_MODE_SIZE (tmode) < max_size)
2429 mode = tmode;
2430
57814e5e 2431 if (USE_STORE_PRE_DECREMENT (mode) && data->reverse && ! data->autinc_to)
9de08200 2432 {
57814e5e
JJ
2433 data->to_addr = copy_addr_to_reg (plus_constant (to_addr, data->len));
2434 data->autinc_to = 1;
2435 data->explicit_inc_to = -1;
9de08200 2436 }
3bdf5ad1 2437
57814e5e
JJ
2438 if (USE_STORE_POST_INCREMENT (mode) && ! data->reverse
2439 && ! data->autinc_to)
9de08200 2440 {
57814e5e
JJ
2441 data->to_addr = copy_addr_to_reg (to_addr);
2442 data->autinc_to = 1;
2443 data->explicit_inc_to = 1;
9de08200 2444 }
3bdf5ad1 2445
57814e5e
JJ
2446 if ( !data->autinc_to && CONSTANT_P (to_addr))
2447 data->to_addr = copy_addr_to_reg (to_addr);
9de08200
RK
2448 }
2449
e1565e65 2450 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2451 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2452 align = MOVE_MAX * BITS_PER_UNIT;
9de08200 2453
57814e5e 2454 /* First store what we can in the largest integer mode, then go to
9de08200
RK
2455 successively smaller modes. */
2456
2457 while (max_size > 1)
2458 {
9de08200
RK
2459 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2460 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2461 if (GET_MODE_SIZE (tmode) < max_size)
2462 mode = tmode;
2463
2464 if (mode == VOIDmode)
2465 break;
2466
2467 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2468 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
57814e5e 2469 store_by_pieces_2 (GEN_FCN (icode), mode, data);
9de08200
RK
2470
2471 max_size = GET_MODE_SIZE (mode);
2472 }
2473
2474 /* The code above should have handled everything. */
57814e5e 2475 if (data->len != 0)
9de08200
RK
2476 abort ();
2477}
2478
57814e5e 2479/* Subroutine of store_by_pieces_1. Store as many bytes as appropriate
9de08200
RK
2480 with move instructions for mode MODE. GENFUN is the gen_... function
2481 to make a move insn for that mode. DATA has all the other info. */
2482
2483static void
57814e5e 2484store_by_pieces_2 (genfun, mode, data)
711d877c 2485 rtx (*genfun) PARAMS ((rtx, ...));
9de08200 2486 enum machine_mode mode;
57814e5e 2487 struct store_by_pieces *data;
9de08200 2488{
3bdf5ad1 2489 unsigned int size = GET_MODE_SIZE (mode);
57814e5e 2490 rtx to1, cst;
9de08200
RK
2491
2492 while (data->len >= size)
2493 {
3bdf5ad1
RK
2494 if (data->reverse)
2495 data->offset -= size;
9de08200 2496
3bdf5ad1
RK
2497 if (data->autinc_to)
2498 {
2499 to1 = gen_rtx_MEM (mode, data->to_addr);
2500 MEM_COPY_ATTRIBUTES (to1, data->to);
2501 }
3a94c984 2502 else
3bdf5ad1
RK
2503 to1 = change_address (data->to, mode,
2504 plus_constant (data->to_addr, data->offset));
9de08200 2505
940da324 2506 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
57814e5e
JJ
2507 emit_insn (gen_add2_insn (data->to_addr,
2508 GEN_INT (-(HOST_WIDE_INT) size)));
9de08200 2509
57814e5e
JJ
2510 cst = (*data->constfun) (data->constfundata, data->offset, mode);
2511 emit_insn ((*genfun) (to1, cst));
3bdf5ad1 2512
940da324 2513 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2514 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200 2515
3bdf5ad1
RK
2516 if (! data->reverse)
2517 data->offset += size;
9de08200
RK
2518
2519 data->len -= size;
2520 }
2521}
2522\f
19caa751
RK
2523/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2524 its length in bytes and ALIGN is the maximum alignment we can is has.
bbf6f052 2525
e9a25f70
JL
2526 If we call a function that returns the length of the block, return it. */
2527
2528rtx
9de08200 2529clear_storage (object, size, align)
bbf6f052 2530 rtx object;
4c08eef0 2531 rtx size;
729a2125 2532 unsigned int align;
bbf6f052 2533{
52cf7115
JL
2534#ifdef TARGET_MEM_FUNCTIONS
2535 static tree fn;
2536 tree call_expr, arg_list;
2537#endif
e9a25f70
JL
2538 rtx retval = 0;
2539
fcf1b822
RK
2540 /* If OBJECT is not BLKmode and SIZE is the same size as its mode,
2541 just move a zero. Otherwise, do this a piece at a time. */
2542 if (GET_MODE (object) != BLKmode
2543 && GET_CODE (size) == CONST_INT
8752c357 2544 && GET_MODE_SIZE (GET_MODE (object)) == (unsigned int) INTVAL (size))
fcf1b822
RK
2545 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
2546 else
bbf6f052 2547 {
9de08200
RK
2548 object = protect_from_queue (object, 1);
2549 size = protect_from_queue (size, 0);
2550
2551 if (GET_CODE (size) == CONST_INT
fbe1758d 2552 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2553 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2554 else
2555 {
2556 /* Try the most limited insn first, because there's no point
2557 including more than one in the machine description unless
2558 the more limited one has some advantage. */
2559
19caa751 2560 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2561 enum machine_mode mode;
2562
2563 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2564 mode = GET_MODE_WIDER_MODE (mode))
2565 {
2566 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2567 insn_operand_predicate_fn pred;
9de08200
RK
2568
2569 if (code != CODE_FOR_nothing
2570 /* We don't need MODE to be narrower than
2571 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2572 the mode mask, as it is returned by the macro, it will
2573 definitely be less than the actual mode mask. */
2574 && ((GET_CODE (size) == CONST_INT
2575 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2576 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2577 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2578 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2579 || (*pred) (object, BLKmode))
2580 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2581 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2582 {
2583 rtx op1;
2584 rtx last = get_last_insn ();
2585 rtx pat;
2586
2587 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2588 pred = insn_data[(int) code].operand[1].predicate;
2589 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2590 op1 = copy_to_mode_reg (mode, op1);
2591
2592 pat = GEN_FCN ((int) code) (object, op1, opalign);
2593 if (pat)
2594 {
2595 emit_insn (pat);
e9a25f70 2596 return 0;
9de08200
RK
2597 }
2598 else
2599 delete_insns_since (last);
2600 }
2601 }
2602
4bc973ae 2603 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2604
4bc973ae
JL
2605 It is unsafe to save the value generated by protect_from_queue
2606 and reuse it later. Consider what happens if emit_queue is
2607 called before the return value from protect_from_queue is used.
52cf7115 2608
4bc973ae
JL
2609 Expansion of the CALL_EXPR below will call emit_queue before
2610 we are finished emitting RTL for argument setup. So if we are
2611 not careful we could get the wrong value for an argument.
52cf7115 2612
4bc973ae
JL
2613 To avoid this problem we go ahead and emit code to copy OBJECT
2614 and SIZE into new pseudos. We can then place those new pseudos
2615 into an RTL_EXPR and use them later, even after a call to
2616 emit_queue.
52cf7115 2617
4bc973ae
JL
2618 Note this is not strictly needed for library calls since they
2619 do not call emit_queue before loading their arguments. However,
2620 we may need to have library calls call emit_queue in the future
2621 since failing to do so could cause problems for targets which
2622 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2623 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2624
4bc973ae
JL
2625#ifdef TARGET_MEM_FUNCTIONS
2626 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2627#else
2628 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2629 TREE_UNSIGNED (integer_type_node));
f3dc586a 2630 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2631#endif
52cf7115 2632
4bc973ae
JL
2633#ifdef TARGET_MEM_FUNCTIONS
2634 /* It is incorrect to use the libcall calling conventions to call
2635 memset in this context.
52cf7115 2636
4bc973ae
JL
2637 This could be a user call to memset and the user may wish to
2638 examine the return value from memset.
52cf7115 2639
4bc973ae
JL
2640 For targets where libcalls and normal calls have different
2641 conventions for returning pointers, we could end up generating
3a94c984 2642 incorrect code.
4bc973ae
JL
2643
2644 So instead of using a libcall sequence we build up a suitable
2645 CALL_EXPR and expand the call in the normal fashion. */
2646 if (fn == NULL_TREE)
2647 {
2648 tree fntype;
2649
2650 /* This was copied from except.c, I don't know if all this is
2651 necessary in this context or not. */
2652 fn = get_identifier ("memset");
4bc973ae
JL
2653 fntype = build_pointer_type (void_type_node);
2654 fntype = build_function_type (fntype, NULL_TREE);
2655 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2656 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2657 DECL_EXTERNAL (fn) = 1;
2658 TREE_PUBLIC (fn) = 1;
2659 DECL_ARTIFICIAL (fn) = 1;
6c418184 2660 make_decl_rtl (fn, NULL_PTR);
4bc973ae 2661 assemble_external (fn);
4bc973ae
JL
2662 }
2663
3a94c984 2664 /* We need to make an argument list for the function call.
4bc973ae
JL
2665
2666 memset has three arguments, the first is a void * addresses, the
2667 second a integer with the initialization value, the last is a
2668 size_t byte count for the copy. */
2669 arg_list
2670 = build_tree_list (NULL_TREE,
2671 make_tree (build_pointer_type (void_type_node),
2672 object));
2673 TREE_CHAIN (arg_list)
2674 = build_tree_list (NULL_TREE,
3a94c984 2675 make_tree (integer_type_node, const0_rtx));
4bc973ae
JL
2676 TREE_CHAIN (TREE_CHAIN (arg_list))
2677 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2678 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2679
2680 /* Now we have to build up the CALL_EXPR itself. */
2681 call_expr = build1 (ADDR_EXPR,
2682 build_pointer_type (TREE_TYPE (fn)), fn);
2683 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2684 call_expr, arg_list, NULL_TREE);
2685 TREE_SIDE_EFFECTS (call_expr) = 1;
2686
2687 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2688#else
ebb1b59a 2689 emit_library_call (bzero_libfunc, LCT_NORMAL,
fe7bbd2a 2690 VOIDmode, 2, object, Pmode, size,
9de08200 2691 TYPE_MODE (integer_type_node));
bbf6f052 2692#endif
9de08200 2693 }
bbf6f052 2694 }
e9a25f70
JL
2695
2696 return retval;
bbf6f052
RK
2697}
2698
2699/* Generate code to copy Y into X.
2700 Both Y and X must have the same mode, except that
2701 Y can be a constant with VOIDmode.
2702 This mode cannot be BLKmode; use emit_block_move for that.
2703
2704 Return the last instruction emitted. */
2705
2706rtx
2707emit_move_insn (x, y)
2708 rtx x, y;
2709{
2710 enum machine_mode mode = GET_MODE (x);
de1b33dd
AO
2711 rtx y_cst = NULL_RTX;
2712 rtx last_insn;
bbf6f052
RK
2713
2714 x = protect_from_queue (x, 1);
2715 y = protect_from_queue (y, 0);
2716
2717 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2718 abort ();
2719
ee5332b8
RH
2720 /* Never force constant_p_rtx to memory. */
2721 if (GET_CODE (y) == CONSTANT_P_RTX)
2722 ;
2723 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
de1b33dd
AO
2724 {
2725 y_cst = y;
2726 y = force_const_mem (mode, y);
2727 }
bbf6f052
RK
2728
2729 /* If X or Y are memory references, verify that their addresses are valid
2730 for the machine. */
2731 if (GET_CODE (x) == MEM
2732 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2733 && ! push_operand (x, GET_MODE (x)))
2734 || (flag_force_addr
2735 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2736 x = change_address (x, VOIDmode, XEXP (x, 0));
2737
2738 if (GET_CODE (y) == MEM
2739 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2740 || (flag_force_addr
2741 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2742 y = change_address (y, VOIDmode, XEXP (y, 0));
2743
2744 if (mode == BLKmode)
2745 abort ();
2746
de1b33dd
AO
2747 last_insn = emit_move_insn_1 (x, y);
2748
2749 if (y_cst && GET_CODE (x) == REG)
2750 REG_NOTES (last_insn)
2751 = gen_rtx_EXPR_LIST (REG_EQUAL, y_cst, REG_NOTES (last_insn));
2752
2753 return last_insn;
261c4230
RS
2754}
2755
2756/* Low level part of emit_move_insn.
2757 Called just like emit_move_insn, but assumes X and Y
2758 are basically valid. */
2759
2760rtx
2761emit_move_insn_1 (x, y)
2762 rtx x, y;
2763{
2764 enum machine_mode mode = GET_MODE (x);
2765 enum machine_mode submode;
2766 enum mode_class class = GET_MODE_CLASS (mode);
770ae6cc 2767 unsigned int i;
261c4230 2768
76bbe028 2769 if (mode >= MAX_MACHINE_MODE)
3a94c984 2770 abort ();
76bbe028 2771
bbf6f052
RK
2772 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2773 return
2774 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2775
89742723 2776 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2777 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2778 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2779 * BITS_PER_UNIT),
2780 (class == MODE_COMPLEX_INT
2781 ? MODE_INT : MODE_FLOAT),
2782 0))
7308a047
RS
2783 && (mov_optab->handlers[(int) submode].insn_code
2784 != CODE_FOR_nothing))
2785 {
2786 /* Don't split destination if it is a stack push. */
2787 int stack = push_operand (x, GET_MODE (x));
7308a047 2788
7308a047
RS
2789 /* If this is a stack, push the highpart first, so it
2790 will be in the argument order.
2791
2792 In that case, change_address is used only to convert
2793 the mode, not to change the address. */
c937357e
RS
2794 if (stack)
2795 {
e33c0d66
RS
2796 /* Note that the real part always precedes the imag part in memory
2797 regardless of machine's endianness. */
c937357e
RS
2798#ifdef STACK_GROWS_DOWNWARD
2799 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2800 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2801 gen_imagpart (submode, y)));
c937357e 2802 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2803 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2804 gen_realpart (submode, y)));
c937357e
RS
2805#else
2806 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2807 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2808 gen_realpart (submode, y)));
c937357e 2809 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
3bdf5ad1 2810 (gen_rtx_MEM (submode, XEXP (x, 0)),
e33c0d66 2811 gen_imagpart (submode, y)));
c937357e
RS
2812#endif
2813 }
2814 else
2815 {
235ae7be
DM
2816 rtx realpart_x, realpart_y;
2817 rtx imagpart_x, imagpart_y;
2818
405f63da
MM
2819 /* If this is a complex value with each part being smaller than a
2820 word, the usual calling sequence will likely pack the pieces into
2821 a single register. Unfortunately, SUBREG of hard registers only
2822 deals in terms of words, so we have a problem converting input
2823 arguments to the CONCAT of two registers that is used elsewhere
2824 for complex values. If this is before reload, we can copy it into
2825 memory and reload. FIXME, we should see about using extract and
2826 insert on integer registers, but complex short and complex char
2827 variables should be rarely used. */
3a94c984 2828 if (GET_MODE_BITSIZE (mode) < 2 * BITS_PER_WORD
405f63da
MM
2829 && (reload_in_progress | reload_completed) == 0)
2830 {
2831 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2832 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2833
2834 if (packed_dest_p || packed_src_p)
2835 {
2836 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2837 ? MODE_FLOAT : MODE_INT);
2838
1da68f56
RK
2839 enum machine_mode reg_mode
2840 = mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
405f63da
MM
2841
2842 if (reg_mode != BLKmode)
2843 {
2844 rtx mem = assign_stack_temp (reg_mode,
2845 GET_MODE_SIZE (mode), 0);
405f63da
MM
2846 rtx cmem = change_address (mem, mode, NULL_RTX);
2847
1da68f56
RK
2848 cfun->cannot_inline
2849 = N_("function using short complex types cannot be inline");
405f63da
MM
2850
2851 if (packed_dest_p)
2852 {
2853 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2854 emit_move_insn_1 (cmem, y);
2855 return emit_move_insn_1 (sreg, mem);
2856 }
2857 else
2858 {
2859 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2860 emit_move_insn_1 (mem, sreg);
2861 return emit_move_insn_1 (x, cmem);
2862 }
2863 }
2864 }
2865 }
2866
235ae7be
DM
2867 realpart_x = gen_realpart (submode, x);
2868 realpart_y = gen_realpart (submode, y);
2869 imagpart_x = gen_imagpart (submode, x);
2870 imagpart_y = gen_imagpart (submode, y);
2871
2872 /* Show the output dies here. This is necessary for SUBREGs
2873 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2874 hard regs shouldn't appear here except as return values.
2875 We never want to emit such a clobber after reload. */
2876 if (x != y
235ae7be
DM
2877 && ! (reload_in_progress || reload_completed)
2878 && (GET_CODE (realpart_x) == SUBREG
2879 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2880 {
c14c6529 2881 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2882 }
2638126a 2883
c937357e 2884 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2885 (realpart_x, realpart_y));
c937357e 2886 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2887 (imagpart_x, imagpart_y));
c937357e 2888 }
7308a047 2889
7a1ab50a 2890 return get_last_insn ();
7308a047
RS
2891 }
2892
bbf6f052
RK
2893 /* This will handle any multi-word mode that lacks a move_insn pattern.
2894 However, you will get better code if you define such patterns,
2895 even if they must turn into multiple assembler instructions. */
a4320483 2896 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2897 {
2898 rtx last_insn = 0;
3ef1eef4 2899 rtx seq, inner;
235ae7be 2900 int need_clobber;
3a94c984 2901
a98c9f1a
RK
2902#ifdef PUSH_ROUNDING
2903
2904 /* If X is a push on the stack, do the push now and replace
2905 X with a reference to the stack pointer. */
2906 if (push_operand (x, GET_MODE (x)))
2907 {
2908 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2909 x = change_address (x, VOIDmode, stack_pointer_rtx);
2910 }
2911#endif
3a94c984 2912
3ef1eef4
RK
2913 /* If we are in reload, see if either operand is a MEM whose address
2914 is scheduled for replacement. */
2915 if (reload_in_progress && GET_CODE (x) == MEM
2916 && (inner = find_replacement (&XEXP (x, 0))) != XEXP (x, 0))
2917 {
2918 rtx new = gen_rtx_MEM (GET_MODE (x), inner);
2919
3ef1eef4 2920 MEM_COPY_ATTRIBUTES (new, x);
3ef1eef4
RK
2921 x = new;
2922 }
2923 if (reload_in_progress && GET_CODE (y) == MEM
2924 && (inner = find_replacement (&XEXP (y, 0))) != XEXP (y, 0))
2925 {
2926 rtx new = gen_rtx_MEM (GET_MODE (y), inner);
2927
3ef1eef4 2928 MEM_COPY_ATTRIBUTES (new, y);
3ef1eef4
RK
2929 y = new;
2930 }
2931
235ae7be 2932 start_sequence ();
15a7a8ec 2933
235ae7be 2934 need_clobber = 0;
bbf6f052 2935 for (i = 0;
3a94c984 2936 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
bbf6f052
RK
2937 i++)
2938 {
2939 rtx xpart = operand_subword (x, i, 1, mode);
2940 rtx ypart = operand_subword (y, i, 1, mode);
2941
2942 /* If we can't get a part of Y, put Y into memory if it is a
2943 constant. Otherwise, force it into a register. If we still
2944 can't get a part of Y, abort. */
2945 if (ypart == 0 && CONSTANT_P (y))
2946 {
2947 y = force_const_mem (mode, y);
2948 ypart = operand_subword (y, i, 1, mode);
2949 }
2950 else if (ypart == 0)
2951 ypart = operand_subword_force (y, i, mode);
2952
2953 if (xpart == 0 || ypart == 0)
2954 abort ();
2955
235ae7be
DM
2956 need_clobber |= (GET_CODE (xpart) == SUBREG);
2957
bbf6f052
RK
2958 last_insn = emit_move_insn (xpart, ypart);
2959 }
6551fa4d 2960
235ae7be
DM
2961 seq = gen_sequence ();
2962 end_sequence ();
2963
2964 /* Show the output dies here. This is necessary for SUBREGs
2965 of pseudos since we cannot track their lifetimes correctly;
2966 hard regs shouldn't appear here except as return values.
2967 We never want to emit such a clobber after reload. */
2968 if (x != y
2969 && ! (reload_in_progress || reload_completed)
2970 && need_clobber != 0)
2971 {
2972 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2973 }
2974
2975 emit_insn (seq);
2976
bbf6f052
RK
2977 return last_insn;
2978 }
2979 else
2980 abort ();
2981}
2982\f
2983/* Pushing data onto the stack. */
2984
2985/* Push a block of length SIZE (perhaps variable)
2986 and return an rtx to address the beginning of the block.
2987 Note that it is not possible for the value returned to be a QUEUED.
2988 The value may be virtual_outgoing_args_rtx.
2989
2990 EXTRA is the number of bytes of padding to push in addition to SIZE.
2991 BELOW nonzero means this padding comes at low addresses;
2992 otherwise, the padding comes at high addresses. */
2993
2994rtx
2995push_block (size, extra, below)
2996 rtx size;
2997 int extra, below;
2998{
2999 register rtx temp;
88f63c77
RK
3000
3001 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
3002 if (CONSTANT_P (size))
3003 anti_adjust_stack (plus_constant (size, extra));
3004 else if (GET_CODE (size) == REG && extra == 0)
3005 anti_adjust_stack (size);
3006 else
3007 {
ce48579b 3008 temp = copy_to_mode_reg (Pmode, size);
bbf6f052 3009 if (extra != 0)
906c4e36 3010 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
3011 temp, 0, OPTAB_LIB_WIDEN);
3012 anti_adjust_stack (temp);
3013 }
3014
f73ad30e
JH
3015#ifndef STACK_GROWS_DOWNWARD
3016#ifdef ARGS_GROW_DOWNWARD
3017 if (!ACCUMULATE_OUTGOING_ARGS)
bbf6f052 3018#else
f73ad30e
JH
3019 if (0)
3020#endif
3021#else
3022 if (1)
bbf6f052 3023#endif
f73ad30e 3024 {
f73ad30e
JH
3025 /* Return the lowest stack address when STACK or ARGS grow downward and
3026 we are not aaccumulating outgoing arguments (the c4x port uses such
3027 conventions). */
3028 temp = virtual_outgoing_args_rtx;
3029 if (extra != 0 && below)
3030 temp = plus_constant (temp, extra);
3031 }
3032 else
3033 {
3034 if (GET_CODE (size) == CONST_INT)
3035 temp = plus_constant (virtual_outgoing_args_rtx,
3a94c984 3036 -INTVAL (size) - (below ? 0 : extra));
f73ad30e
JH
3037 else if (extra != 0 && !below)
3038 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3bdf5ad1 3039 negate_rtx (Pmode, plus_constant (size, extra)));
f73ad30e
JH
3040 else
3041 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
3042 negate_rtx (Pmode, size));
3043 }
bbf6f052
RK
3044
3045 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
3046}
3047
87e38d84 3048rtx
bbf6f052
RK
3049gen_push_operand ()
3050{
38a448ca 3051 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
3052}
3053
921b3427
RK
3054/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
3055 block of SIZE bytes. */
3056
3057static rtx
3058get_push_address (size)
3a94c984 3059 int size;
921b3427
RK
3060{
3061 register rtx temp;
3062
3063 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 3064 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 3065 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 3066 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
3067 else
3068 temp = stack_pointer_rtx;
3069
c85f7c16 3070 return copy_to_reg (temp);
921b3427
RK
3071}
3072
bbf6f052
RK
3073/* Generate code to push X onto the stack, assuming it has mode MODE and
3074 type TYPE.
3075 MODE is redundant except when X is a CONST_INT (since they don't
3076 carry mode info).
3077 SIZE is an rtx for the size of data to be copied (in bytes),
3078 needed only if X is BLKmode.
3079
19caa751 3080 ALIGN is maximum alignment we can assume.
bbf6f052 3081
cd048831
RK
3082 If PARTIAL and REG are both nonzero, then copy that many of the first
3083 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
3084 The amount of space pushed is decreased by PARTIAL words,
3085 rounded *down* to a multiple of PARM_BOUNDARY.
3086 REG must be a hard register in this case.
cd048831
RK
3087 If REG is zero but PARTIAL is not, take any all others actions for an
3088 argument partially in registers, but do not actually load any
3089 registers.
bbf6f052
RK
3090
3091 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 3092 This is ignored if an argument block has already been allocated.
bbf6f052
RK
3093
3094 On a machine that lacks real push insns, ARGS_ADDR is the address of
3095 the bottom of the argument block for this call. We use indexing off there
3096 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
3097 argument block has not been preallocated.
3098
e5e809f4
JL
3099 ARGS_SO_FAR is the size of args previously pushed for this call.
3100
3101 REG_PARM_STACK_SPACE is nonzero if functions require stack space
3102 for arguments passed in registers. If nonzero, it will be the number
3103 of bytes required. */
bbf6f052
RK
3104
3105void
3106emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
3107 args_addr, args_so_far, reg_parm_stack_space,
3108 alignment_pad)
bbf6f052
RK
3109 register rtx x;
3110 enum machine_mode mode;
3111 tree type;
3112 rtx size;
729a2125 3113 unsigned int align;
bbf6f052
RK
3114 int partial;
3115 rtx reg;
3116 int extra;
3117 rtx args_addr;
3118 rtx args_so_far;
e5e809f4 3119 int reg_parm_stack_space;
4fc026cd 3120 rtx alignment_pad;
bbf6f052
RK
3121{
3122 rtx xinner;
3123 enum direction stack_direction
3124#ifdef STACK_GROWS_DOWNWARD
3125 = downward;
3126#else
3127 = upward;
3128#endif
3129
3130 /* Decide where to pad the argument: `downward' for below,
3131 `upward' for above, or `none' for don't pad it.
3132 Default is below for small data on big-endian machines; else above. */
3133 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
3134
3135 /* Invert direction if stack is post-update. */
3136 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
3137 if (where_pad != none)
3138 where_pad = (where_pad == downward ? upward : downward);
3139
3140 xinner = x = protect_from_queue (x, 0);
3141
3142 if (mode == BLKmode)
3143 {
3144 /* Copy a block into the stack, entirely or partially. */
3145
3146 register rtx temp;
3147 int used = partial * UNITS_PER_WORD;
3148 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
3149 int skip;
3a94c984 3150
bbf6f052
RK
3151 if (size == 0)
3152 abort ();
3153
3154 used -= offset;
3155
3156 /* USED is now the # of bytes we need not copy to the stack
3157 because registers will take care of them. */
3158
3159 if (partial != 0)
3160 xinner = change_address (xinner, BLKmode,
3161 plus_constant (XEXP (xinner, 0), used));
3162
3163 /* If the partial register-part of the arg counts in its stack size,
3164 skip the part of stack space corresponding to the registers.
3165 Otherwise, start copying to the beginning of the stack space,
3166 by setting SKIP to 0. */
e5e809f4 3167 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
3168
3169#ifdef PUSH_ROUNDING
3170 /* Do it with several push insns if that doesn't take lots of insns
3171 and if there is no difficulty with push insns that skip bytes
3172 on the stack for alignment purposes. */
3173 if (args_addr == 0
f73ad30e 3174 && PUSH_ARGS
bbf6f052
RK
3175 && GET_CODE (size) == CONST_INT
3176 && skip == 0
15914757 3177 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
3178 /* Here we avoid the case of a structure whose weak alignment
3179 forces many pushes of a small amount of data,
3180 and such small pushes do rounding that causes trouble. */
e1565e65 3181 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 3182 || align >= BIGGEST_ALIGNMENT
bbf6f052 3183 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
3184 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
3185 {
3186 /* Push padding now if padding above and stack grows down,
3187 or if padding below and stack grows up.
3188 But if space already allocated, this has already been done. */
3189 if (extra && args_addr == 0
3190 && where_pad != none && where_pad != stack_direction)
906c4e36 3191 anti_adjust_stack (GEN_INT (extra));
bbf6f052 3192
1503a7ec 3193 stack_pointer_delta += INTVAL (size) - used;
38a448ca 3194 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 3195 INTVAL (size) - used, align);
921b3427 3196
7d384cc0 3197 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3198 {
3199 rtx temp;
3a94c984 3200
956d6950 3201 in_check_memory_usage = 1;
3a94c984 3202 temp = get_push_address (INTVAL (size) - used);
c85f7c16 3203 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3204 emit_library_call (chkr_copy_bitmap_libfunc,
3205 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3206 Pmode, XEXP (xinner, 0), Pmode,
3a94c984 3207 GEN_INT (INTVAL (size) - used),
921b3427
RK
3208 TYPE_MODE (sizetype));
3209 else
ebb1b59a
BS
3210 emit_library_call (chkr_set_right_libfunc,
3211 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, temp,
3212 Pmode, GEN_INT (INTVAL (size) - used),
921b3427 3213 TYPE_MODE (sizetype),
956d6950
JL
3214 GEN_INT (MEMORY_USE_RW),
3215 TYPE_MODE (integer_type_node));
3216 in_check_memory_usage = 0;
921b3427 3217 }
bbf6f052
RK
3218 }
3219 else
3a94c984 3220#endif /* PUSH_ROUNDING */
bbf6f052 3221 {
7ab923cc
JJ
3222 rtx target;
3223
bbf6f052
RK
3224 /* Otherwise make space on the stack and copy the data
3225 to the address of that space. */
3226
3227 /* Deduct words put into registers from the size we must copy. */
3228 if (partial != 0)
3229 {
3230 if (GET_CODE (size) == CONST_INT)
906c4e36 3231 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3232 else
3233 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3234 GEN_INT (used), NULL_RTX, 0,
3235 OPTAB_LIB_WIDEN);
bbf6f052
RK
3236 }
3237
3238 /* Get the address of the stack space.
3239 In this case, we do not deal with EXTRA separately.
3240 A single stack adjust will do. */
3241 if (! args_addr)
3242 {
3243 temp = push_block (size, extra, where_pad == downward);
3244 extra = 0;
3245 }
3246 else if (GET_CODE (args_so_far) == CONST_INT)
3247 temp = memory_address (BLKmode,
3248 plus_constant (args_addr,
3249 skip + INTVAL (args_so_far)));
3250 else
3251 temp = memory_address (BLKmode,
38a448ca
RH
3252 plus_constant (gen_rtx_PLUS (Pmode,
3253 args_addr,
3254 args_so_far),
bbf6f052 3255 skip));
7d384cc0 3256 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3257 {
956d6950 3258 in_check_memory_usage = 1;
921b3427 3259 target = copy_to_reg (temp);
c85f7c16 3260 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3261 emit_library_call (chkr_copy_bitmap_libfunc,
3262 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed
MK
3263 target, Pmode,
3264 XEXP (xinner, 0), Pmode,
921b3427
RK
3265 size, TYPE_MODE (sizetype));
3266 else
ebb1b59a
BS
3267 emit_library_call (chkr_set_right_libfunc,
3268 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 3269 target, Pmode,
921b3427 3270 size, TYPE_MODE (sizetype),
956d6950
JL
3271 GEN_INT (MEMORY_USE_RW),
3272 TYPE_MODE (integer_type_node));
3273 in_check_memory_usage = 0;
921b3427 3274 }
bbf6f052 3275
3a94c984 3276 target = gen_rtx_MEM (BLKmode, temp);
7ab923cc 3277
3a94c984
KH
3278 if (type != 0)
3279 {
3280 set_mem_attributes (target, type, 1);
3281 /* Function incoming arguments may overlap with sibling call
3282 outgoing arguments and we cannot allow reordering of reads
3283 from function arguments with stores to outgoing arguments
3284 of sibling calls. */
3285 MEM_ALIAS_SET (target) = 0;
3286 }
7ab923cc 3287
bbf6f052
RK
3288 /* TEMP is the address of the block. Copy the data there. */
3289 if (GET_CODE (size) == CONST_INT
729a2125 3290 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3291 {
7ab923cc 3292 move_by_pieces (target, xinner, INTVAL (size), align);
bbf6f052
RK
3293 goto ret;
3294 }
e5e809f4 3295 else
bbf6f052 3296 {
19caa751 3297 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3298 enum machine_mode mode;
3bdf5ad1 3299
e5e809f4
JL
3300 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3301 mode != VOIDmode;
3302 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3303 {
e5e809f4 3304 enum insn_code code = movstr_optab[(int) mode];
a995e389 3305 insn_operand_predicate_fn pred;
e5e809f4
JL
3306
3307 if (code != CODE_FOR_nothing
3308 && ((GET_CODE (size) == CONST_INT
3309 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3310 <= (GET_MODE_MASK (mode) >> 1)))
3311 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3312 && (!(pred = insn_data[(int) code].operand[0].predicate)
3313 || ((*pred) (target, BLKmode)))
3314 && (!(pred = insn_data[(int) code].operand[1].predicate)
3315 || ((*pred) (xinner, BLKmode)))
3316 && (!(pred = insn_data[(int) code].operand[3].predicate)
3317 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3318 {
3319 rtx op2 = convert_to_mode (mode, size, 1);
3320 rtx last = get_last_insn ();
3321 rtx pat;
3322
a995e389
RH
3323 pred = insn_data[(int) code].operand[2].predicate;
3324 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3325 op2 = copy_to_mode_reg (mode, op2);
3326
3327 pat = GEN_FCN ((int) code) (target, xinner,
3328 op2, opalign);
3329 if (pat)
3330 {
3331 emit_insn (pat);
3332 goto ret;
3333 }
3334 else
3335 delete_insns_since (last);
3336 }
c841050e 3337 }
bbf6f052 3338 }
bbf6f052 3339
f73ad30e
JH
3340 if (!ACCUMULATE_OUTGOING_ARGS)
3341 {
3342 /* If the source is referenced relative to the stack pointer,
3343 copy it to another register to stabilize it. We do not need
3344 to do this if we know that we won't be changing sp. */
bbf6f052 3345
f73ad30e
JH
3346 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3347 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3348 temp = copy_to_reg (temp);
3349 }
bbf6f052
RK
3350
3351 /* Make inhibit_defer_pop nonzero around the library call
3352 to force it to pop the bcopy-arguments right away. */
3353 NO_DEFER_POP;
3354#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3355 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052 3356 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3357 convert_to_mode (TYPE_MODE (sizetype),
3358 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3359 TYPE_MODE (sizetype));
bbf6f052 3360#else
ebb1b59a 3361 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052 3362 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3363 convert_to_mode (TYPE_MODE (integer_type_node),
3364 size,
3365 TREE_UNSIGNED (integer_type_node)),
3366 TYPE_MODE (integer_type_node));
bbf6f052
RK
3367#endif
3368 OK_DEFER_POP;
3369 }
3370 }
3371 else if (partial > 0)
3372 {
3373 /* Scalar partly in registers. */
3374
3375 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3376 int i;
3377 int not_stack;
3378 /* # words of start of argument
3379 that we must make space for but need not store. */
3380 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3381 int args_offset = INTVAL (args_so_far);
3382 int skip;
3383
3384 /* Push padding now if padding above and stack grows down,
3385 or if padding below and stack grows up.
3386 But if space already allocated, this has already been done. */
3387 if (extra && args_addr == 0
3388 && where_pad != none && where_pad != stack_direction)
906c4e36 3389 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3390
3391 /* If we make space by pushing it, we might as well push
3392 the real data. Otherwise, we can leave OFFSET nonzero
3393 and leave the space uninitialized. */
3394 if (args_addr == 0)
3395 offset = 0;
3396
3397 /* Now NOT_STACK gets the number of words that we don't need to
3398 allocate on the stack. */
3399 not_stack = partial - offset;
3400
3401 /* If the partial register-part of the arg counts in its stack size,
3402 skip the part of stack space corresponding to the registers.
3403 Otherwise, start copying to the beginning of the stack space,
3404 by setting SKIP to 0. */
e5e809f4 3405 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3406
3407 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3408 x = validize_mem (force_const_mem (mode, x));
3409
3410 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3411 SUBREGs of such registers are not allowed. */
3412 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3413 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3414 x = copy_to_reg (x);
3415
3416 /* Loop over all the words allocated on the stack for this arg. */
3417 /* We can do it by words, because any scalar bigger than a word
3418 has a size a multiple of a word. */
3419#ifndef PUSH_ARGS_REVERSED
3420 for (i = not_stack; i < size; i++)
3421#else
3422 for (i = size - 1; i >= not_stack; i--)
3423#endif
3424 if (i >= not_stack + offset)
3425 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3426 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3427 0, args_addr,
3428 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3429 * UNITS_PER_WORD)),
4fc026cd 3430 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3431 }
3432 else
3433 {
3434 rtx addr;
921b3427 3435 rtx target = NULL_RTX;
3bdf5ad1 3436 rtx dest;
bbf6f052
RK
3437
3438 /* Push padding now if padding above and stack grows down,
3439 or if padding below and stack grows up.
3440 But if space already allocated, this has already been done. */
3441 if (extra && args_addr == 0
3442 && where_pad != none && where_pad != stack_direction)
906c4e36 3443 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3444
3445#ifdef PUSH_ROUNDING
f73ad30e 3446 if (args_addr == 0 && PUSH_ARGS)
1503a7ec
JH
3447 {
3448 addr = gen_push_operand ();
3449 stack_pointer_delta += PUSH_ROUNDING (GET_MODE_SIZE (mode));
3450 }
bbf6f052
RK
3451 else
3452#endif
921b3427
RK
3453 {
3454 if (GET_CODE (args_so_far) == CONST_INT)
3455 addr
3456 = memory_address (mode,
3a94c984 3457 plus_constant (args_addr,
921b3427 3458 INTVAL (args_so_far)));
3a94c984 3459 else
38a448ca
RH
3460 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3461 args_so_far));
921b3427
RK
3462 target = addr;
3463 }
bbf6f052 3464
3bdf5ad1
RK
3465 dest = gen_rtx_MEM (mode, addr);
3466 if (type != 0)
7ab923cc
JJ
3467 {
3468 set_mem_attributes (dest, type, 1);
3469 /* Function incoming arguments may overlap with sibling call
3470 outgoing arguments and we cannot allow reordering of reads
3471 from function arguments with stores to outgoing arguments
3472 of sibling calls. */
3473 MEM_ALIAS_SET (dest) = 0;
3474 }
3bdf5ad1
RK
3475
3476 emit_move_insn (dest, x);
921b3427 3477
7d384cc0 3478 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3479 {
956d6950 3480 in_check_memory_usage = 1;
921b3427
RK
3481 if (target == 0)
3482 target = get_push_address (GET_MODE_SIZE (mode));
3483
c85f7c16 3484 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
ebb1b59a
BS
3485 emit_library_call (chkr_copy_bitmap_libfunc,
3486 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3487 Pmode, XEXP (x, 0), Pmode,
921b3427
RK
3488 GEN_INT (GET_MODE_SIZE (mode)),
3489 TYPE_MODE (sizetype));
3490 else
ebb1b59a
BS
3491 emit_library_call (chkr_set_right_libfunc,
3492 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, target,
3493 Pmode, GEN_INT (GET_MODE_SIZE (mode)),
921b3427 3494 TYPE_MODE (sizetype),
956d6950
JL
3495 GEN_INT (MEMORY_USE_RW),
3496 TYPE_MODE (integer_type_node));
3497 in_check_memory_usage = 0;
921b3427 3498 }
bbf6f052
RK
3499 }
3500
3501 ret:
3502 /* If part should go in registers, copy that part
3503 into the appropriate registers. Do this now, at the end,
3504 since mem-to-mem copies above may do function calls. */
cd048831 3505 if (partial > 0 && reg != 0)
fffa9c1d
JW
3506 {
3507 /* Handle calls that pass values in multiple non-contiguous locations.
3508 The Irix 6 ABI has examples of this. */
3509 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3510 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3511 else
3512 move_block_to_reg (REGNO (reg), x, partial, mode);
3513 }
bbf6f052
RK
3514
3515 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3516 anti_adjust_stack (GEN_INT (extra));
3a94c984 3517
3ea2292a 3518 if (alignment_pad && args_addr == 0)
4fc026cd 3519 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3520}
3521\f
296b4ed9
RK
3522/* Return X if X can be used as a subtarget in a sequence of arithmetic
3523 operations. */
3524
3525static rtx
3526get_subtarget (x)
3527 rtx x;
3528{
3529 return ((x == 0
3530 /* Only registers can be subtargets. */
3531 || GET_CODE (x) != REG
3532 /* If the register is readonly, it can't be set more than once. */
3533 || RTX_UNCHANGING_P (x)
3534 /* Don't use hard regs to avoid extending their life. */
3535 || REGNO (x) < FIRST_PSEUDO_REGISTER
3536 /* Avoid subtargets inside loops,
3537 since they hide some invariant expressions. */
3538 || preserve_subexpressions_p ())
3539 ? 0 : x);
3540}
3541
bbf6f052
RK
3542/* Expand an assignment that stores the value of FROM into TO.
3543 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3544 (This may contain a QUEUED rtx;
3545 if the value is constant, this rtx is a constant.)
3546 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3547
3548 SUGGEST_REG is no longer actually used.
3549 It used to mean, copy the value through a register
3550 and return that register, if that is possible.
709f5be1 3551 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3552
3553rtx
3554expand_assignment (to, from, want_value, suggest_reg)
3555 tree to, from;
3556 int want_value;
c5c76735 3557 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3558{
3559 register rtx to_rtx = 0;
3560 rtx result;
3561
3562 /* Don't crash if the lhs of the assignment was erroneous. */
3563
3564 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3565 {
3566 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3567 return want_value ? result : NULL_RTX;
3568 }
bbf6f052
RK
3569
3570 /* Assignment of a structure component needs special treatment
3571 if the structure component's rtx is not simply a MEM.
6be58303
JW
3572 Assignment of an array element at a constant index, and assignment of
3573 an array element in an unaligned packed structure field, has the same
3574 problem. */
bbf6f052 3575
08293add
RK
3576 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3577 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3578 {
3579 enum machine_mode mode1;
770ae6cc 3580 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3581 tree offset;
bbf6f052
RK
3582 int unsignedp;
3583 int volatilep = 0;
0088fcb1 3584 tree tem;
729a2125 3585 unsigned int alignment;
0088fcb1
RK
3586
3587 push_temp_slots ();
839c4796
RK
3588 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3589 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3590
3591 /* If we are going to use store_bit_field and extract_bit_field,
3592 make sure to_rtx will be safe for multiple use. */
3593
3594 if (mode1 == VOIDmode && want_value)
3595 tem = stabilize_reference (tem);
3596
921b3427 3597 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3598 if (offset != 0)
3599 {
906c4e36 3600 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3601
3602 if (GET_CODE (to_rtx) != MEM)
3603 abort ();
bd070e1a
RH
3604
3605 if (GET_MODE (offset_rtx) != ptr_mode)
3606 {
3607#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3608 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3609#else
3610 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3611#endif
3612 }
3613
9a7b9f4f
JL
3614 /* A constant address in TO_RTX can have VOIDmode, we must not try
3615 to call force_reg for that case. Avoid that case. */
89752202
HB
3616 if (GET_CODE (to_rtx) == MEM
3617 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3618 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202 3619 && bitsize
3a94c984 3620 && (bitpos % bitsize) == 0
89752202 3621 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 3622 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
3623 {
3624 rtx temp = change_address (to_rtx, mode1,
3625 plus_constant (XEXP (to_rtx, 0),
3626 (bitpos /
3627 BITS_PER_UNIT)));
3628 if (GET_CODE (XEXP (temp, 0)) == REG)
3629 to_rtx = temp;
3630 else
3631 to_rtx = change_address (to_rtx, mode1,
3632 force_reg (GET_MODE (XEXP (temp, 0)),
3633 XEXP (temp, 0)));
3634 bitpos = 0;
3635 }
3636
7bb0943f 3637 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3638 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3639 force_reg (ptr_mode,
3640 offset_rtx)));
7bb0943f 3641 }
c5c76735 3642
bbf6f052
RK
3643 if (volatilep)
3644 {
3645 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3646 {
3647 /* When the offset is zero, to_rtx is the address of the
3648 structure we are storing into, and hence may be shared.
3649 We must make a new MEM before setting the volatile bit. */
3650 if (offset == 0)
effbcc6a
RK
3651 to_rtx = copy_rtx (to_rtx);
3652
01188446
JW
3653 MEM_VOLATILE_P (to_rtx) = 1;
3654 }
bbf6f052
RK
3655#if 0 /* This was turned off because, when a field is volatile
3656 in an object which is not volatile, the object may be in a register,
3657 and then we would abort over here. */
3658 else
3659 abort ();
3660#endif
3661 }
3662
956d6950
JL
3663 if (TREE_CODE (to) == COMPONENT_REF
3664 && TREE_READONLY (TREE_OPERAND (to, 1)))
3665 {
8bd6ecc2 3666 if (offset == 0)
956d6950
JL
3667 to_rtx = copy_rtx (to_rtx);
3668
3669 RTX_UNCHANGING_P (to_rtx) = 1;
3670 }
3671
921b3427 3672 /* Check the access. */
7d384cc0 3673 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3674 {
3675 rtx to_addr;
3676 int size;
3677 int best_mode_size;
3678 enum machine_mode best_mode;
3679
3680 best_mode = get_best_mode (bitsize, bitpos,
3681 TYPE_ALIGN (TREE_TYPE (tem)),
3682 mode1, volatilep);
3683 if (best_mode == VOIDmode)
3684 best_mode = QImode;
3685
3686 best_mode_size = GET_MODE_BITSIZE (best_mode);
3687 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3688 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3689 size *= GET_MODE_SIZE (best_mode);
3690
3691 /* Check the access right of the pointer. */
ea4da9db 3692 in_check_memory_usage = 1;
e9a25f70 3693 if (size)
ebb1b59a
BS
3694 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
3695 VOIDmode, 3, to_addr, Pmode,
e9a25f70 3696 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3697 GEN_INT (MEMORY_USE_WO),
3698 TYPE_MODE (integer_type_node));
ea4da9db 3699 in_check_memory_usage = 0;
921b3427
RK
3700 }
3701
a69beca1
RK
3702 /* If this is a varying-length object, we must get the address of
3703 the source and do an explicit block move. */
3704 if (bitsize < 0)
3705 {
3706 unsigned int from_align;
3707 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3708 rtx inner_to_rtx
3709 = change_address (to_rtx, VOIDmode,
3710 plus_constant (XEXP (to_rtx, 0),
3711 bitpos / BITS_PER_UNIT));
3712
3713 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
19caa751 3714 MIN (alignment, from_align));
a69beca1
RK
3715 free_temp_slots ();
3716 pop_temp_slots ();
3717 return to_rtx;
3718 }
3719 else
3720 {
3721 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3722 (want_value
3723 /* Spurious cast for HPUX compiler. */
3724 ? ((enum machine_mode)
3725 TYPE_MODE (TREE_TYPE (to)))
3726 : VOIDmode),
3727 unsignedp,
a69beca1
RK
3728 alignment,
3729 int_size_in_bytes (TREE_TYPE (tem)),
3730 get_alias_set (to));
3731
3732 preserve_temp_slots (result);
3733 free_temp_slots ();
3734 pop_temp_slots ();
3735
3736 /* If the value is meaningful, convert RESULT to the proper mode.
3737 Otherwise, return nothing. */
3738 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3739 TYPE_MODE (TREE_TYPE (from)),
3740 result,
3741 TREE_UNSIGNED (TREE_TYPE (to)))
3742 : NULL_RTX);
3743 }
bbf6f052
RK
3744 }
3745
cd1db108
RS
3746 /* If the rhs is a function call and its value is not an aggregate,
3747 call the function before we start to compute the lhs.
3748 This is needed for correct code for cases such as
3749 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3750 requires loading up part of an address in a separate insn.
3751
1858863b
JW
3752 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3753 since it might be a promoted variable where the zero- or sign- extension
3754 needs to be done. Handling this in the normal way is safe because no
3755 computation is done before the call. */
1ad87b63 3756 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3757 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3758 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3759 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3760 {
0088fcb1
RK
3761 rtx value;
3762
3763 push_temp_slots ();
3764 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3765 if (to_rtx == 0)
921b3427 3766 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3767
fffa9c1d
JW
3768 /* Handle calls that return values in multiple non-contiguous locations.
3769 The Irix 6 ABI has examples of this. */
3770 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16 3771 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3772 TYPE_ALIGN (TREE_TYPE (from)));
fffa9c1d 3773 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3774 emit_block_move (to_rtx, value, expr_size (from),
19caa751 3775 TYPE_ALIGN (TREE_TYPE (from)));
aaf87c45 3776 else
6419e5b0
DT
3777 {
3778#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3779 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3780 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3781 value = convert_memory_address (GET_MODE (to_rtx), value);
3782#endif
3783 emit_move_insn (to_rtx, value);
3784 }
cd1db108
RS
3785 preserve_temp_slots (to_rtx);
3786 free_temp_slots ();
0088fcb1 3787 pop_temp_slots ();
709f5be1 3788 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3789 }
3790
bbf6f052
RK
3791 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3792 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3793
3794 if (to_rtx == 0)
41472af8
MM
3795 {
3796 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3797 if (GET_CODE (to_rtx) == MEM)
3798 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3799 }
bbf6f052 3800
86d38d25 3801 /* Don't move directly into a return register. */
14a774a9
RK
3802 if (TREE_CODE (to) == RESULT_DECL
3803 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3804 {
0088fcb1
RK
3805 rtx temp;
3806
3807 push_temp_slots ();
3808 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3809
3810 if (GET_CODE (to_rtx) == PARALLEL)
3811 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3812 TYPE_ALIGN (TREE_TYPE (from)));
14a774a9
RK
3813 else
3814 emit_move_insn (to_rtx, temp);
3815
86d38d25
RS
3816 preserve_temp_slots (to_rtx);
3817 free_temp_slots ();
0088fcb1 3818 pop_temp_slots ();
709f5be1 3819 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3820 }
3821
bbf6f052
RK
3822 /* In case we are returning the contents of an object which overlaps
3823 the place the value is being stored, use a safe function when copying
3824 a value through a pointer into a structure value return block. */
3825 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3826 && current_function_returns_struct
3827 && !current_function_returns_pcc_struct)
3828 {
0088fcb1
RK
3829 rtx from_rtx, size;
3830
3831 push_temp_slots ();
33a20d10 3832 size = expr_size (from);
921b3427
RK
3833 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3834 EXPAND_MEMORY_USE_DONT);
3835
3836 /* Copy the rights of the bitmap. */
7d384cc0 3837 if (current_function_check_memory_usage)
ebb1b59a
BS
3838 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
3839 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
6a9c4aed 3840 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3841 convert_to_mode (TYPE_MODE (sizetype),
3842 size, TREE_UNSIGNED (sizetype)),
3843 TYPE_MODE (sizetype));
bbf6f052
RK
3844
3845#ifdef TARGET_MEM_FUNCTIONS
ebb1b59a 3846 emit_library_call (memcpy_libfunc, LCT_NORMAL,
bbf6f052
RK
3847 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3848 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3849 convert_to_mode (TYPE_MODE (sizetype),
3850 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3851 TYPE_MODE (sizetype));
bbf6f052 3852#else
ebb1b59a 3853 emit_library_call (bcopy_libfunc, LCT_NORMAL,
bbf6f052
RK
3854 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3855 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3856 convert_to_mode (TYPE_MODE (integer_type_node),
3857 size, TREE_UNSIGNED (integer_type_node)),
3858 TYPE_MODE (integer_type_node));
bbf6f052
RK
3859#endif
3860
3861 preserve_temp_slots (to_rtx);
3862 free_temp_slots ();
0088fcb1 3863 pop_temp_slots ();
709f5be1 3864 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3865 }
3866
3867 /* Compute FROM and store the value in the rtx we got. */
3868
0088fcb1 3869 push_temp_slots ();
bbf6f052
RK
3870 result = store_expr (from, to_rtx, want_value);
3871 preserve_temp_slots (result);
3872 free_temp_slots ();
0088fcb1 3873 pop_temp_slots ();
709f5be1 3874 return want_value ? result : NULL_RTX;
bbf6f052
RK
3875}
3876
3877/* Generate code for computing expression EXP,
3878 and storing the value into TARGET.
bbf6f052
RK
3879 TARGET may contain a QUEUED rtx.
3880
709f5be1
RS
3881 If WANT_VALUE is nonzero, return a copy of the value
3882 not in TARGET, so that we can be sure to use the proper
3883 value in a containing expression even if TARGET has something
3884 else stored in it. If possible, we copy the value through a pseudo
3885 and return that pseudo. Or, if the value is constant, we try to
3886 return the constant. In some cases, we return a pseudo
3887 copied *from* TARGET.
3888
3889 If the mode is BLKmode then we may return TARGET itself.
3890 It turns out that in BLKmode it doesn't cause a problem.
3891 because C has no operators that could combine two different
3892 assignments into the same BLKmode object with different values
3893 with no sequence point. Will other languages need this to
3894 be more thorough?
3895
3896 If WANT_VALUE is 0, we return NULL, to make sure
3897 to catch quickly any cases where the caller uses the value
3898 and fails to set WANT_VALUE. */
bbf6f052
RK
3899
3900rtx
709f5be1 3901store_expr (exp, target, want_value)
bbf6f052
RK
3902 register tree exp;
3903 register rtx target;
709f5be1 3904 int want_value;
bbf6f052
RK
3905{
3906 register rtx temp;
3907 int dont_return_target = 0;
3908
3909 if (TREE_CODE (exp) == COMPOUND_EXPR)
3910 {
3911 /* Perform first part of compound expression, then assign from second
3912 part. */
3913 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3914 emit_queue ();
709f5be1 3915 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3916 }
3917 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3918 {
3919 /* For conditional expression, get safe form of the target. Then
3920 test the condition, doing the appropriate assignment on either
3921 side. This avoids the creation of unnecessary temporaries.
3922 For non-BLKmode, it is more efficient not to do this. */
3923
3924 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3925
3926 emit_queue ();
3927 target = protect_from_queue (target, 1);
3928
dabf8373 3929 do_pending_stack_adjust ();
bbf6f052
RK
3930 NO_DEFER_POP;
3931 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3932 start_cleanup_deferral ();
709f5be1 3933 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3934 end_cleanup_deferral ();
bbf6f052
RK
3935 emit_queue ();
3936 emit_jump_insn (gen_jump (lab2));
3937 emit_barrier ();
3938 emit_label (lab1);
956d6950 3939 start_cleanup_deferral ();
709f5be1 3940 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3941 end_cleanup_deferral ();
bbf6f052
RK
3942 emit_queue ();
3943 emit_label (lab2);
3944 OK_DEFER_POP;
a3a58acc 3945
709f5be1 3946 return want_value ? target : NULL_RTX;
bbf6f052 3947 }
bbf6f052 3948 else if (queued_subexp_p (target))
709f5be1
RS
3949 /* If target contains a postincrement, let's not risk
3950 using it as the place to generate the rhs. */
bbf6f052
RK
3951 {
3952 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3953 {
3954 /* Expand EXP into a new pseudo. */
3955 temp = gen_reg_rtx (GET_MODE (target));
3956 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3957 }
3958 else
906c4e36 3959 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3960
3961 /* If target is volatile, ANSI requires accessing the value
3962 *from* the target, if it is accessed. So make that happen.
3963 In no case return the target itself. */
3964 if (! MEM_VOLATILE_P (target) && want_value)
3965 dont_return_target = 1;
bbf6f052 3966 }
12f06d17
CH
3967 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3968 && GET_MODE (target) != BLKmode)
3969 /* If target is in memory and caller wants value in a register instead,
3970 arrange that. Pass TARGET as target for expand_expr so that,
3971 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3972 We know expand_expr will not use the target in that case.
3973 Don't do this if TARGET is volatile because we are supposed
3974 to write it and then read it. */
3975 {
1da93fe0 3976 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3977 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3978 temp = copy_to_reg (temp);
3979 dont_return_target = 1;
3980 }
1499e0a8
RK
3981 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3982 /* If this is an scalar in a register that is stored in a wider mode
3983 than the declared mode, compute the result into its declared mode
3984 and then convert to the wider mode. Our value is the computed
3985 expression. */
3986 {
5a32d038 3987 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3988 which will often result in some optimizations. Do the conversion
3989 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3990 the extend. But don't do this if the type of EXP is a subtype
3991 of something else since then the conversion might involve
3992 more than just converting modes. */
3993 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3994 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3995 {
3996 if (TREE_UNSIGNED (TREE_TYPE (exp))
3997 != SUBREG_PROMOTED_UNSIGNED_P (target))
3998 exp
3999 = convert
4000 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
4001 TREE_TYPE (exp)),
4002 exp);
4003
4004 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
4005 SUBREG_PROMOTED_UNSIGNED_P (target)),
4006 exp);
4007 }
3a94c984 4008
1499e0a8 4009 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 4010
766f36c7 4011 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
4012 the access now so it gets done only once. Likewise if
4013 it contains TARGET. */
4014 if (GET_CODE (temp) == MEM && want_value
4015 && (MEM_VOLATILE_P (temp)
4016 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
4017 temp = copy_to_reg (temp);
4018
b258707c
RS
4019 /* If TEMP is a VOIDmode constant, use convert_modes to make
4020 sure that we properly convert it. */
4021 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
4022 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
4023 TYPE_MODE (TREE_TYPE (exp)), temp,
4024 SUBREG_PROMOTED_UNSIGNED_P (target));
4025
1499e0a8
RK
4026 convert_move (SUBREG_REG (target), temp,
4027 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
4028
4029 /* If we promoted a constant, change the mode back down to match
4030 target. Otherwise, the caller might get confused by a result whose
4031 mode is larger than expected. */
4032
4033 if (want_value && GET_MODE (temp) != GET_MODE (target)
4034 && GET_MODE (temp) != VOIDmode)
4035 {
4036 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
4037 SUBREG_PROMOTED_VAR_P (temp) = 1;
4038 SUBREG_PROMOTED_UNSIGNED_P (temp)
4039 = SUBREG_PROMOTED_UNSIGNED_P (target);
4040 }
4041
709f5be1 4042 return want_value ? temp : NULL_RTX;
1499e0a8 4043 }
bbf6f052
RK
4044 else
4045 {
4046 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 4047 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
4048 If TARGET is a volatile mem ref, either return TARGET
4049 or return a reg copied *from* TARGET; ANSI requires this.
4050
4051 Otherwise, if TEMP is not TARGET, return TEMP
4052 if it is constant (for efficiency),
4053 or if we really want the correct value. */
bbf6f052
RK
4054 if (!(target && GET_CODE (target) == REG
4055 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 4056 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 4057 && ! rtx_equal_p (temp, target)
709f5be1 4058 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
4059 dont_return_target = 1;
4060 }
4061
b258707c
RS
4062 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
4063 the same as that of TARGET, adjust the constant. This is needed, for
4064 example, in case it is a CONST_DOUBLE and we want only a word-sized
4065 value. */
4066 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 4067 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
4068 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
4069 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
4070 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
4071
7d384cc0 4072 if (current_function_check_memory_usage
921b3427
RK
4073 && GET_CODE (target) == MEM
4074 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
4075 {
ea4da9db 4076 in_check_memory_usage = 1;
921b3427 4077 if (GET_CODE (temp) == MEM)
ebb1b59a
BS
4078 emit_library_call (chkr_copy_bitmap_libfunc, LCT_CONST_MAKE_BLOCK,
4079 VOIDmode, 3, XEXP (target, 0), Pmode,
6a9c4aed 4080 XEXP (temp, 0), Pmode,
921b3427
RK
4081 expr_size (exp), TYPE_MODE (sizetype));
4082 else
ebb1b59a
BS
4083 emit_library_call (chkr_check_addr_libfunc, LCT_CONST_MAKE_BLOCK,
4084 VOIDmode, 3, XEXP (target, 0), Pmode,
921b3427 4085 expr_size (exp), TYPE_MODE (sizetype),
3a94c984 4086 GEN_INT (MEMORY_USE_WO),
956d6950 4087 TYPE_MODE (integer_type_node));
ea4da9db 4088 in_check_memory_usage = 0;
921b3427
RK
4089 }
4090
bbf6f052
RK
4091 /* If value was not generated in the target, store it there.
4092 Convert the value to TARGET's type first if nec. */
f3f2255a
R
4093 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
4094 one or both of them are volatile memory refs, we have to distinguish
4095 two cases:
4096 - expand_expr has used TARGET. In this case, we must not generate
4097 another copy. This can be detected by TARGET being equal according
4098 to == .
4099 - expand_expr has not used TARGET - that means that the source just
4100 happens to have the same RTX form. Since temp will have been created
4101 by expand_expr, it will compare unequal according to == .
4102 We must generate a copy in this case, to reach the correct number
4103 of volatile memory references. */
bbf6f052 4104
6036acbb 4105 if ((! rtx_equal_p (temp, target)
f3f2255a
R
4106 || (temp != target && (side_effects_p (temp)
4107 || side_effects_p (target))))
6036acbb 4108 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
4109 {
4110 target = protect_from_queue (target, 1);
4111 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 4112 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
4113 {
4114 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4115 if (dont_return_target)
4116 {
4117 /* In this case, we will return TEMP,
4118 so make sure it has the proper mode.
4119 But don't forget to store the value into TARGET. */
4120 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
4121 emit_move_insn (target, temp);
4122 }
4123 else
4124 convert_move (target, temp, unsignedp);
4125 }
4126
4127 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
4128 {
4129 /* Handle copying a string constant into an array.
4130 The string constant may be shorter than the array.
4131 So copy just the string's actual length, and clear the rest. */
4132 rtx size;
22619c3f 4133 rtx addr;
bbf6f052 4134
e87b4f3f
RS
4135 /* Get the size of the data type of the string,
4136 which is actually the size of the target. */
4137 size = expr_size (exp);
4138 if (GET_CODE (size) == CONST_INT
4139 && INTVAL (size) < TREE_STRING_LENGTH (exp))
19caa751 4140 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4141 else
bbf6f052 4142 {
e87b4f3f
RS
4143 /* Compute the size of the data to copy from the string. */
4144 tree copy_size
c03b7665 4145 = size_binop (MIN_EXPR,
b50d17a1 4146 make_tree (sizetype, size),
fed3cef0 4147 size_int (TREE_STRING_LENGTH (exp)));
f9e158c3 4148 unsigned int align = TYPE_ALIGN (TREE_TYPE (exp));
906c4e36
RK
4149 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
4150 VOIDmode, 0);
e87b4f3f
RS
4151 rtx label = 0;
4152
4153 /* Copy that much. */
4154 emit_block_move (target, temp, copy_size_rtx,
19caa751 4155 TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 4156
88f63c77
RK
4157 /* Figure out how much is left in TARGET that we have to clear.
4158 Do all calculations in ptr_mode. */
4159
4160 addr = XEXP (target, 0);
4161 addr = convert_modes (ptr_mode, Pmode, addr, 1);
4162
e87b4f3f
RS
4163 if (GET_CODE (copy_size_rtx) == CONST_INT)
4164 {
88f63c77 4165 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
3a94c984 4166 size = plus_constant (size, -TREE_STRING_LENGTH (exp));
8752c357
AJ
4167 align = MIN (align,
4168 (unsigned int) (BITS_PER_UNIT
4169 * (INTVAL (copy_size_rtx)
4170 & - INTVAL (copy_size_rtx))));
e87b4f3f
RS
4171 }
4172 else
4173 {
88f63c77
RK
4174 addr = force_reg (ptr_mode, addr);
4175 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
4176 copy_size_rtx, NULL_RTX, 0,
4177 OPTAB_LIB_WIDEN);
e87b4f3f 4178
88f63c77 4179 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
4180 copy_size_rtx, NULL_RTX, 0,
4181 OPTAB_LIB_WIDEN);
e87b4f3f 4182
2a5b96fd 4183 align = BITS_PER_UNIT;
e87b4f3f 4184 label = gen_label_rtx ();
c5d5d461
JL
4185 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
4186 GET_MODE (size), 0, 0, label);
e87b4f3f 4187 }
2a5b96fd 4188 align = MIN (align, expr_align (copy_size));
e87b4f3f
RS
4189
4190 if (size != const0_rtx)
4191 {
3bdf5ad1
RK
4192 rtx dest = gen_rtx_MEM (BLKmode, addr);
4193
4194 MEM_COPY_ATTRIBUTES (dest, target);
4195
921b3427 4196 /* Be sure we can write on ADDR. */
ea4da9db 4197 in_check_memory_usage = 1;
7d384cc0 4198 if (current_function_check_memory_usage)
ebb1b59a
BS
4199 emit_library_call (chkr_check_addr_libfunc,
4200 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 4201 addr, Pmode,
921b3427 4202 size, TYPE_MODE (sizetype),
3a94c984 4203 GEN_INT (MEMORY_USE_WO),
956d6950 4204 TYPE_MODE (integer_type_node));
ea4da9db 4205 in_check_memory_usage = 0;
051ffad5 4206 clear_storage (dest, size, align);
e87b4f3f 4207 }
22619c3f 4208
e87b4f3f
RS
4209 if (label)
4210 emit_label (label);
bbf6f052
RK
4211 }
4212 }
fffa9c1d
JW
4213 /* Handle calls that return values in multiple non-contiguous locations.
4214 The Irix 6 ABI has examples of this. */
4215 else if (GET_CODE (target) == PARALLEL)
aac5cc16 4216 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
19caa751 4217 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4218 else if (GET_MODE (temp) == BLKmode)
4219 emit_block_move (target, temp, expr_size (exp),
19caa751 4220 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
4221 else
4222 emit_move_insn (target, temp);
4223 }
709f5be1 4224
766f36c7
RK
4225 /* If we don't want a value, return NULL_RTX. */
4226 if (! want_value)
4227 return NULL_RTX;
4228
4229 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
4230 ??? The latter test doesn't seem to make sense. */
4231 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 4232 return temp;
766f36c7
RK
4233
4234 /* Return TARGET itself if it is a hard register. */
4235 else if (want_value && GET_MODE (target) != BLKmode
4236 && ! (GET_CODE (target) == REG
4237 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 4238 return copy_to_reg (target);
3a94c984 4239
766f36c7 4240 else
709f5be1 4241 return target;
bbf6f052
RK
4242}
4243\f
9de08200
RK
4244/* Return 1 if EXP just contains zeros. */
4245
4246static int
4247is_zeros_p (exp)
4248 tree exp;
4249{
4250 tree elt;
4251
4252 switch (TREE_CODE (exp))
4253 {
4254 case CONVERT_EXPR:
4255 case NOP_EXPR:
4256 case NON_LVALUE_EXPR:
4257 return is_zeros_p (TREE_OPERAND (exp, 0));
4258
4259 case INTEGER_CST:
05bccae2 4260 return integer_zerop (exp);
9de08200
RK
4261
4262 case COMPLEX_CST:
4263 return
4264 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4265
4266 case REAL_CST:
41c9120b 4267 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
4268
4269 case CONSTRUCTOR:
e1a43f73
PB
4270 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4271 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4272 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4273 if (! is_zeros_p (TREE_VALUE (elt)))
4274 return 0;
4275
4276 return 1;
3a94c984 4277
e9a25f70
JL
4278 default:
4279 return 0;
9de08200 4280 }
9de08200
RK
4281}
4282
4283/* Return 1 if EXP contains mostly (3/4) zeros. */
4284
4285static int
4286mostly_zeros_p (exp)
4287 tree exp;
4288{
9de08200
RK
4289 if (TREE_CODE (exp) == CONSTRUCTOR)
4290 {
e1a43f73
PB
4291 int elts = 0, zeros = 0;
4292 tree elt = CONSTRUCTOR_ELTS (exp);
4293 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4294 {
4295 /* If there are no ranges of true bits, it is all zero. */
4296 return elt == NULL_TREE;
4297 }
4298 for (; elt; elt = TREE_CHAIN (elt))
4299 {
4300 /* We do not handle the case where the index is a RANGE_EXPR,
4301 so the statistic will be somewhat inaccurate.
4302 We do make a more accurate count in store_constructor itself,
4303 so since this function is only used for nested array elements,
0f41302f 4304 this should be close enough. */
e1a43f73
PB
4305 if (mostly_zeros_p (TREE_VALUE (elt)))
4306 zeros++;
4307 elts++;
4308 }
9de08200
RK
4309
4310 return 4 * zeros >= 3 * elts;
4311 }
4312
4313 return is_zeros_p (exp);
4314}
4315\f
e1a43f73
PB
4316/* Helper function for store_constructor.
4317 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4318 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4319 ALIGN and CLEARED are as for store_constructor.
23cb1766 4320 ALIAS_SET is the alias set to use for any stores.
23ccec44
JW
4321
4322 This provides a recursive shortcut back to store_constructor when it isn't
4323 necessary to go through store_field. This is so that we can pass through
4324 the cleared field to let store_constructor know that we may not have to
4325 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4326
4327static void
4328store_constructor_field (target, bitsize, bitpos,
23cb1766 4329 mode, exp, type, align, cleared, alias_set)
e1a43f73 4330 rtx target;
770ae6cc
RK
4331 unsigned HOST_WIDE_INT bitsize;
4332 HOST_WIDE_INT bitpos;
e1a43f73
PB
4333 enum machine_mode mode;
4334 tree exp, type;
729a2125 4335 unsigned int align;
e1a43f73 4336 int cleared;
23cb1766 4337 int alias_set;
e1a43f73
PB
4338{
4339 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4340 && bitpos % BITS_PER_UNIT == 0
4341 /* If we have a non-zero bitpos for a register target, then we just
4342 let store_field do the bitfield handling. This is unlikely to
4343 generate unnecessary clear instructions anyways. */
4344 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4345 {
126e5b0d 4346 if (bitpos != 0)
ce64861e
RK
4347 target
4348 = change_address (target,
4349 GET_MODE (target) == BLKmode
4350 || 0 != (bitpos
4351 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4352 ? BLKmode : VOIDmode,
4353 plus_constant (XEXP (target, 0),
4354 bitpos / BITS_PER_UNIT));
23cb1766 4355
e0339ef7
RK
4356
4357 /* Show the alignment may no longer be what it was and update the alias
4358 set, if required. */
eeebb824 4359 if (bitpos != 0)
8752c357 4360 align = MIN (align, (unsigned int) bitpos & - bitpos);
832ea3b3
FS
4361 if (GET_CODE (target) == MEM)
4362 MEM_ALIAS_SET (target) = alias_set;
e0339ef7 4363
b7010412 4364 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4365 }
4366 else
19caa751 4367 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
23cb1766 4368 int_size_in_bytes (type), alias_set);
e1a43f73
PB
4369}
4370
bbf6f052 4371/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4372 TARGET is either a REG or a MEM.
19caa751 4373 ALIGN is the maximum known alignment for TARGET.
b7010412
RK
4374 CLEARED is true if TARGET is known to have been zero'd.
4375 SIZE is the number of bytes of TARGET we are allowed to modify: this
4376 may not be the same as the size of EXP if we are assigning to a field
4377 which has been packed to exclude padding bits. */
bbf6f052
RK
4378
4379static void
b7010412 4380store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4381 tree exp;
4382 rtx target;
729a2125 4383 unsigned int align;
e1a43f73 4384 int cleared;
13eb1f7f 4385 HOST_WIDE_INT size;
bbf6f052 4386{
4af3895e 4387 tree type = TREE_TYPE (exp);
a5efcd63 4388#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4389 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4390#endif
4af3895e 4391
bbf6f052
RK
4392 /* We know our target cannot conflict, since safe_from_p has been called. */
4393#if 0
4394 /* Don't try copying piece by piece into a hard register
4395 since that is vulnerable to being clobbered by EXP.
4396 Instead, construct in a pseudo register and then copy it all. */
4397 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4398 {
4399 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4400 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4401 emit_move_insn (target, temp);
4402 return;
4403 }
4404#endif
4405
e44842fe
RK
4406 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4407 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4408 {
4409 register tree elt;
4410
4af3895e 4411 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4412 if ((TREE_CODE (type) == UNION_TYPE
4413 || TREE_CODE (type) == QUAL_UNION_TYPE)
4414 && ! cleared)
a59f8640
R
4415 {
4416 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4417
4418 /* If the constructor is empty, clear the union. */
4419 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
19caa751 4420 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
a59f8640 4421 }
4af3895e
JVA
4422
4423 /* If we are building a static constructor into a register,
4424 set the initial value as zero so we can fold the value into
67225c15
RK
4425 a constant. But if more than one register is involved,
4426 this probably loses. */
4427 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4428 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4429 {
4430 if (! cleared)
e9a25f70 4431 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4432
9de08200
RK
4433 cleared = 1;
4434 }
4435
4436 /* If the constructor has fewer fields than the structure
4437 or if we are initializing the structure to mostly zeros,
fcf1b822
RK
4438 clear the whole structure first. Don't do this is TARGET is
4439 register whose mode size isn't equal to SIZE since clear_storage
4440 can't handle this case. */
9376fcd6
RK
4441 else if (size > 0
4442 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4443 != fields_length (type))
fcf1b822
RK
4444 || mostly_zeros_p (exp))
4445 && (GET_CODE (target) != REG
8752c357 4446 || (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (target)) == size))
9de08200
RK
4447 {
4448 if (! cleared)
19caa751 4449 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4450
4451 cleared = 1;
4452 }
dd1db5ec 4453 else if (! cleared)
bbf6f052 4454 /* Inform later passes that the old value is dead. */
38a448ca 4455 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4456
4457 /* Store each element of the constructor into
4458 the corresponding field of TARGET. */
4459
4460 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4461 {
4462 register tree field = TREE_PURPOSE (elt);
c5c76735 4463#ifdef WORD_REGISTER_OPERATIONS
34c73909 4464 tree value = TREE_VALUE (elt);
c5c76735 4465#endif
bbf6f052 4466 register enum machine_mode mode;
770ae6cc
RK
4467 HOST_WIDE_INT bitsize;
4468 HOST_WIDE_INT bitpos = 0;
bbf6f052 4469 int unsignedp;
770ae6cc 4470 tree offset;
b50d17a1 4471 rtx to_rtx = target;
bbf6f052 4472
f32fd778
RS
4473 /* Just ignore missing fields.
4474 We cleared the whole structure, above,
4475 if any fields are missing. */
4476 if (field == 0)
4477 continue;
4478
e1a43f73
PB
4479 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4480 continue;
9de08200 4481
770ae6cc
RK
4482 if (host_integerp (DECL_SIZE (field), 1))
4483 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4484 else
4485 bitsize = -1;
4486
bbf6f052
RK
4487 unsignedp = TREE_UNSIGNED (field);
4488 mode = DECL_MODE (field);
4489 if (DECL_BIT_FIELD (field))
4490 mode = VOIDmode;
4491
770ae6cc
RK
4492 offset = DECL_FIELD_OFFSET (field);
4493 if (host_integerp (offset, 0)
4494 && host_integerp (bit_position (field), 0))
4495 {
4496 bitpos = int_bit_position (field);
4497 offset = 0;
4498 }
b50d17a1 4499 else
770ae6cc 4500 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
3a94c984 4501
b50d17a1
RK
4502 if (offset)
4503 {
4504 rtx offset_rtx;
4505
4506 if (contains_placeholder_p (offset))
7fa96708 4507 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4508 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4509
b50d17a1
RK
4510 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4511 if (GET_CODE (to_rtx) != MEM)
4512 abort ();
4513
3a94c984
KH
4514 if (GET_MODE (offset_rtx) != ptr_mode)
4515 {
bd070e1a 4516#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4517 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4518#else
4519 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4520#endif
4521 }
4522
b50d17a1
RK
4523 to_rtx
4524 = change_address (to_rtx, VOIDmode,
38a448ca 4525 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4526 force_reg (ptr_mode,
4527 offset_rtx)));
7fa96708 4528 align = DECL_OFFSET_ALIGN (field);
b50d17a1 4529 }
c5c76735 4530
cf04eb80
RK
4531 if (TREE_READONLY (field))
4532 {
9151b3bf 4533 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4534 to_rtx = copy_rtx (to_rtx);
4535
cf04eb80
RK
4536 RTX_UNCHANGING_P (to_rtx) = 1;
4537 }
4538
34c73909
R
4539#ifdef WORD_REGISTER_OPERATIONS
4540 /* If this initializes a field that is smaller than a word, at the
4541 start of a word, try to widen it to a full word.
4542 This special case allows us to output C++ member function
4543 initializations in a form that the optimizers can understand. */
770ae6cc 4544 if (GET_CODE (target) == REG
34c73909
R
4545 && bitsize < BITS_PER_WORD
4546 && bitpos % BITS_PER_WORD == 0
4547 && GET_MODE_CLASS (mode) == MODE_INT
4548 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4549 && exp_size >= 0
4550 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4551 {
4552 tree type = TREE_TYPE (value);
4553 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4554 {
4555 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4556 value = convert (type, value);
4557 }
4558 if (BYTES_BIG_ENDIAN)
4559 value
4560 = fold (build (LSHIFT_EXPR, type, value,
4561 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4562 bitsize = BITS_PER_WORD;
4563 mode = word_mode;
4564 }
4565#endif
c5c76735 4566 store_constructor_field (to_rtx, bitsize, bitpos, mode,
23cb1766 4567 TREE_VALUE (elt), type, align, cleared,
963a2a84 4568 (DECL_NONADDRESSABLE_P (field)
1ccfe3fa 4569 && GET_CODE (to_rtx) == MEM)
23cb1766
RK
4570 ? MEM_ALIAS_SET (to_rtx)
4571 : get_alias_set (TREE_TYPE (field)));
bbf6f052
RK
4572 }
4573 }
4af3895e 4574 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4575 {
4576 register tree elt;
4577 register int i;
e1a43f73 4578 int need_to_clear;
4af3895e 4579 tree domain = TYPE_DOMAIN (type);
4af3895e 4580 tree elttype = TREE_TYPE (type);
85f3d674
RK
4581 int const_bounds_p = (host_integerp (TYPE_MIN_VALUE (domain), 0)
4582 && host_integerp (TYPE_MAX_VALUE (domain), 0));
4583 HOST_WIDE_INT minelt;
4584 HOST_WIDE_INT maxelt;
4585
4586 /* If we have constant bounds for the range of the type, get them. */
4587 if (const_bounds_p)
4588 {
4589 minelt = tree_low_cst (TYPE_MIN_VALUE (domain), 0);
4590 maxelt = tree_low_cst (TYPE_MAX_VALUE (domain), 0);
4591 }
bbf6f052 4592
e1a43f73 4593 /* If the constructor has fewer elements than the array,
38e01259 4594 clear the whole array first. Similarly if this is
e1a43f73
PB
4595 static constructor of a non-BLKmode object. */
4596 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4597 need_to_clear = 1;
4598 else
4599 {
4600 HOST_WIDE_INT count = 0, zero_count = 0;
85f3d674
RK
4601 need_to_clear = ! const_bounds_p;
4602
e1a43f73
PB
4603 /* This loop is a more accurate version of the loop in
4604 mostly_zeros_p (it handles RANGE_EXPR in an index).
4605 It is also needed to check for missing elements. */
4606 for (elt = CONSTRUCTOR_ELTS (exp);
85f3d674 4607 elt != NULL_TREE && ! need_to_clear;
df0faff1 4608 elt = TREE_CHAIN (elt))
e1a43f73
PB
4609 {
4610 tree index = TREE_PURPOSE (elt);
4611 HOST_WIDE_INT this_node_count;
19caa751 4612
e1a43f73
PB
4613 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4614 {
4615 tree lo_index = TREE_OPERAND (index, 0);
4616 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4617
19caa751
RK
4618 if (! host_integerp (lo_index, 1)
4619 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4620 {
4621 need_to_clear = 1;
4622 break;
4623 }
19caa751
RK
4624
4625 this_node_count = (tree_low_cst (hi_index, 1)
4626 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4627 }
4628 else
4629 this_node_count = 1;
85f3d674 4630
e1a43f73
PB
4631 count += this_node_count;
4632 if (mostly_zeros_p (TREE_VALUE (elt)))
4633 zero_count += this_node_count;
4634 }
85f3d674 4635
8e958f70 4636 /* Clear the entire array first if there are any missing elements,
0f41302f 4637 or if the incidence of zero elements is >= 75%. */
85f3d674
RK
4638 if (! need_to_clear
4639 && (count < maxelt - minelt + 1 || 4 * zero_count >= 3 * count))
e1a43f73
PB
4640 need_to_clear = 1;
4641 }
85f3d674 4642
9376fcd6 4643 if (need_to_clear && size > 0)
9de08200
RK
4644 {
4645 if (! cleared)
19caa751 4646 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4647 cleared = 1;
4648 }
bbf6f052
RK
4649 else
4650 /* Inform later passes that the old value is dead. */
38a448ca 4651 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4652
4653 /* Store each element of the constructor into
4654 the corresponding element of TARGET, determined
4655 by counting the elements. */
4656 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4657 elt;
4658 elt = TREE_CHAIN (elt), i++)
4659 {
4660 register enum machine_mode mode;
19caa751
RK
4661 HOST_WIDE_INT bitsize;
4662 HOST_WIDE_INT bitpos;
bbf6f052 4663 int unsignedp;
e1a43f73 4664 tree value = TREE_VALUE (elt);
729a2125 4665 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4666 tree index = TREE_PURPOSE (elt);
4667 rtx xtarget = target;
bbf6f052 4668
e1a43f73
PB
4669 if (cleared && is_zeros_p (value))
4670 continue;
9de08200 4671
bbf6f052 4672 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4673 mode = TYPE_MODE (elttype);
4674 if (mode == BLKmode)
19caa751
RK
4675 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4676 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4677 : -1);
14a774a9
RK
4678 else
4679 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4680
e1a43f73
PB
4681 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4682 {
4683 tree lo_index = TREE_OPERAND (index, 0);
4684 tree hi_index = TREE_OPERAND (index, 1);
4685 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4686 struct nesting *loop;
05c0b405
PB
4687 HOST_WIDE_INT lo, hi, count;
4688 tree position;
e1a43f73 4689
0f41302f 4690 /* If the range is constant and "small", unroll the loop. */
85f3d674
RK
4691 if (const_bounds_p
4692 && host_integerp (lo_index, 0)
19caa751
RK
4693 && host_integerp (hi_index, 0)
4694 && (lo = tree_low_cst (lo_index, 0),
4695 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4696 count = hi - lo + 1,
4697 (GET_CODE (target) != MEM
4698 || count <= 2
19caa751
RK
4699 || (host_integerp (TYPE_SIZE (elttype), 1)
4700 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4701 <= 40 * 8)))))
e1a43f73 4702 {
05c0b405
PB
4703 lo -= minelt; hi -= minelt;
4704 for (; lo <= hi; lo++)
e1a43f73 4705 {
19caa751 4706 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
23cb1766
RK
4707 store_constructor_field
4708 (target, bitsize, bitpos, mode, value, type, align,
4709 cleared,
4710 TYPE_NONALIASED_COMPONENT (type)
4711 ? MEM_ALIAS_SET (target) : get_alias_set (elttype));
e1a43f73
PB
4712 }
4713 }
4714 else
4715 {
4716 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4717 loop_top = gen_label_rtx ();
4718 loop_end = gen_label_rtx ();
4719
4720 unsignedp = TREE_UNSIGNED (domain);
4721
4722 index = build_decl (VAR_DECL, NULL_TREE, domain);
4723
4724 DECL_RTL (index) = index_r
4725 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4726 &unsignedp, 0));
4727
4728 if (TREE_CODE (value) == SAVE_EXPR
4729 && SAVE_EXPR_RTL (value) == 0)
4730 {
0f41302f
MS
4731 /* Make sure value gets expanded once before the
4732 loop. */
e1a43f73
PB
4733 expand_expr (value, const0_rtx, VOIDmode, 0);
4734 emit_queue ();
4735 }
4736 store_expr (lo_index, index_r, 0);
4737 loop = expand_start_loop (0);
4738
0f41302f 4739 /* Assign value to element index. */
fed3cef0
RK
4740 position
4741 = convert (ssizetype,
4742 fold (build (MINUS_EXPR, TREE_TYPE (index),
4743 index, TYPE_MIN_VALUE (domain))));
4744 position = size_binop (MULT_EXPR, position,
4745 convert (ssizetype,
4746 TYPE_SIZE_UNIT (elttype)));
4747
e1a43f73 4748 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4749 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4750 xtarget = change_address (target, mode, addr);
4751 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4752 store_constructor (value, xtarget, align, cleared,
4753 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4754 else
4755 store_expr (value, xtarget, 0);
4756
4757 expand_exit_loop_if_false (loop,
4758 build (LT_EXPR, integer_type_node,
4759 index, hi_index));
4760
4761 expand_increment (build (PREINCREMENT_EXPR,
4762 TREE_TYPE (index),
7b8b9722 4763 index, integer_one_node), 0, 0);
e1a43f73
PB
4764 expand_end_loop ();
4765 emit_label (loop_end);
e1a43f73
PB
4766 }
4767 }
19caa751
RK
4768 else if ((index != 0 && ! host_integerp (index, 0))
4769 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4770 {
e1a43f73 4771 rtx pos_rtx, addr;
03dc44a6
RS
4772 tree position;
4773
5b6c44ff 4774 if (index == 0)
fed3cef0 4775 index = ssize_int (1);
5b6c44ff 4776
e1a43f73 4777 if (minelt)
fed3cef0
RK
4778 index = convert (ssizetype,
4779 fold (build (MINUS_EXPR, index,
4780 TYPE_MIN_VALUE (domain))));
19caa751 4781
fed3cef0
RK
4782 position = size_binop (MULT_EXPR, index,
4783 convert (ssizetype,
4784 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4785 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4786 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4787 xtarget = change_address (target, mode, addr);
e1a43f73 4788 store_expr (value, xtarget, 0);
03dc44a6
RS
4789 }
4790 else
4791 {
4792 if (index != 0)
19caa751
RK
4793 bitpos = ((tree_low_cst (index, 0) - minelt)
4794 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4795 else
19caa751
RK
4796 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4797
c5c76735 4798 store_constructor_field (target, bitsize, bitpos, mode, value,
23cb1766
RK
4799 type, align, cleared,
4800 TYPE_NONALIASED_COMPONENT (type)
831ecbd4 4801 && GET_CODE (target) == MEM
23cb1766
RK
4802 ? MEM_ALIAS_SET (target) :
4803 get_alias_set (elttype));
4804
03dc44a6 4805 }
bbf6f052
RK
4806 }
4807 }
19caa751 4808
3a94c984 4809 /* Set constructor assignments. */
071a6595
PB
4810 else if (TREE_CODE (type) == SET_TYPE)
4811 {
e1a43f73 4812 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4813 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4814 tree domain = TYPE_DOMAIN (type);
4815 tree domain_min, domain_max, bitlength;
4816
9faa82d8 4817 /* The default implementation strategy is to extract the constant
071a6595
PB
4818 parts of the constructor, use that to initialize the target,
4819 and then "or" in whatever non-constant ranges we need in addition.
4820
4821 If a large set is all zero or all ones, it is
4822 probably better to set it using memset (if available) or bzero.
4823 Also, if a large set has just a single range, it may also be
4824 better to first clear all the first clear the set (using
0f41302f 4825 bzero/memset), and set the bits we want. */
3a94c984 4826
0f41302f 4827 /* Check for all zeros. */
9376fcd6 4828 if (elt == NULL_TREE && size > 0)
071a6595 4829 {
e1a43f73 4830 if (!cleared)
19caa751 4831 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
071a6595
PB
4832 return;
4833 }
4834
071a6595
PB
4835 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4836 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4837 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4838 size_diffop (domain_max, domain_min),
4839 ssize_int (1));
071a6595 4840
19caa751 4841 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4842
4843 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4844 are "complicated" (more than one range), initialize (the
3a94c984 4845 constant parts) by copying from a constant. */
e1a43f73
PB
4846 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4847 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4848 {
19caa751 4849 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4850 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4851 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4852 HOST_WIDE_INT word = 0;
19caa751
RK
4853 unsigned int bit_pos = 0;
4854 unsigned int ibit = 0;
4855 unsigned int offset = 0; /* In bytes from beginning of set. */
4856
e1a43f73 4857 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4858 for (;;)
071a6595 4859 {
b4ee5a72
PB
4860 if (bit_buffer[ibit])
4861 {
b09f3348 4862 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4863 word |= (1 << (set_word_size - 1 - bit_pos));
4864 else
4865 word |= 1 << bit_pos;
4866 }
19caa751 4867
b4ee5a72
PB
4868 bit_pos++; ibit++;
4869 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4870 {
e1a43f73
PB
4871 if (word != 0 || ! cleared)
4872 {
4873 rtx datum = GEN_INT (word);
4874 rtx to_rtx;
19caa751 4875
0f41302f
MS
4876 /* The assumption here is that it is safe to use
4877 XEXP if the set is multi-word, but not if
4878 it's single-word. */
e1a43f73
PB
4879 if (GET_CODE (target) == MEM)
4880 {
4881 to_rtx = plus_constant (XEXP (target, 0), offset);
4882 to_rtx = change_address (target, mode, to_rtx);
4883 }
3a94c984 4884 else if (offset == 0)
e1a43f73
PB
4885 to_rtx = target;
4886 else
4887 abort ();
4888 emit_move_insn (to_rtx, datum);
4889 }
19caa751 4890
b4ee5a72
PB
4891 if (ibit == nbits)
4892 break;
4893 word = 0;
4894 bit_pos = 0;
4895 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4896 }
4897 }
071a6595 4898 }
e1a43f73 4899 else if (!cleared)
19caa751
RK
4900 /* Don't bother clearing storage if the set is all ones. */
4901 if (TREE_CHAIN (elt) != NULL_TREE
4902 || (TREE_PURPOSE (elt) == NULL_TREE
4903 ? nbits != 1
4904 : ( ! host_integerp (TREE_VALUE (elt), 0)
4905 || ! host_integerp (TREE_PURPOSE (elt), 0)
4906 || (tree_low_cst (TREE_VALUE (elt), 0)
4907 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4908 != (HOST_WIDE_INT) nbits))))
4909 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
3a94c984 4910
e1a43f73 4911 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595 4912 {
3a94c984 4913 /* Start of range of element or NULL. */
071a6595 4914 tree startbit = TREE_PURPOSE (elt);
3a94c984 4915 /* End of range of element, or element value. */
071a6595 4916 tree endbit = TREE_VALUE (elt);
381127e8 4917#ifdef TARGET_MEM_FUNCTIONS
071a6595 4918 HOST_WIDE_INT startb, endb;
381127e8 4919#endif
19caa751 4920 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
4921
4922 bitlength_rtx = expand_expr (bitlength,
19caa751 4923 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595 4924
3a94c984 4925 /* Handle non-range tuple element like [ expr ]. */
071a6595
PB
4926 if (startbit == NULL_TREE)
4927 {
4928 startbit = save_expr (endbit);
4929 endbit = startbit;
4930 }
19caa751 4931
071a6595
PB
4932 startbit = convert (sizetype, startbit);
4933 endbit = convert (sizetype, endbit);
4934 if (! integer_zerop (domain_min))
4935 {
4936 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4937 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4938 }
3a94c984 4939 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
071a6595 4940 EXPAND_CONST_ADDRESS);
3a94c984 4941 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
071a6595
PB
4942 EXPAND_CONST_ADDRESS);
4943
4944 if (REG_P (target))
4945 {
1da68f56
RK
4946 targetx
4947 = assign_temp
4948 ((build_qualified_type (type_for_mode (GET_MODE (target), 0),
4949 TYPE_QUAL_CONST)),
4950 0, 1, 1);
071a6595
PB
4951 emit_move_insn (targetx, target);
4952 }
19caa751 4953
071a6595
PB
4954 else if (GET_CODE (target) == MEM)
4955 targetx = target;
4956 else
4957 abort ();
4958
4959#ifdef TARGET_MEM_FUNCTIONS
4960 /* Optimization: If startbit and endbit are
9faa82d8 4961 constants divisible by BITS_PER_UNIT,
0f41302f 4962 call memset instead. */
071a6595
PB
4963 if (TREE_CODE (startbit) == INTEGER_CST
4964 && TREE_CODE (endbit) == INTEGER_CST
4965 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4966 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4967 {
ebb1b59a 4968 emit_library_call (memset_libfunc, LCT_NORMAL,
071a6595 4969 VOIDmode, 3,
e1a43f73
PB
4970 plus_constant (XEXP (targetx, 0),
4971 startb / BITS_PER_UNIT),
071a6595 4972 Pmode,
3b6f75e2 4973 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4974 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4975 TYPE_MODE (sizetype));
071a6595
PB
4976 }
4977 else
4978#endif
19caa751 4979 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
ebb1b59a
BS
4980 LCT_NORMAL, VOIDmode, 4, XEXP (targetx, 0),
4981 Pmode, bitlength_rtx, TYPE_MODE (sizetype),
19caa751
RK
4982 startbit_rtx, TYPE_MODE (sizetype),
4983 endbit_rtx, TYPE_MODE (sizetype));
4984
071a6595
PB
4985 if (REG_P (target))
4986 emit_move_insn (target, targetx);
4987 }
4988 }
bbf6f052
RK
4989
4990 else
4991 abort ();
4992}
4993
4994/* Store the value of EXP (an expression tree)
4995 into a subfield of TARGET which has mode MODE and occupies
4996 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4997 If MODE is VOIDmode, it means that we are storing into a bit-field.
4998
4999 If VALUE_MODE is VOIDmode, return nothing in particular.
5000 UNSIGNEDP is not used in this case.
5001
5002 Otherwise, return an rtx for the value stored. This rtx
5003 has mode VALUE_MODE if that is convenient to do.
5004 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
5005
19caa751 5006 ALIGN is the alignment that TARGET is known to have.
3a94c984 5007 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
ece32014
MM
5008
5009 ALIAS_SET is the alias set for the destination. This value will
5010 (in general) be different from that for TARGET, since TARGET is a
5011 reference to the containing structure. */
bbf6f052
RK
5012
5013static rtx
5014store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 5015 unsignedp, align, total_size, alias_set)
bbf6f052 5016 rtx target;
770ae6cc
RK
5017 HOST_WIDE_INT bitsize;
5018 HOST_WIDE_INT bitpos;
bbf6f052
RK
5019 enum machine_mode mode;
5020 tree exp;
5021 enum machine_mode value_mode;
5022 int unsignedp;
729a2125 5023 unsigned int align;
770ae6cc 5024 HOST_WIDE_INT total_size;
ece32014 5025 int alias_set;
bbf6f052 5026{
906c4e36 5027 HOST_WIDE_INT width_mask = 0;
bbf6f052 5028
e9a25f70
JL
5029 if (TREE_CODE (exp) == ERROR_MARK)
5030 return const0_rtx;
5031
906c4e36
RK
5032 if (bitsize < HOST_BITS_PER_WIDE_INT)
5033 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
5034
5035 /* If we are storing into an unaligned field of an aligned union that is
5036 in a register, we may have the mode of TARGET being an integer mode but
5037 MODE == BLKmode. In that case, get an aligned object whose size and
5038 alignment are the same as TARGET and store TARGET into it (we can avoid
5039 the store if the field being stored is the entire width of TARGET). Then
5040 call ourselves recursively to store the field into a BLKmode version of
5041 that object. Finally, load from the object into TARGET. This is not
5042 very efficient in general, but should only be slightly more expensive
5043 than the otherwise-required unaligned accesses. Perhaps this can be
5044 cleaned up later. */
5045
5046 if (mode == BLKmode
5047 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
5048 {
1da68f56
RK
5049 rtx object
5050 = assign_temp
5051 (build_qualified_type (type_for_mode (GET_MODE (target), 0),
5052 TYPE_QUAL_CONST),
5053 0, 1, 1);
bbf6f052
RK
5054 rtx blk_object = copy_rtx (object);
5055
5056 PUT_MODE (blk_object, BLKmode);
5057
8752c357 5058 if (bitsize != (HOST_WIDE_INT) GET_MODE_BITSIZE (GET_MODE (target)))
bbf6f052
RK
5059 emit_move_insn (object, target);
5060
5061 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 5062 align, total_size, alias_set);
bbf6f052 5063
46093b97
RS
5064 /* Even though we aren't returning target, we need to
5065 give it the updated value. */
bbf6f052
RK
5066 emit_move_insn (target, object);
5067
46093b97 5068 return blk_object;
bbf6f052 5069 }
c3b247b4
JM
5070
5071 if (GET_CODE (target) == CONCAT)
5072 {
5073 /* We're storing into a struct containing a single __complex. */
5074
5075 if (bitpos != 0)
5076 abort ();
5077 return store_expr (exp, target, 0);
5078 }
bbf6f052
RK
5079
5080 /* If the structure is in a register or if the component
5081 is a bit field, we cannot use addressing to access it.
5082 Use bit-field techniques or SUBREG to store in it. */
5083
4fa52007 5084 if (mode == VOIDmode
6ab06cbb
JW
5085 || (mode != BLKmode && ! direct_store[(int) mode]
5086 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
5087 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 5088 || GET_CODE (target) == REG
c980ac49 5089 || GET_CODE (target) == SUBREG
ccc98036
RS
5090 /* If the field isn't aligned enough to store as an ordinary memref,
5091 store it as a bit field. */
e1565e65 5092 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5093 && (align < GET_MODE_ALIGNMENT (mode)
14a774a9 5094 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 5095 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 5096 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
14a774a9
RK
5097 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
5098 /* If the RHS and field are a constant size and the size of the
5099 RHS isn't the same size as the bitfield, we must use bitfield
5100 operations. */
05bccae2
RK
5101 || (bitsize >= 0
5102 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
5103 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 5104 {
906c4e36 5105 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 5106
ef19912d
RK
5107 /* If BITSIZE is narrower than the size of the type of EXP
5108 we will be narrowing TEMP. Normally, what's wanted are the
5109 low-order bits. However, if EXP's type is a record and this is
5110 big-endian machine, we want the upper BITSIZE bits. */
5111 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
5112 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
5113 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
5114 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
5115 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
5116 - bitsize),
5117 temp, 1);
5118
bbd6cf73
RK
5119 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
5120 MODE. */
5121 if (mode != VOIDmode && mode != BLKmode
5122 && mode != TYPE_MODE (TREE_TYPE (exp)))
5123 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
5124
a281e72d
RK
5125 /* If the modes of TARGET and TEMP are both BLKmode, both
5126 must be in memory and BITPOS must be aligned on a byte
5127 boundary. If so, we simply do a block copy. */
5128 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
5129 {
19caa751 5130 unsigned int exp_align = expr_align (exp);
729a2125 5131
a281e72d
RK
5132 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
5133 || bitpos % BITS_PER_UNIT != 0)
5134 abort ();
5135
0086427c
RK
5136 target = change_address (target, VOIDmode,
5137 plus_constant (XEXP (target, 0),
a281e72d
RK
5138 bitpos / BITS_PER_UNIT));
5139
729a2125
RK
5140 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
5141 align = MIN (exp_align, align);
c297a34e 5142
14a774a9 5143 /* Find an alignment that is consistent with the bit position. */
19caa751 5144 while ((bitpos % align) != 0)
14a774a9
RK
5145 align >>= 1;
5146
a281e72d 5147 emit_block_move (target, temp,
bd5dab53
RK
5148 bitsize == -1 ? expr_size (exp)
5149 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
5150 / BITS_PER_UNIT),
14a774a9 5151 align);
a281e72d
RK
5152
5153 return value_mode == VOIDmode ? const0_rtx : target;
5154 }
5155
bbf6f052
RK
5156 /* Store the value in the bitfield. */
5157 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
5158 if (value_mode != VOIDmode)
5159 {
5160 /* The caller wants an rtx for the value. */
5161 /* If possible, avoid refetching from the bitfield itself. */
5162 if (width_mask != 0
5163 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 5164 {
9074de27 5165 tree count;
5c4d7cfb 5166 enum machine_mode tmode;
86a2c12a 5167
5c4d7cfb
RS
5168 if (unsignedp)
5169 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
5170 tmode = GET_MODE (temp);
86a2c12a
RS
5171 if (tmode == VOIDmode)
5172 tmode = value_mode;
5c4d7cfb
RS
5173 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
5174 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
5175 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
5176 }
bbf6f052 5177 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
5178 NULL_RTX, value_mode, 0, align,
5179 total_size);
bbf6f052
RK
5180 }
5181 return const0_rtx;
5182 }
5183 else
5184 {
5185 rtx addr = XEXP (target, 0);
5186 rtx to_rtx;
5187
5188 /* If a value is wanted, it must be the lhs;
5189 so make the address stable for multiple use. */
5190
5191 if (value_mode != VOIDmode && GET_CODE (addr) != REG
5192 && ! CONSTANT_ADDRESS_P (addr)
5193 /* A frame-pointer reference is already stable. */
5194 && ! (GET_CODE (addr) == PLUS
5195 && GET_CODE (XEXP (addr, 1)) == CONST_INT
5196 && (XEXP (addr, 0) == virtual_incoming_args_rtx
5197 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
5198 addr = copy_to_reg (addr);
5199
5200 /* Now build a reference to just the desired component. */
5201
effbcc6a
RK
5202 to_rtx = copy_rtx (change_address (target, mode,
5203 plus_constant (addr,
5204 (bitpos
5205 / BITS_PER_UNIT))));
c6df88cb 5206 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 5207 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
5208
5209 return store_expr (exp, to_rtx, value_mode != VOIDmode);
5210 }
5211}
5212\f
5213/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
5214 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 5215 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
5216
5217 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
5218 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
5219 If the position of the field is variable, we store a tree
5220 giving the variable offset (in units) in *POFFSET.
5221 This offset is in addition to the bit position.
5222 If the position is not variable, we store 0 in *POFFSET.
19caa751 5223 We set *PALIGNMENT to the alignment of the address that will be
839c4796
RK
5224 computed. This is the alignment of the thing we return if *POFFSET
5225 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
5226
5227 If any of the extraction expressions is volatile,
5228 we store 1 in *PVOLATILEP. Otherwise we don't change that.
5229
5230 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
5231 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
5232 is redundant.
5233
5234 If the field describes a variable-sized object, *PMODE is set to
5235 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 5236 this case, but the address of the object can be found. */
bbf6f052
RK
5237
5238tree
4969d05d 5239get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 5240 punsignedp, pvolatilep, palignment)
bbf6f052 5241 tree exp;
770ae6cc
RK
5242 HOST_WIDE_INT *pbitsize;
5243 HOST_WIDE_INT *pbitpos;
7bb0943f 5244 tree *poffset;
bbf6f052
RK
5245 enum machine_mode *pmode;
5246 int *punsignedp;
5247 int *pvolatilep;
729a2125 5248 unsigned int *palignment;
bbf6f052
RK
5249{
5250 tree size_tree = 0;
5251 enum machine_mode mode = VOIDmode;
fed3cef0 5252 tree offset = size_zero_node;
770ae6cc 5253 tree bit_offset = bitsize_zero_node;
c84e2712 5254 unsigned int alignment = BIGGEST_ALIGNMENT;
770ae6cc 5255 tree tem;
bbf6f052 5256
770ae6cc
RK
5257 /* First get the mode, signedness, and size. We do this from just the
5258 outermost expression. */
bbf6f052
RK
5259 if (TREE_CODE (exp) == COMPONENT_REF)
5260 {
5261 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
5262 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
5263 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 5264
bbf6f052
RK
5265 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
5266 }
5267 else if (TREE_CODE (exp) == BIT_FIELD_REF)
5268 {
5269 size_tree = TREE_OPERAND (exp, 1);
5270 *punsignedp = TREE_UNSIGNED (exp);
5271 }
5272 else
5273 {
5274 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
5275 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
5276
ab87f8c8
JL
5277 if (mode == BLKmode)
5278 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
5279 else
5280 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052 5281 }
3a94c984 5282
770ae6cc 5283 if (size_tree != 0)
bbf6f052 5284 {
770ae6cc 5285 if (! host_integerp (size_tree, 1))
e7c33f54
RK
5286 mode = BLKmode, *pbitsize = -1;
5287 else
770ae6cc 5288 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
5289 }
5290
5291 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5292 and find the ultimate containing object. */
bbf6f052
RK
5293 while (1)
5294 {
770ae6cc
RK
5295 if (TREE_CODE (exp) == BIT_FIELD_REF)
5296 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5297 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5298 {
770ae6cc
RK
5299 tree field = TREE_OPERAND (exp, 1);
5300 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5301
e7f3c83f
RK
5302 /* If this field hasn't been filled in yet, don't go
5303 past it. This should only happen when folding expressions
5304 made during type construction. */
770ae6cc 5305 if (this_offset == 0)
e7f3c83f 5306 break;
770ae6cc
RK
5307 else if (! TREE_CONSTANT (this_offset)
5308 && contains_placeholder_p (this_offset))
5309 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5310
7156dead 5311 offset = size_binop (PLUS_EXPR, offset, this_offset);
770ae6cc
RK
5312 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5313 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5314
770ae6cc
RK
5315 if (! host_integerp (offset, 0))
5316 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
bbf6f052 5317 }
7156dead 5318
742920c7 5319 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 5320 {
742920c7
RK
5321 tree index = TREE_OPERAND (exp, 1);
5322 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
770ae6cc 5323 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
7156dead 5324 tree unit_size = TYPE_SIZE_UNIT (TREE_TYPE (exp));
742920c7 5325
770ae6cc
RK
5326 /* We assume all arrays have sizes that are a multiple of a byte.
5327 First subtract the lower bound, if any, in the type of the
5328 index, then convert to sizetype and multiply by the size of the
5329 array element. */
5330 if (low_bound != 0 && ! integer_zerop (low_bound))
5331 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5332 index, low_bound));
f8dac6eb 5333
7156dead
RK
5334 /* If the index has a self-referential type, pass it to a
5335 WITH_RECORD_EXPR; if the component size is, pass our
5336 component to one. */
770ae6cc
RK
5337 if (! TREE_CONSTANT (index)
5338 && contains_placeholder_p (index))
5339 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
7156dead
RK
5340 if (! TREE_CONSTANT (unit_size)
5341 && contains_placeholder_p (unit_size))
5342 unit_size = build (WITH_RECORD_EXPR, sizetype, unit_size,
5343 TREE_OPERAND (exp, 0));
742920c7 5344
770ae6cc
RK
5345 offset = size_binop (PLUS_EXPR, offset,
5346 size_binop (MULT_EXPR,
5347 convert (sizetype, index),
7156dead 5348 unit_size));
bbf6f052 5349 }
7156dead 5350
bbf6f052
RK
5351 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5352 && ! ((TREE_CODE (exp) == NOP_EXPR
5353 || TREE_CODE (exp) == CONVERT_EXPR)
5354 && (TYPE_MODE (TREE_TYPE (exp))
5355 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5356 break;
7bb0943f
RS
5357
5358 /* If any reference in the chain is volatile, the effect is volatile. */
5359 if (TREE_THIS_VOLATILE (exp))
5360 *pvolatilep = 1;
839c4796
RK
5361
5362 /* If the offset is non-constant already, then we can't assume any
5363 alignment more than the alignment here. */
770ae6cc 5364 if (! TREE_CONSTANT (offset))
839c4796
RK
5365 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5366
bbf6f052
RK
5367 exp = TREE_OPERAND (exp, 0);
5368 }
5369
2f939d94 5370 if (DECL_P (exp))
839c4796 5371 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5372 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5373 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5374
770ae6cc
RK
5375 /* If OFFSET is constant, see if we can return the whole thing as a
5376 constant bit position. Otherwise, split it up. */
5377 if (host_integerp (offset, 0)
5378 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5379 bitsize_unit_node))
5380 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5381 && host_integerp (tem, 0))
5382 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5383 else
5384 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5385
bbf6f052 5386 *pmode = mode;
19caa751 5387 *palignment = alignment;
bbf6f052
RK
5388 return exp;
5389}
921b3427
RK
5390
5391/* Subroutine of expand_exp: compute memory_usage from modifier. */
770ae6cc 5392
921b3427
RK
5393static enum memory_use_mode
5394get_memory_usage_from_modifier (modifier)
5395 enum expand_modifier modifier;
5396{
5397 switch (modifier)
5398 {
5399 case EXPAND_NORMAL:
e5e809f4 5400 case EXPAND_SUM:
921b3427
RK
5401 return MEMORY_USE_RO;
5402 break;
5403 case EXPAND_MEMORY_USE_WO:
5404 return MEMORY_USE_WO;
5405 break;
5406 case EXPAND_MEMORY_USE_RW:
5407 return MEMORY_USE_RW;
5408 break;
921b3427 5409 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5410 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5411 MEMORY_USE_DONT, because they are modifiers to a call of
5412 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5413 case EXPAND_CONST_ADDRESS:
e5e809f4 5414 case EXPAND_INITIALIZER:
921b3427
RK
5415 return MEMORY_USE_DONT;
5416 case EXPAND_MEMORY_USE_BAD:
5417 default:
5418 abort ();
5419 }
5420}
bbf6f052 5421\f
3fe44edd
RK
5422/* Given an rtx VALUE that may contain additions and multiplications, return
5423 an equivalent value that just refers to a register, memory, or constant.
5424 This is done by generating instructions to perform the arithmetic and
5425 returning a pseudo-register containing the value.
c45a13a6
RK
5426
5427 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5428
5429rtx
5430force_operand (value, target)
5431 rtx value, target;
5432{
5433 register optab binoptab = 0;
5434 /* Use a temporary to force order of execution of calls to
5435 `force_operand'. */
5436 rtx tmp;
5437 register rtx op2;
5438 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 5439 register rtx subtarget = get_subtarget (target);
bbf6f052 5440
8b015896
RH
5441 /* Check for a PIC address load. */
5442 if (flag_pic
5443 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5444 && XEXP (value, 0) == pic_offset_table_rtx
5445 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5446 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5447 || GET_CODE (XEXP (value, 1)) == CONST))
5448 {
5449 if (!subtarget)
5450 subtarget = gen_reg_rtx (GET_MODE (value));
5451 emit_move_insn (subtarget, value);
5452 return subtarget;
5453 }
5454
bbf6f052
RK
5455 if (GET_CODE (value) == PLUS)
5456 binoptab = add_optab;
5457 else if (GET_CODE (value) == MINUS)
5458 binoptab = sub_optab;
5459 else if (GET_CODE (value) == MULT)
5460 {
5461 op2 = XEXP (value, 1);
5462 if (!CONSTANT_P (op2)
5463 && !(GET_CODE (op2) == REG && op2 != subtarget))
5464 subtarget = 0;
5465 tmp = force_operand (XEXP (value, 0), subtarget);
5466 return expand_mult (GET_MODE (value), tmp,
906c4e36 5467 force_operand (op2, NULL_RTX),
91ce572a 5468 target, 1);
bbf6f052
RK
5469 }
5470
5471 if (binoptab)
5472 {
5473 op2 = XEXP (value, 1);
5474 if (!CONSTANT_P (op2)
5475 && !(GET_CODE (op2) == REG && op2 != subtarget))
5476 subtarget = 0;
5477 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5478 {
5479 binoptab = add_optab;
5480 op2 = negate_rtx (GET_MODE (value), op2);
5481 }
5482
5483 /* Check for an addition with OP2 a constant integer and our first
5484 operand a PLUS of a virtual register and something else. In that
5485 case, we want to emit the sum of the virtual register and the
5486 constant first and then add the other value. This allows virtual
5487 register instantiation to simply modify the constant rather than
5488 creating another one around this addition. */
5489 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5490 && GET_CODE (XEXP (value, 0)) == PLUS
5491 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5492 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5493 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5494 {
5495 rtx temp = expand_binop (GET_MODE (value), binoptab,
5496 XEXP (XEXP (value, 0), 0), op2,
5497 subtarget, 0, OPTAB_LIB_WIDEN);
5498 return expand_binop (GET_MODE (value), binoptab, temp,
5499 force_operand (XEXP (XEXP (value, 0), 1), 0),
5500 target, 0, OPTAB_LIB_WIDEN);
5501 }
3a94c984 5502
bbf6f052
RK
5503 tmp = force_operand (XEXP (value, 0), subtarget);
5504 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5505 force_operand (op2, NULL_RTX),
bbf6f052 5506 target, 0, OPTAB_LIB_WIDEN);
8008b228 5507 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5508 because the only operations we are expanding here are signed ones. */
5509 }
5510 return value;
5511}
5512\f
5513/* Subroutine of expand_expr:
5514 save the non-copied parts (LIST) of an expr (LHS), and return a list
5515 which can restore these values to their previous values,
5516 should something modify their storage. */
5517
5518static tree
5519save_noncopied_parts (lhs, list)
5520 tree lhs;
5521 tree list;
5522{
5523 tree tail;
5524 tree parts = 0;
5525
5526 for (tail = list; tail; tail = TREE_CHAIN (tail))
5527 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5528 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5529 else
5530 {
5531 tree part = TREE_VALUE (tail);
5532 tree part_type = TREE_TYPE (part);
906c4e36 5533 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
1da68f56
RK
5534 rtx target
5535 = assign_temp (build_qualified_type (part_type,
5536 (TYPE_QUALS (part_type)
5537 | TYPE_QUAL_CONST)),
5538 0, 1, 1);
5539
bbf6f052 5540 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5541 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5542 parts = tree_cons (to_be_saved,
906c4e36
RK
5543 build (RTL_EXPR, part_type, NULL_TREE,
5544 (tree) target),
bbf6f052
RK
5545 parts);
5546 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5547 }
5548 return parts;
5549}
5550
5551/* Subroutine of expand_expr:
5552 record the non-copied parts (LIST) of an expr (LHS), and return a list
5553 which specifies the initial values of these parts. */
5554
5555static tree
5556init_noncopied_parts (lhs, list)
5557 tree lhs;
5558 tree list;
5559{
5560 tree tail;
5561 tree parts = 0;
5562
5563 for (tail = list; tail; tail = TREE_CHAIN (tail))
5564 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5565 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5566 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5567 {
5568 tree part = TREE_VALUE (tail);
5569 tree part_type = TREE_TYPE (part);
906c4e36 5570 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5571 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5572 }
5573 return parts;
5574}
5575
5576/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5577 EXP can reference X, which is being modified. TOP_P is nonzero if this
5578 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5579 for EXP, as opposed to a recursive call to this function.
5580
5581 It is always safe for this routine to return zero since it merely
5582 searches for optimization opportunities. */
bbf6f052 5583
8f17b5c5 5584int
e5e809f4 5585safe_from_p (x, exp, top_p)
bbf6f052
RK
5586 rtx x;
5587 tree exp;
e5e809f4 5588 int top_p;
bbf6f052
RK
5589{
5590 rtx exp_rtl = 0;
5591 int i, nops;
1da68f56 5592 static tree save_expr_list;
bbf6f052 5593
6676e72f
RK
5594 if (x == 0
5595 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5596 have no way of allocating temporaries of variable size
5597 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5598 So we assume here that something at a higher level has prevented a
f4510f37 5599 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5600 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5601 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5602 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5603 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5604 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5605 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5606 != INTEGER_CST)
1da68f56
RK
5607 && GET_MODE (x) == BLKmode)
5608 /* If X is in the outgoing argument area, it is always safe. */
5609 || (GET_CODE (x) == MEM
5610 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5611 || (GET_CODE (XEXP (x, 0)) == PLUS
5612 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx))))
bbf6f052
RK
5613 return 1;
5614
5615 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5616 find the underlying pseudo. */
5617 if (GET_CODE (x) == SUBREG)
5618 {
5619 x = SUBREG_REG (x);
5620 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5621 return 0;
5622 }
5623
1da68f56
RK
5624 /* A SAVE_EXPR might appear many times in the expression passed to the
5625 top-level safe_from_p call, and if it has a complex subexpression,
5626 examining it multiple times could result in a combinatorial explosion.
5627 E.g. on an Alpha running at least 200MHz, a Fortran test case compiled
5628 with optimization took about 28 minutes to compile -- even though it was
5629 only a few lines long. So we mark each SAVE_EXPR we see with TREE_PRIVATE
5630 and turn that off when we are done. We keep a list of the SAVE_EXPRs
5631 we have processed. Note that the only test of top_p was above. */
5632
5633 if (top_p)
5634 {
5635 int rtn;
5636 tree t;
5637
5638 save_expr_list = 0;
5639
5640 rtn = safe_from_p (x, exp, 0);
5641
5642 for (t = save_expr_list; t != 0; t = TREE_CHAIN (t))
5643 TREE_PRIVATE (TREE_PURPOSE (t)) = 0;
5644
5645 return rtn;
5646 }
bbf6f052 5647
1da68f56 5648 /* Now look at our tree code and possibly recurse. */
bbf6f052
RK
5649 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5650 {
5651 case 'd':
5652 exp_rtl = DECL_RTL (exp);
5653 break;
5654
5655 case 'c':
5656 return 1;
5657
5658 case 'x':
5659 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5660 return ((TREE_VALUE (exp) == 0
e5e809f4 5661 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5662 && (TREE_CHAIN (exp) == 0
e5e809f4 5663 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5664 else if (TREE_CODE (exp) == ERROR_MARK)
5665 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5666 else
5667 return 0;
5668
5669 case '1':
e5e809f4 5670 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5671
5672 case '2':
5673 case '<':
e5e809f4
JL
5674 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5675 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5676
5677 case 'e':
5678 case 'r':
5679 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5680 the expression. If it is set, we conflict iff we are that rtx or
5681 both are in memory. Otherwise, we check all operands of the
5682 expression recursively. */
5683
5684 switch (TREE_CODE (exp))
5685 {
5686 case ADDR_EXPR:
e44842fe 5687 return (staticp (TREE_OPERAND (exp, 0))
1da68f56
RK
5688 || TREE_STATIC (exp)
5689 || safe_from_p (x, TREE_OPERAND (exp, 0), 0));
bbf6f052
RK
5690
5691 case INDIRECT_REF:
1da68f56
RK
5692 if (GET_CODE (x) == MEM
5693 && alias_sets_conflict_p (MEM_ALIAS_SET (x),
5694 get_alias_set (exp)))
bbf6f052
RK
5695 return 0;
5696 break;
5697
5698 case CALL_EXPR:
f9808f81
MM
5699 /* Assume that the call will clobber all hard registers and
5700 all of memory. */
5701 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5702 || GET_CODE (x) == MEM)
5703 return 0;
bbf6f052
RK
5704 break;
5705
5706 case RTL_EXPR:
3bb5826a
RK
5707 /* If a sequence exists, we would have to scan every instruction
5708 in the sequence to see if it was safe. This is probably not
5709 worthwhile. */
5710 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5711 return 0;
5712
3bb5826a 5713 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5714 break;
5715
5716 case WITH_CLEANUP_EXPR:
5717 exp_rtl = RTL_EXPR_RTL (exp);
5718 break;
5719
5dab5552 5720 case CLEANUP_POINT_EXPR:
e5e809f4 5721 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5722
bbf6f052
RK
5723 case SAVE_EXPR:
5724 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5725 if (exp_rtl)
5726 break;
5727
1da68f56
RK
5728 /* If we've already scanned this, don't do it again. Otherwise,
5729 show we've scanned it and record for clearing the flag if we're
5730 going on. */
5731 if (TREE_PRIVATE (exp))
5732 return 1;
ff439b5f 5733
1da68f56
RK
5734 TREE_PRIVATE (exp) = 1;
5735 if (! safe_from_p (x, TREE_OPERAND (exp, 0), 0))
ff59bfe6 5736 {
1da68f56
RK
5737 TREE_PRIVATE (exp) = 0;
5738 return 0;
ff59bfe6 5739 }
1da68f56
RK
5740
5741 save_expr_list = tree_cons (exp, NULL_TREE, save_expr_list);
ff439b5f 5742 return 1;
bbf6f052 5743
8129842c
RS
5744 case BIND_EXPR:
5745 /* The only operand we look at is operand 1. The rest aren't
5746 part of the expression. */
e5e809f4 5747 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5748
bbf6f052 5749 case METHOD_CALL_EXPR:
0f41302f 5750 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5751 abort ();
3a94c984 5752
e9a25f70
JL
5753 default:
5754 break;
bbf6f052
RK
5755 }
5756
5757 /* If we have an rtx, we do not need to scan our operands. */
5758 if (exp_rtl)
5759 break;
5760
8f17b5c5 5761 nops = first_rtl_op (TREE_CODE (exp));
bbf6f052
RK
5762 for (i = 0; i < nops; i++)
5763 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5764 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052 5765 return 0;
8f17b5c5
MM
5766
5767 /* If this is a language-specific tree code, it may require
5768 special handling. */
5769 if (TREE_CODE (exp) >= LAST_AND_UNUSED_TREE_CODE
5770 && lang_safe_from_p
5771 && !(*lang_safe_from_p) (x, exp))
5772 return 0;
bbf6f052
RK
5773 }
5774
5775 /* If we have an rtl, find any enclosed object. Then see if we conflict
5776 with it. */
5777 if (exp_rtl)
5778 {
5779 if (GET_CODE (exp_rtl) == SUBREG)
5780 {
5781 exp_rtl = SUBREG_REG (exp_rtl);
5782 if (GET_CODE (exp_rtl) == REG
5783 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5784 return 0;
5785 }
5786
5787 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
1da68f56 5788 are memory and they conflict. */
bbf6f052
RK
5789 return ! (rtx_equal_p (x, exp_rtl)
5790 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
1da68f56
RK
5791 && true_dependence (exp_rtl, GET_MODE (x), x,
5792 rtx_addr_varies_p)));
bbf6f052
RK
5793 }
5794
5795 /* If we reach here, it is safe. */
5796 return 1;
5797}
5798
5799/* Subroutine of expand_expr: return nonzero iff EXP is an
5800 expression whose type is statically determinable. */
5801
5802static int
5803fixed_type_p (exp)
5804 tree exp;
5805{
5806 if (TREE_CODE (exp) == PARM_DECL
5807 || TREE_CODE (exp) == VAR_DECL
5808 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5809 || TREE_CODE (exp) == COMPONENT_REF
5810 || TREE_CODE (exp) == ARRAY_REF)
5811 return 1;
5812 return 0;
5813}
01c8a7c8
RK
5814
5815/* Subroutine of expand_expr: return rtx if EXP is a
5816 variable or parameter; else return 0. */
5817
5818static rtx
5819var_rtx (exp)
5820 tree exp;
5821{
5822 STRIP_NOPS (exp);
5823 switch (TREE_CODE (exp))
5824 {
5825 case PARM_DECL:
5826 case VAR_DECL:
5827 return DECL_RTL (exp);
5828 default:
5829 return 0;
5830 }
5831}
dbecbbe4
JL
5832
5833#ifdef MAX_INTEGER_COMPUTATION_MODE
5834void
5835check_max_integer_computation_mode (exp)
3a94c984 5836 tree exp;
dbecbbe4 5837{
5f652c07 5838 enum tree_code code;
dbecbbe4
JL
5839 enum machine_mode mode;
5840
5f652c07
JM
5841 /* Strip any NOPs that don't change the mode. */
5842 STRIP_NOPS (exp);
5843 code = TREE_CODE (exp);
5844
71bca506
JL
5845 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5846 if (code == NOP_EXPR
5847 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5848 return;
5849
dbecbbe4
JL
5850 /* First check the type of the overall operation. We need only look at
5851 unary, binary and relational operations. */
5852 if (TREE_CODE_CLASS (code) == '1'
5853 || TREE_CODE_CLASS (code) == '2'
5854 || TREE_CODE_CLASS (code) == '<')
5855 {
5856 mode = TYPE_MODE (TREE_TYPE (exp));
5857 if (GET_MODE_CLASS (mode) == MODE_INT
5858 && mode > MAX_INTEGER_COMPUTATION_MODE)
5859 fatal ("unsupported wide integer operation");
5860 }
5861
5862 /* Check operand of a unary op. */
5863 if (TREE_CODE_CLASS (code) == '1')
5864 {
5865 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5866 if (GET_MODE_CLASS (mode) == MODE_INT
5867 && mode > MAX_INTEGER_COMPUTATION_MODE)
5868 fatal ("unsupported wide integer operation");
5869 }
3a94c984 5870
dbecbbe4
JL
5871 /* Check operands of a binary/comparison op. */
5872 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5873 {
5874 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5875 if (GET_MODE_CLASS (mode) == MODE_INT
5876 && mode > MAX_INTEGER_COMPUTATION_MODE)
5877 fatal ("unsupported wide integer operation");
5878
5879 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5880 if (GET_MODE_CLASS (mode) == MODE_INT
5881 && mode > MAX_INTEGER_COMPUTATION_MODE)
5882 fatal ("unsupported wide integer operation");
5883 }
5884}
5885#endif
14a774a9 5886\f
bbf6f052
RK
5887/* expand_expr: generate code for computing expression EXP.
5888 An rtx for the computed value is returned. The value is never null.
5889 In the case of a void EXP, const0_rtx is returned.
5890
5891 The value may be stored in TARGET if TARGET is nonzero.
5892 TARGET is just a suggestion; callers must assume that
5893 the rtx returned may not be the same as TARGET.
5894
5895 If TARGET is CONST0_RTX, it means that the value will be ignored.
5896
5897 If TMODE is not VOIDmode, it suggests generating the
5898 result in mode TMODE. But this is done only when convenient.
5899 Otherwise, TMODE is ignored and the value generated in its natural mode.
5900 TMODE is just a suggestion; callers must assume that
5901 the rtx returned may not have mode TMODE.
5902
d6a5ac33
RK
5903 Note that TARGET may have neither TMODE nor MODE. In that case, it
5904 probably will not be used.
bbf6f052
RK
5905
5906 If MODIFIER is EXPAND_SUM then when EXP is an addition
5907 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5908 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5909 products as above, or REG or MEM, or constant.
5910 Ordinarily in such cases we would output mul or add instructions
5911 and then return a pseudo reg containing the sum.
5912
5913 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5914 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5915 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5916 This is used for outputting expressions used in initializers.
5917
5918 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5919 with a constant address even if that address is not normally legitimate.
5920 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5921
5922rtx
5923expand_expr (exp, target, tmode, modifier)
5924 register tree exp;
5925 rtx target;
5926 enum machine_mode tmode;
5927 enum expand_modifier modifier;
5928{
5929 register rtx op0, op1, temp;
5930 tree type = TREE_TYPE (exp);
5931 int unsignedp = TREE_UNSIGNED (type);
68557e14 5932 register enum machine_mode mode;
bbf6f052
RK
5933 register enum tree_code code = TREE_CODE (exp);
5934 optab this_optab;
68557e14
ML
5935 rtx subtarget, original_target;
5936 int ignore;
bbf6f052 5937 tree context;
921b3427
RK
5938 /* Used by check-memory-usage to make modifier read only. */
5939 enum expand_modifier ro_modifier;
bbf6f052 5940
3a94c984 5941 /* Handle ERROR_MARK before anybody tries to access its type. */
85f3d674 5942 if (TREE_CODE (exp) == ERROR_MARK || TREE_CODE (type) == ERROR_MARK)
68557e14
ML
5943 {
5944 op0 = CONST0_RTX (tmode);
5945 if (op0 != 0)
5946 return op0;
5947 return const0_rtx;
5948 }
5949
5950 mode = TYPE_MODE (type);
5951 /* Use subtarget as the target for operand 0 of a binary operation. */
296b4ed9 5952 subtarget = get_subtarget (target);
68557e14
ML
5953 original_target = target;
5954 ignore = (target == const0_rtx
5955 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5956 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5957 || code == COND_EXPR)
5958 && TREE_CODE (type) == VOID_TYPE));
5959
921b3427
RK
5960 /* Make a read-only version of the modifier. */
5961 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5962 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5963 ro_modifier = modifier;
5964 else
5965 ro_modifier = EXPAND_NORMAL;
ca695ac9 5966
dd27116b
RK
5967 /* If we are going to ignore this result, we need only do something
5968 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5969 is, short-circuit the most common cases here. Note that we must
5970 not call expand_expr with anything but const0_rtx in case this
5971 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5972
dd27116b
RK
5973 if (ignore)
5974 {
5975 if (! TREE_SIDE_EFFECTS (exp))
5976 return const0_rtx;
5977
14a774a9
RK
5978 /* Ensure we reference a volatile object even if value is ignored, but
5979 don't do this if all we are doing is taking its address. */
dd27116b
RK
5980 if (TREE_THIS_VOLATILE (exp)
5981 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
5982 && mode != VOIDmode && mode != BLKmode
5983 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 5984 {
921b3427 5985 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5986 if (GET_CODE (temp) == MEM)
5987 temp = copy_to_reg (temp);
5988 return const0_rtx;
5989 }
5990
14a774a9
RK
5991 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5992 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 5993 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5994 VOIDmode, ro_modifier);
14a774a9 5995 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
3a94c984 5996 || code == ARRAY_REF)
dd27116b 5997 {
921b3427
RK
5998 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5999 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
6000 return const0_rtx;
6001 }
6002 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
6003 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
6004 /* If the second operand has no side effects, just evaluate
0f41302f 6005 the first. */
dd27116b 6006 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 6007 VOIDmode, ro_modifier);
14a774a9
RK
6008 else if (code == BIT_FIELD_REF)
6009 {
6010 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
6011 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
6012 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
6013 return const0_rtx;
6014 }
3a94c984 6015 ;
90764a87 6016 target = 0;
dd27116b 6017 }
bbf6f052 6018
dbecbbe4 6019#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
6020 /* Only check stuff here if the mode we want is different from the mode
6021 of the expression; if it's the same, check_max_integer_computiation_mode
6022 will handle it. Do we really need to check this stuff at all? */
6023
ce3c0b53 6024 if (target
5f652c07 6025 && GET_MODE (target) != mode
ce3c0b53
JL
6026 && TREE_CODE (exp) != INTEGER_CST
6027 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
6028 && TREE_CODE (exp) != ARRAY_REF
6029 && TREE_CODE (exp) != COMPONENT_REF
6030 && TREE_CODE (exp) != BIT_FIELD_REF
6031 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 6032 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
6033 && TREE_CODE (exp) != VAR_DECL
6034 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
6035 {
6036 enum machine_mode mode = GET_MODE (target);
6037
6038 if (GET_MODE_CLASS (mode) == MODE_INT
6039 && mode > MAX_INTEGER_COMPUTATION_MODE)
6040 fatal ("unsupported wide integer operation");
6041 }
6042
5f652c07
JM
6043 if (tmode != mode
6044 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 6045 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
6046 && TREE_CODE (exp) != ARRAY_REF
6047 && TREE_CODE (exp) != COMPONENT_REF
6048 && TREE_CODE (exp) != BIT_FIELD_REF
6049 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 6050 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 6051 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 6052 && TREE_CODE (exp) != RTL_EXPR
71bca506 6053 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
6054 && tmode > MAX_INTEGER_COMPUTATION_MODE)
6055 fatal ("unsupported wide integer operation");
6056
6057 check_max_integer_computation_mode (exp);
6058#endif
6059
e44842fe
RK
6060 /* If will do cse, generate all results into pseudo registers
6061 since 1) that allows cse to find more things
6062 and 2) otherwise cse could produce an insn the machine
6063 cannot support. */
6064
bbf6f052
RK
6065 if (! cse_not_expected && mode != BLKmode && target
6066 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
6067 target = subtarget;
6068
bbf6f052
RK
6069 switch (code)
6070 {
6071 case LABEL_DECL:
b552441b
RS
6072 {
6073 tree function = decl_function_context (exp);
6074 /* Handle using a label in a containing function. */
d0977240
RK
6075 if (function != current_function_decl
6076 && function != inline_function_decl && function != 0)
b552441b
RS
6077 {
6078 struct function *p = find_function_data (function);
49ad7cfa
BS
6079 p->expr->x_forced_labels
6080 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
6081 p->expr->x_forced_labels);
b552441b 6082 }
ab87f8c8
JL
6083 else
6084 {
ab87f8c8
JL
6085 if (modifier == EXPAND_INITIALIZER)
6086 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
6087 label_rtx (exp),
6088 forced_labels);
6089 }
c5c76735 6090
38a448ca
RH
6091 temp = gen_rtx_MEM (FUNCTION_MODE,
6092 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
6093 if (function != current_function_decl
6094 && function != inline_function_decl && function != 0)
26fcb35a
RS
6095 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
6096 return temp;
b552441b 6097 }
bbf6f052
RK
6098
6099 case PARM_DECL:
6100 if (DECL_RTL (exp) == 0)
6101 {
6102 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 6103 return CONST0_RTX (mode);
bbf6f052
RK
6104 }
6105
0f41302f 6106 /* ... fall through ... */
d6a5ac33 6107
bbf6f052 6108 case VAR_DECL:
2dca20cd
RS
6109 /* If a static var's type was incomplete when the decl was written,
6110 but the type is complete now, lay out the decl now. */
d0f062fb 6111 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
6112 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
6113 {
2dca20cd
RS
6114 layout_decl (exp, 0);
6115 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
2dca20cd 6116 }
d6a5ac33 6117
7d384cc0
KR
6118 /* Although static-storage variables start off initialized, according to
6119 ANSI C, a memcpy could overwrite them with uninitialized values. So
6120 we check them too. This also lets us check for read-only variables
6121 accessed via a non-const declaration, in case it won't be detected
6122 any other way (e.g., in an embedded system or OS kernel without
6123 memory protection).
6124
6125 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 6126 if (cfun && current_function_check_memory_usage
49ad7cfa 6127 && code == VAR_DECL
921b3427 6128 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
6129 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6130 {
6131 enum memory_use_mode memory_usage;
6132 memory_usage = get_memory_usage_from_modifier (modifier);
6133
ea4da9db 6134 in_check_memory_usage = 1;
921b3427 6135 if (memory_usage != MEMORY_USE_DONT)
ebb1b59a
BS
6136 emit_library_call (chkr_check_addr_libfunc,
6137 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
6a9c4aed 6138 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
6139 GEN_INT (int_size_in_bytes (type)),
6140 TYPE_MODE (sizetype),
956d6950
JL
6141 GEN_INT (memory_usage),
6142 TYPE_MODE (integer_type_node));
ea4da9db 6143 in_check_memory_usage = 0;
921b3427
RK
6144 }
6145
0f41302f 6146 /* ... fall through ... */
d6a5ac33 6147
2dca20cd 6148 case FUNCTION_DECL:
bbf6f052
RK
6149 case RESULT_DECL:
6150 if (DECL_RTL (exp) == 0)
6151 abort ();
d6a5ac33 6152
e44842fe
RK
6153 /* Ensure variable marked as used even if it doesn't go through
6154 a parser. If it hasn't be used yet, write out an external
6155 definition. */
6156 if (! TREE_USED (exp))
6157 {
6158 assemble_external (exp);
6159 TREE_USED (exp) = 1;
6160 }
6161
dc6d66b3
RK
6162 /* Show we haven't gotten RTL for this yet. */
6163 temp = 0;
6164
bbf6f052
RK
6165 /* Handle variables inherited from containing functions. */
6166 context = decl_function_context (exp);
6167
6168 /* We treat inline_function_decl as an alias for the current function
6169 because that is the inline function whose vars, types, etc.
6170 are being merged into the current function.
6171 See expand_inline_function. */
d6a5ac33 6172
bbf6f052
RK
6173 if (context != 0 && context != current_function_decl
6174 && context != inline_function_decl
6175 /* If var is static, we don't need a static chain to access it. */
6176 && ! (GET_CODE (DECL_RTL (exp)) == MEM
6177 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
6178 {
6179 rtx addr;
6180
6181 /* Mark as non-local and addressable. */
81feeecb 6182 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
6183 if (DECL_NO_STATIC_CHAIN (current_function_decl))
6184 abort ();
bbf6f052
RK
6185 mark_addressable (exp);
6186 if (GET_CODE (DECL_RTL (exp)) != MEM)
6187 abort ();
6188 addr = XEXP (DECL_RTL (exp), 0);
6189 if (GET_CODE (addr) == MEM)
3a94c984 6190 addr = change_address (addr, Pmode,
3bdf5ad1 6191 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
6192 else
6193 addr = fix_lexical_addr (addr, exp);
3bdf5ad1 6194
dc6d66b3 6195 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 6196 }
4af3895e 6197
bbf6f052
RK
6198 /* This is the case of an array whose size is to be determined
6199 from its initializer, while the initializer is still being parsed.
6200 See expand_decl. */
d6a5ac33 6201
dc6d66b3
RK
6202 else if (GET_CODE (DECL_RTL (exp)) == MEM
6203 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
6204 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 6205 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
6206
6207 /* If DECL_RTL is memory, we are in the normal case and either
6208 the address is not valid or it is not a register and -fforce-addr
6209 is specified, get the address into a register. */
6210
dc6d66b3
RK
6211 else if (GET_CODE (DECL_RTL (exp)) == MEM
6212 && modifier != EXPAND_CONST_ADDRESS
6213 && modifier != EXPAND_SUM
6214 && modifier != EXPAND_INITIALIZER
6215 && (! memory_address_p (DECL_MODE (exp),
6216 XEXP (DECL_RTL (exp), 0))
6217 || (flag_force_addr
6218 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
6219 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 6220 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 6221
dc6d66b3
RK
6222 /* If we got something, return it. But first, set the alignment
6223 the address is a register. */
6224 if (temp != 0)
6225 {
6226 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 6227 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
6228
6229 return temp;
6230 }
6231
1499e0a8
RK
6232 /* If the mode of DECL_RTL does not match that of the decl, it
6233 must be a promoted value. We return a SUBREG of the wanted mode,
6234 but mark it so that we know that it was already extended. */
6235
6236 if (GET_CODE (DECL_RTL (exp)) == REG
6237 && GET_MODE (DECL_RTL (exp)) != mode)
6238 {
1499e0a8
RK
6239 /* Get the signedness used for this variable. Ensure we get the
6240 same mode we got when the variable was declared. */
78911e8b
RK
6241 if (GET_MODE (DECL_RTL (exp))
6242 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
6243 abort ();
6244
38a448ca 6245 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
6246 SUBREG_PROMOTED_VAR_P (temp) = 1;
6247 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6248 return temp;
6249 }
6250
bbf6f052
RK
6251 return DECL_RTL (exp);
6252
6253 case INTEGER_CST:
6254 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6255 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6256
6257 case CONST_DECL:
921b3427 6258 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
3a94c984 6259 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6260
6261 case REAL_CST:
6262 /* If optimized, generate immediate CONST_DOUBLE
3a94c984
KH
6263 which will be turned into memory by reload if necessary.
6264
bbf6f052
RK
6265 We used to force a register so that loop.c could see it. But
6266 this does not allow gen_* patterns to perform optimizations with
6267 the constants. It also produces two insns in cases like "x = 1.0;".
6268 On most machines, floating-point constants are not permitted in
6269 many insns, so we'd end up copying it to a register in any case.
6270
6271 Now, we do the copying in expand_binop, if appropriate. */
6272 return immed_real_const (exp);
6273
6274 case COMPLEX_CST:
6275 case STRING_CST:
6276 if (! TREE_CST_RTL (exp))
bd7cf17e 6277 output_constant_def (exp, 1);
bbf6f052
RK
6278
6279 /* TREE_CST_RTL probably contains a constant address.
6280 On RISC machines where a constant address isn't valid,
6281 make some insns to get that address into a register. */
6282 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6283 && modifier != EXPAND_CONST_ADDRESS
6284 && modifier != EXPAND_INITIALIZER
6285 && modifier != EXPAND_SUM
d6a5ac33
RK
6286 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6287 || (flag_force_addr
6288 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6289 return change_address (TREE_CST_RTL (exp), VOIDmode,
6290 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6291 return TREE_CST_RTL (exp);
6292
bf1e5319 6293 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6294 {
6295 rtx to_return;
3b304f5b 6296 const char *saved_input_filename = input_filename;
b24f65cd
APB
6297 int saved_lineno = lineno;
6298 input_filename = EXPR_WFL_FILENAME (exp);
6299 lineno = EXPR_WFL_LINENO (exp);
6300 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6301 emit_line_note (input_filename, lineno);
3a94c984 6302 /* Possibly avoid switching back and force here. */
b24f65cd
APB
6303 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6304 input_filename = saved_input_filename;
6305 lineno = saved_lineno;
6306 return to_return;
6307 }
bf1e5319 6308
bbf6f052
RK
6309 case SAVE_EXPR:
6310 context = decl_function_context (exp);
d6a5ac33 6311
d0977240
RK
6312 /* If this SAVE_EXPR was at global context, assume we are an
6313 initialization function and move it into our context. */
6314 if (context == 0)
6315 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6316
bbf6f052
RK
6317 /* We treat inline_function_decl as an alias for the current function
6318 because that is the inline function whose vars, types, etc.
6319 are being merged into the current function.
6320 See expand_inline_function. */
6321 if (context == current_function_decl || context == inline_function_decl)
6322 context = 0;
6323
6324 /* If this is non-local, handle it. */
6325 if (context)
6326 {
d0977240
RK
6327 /* The following call just exists to abort if the context is
6328 not of a containing function. */
6329 find_function_data (context);
6330
bbf6f052
RK
6331 temp = SAVE_EXPR_RTL (exp);
6332 if (temp && GET_CODE (temp) == REG)
6333 {
6334 put_var_into_stack (exp);
6335 temp = SAVE_EXPR_RTL (exp);
6336 }
6337 if (temp == 0 || GET_CODE (temp) != MEM)
6338 abort ();
6339 return change_address (temp, mode,
6340 fix_lexical_addr (XEXP (temp, 0), exp));
6341 }
6342 if (SAVE_EXPR_RTL (exp) == 0)
6343 {
06089a8b
RK
6344 if (mode == VOIDmode)
6345 temp = const0_rtx;
6346 else
1da68f56
RK
6347 temp = assign_temp (build_qualified_type (type,
6348 (TYPE_QUALS (type)
6349 | TYPE_QUAL_CONST)),
6350 3, 0, 0);
1499e0a8 6351
bbf6f052 6352 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6353 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6354 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6355 save_expr_regs);
ff78f773
RK
6356
6357 /* If the mode of TEMP does not match that of the expression, it
6358 must be a promoted value. We pass store_expr a SUBREG of the
6359 wanted mode but mark it so that we know that it was already
6360 extended. Note that `unsignedp' was modified above in
6361 this case. */
6362
6363 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6364 {
38a448ca 6365 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
6366 SUBREG_PROMOTED_VAR_P (temp) = 1;
6367 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6368 }
6369
4c7a0be9 6370 if (temp == const0_rtx)
921b3427
RK
6371 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6372 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6373 else
6374 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6375
6376 TREE_USED (exp) = 1;
bbf6f052 6377 }
1499e0a8
RK
6378
6379 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6380 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6381 but mark it so that we know that it was already extended. */
1499e0a8
RK
6382
6383 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6384 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6385 {
e70d22c8
RK
6386 /* Compute the signedness and make the proper SUBREG. */
6387 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6388 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6389 SUBREG_PROMOTED_VAR_P (temp) = 1;
6390 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6391 return temp;
6392 }
6393
bbf6f052
RK
6394 return SAVE_EXPR_RTL (exp);
6395
679163cf
MS
6396 case UNSAVE_EXPR:
6397 {
6398 rtx temp;
6399 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6400 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6401 return temp;
6402 }
6403
b50d17a1 6404 case PLACEHOLDER_EXPR:
e9a25f70
JL
6405 {
6406 tree placeholder_expr;
6407
6408 /* If there is an object on the head of the placeholder list,
e5e809f4 6409 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6410 further information, see tree.def. */
6411 for (placeholder_expr = placeholder_list;
6412 placeholder_expr != 0;
6413 placeholder_expr = TREE_CHAIN (placeholder_expr))
6414 {
6415 tree need_type = TYPE_MAIN_VARIANT (type);
6416 tree object = 0;
6417 tree old_list = placeholder_list;
6418 tree elt;
6419
e5e809f4 6420 /* Find the outermost reference that is of the type we want.
3a94c984 6421 If none, see if any object has a type that is a pointer to
e5e809f4
JL
6422 the type we want. */
6423 for (elt = TREE_PURPOSE (placeholder_expr);
6424 elt != 0 && object == 0;
6425 elt
6426 = ((TREE_CODE (elt) == COMPOUND_EXPR
6427 || TREE_CODE (elt) == COND_EXPR)
6428 ? TREE_OPERAND (elt, 1)
6429 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6430 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6431 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6432 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6433 ? TREE_OPERAND (elt, 0) : 0))
6434 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6435 object = elt;
e9a25f70 6436
e9a25f70 6437 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6438 elt != 0 && object == 0;
6439 elt
6440 = ((TREE_CODE (elt) == COMPOUND_EXPR
6441 || TREE_CODE (elt) == COND_EXPR)
6442 ? TREE_OPERAND (elt, 1)
6443 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6444 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6445 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6446 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6447 ? TREE_OPERAND (elt, 0) : 0))
6448 if (POINTER_TYPE_P (TREE_TYPE (elt))
6449 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6450 == need_type))
e5e809f4 6451 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6452
e9a25f70 6453 if (object != 0)
2cde2255 6454 {
e9a25f70
JL
6455 /* Expand this object skipping the list entries before
6456 it was found in case it is also a PLACEHOLDER_EXPR.
6457 In that case, we want to translate it using subsequent
6458 entries. */
6459 placeholder_list = TREE_CHAIN (placeholder_expr);
6460 temp = expand_expr (object, original_target, tmode,
6461 ro_modifier);
6462 placeholder_list = old_list;
6463 return temp;
2cde2255 6464 }
e9a25f70
JL
6465 }
6466 }
b50d17a1
RK
6467
6468 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6469 abort ();
6470
6471 case WITH_RECORD_EXPR:
6472 /* Put the object on the placeholder list, expand our first operand,
6473 and pop the list. */
6474 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6475 placeholder_list);
6476 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6477 tmode, ro_modifier);
b50d17a1
RK
6478 placeholder_list = TREE_CHAIN (placeholder_list);
6479 return target;
6480
70e6ca43
APB
6481 case GOTO_EXPR:
6482 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6483 expand_goto (TREE_OPERAND (exp, 0));
6484 else
6485 expand_computed_goto (TREE_OPERAND (exp, 0));
6486 return const0_rtx;
6487
bbf6f052 6488 case EXIT_EXPR:
e44842fe
RK
6489 expand_exit_loop_if_false (NULL_PTR,
6490 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6491 return const0_rtx;
6492
f42e28dd
APB
6493 case LABELED_BLOCK_EXPR:
6494 if (LABELED_BLOCK_BODY (exp))
6495 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6496 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6497 return const0_rtx;
6498
6499 case EXIT_BLOCK_EXPR:
6500 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6501 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6502 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6503 return const0_rtx;
6504
bbf6f052 6505 case LOOP_EXPR:
0088fcb1 6506 push_temp_slots ();
bbf6f052
RK
6507 expand_start_loop (1);
6508 expand_expr_stmt (TREE_OPERAND (exp, 0));
6509 expand_end_loop ();
0088fcb1 6510 pop_temp_slots ();
bbf6f052
RK
6511
6512 return const0_rtx;
6513
6514 case BIND_EXPR:
6515 {
6516 tree vars = TREE_OPERAND (exp, 0);
6517 int vars_need_expansion = 0;
6518
6519 /* Need to open a binding contour here because
e976b8b2 6520 if there are any cleanups they must be contained here. */
8e91754e 6521 expand_start_bindings (2);
bbf6f052 6522
2df53c0b
RS
6523 /* Mark the corresponding BLOCK for output in its proper place. */
6524 if (TREE_OPERAND (exp, 2) != 0
6525 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6526 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6527
6528 /* If VARS have not yet been expanded, expand them now. */
6529 while (vars)
6530 {
6531 if (DECL_RTL (vars) == 0)
6532 {
6533 vars_need_expansion = 1;
6534 expand_decl (vars);
6535 }
6536 expand_decl_init (vars);
6537 vars = TREE_CHAIN (vars);
6538 }
6539
921b3427 6540 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6541
6542 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6543
6544 return temp;
6545 }
6546
6547 case RTL_EXPR:
83b853c9
JM
6548 if (RTL_EXPR_SEQUENCE (exp))
6549 {
6550 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6551 abort ();
6552 emit_insns (RTL_EXPR_SEQUENCE (exp));
6553 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6554 }
64dc53f3
MM
6555 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6556 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6557 return RTL_EXPR_RTL (exp);
6558
6559 case CONSTRUCTOR:
dd27116b
RK
6560 /* If we don't need the result, just ensure we evaluate any
6561 subexpressions. */
6562 if (ignore)
6563 {
6564 tree elt;
6565 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6566 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6567 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6568 return const0_rtx;
6569 }
3207b172 6570
4af3895e
JVA
6571 /* All elts simple constants => refer to a constant in memory. But
6572 if this is a non-BLKmode mode, let it store a field at a time
6573 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6574 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6575 store directly into the target unless the type is large enough
6576 that memcpy will be used. If we are making an initializer and
3207b172 6577 all operands are constant, put it in memory as well. */
dd27116b 6578 else if ((TREE_STATIC (exp)
3207b172 6579 && ((mode == BLKmode
e5e809f4 6580 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6581 || TREE_ADDRESSABLE (exp)
19caa751 6582 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
3a94c984 6583 && (! MOVE_BY_PIECES_P
19caa751
RK
6584 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6585 TYPE_ALIGN (type)))
9de08200 6586 && ! mostly_zeros_p (exp))))
dd27116b 6587 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052 6588 {
bd7cf17e 6589 rtx constructor = output_constant_def (exp, 1);
19caa751 6590
b552441b
RS
6591 if (modifier != EXPAND_CONST_ADDRESS
6592 && modifier != EXPAND_INITIALIZER
6593 && modifier != EXPAND_SUM
d6a5ac33
RK
6594 && (! memory_address_p (GET_MODE (constructor),
6595 XEXP (constructor, 0))
6596 || (flag_force_addr
6597 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6598 constructor = change_address (constructor, VOIDmode,
6599 XEXP (constructor, 0));
6600 return constructor;
6601 }
bbf6f052
RK
6602 else
6603 {
e9ac02a6
JW
6604 /* Handle calls that pass values in multiple non-contiguous
6605 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6606 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6607 || GET_CODE (target) == PARALLEL)
1da68f56
RK
6608 target
6609 = assign_temp (build_qualified_type (type,
6610 (TYPE_QUALS (type)
6611 | (TREE_READONLY (exp)
6612 * TYPE_QUAL_CONST))),
6613 TREE_ADDRESSABLE (exp), 1, 1);
07604beb 6614
b7010412
RK
6615 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6616 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6617 return target;
6618 }
6619
6620 case INDIRECT_REF:
6621 {
6622 tree exp1 = TREE_OPERAND (exp, 0);
7581a30f 6623 tree index;
3a94c984
KH
6624 tree string = string_constant (exp1, &index);
6625
06eaa86f 6626 /* Try to optimize reads from const strings. */
7581a30f
JW
6627 if (string
6628 && TREE_CODE (string) == STRING_CST
6629 && TREE_CODE (index) == INTEGER_CST
05bccae2 6630 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6631 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6632 && GET_MODE_SIZE (mode) == 1
6633 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6634 return
6635 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6636
405f0da6
JW
6637 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6638 op0 = memory_address (mode, op0);
8c8a8e34 6639
01d939e8 6640 if (cfun && current_function_check_memory_usage
49ad7cfa 6641 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6642 {
6643 enum memory_use_mode memory_usage;
6644 memory_usage = get_memory_usage_from_modifier (modifier);
6645
6646 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6647 {
6648 in_check_memory_usage = 1;
ebb1b59a
BS
6649 emit_library_call (chkr_check_addr_libfunc,
6650 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, op0,
6651 Pmode, GEN_INT (int_size_in_bytes (type)),
c85f7c16
JL
6652 TYPE_MODE (sizetype),
6653 GEN_INT (memory_usage),
6654 TYPE_MODE (integer_type_node));
6655 in_check_memory_usage = 0;
6656 }
921b3427
RK
6657 }
6658
38a448ca 6659 temp = gen_rtx_MEM (mode, op0);
3bdf5ad1 6660 set_mem_attributes (temp, exp, 0);
1125706f
RK
6661
6662 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6663 here, because, in C and C++, the fact that a location is accessed
6664 through a pointer to const does not mean that the value there can
6665 never change. Languages where it can never change should
6666 also set TREE_STATIC. */
5cb7a25a 6667 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6668
6669 /* If we are writing to this object and its type is a record with
6670 readonly fields, we must mark it as readonly so it will
6671 conflict with readonly references to those fields. */
1da68f56 6672 if (modifier == EXPAND_MEMORY_USE_WO && readonly_fields_p (type))
14a774a9
RK
6673 RTX_UNCHANGING_P (temp) = 1;
6674
8c8a8e34
JW
6675 return temp;
6676 }
bbf6f052
RK
6677
6678 case ARRAY_REF:
742920c7
RK
6679 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6680 abort ();
bbf6f052 6681
bbf6f052 6682 {
742920c7
RK
6683 tree array = TREE_OPERAND (exp, 0);
6684 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6685 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6686 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6687 HOST_WIDE_INT i;
b50d17a1 6688
d4c89139
PB
6689 /* Optimize the special-case of a zero lower bound.
6690
6691 We convert the low_bound to sizetype to avoid some problems
6692 with constant folding. (E.g. suppose the lower bound is 1,
6693 and its mode is QI. Without the conversion, (ARRAY
6694 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6695 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6696
742920c7 6697 if (! integer_zerop (low_bound))
fed3cef0 6698 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6699
742920c7 6700 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6701 This is not done in fold so it won't happen inside &.
6702 Don't fold if this is for wide characters since it's too
6703 difficult to do correctly and this is a very rare case. */
742920c7
RK
6704
6705 if (TREE_CODE (array) == STRING_CST
6706 && TREE_CODE (index) == INTEGER_CST
05bccae2 6707 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6708 && GET_MODE_CLASS (mode) == MODE_INT
6709 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6710 return
6711 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6712
742920c7
RK
6713 /* If this is a constant index into a constant array,
6714 just get the value from the array. Handle both the cases when
6715 we have an explicit constructor and when our operand is a variable
6716 that was declared const. */
4af3895e 6717
05bccae2
RK
6718 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6719 && TREE_CODE (index) == INTEGER_CST
3a94c984 6720 && 0 > compare_tree_int (index,
05bccae2
RK
6721 list_length (CONSTRUCTOR_ELTS
6722 (TREE_OPERAND (exp, 0)))))
742920c7 6723 {
05bccae2
RK
6724 tree elem;
6725
6726 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6727 i = TREE_INT_CST_LOW (index);
6728 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6729 ;
6730
6731 if (elem)
6732 return expand_expr (fold (TREE_VALUE (elem)), target,
6733 tmode, ro_modifier);
742920c7 6734 }
3a94c984 6735
742920c7
RK
6736 else if (optimize >= 1
6737 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6738 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6739 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6740 {
08293add 6741 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6742 {
6743 tree init = DECL_INITIAL (array);
6744
742920c7
RK
6745 if (TREE_CODE (init) == CONSTRUCTOR)
6746 {
665f2503 6747 tree elem;
742920c7 6748
05bccae2 6749 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6750 (elem
6751 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6752 elem = TREE_CHAIN (elem))
6753 ;
6754
742920c7
RK
6755 if (elem)
6756 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6757 tmode, ro_modifier);
742920c7
RK
6758 }
6759 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6760 && 0 > compare_tree_int (index,
6761 TREE_STRING_LENGTH (init)))
5c80f6e6
JJ
6762 {
6763 tree type = TREE_TYPE (TREE_TYPE (init));
6764 enum machine_mode mode = TYPE_MODE (type);
6765
6766 if (GET_MODE_CLASS (mode) == MODE_INT
6767 && GET_MODE_SIZE (mode) == 1)
6768 return (GEN_INT
6769 (TREE_STRING_POINTER
6770 (init)[TREE_INT_CST_LOW (index)]));
6771 }
742920c7
RK
6772 }
6773 }
6774 }
3a94c984 6775 /* Fall through. */
bbf6f052
RK
6776
6777 case COMPONENT_REF:
6778 case BIT_FIELD_REF:
4af3895e 6779 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6780 appropriate field if it is present. Don't do this if we have
6781 already written the data since we want to refer to that copy
6782 and varasm.c assumes that's what we'll do. */
4af3895e 6783 if (code != ARRAY_REF
7a0b7b9a
RK
6784 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6785 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6786 {
6787 tree elt;
6788
6789 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6790 elt = TREE_CHAIN (elt))
86b5812c
RK
6791 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6792 /* We can normally use the value of the field in the
6793 CONSTRUCTOR. However, if this is a bitfield in
6794 an integral mode that we can fit in a HOST_WIDE_INT,
6795 we must mask only the number of bits in the bitfield,
6796 since this is done implicitly by the constructor. If
6797 the bitfield does not meet either of those conditions,
6798 we can't do this optimization. */
6799 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6800 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6801 == MODE_INT)
6802 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6803 <= HOST_BITS_PER_WIDE_INT))))
6804 {
3a94c984 6805 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
86b5812c
RK
6806 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6807 {
9df2c88c
RK
6808 HOST_WIDE_INT bitsize
6809 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6810
6811 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6812 {
6813 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6814 op0 = expand_and (op0, op1, target);
6815 }
6816 else
6817 {
e5e809f4
JL
6818 enum machine_mode imode
6819 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6820 tree count
e5e809f4
JL
6821 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6822 0);
86b5812c
RK
6823
6824 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6825 target, 0);
6826 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6827 target, 0);
6828 }
6829 }
6830
6831 return op0;
6832 }
4af3895e
JVA
6833 }
6834
bbf6f052
RK
6835 {
6836 enum machine_mode mode1;
770ae6cc 6837 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6838 tree offset;
bbf6f052 6839 int volatilep = 0;
729a2125 6840 unsigned int alignment;
839c4796
RK
6841 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6842 &mode1, &unsignedp, &volatilep,
6843 &alignment);
bbf6f052 6844
e7f3c83f
RK
6845 /* If we got back the original object, something is wrong. Perhaps
6846 we are evaluating an expression too early. In any event, don't
6847 infinitely recurse. */
6848 if (tem == exp)
6849 abort ();
6850
3d27140a 6851 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6852 computation, since it will need a temporary and TARGET is known
6853 to have to do. This occurs in unchecked conversion in Ada. */
3a94c984 6854
b74f5ff2
RK
6855 op0 = expand_expr (tem,
6856 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6857 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6858 != INTEGER_CST)
6859 ? target : NULL_RTX),
4ed67205 6860 VOIDmode,
14a774a9
RK
6861 (modifier == EXPAND_INITIALIZER
6862 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 6863 ? modifier : EXPAND_NORMAL);
bbf6f052 6864
8c8a8e34 6865 /* If this is a constant, put it into a register if it is a
14a774a9 6866 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6867 if (CONSTANT_P (op0))
6868 {
6869 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6870 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6871 && offset == 0)
8c8a8e34
JW
6872 op0 = force_reg (mode, op0);
6873 else
6874 op0 = validize_mem (force_const_mem (mode, op0));
6875 }
6876
7bb0943f
RS
6877 if (offset != 0)
6878 {
906c4e36 6879 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 6880
14a774a9
RK
6881 /* If this object is in memory, put it into a register.
6882 This case can't occur in C, but can in Ada if we have
6883 unchecked conversion of an expression from a scalar type to
6884 an array or record type. */
6885 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6886 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6887 {
1da68f56
RK
6888 tree nt = build_qualified_type (TREE_TYPE (tem),
6889 (TYPE_QUALS (TREE_TYPE (tem))
6890 | TYPE_QUAL_CONST));
6891 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
6892
6893 mark_temp_addr_taken (memloc);
6894 emit_move_insn (memloc, op0);
6895 op0 = memloc;
6896 }
6897
7bb0943f
RS
6898 if (GET_CODE (op0) != MEM)
6899 abort ();
2d48c13d
JL
6900
6901 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6902 {
2d48c13d 6903#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6904 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6905#else
bd070e1a 6906 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6907#endif
bd070e1a 6908 }
2d48c13d 6909
14a774a9 6910 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6911 to call force_reg for that case. Avoid that case. */
89752202
HB
6912 if (GET_CODE (op0) == MEM
6913 && GET_MODE (op0) == BLKmode
efd07ca7 6914 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6915 && bitsize != 0
3a94c984 6916 && (bitpos % bitsize) == 0
89752202 6917 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 6918 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
6919 {
6920 rtx temp = change_address (op0, mode1,
6921 plus_constant (XEXP (op0, 0),
6922 (bitpos /
6923 BITS_PER_UNIT)));
6924 if (GET_CODE (XEXP (temp, 0)) == REG)
6925 op0 = temp;
6926 else
6927 op0 = change_address (op0, mode1,
6928 force_reg (GET_MODE (XEXP (temp, 0)),
6929 XEXP (temp, 0)));
6930 bitpos = 0;
6931 }
6932
7bb0943f 6933 op0 = change_address (op0, VOIDmode,
38a448ca 6934 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6935 force_reg (ptr_mode,
6936 offset_rtx)));
7bb0943f
RS
6937 }
6938
bbf6f052
RK
6939 /* Don't forget about volatility even if this is a bitfield. */
6940 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6941 {
6942 op0 = copy_rtx (op0);
6943 MEM_VOLATILE_P (op0) = 1;
6944 }
6945
921b3427 6946 /* Check the access. */
32919a0d
RK
6947 if (cfun != 0 && current_function_check_memory_usage
6948 && GET_CODE (op0) == MEM)
3a94c984 6949 {
921b3427
RK
6950 enum memory_use_mode memory_usage;
6951 memory_usage = get_memory_usage_from_modifier (modifier);
6952
6953 if (memory_usage != MEMORY_USE_DONT)
6954 {
6955 rtx to;
6956 int size;
6957
6958 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6959 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6960
6961 /* Check the access right of the pointer. */
ea4da9db 6962 in_check_memory_usage = 1;
e9a25f70 6963 if (size > BITS_PER_UNIT)
ebb1b59a
BS
6964 emit_library_call (chkr_check_addr_libfunc,
6965 LCT_CONST_MAKE_BLOCK, VOIDmode, 3, to,
6966 Pmode, GEN_INT (size / BITS_PER_UNIT),
e9a25f70 6967 TYPE_MODE (sizetype),
3a94c984 6968 GEN_INT (memory_usage),
956d6950 6969 TYPE_MODE (integer_type_node));
ea4da9db 6970 in_check_memory_usage = 0;
921b3427
RK
6971 }
6972 }
6973
ccc98036
RS
6974 /* In cases where an aligned union has an unaligned object
6975 as a field, we might be extracting a BLKmode value from
6976 an integer-mode (e.g., SImode) object. Handle this case
6977 by doing the extract into an object as wide as the field
6978 (which we know to be the width of a basic mode), then
f2420d0b
JW
6979 storing into memory, and changing the mode to BLKmode.
6980 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6981 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6982 if (mode1 == VOIDmode
ccc98036 6983 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6984 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6985 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6986 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6987 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6988 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6989 /* If the field isn't aligned enough to fetch as a memref,
6990 fetch it as a bit field. */
e1565e65
DE
6991 || (mode1 != BLKmode
6992 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
14a774a9 6993 && ((TYPE_ALIGN (TREE_TYPE (tem))
19caa751 6994 < GET_MODE_ALIGNMENT (mode))
dd841181
RK
6995 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6996 /* If the type and the field are a constant size and the
6997 size of the type isn't the same size as the bitfield,
6998 we must use bitfield operations. */
6999 || ((bitsize >= 0
7000 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
7001 == INTEGER_CST)
05bccae2
RK
7002 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
7003 bitsize)))))
14a774a9
RK
7004 || (modifier != EXPAND_CONST_ADDRESS
7005 && modifier != EXPAND_INITIALIZER
7006 && mode == BLKmode
e1565e65 7007 && SLOW_UNALIGNED_ACCESS (mode, alignment)
19caa751 7008 && (TYPE_ALIGN (type) > alignment
14a774a9 7009 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 7010 {
bbf6f052
RK
7011 enum machine_mode ext_mode = mode;
7012
14a774a9
RK
7013 if (ext_mode == BLKmode
7014 && ! (target != 0 && GET_CODE (op0) == MEM
7015 && GET_CODE (target) == MEM
7016 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
7017 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
7018
7019 if (ext_mode == BLKmode)
a281e72d
RK
7020 {
7021 /* In this case, BITPOS must start at a byte boundary and
7022 TARGET, if specified, must be a MEM. */
7023 if (GET_CODE (op0) != MEM
7024 || (target != 0 && GET_CODE (target) != MEM)
7025 || bitpos % BITS_PER_UNIT != 0)
7026 abort ();
7027
7028 op0 = change_address (op0, VOIDmode,
7029 plus_constant (XEXP (op0, 0),
7030 bitpos / BITS_PER_UNIT));
7031 if (target == 0)
7032 target = assign_temp (type, 0, 1, 1);
7033
7034 emit_block_move (target, op0,
bd5dab53
RK
7035 bitsize == -1 ? expr_size (exp)
7036 : GEN_INT ((bitsize + BITS_PER_UNIT - 1)
7037 / BITS_PER_UNIT),
19caa751 7038 BITS_PER_UNIT);
3a94c984 7039
a281e72d
RK
7040 return target;
7041 }
bbf6f052 7042
dc6d66b3
RK
7043 op0 = validize_mem (op0);
7044
7045 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7046 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3
RK
7047
7048 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 7049 unsignedp, target, ext_mode, ext_mode,
034f9101 7050 alignment,
bbf6f052 7051 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
7052
7053 /* If the result is a record type and BITSIZE is narrower than
7054 the mode of OP0, an integral mode, and this is a big endian
7055 machine, we must put the field into the high-order bits. */
7056 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
7057 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
7058 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
7059 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
7060 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
7061 - bitsize),
7062 op0, 1);
7063
bbf6f052
RK
7064 if (mode == BLKmode)
7065 {
27fb3e16 7066 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
1da68f56
RK
7067 TYPE_QUAL_CONST);
7068 rtx new = assign_temp (nt, 0, 1, 1);
bbf6f052
RK
7069
7070 emit_move_insn (new, op0);
7071 op0 = copy_rtx (new);
7072 PUT_MODE (op0, BLKmode);
7073 }
7074
7075 return op0;
7076 }
7077
05019f83
RK
7078 /* If the result is BLKmode, use that to access the object
7079 now as well. */
7080 if (mode == BLKmode)
7081 mode1 = BLKmode;
7082
bbf6f052
RK
7083 /* Get a reference to just this component. */
7084 if (modifier == EXPAND_CONST_ADDRESS
7085 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
fe7a8445
RK
7086 {
7087 rtx new = gen_rtx_MEM (mode1,
7088 plus_constant (XEXP (op0, 0),
7089 (bitpos / BITS_PER_UNIT)));
7090
7091 MEM_COPY_ATTRIBUTES (new, op0);
7092 op0 = new;
7093 }
bbf6f052
RK
7094 else
7095 op0 = change_address (op0, mode1,
7096 plus_constant (XEXP (op0, 0),
7097 (bitpos / BITS_PER_UNIT)));
41472af8 7098
3bdf5ad1 7099 set_mem_attributes (op0, exp, 0);
dc6d66b3 7100 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 7101 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3 7102
bbf6f052 7103 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 7104 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 7105 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 7106 || modifier == EXPAND_INITIALIZER)
bbf6f052 7107 return op0;
0d15e60c 7108 else if (target == 0)
bbf6f052 7109 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 7110
bbf6f052
RK
7111 convert_move (target, op0, unsignedp);
7112 return target;
7113 }
7114
bbf6f052
RK
7115 /* Intended for a reference to a buffer of a file-object in Pascal.
7116 But it's not certain that a special tree code will really be
7117 necessary for these. INDIRECT_REF might work for them. */
7118 case BUFFER_REF:
7119 abort ();
7120
7308a047 7121 case IN_EXPR:
7308a047 7122 {
d6a5ac33
RK
7123 /* Pascal set IN expression.
7124
7125 Algorithm:
7126 rlo = set_low - (set_low%bits_per_word);
7127 the_word = set [ (index - rlo)/bits_per_word ];
7128 bit_index = index % bits_per_word;
7129 bitmask = 1 << bit_index;
7130 return !!(the_word & bitmask); */
7131
7308a047
RS
7132 tree set = TREE_OPERAND (exp, 0);
7133 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 7134 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 7135 tree set_type = TREE_TYPE (set);
7308a047
RS
7136 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
7137 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
7138 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
7139 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
7140 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
7141 rtx setval = expand_expr (set, 0, VOIDmode, 0);
7142 rtx setaddr = XEXP (setval, 0);
7143 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
7144 rtx rlow;
7145 rtx diff, quo, rem, addr, bit, result;
7308a047 7146
d6a5ac33
RK
7147 /* If domain is empty, answer is no. Likewise if index is constant
7148 and out of bounds. */
51723711 7149 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 7150 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 7151 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
7152 || (TREE_CODE (index) == INTEGER_CST
7153 && TREE_CODE (set_low_bound) == INTEGER_CST
7154 && tree_int_cst_lt (index, set_low_bound))
7155 || (TREE_CODE (set_high_bound) == INTEGER_CST
7156 && TREE_CODE (index) == INTEGER_CST
7157 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
7158 return const0_rtx;
7159
d6a5ac33
RK
7160 if (target == 0)
7161 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
7162
7163 /* If we get here, we have to generate the code for both cases
7164 (in range and out of range). */
7165
7166 op0 = gen_label_rtx ();
7167 op1 = gen_label_rtx ();
7168
7169 if (! (GET_CODE (index_val) == CONST_INT
7170 && GET_CODE (lo_r) == CONST_INT))
7171 {
c5d5d461
JL
7172 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
7173 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7174 }
7175
7176 if (! (GET_CODE (index_val) == CONST_INT
7177 && GET_CODE (hi_r) == CONST_INT))
7178 {
c5d5d461
JL
7179 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
7180 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
7181 }
7182
7183 /* Calculate the element number of bit zero in the first word
7184 of the set. */
7185 if (GET_CODE (lo_r) == CONST_INT)
17938e57 7186 rlow = GEN_INT (INTVAL (lo_r)
3a94c984 7187 & ~((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 7188 else
17938e57
RK
7189 rlow = expand_binop (index_mode, and_optab, lo_r,
7190 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 7191 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 7192
d6a5ac33
RK
7193 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
7194 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
7195
7196 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 7197 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 7198 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
7199 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7200
7308a047 7201 addr = memory_address (byte_mode,
d6a5ac33
RK
7202 expand_binop (index_mode, add_optab, diff,
7203 setaddr, NULL_RTX, iunsignedp,
17938e57 7204 OPTAB_LIB_WIDEN));
d6a5ac33 7205
3a94c984 7206 /* Extract the bit we want to examine. */
7308a047 7207 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 7208 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
7209 make_tree (TREE_TYPE (index), rem),
7210 NULL_RTX, 1);
7211 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
7212 GET_MODE (target) == byte_mode ? target : 0,
7308a047 7213 1, OPTAB_LIB_WIDEN);
17938e57
RK
7214
7215 if (result != target)
7216 convert_move (target, result, 1);
7308a047
RS
7217
7218 /* Output the code to handle the out-of-range case. */
7219 emit_jump (op0);
7220 emit_label (op1);
7221 emit_move_insn (target, const0_rtx);
7222 emit_label (op0);
7223 return target;
7224 }
7225
bbf6f052
RK
7226 case WITH_CLEANUP_EXPR:
7227 if (RTL_EXPR_RTL (exp) == 0)
7228 {
7229 RTL_EXPR_RTL (exp)
921b3427 7230 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
7231 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
7232
bbf6f052
RK
7233 /* That's it for this cleanup. */
7234 TREE_OPERAND (exp, 2) = 0;
7235 }
7236 return RTL_EXPR_RTL (exp);
7237
5dab5552
MS
7238 case CLEANUP_POINT_EXPR:
7239 {
e976b8b2
MS
7240 /* Start a new binding layer that will keep track of all cleanup
7241 actions to be performed. */
8e91754e 7242 expand_start_bindings (2);
e976b8b2 7243
d93d4205 7244 target_temp_slot_level = temp_slot_level;
e976b8b2 7245
921b3427 7246 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
7247 /* If we're going to use this value, load it up now. */
7248 if (! ignore)
7249 op0 = force_not_mem (op0);
d93d4205 7250 preserve_temp_slots (op0);
e976b8b2 7251 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7252 }
7253 return op0;
7254
bbf6f052
RK
7255 case CALL_EXPR:
7256 /* Check for a built-in function. */
7257 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7258 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7259 == FUNCTION_DECL)
bbf6f052 7260 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
c70eaeaf
KG
7261 {
7262 if (DECL_BUILT_IN_CLASS (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7263 == BUILT_IN_FRONTEND)
7264 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
7265 else
7266 return expand_builtin (exp, target, subtarget, tmode, ignore);
7267 }
d6a5ac33 7268
8129842c 7269 return expand_call (exp, target, ignore);
bbf6f052
RK
7270
7271 case NON_LVALUE_EXPR:
7272 case NOP_EXPR:
7273 case CONVERT_EXPR:
7274 case REFERENCE_EXPR:
4a53008b 7275 if (TREE_OPERAND (exp, 0) == error_mark_node)
a592f288 7276 return const0_rtx;
4a53008b 7277
bbf6f052
RK
7278 if (TREE_CODE (type) == UNION_TYPE)
7279 {
7280 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7281
7282 /* If both input and output are BLKmode, this conversion
7283 isn't actually doing anything unless we need to make the
7284 alignment stricter. */
7285 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7286 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7287 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7288 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7289 modifier);
7290
bbf6f052 7291 if (target == 0)
1da68f56 7292 target = assign_temp (type, 0, 1, 1);
d6a5ac33 7293
bbf6f052
RK
7294 if (GET_CODE (target) == MEM)
7295 /* Store data into beginning of memory target. */
7296 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7297 change_address (target, TYPE_MODE (valtype), 0), 0);
7298
bbf6f052
RK
7299 else if (GET_CODE (target) == REG)
7300 /* Store this field into a union of the proper type. */
14a774a9
RK
7301 store_field (target,
7302 MIN ((int_size_in_bytes (TREE_TYPE
7303 (TREE_OPERAND (exp, 0)))
7304 * BITS_PER_UNIT),
8752c357 7305 (HOST_WIDE_INT) GET_MODE_BITSIZE (mode)),
14a774a9 7306 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7156dead
RK
7307 VOIDmode, 0, BITS_PER_UNIT,
7308 int_size_in_bytes (type), 0);
bbf6f052
RK
7309 else
7310 abort ();
7311
7312 /* Return the entire union. */
7313 return target;
7314 }
d6a5ac33 7315
7f62854a
RK
7316 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7317 {
7318 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7319 ro_modifier);
7f62854a
RK
7320
7321 /* If the signedness of the conversion differs and OP0 is
7322 a promoted SUBREG, clear that indication since we now
7323 have to do the proper extension. */
7324 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7325 && GET_CODE (op0) == SUBREG)
7326 SUBREG_PROMOTED_VAR_P (op0) = 0;
7327
7328 return op0;
7329 }
7330
1499e0a8 7331 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7332 if (GET_MODE (op0) == mode)
7333 return op0;
12342f90 7334
d6a5ac33
RK
7335 /* If OP0 is a constant, just convert it into the proper mode. */
7336 if (CONSTANT_P (op0))
7337 return
7338 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7339 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7340
26fcb35a 7341 if (modifier == EXPAND_INITIALIZER)
38a448ca 7342 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7343
bbf6f052 7344 if (target == 0)
d6a5ac33
RK
7345 return
7346 convert_to_mode (mode, op0,
7347 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7348 else
d6a5ac33
RK
7349 convert_move (target, op0,
7350 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7351 return target;
7352
7353 case PLUS_EXPR:
0f41302f
MS
7354 /* We come here from MINUS_EXPR when the second operand is a
7355 constant. */
bbf6f052 7356 plus_expr:
91ce572a
CC
7357 this_optab = ! unsignedp && flag_trapv
7358 && (GET_MODE_CLASS(mode) == MODE_INT)
7359 ? addv_optab : add_optab;
bbf6f052
RK
7360
7361 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7362 something else, make sure we add the register to the constant and
7363 then to the other thing. This case can occur during strength
7364 reduction and doing it this way will produce better code if the
7365 frame pointer or argument pointer is eliminated.
7366
7367 fold-const.c will ensure that the constant is always in the inner
7368 PLUS_EXPR, so the only case we need to do anything about is if
7369 sp, ap, or fp is our second argument, in which case we must swap
7370 the innermost first argument and our second argument. */
7371
7372 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7373 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7374 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7375 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7376 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7377 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7378 {
7379 tree t = TREE_OPERAND (exp, 1);
7380
7381 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7382 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7383 }
7384
88f63c77 7385 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7386 something, we might be forming a constant. So try to use
7387 plus_constant. If it produces a sum and we can't accept it,
7388 use force_operand. This allows P = &ARR[const] to generate
7389 efficient code on machines where a SYMBOL_REF is not a valid
7390 address.
7391
7392 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7393 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
91ce572a 7394 || (mode == ptr_mode && (unsignedp || ! flag_trapv)))
bbf6f052 7395 {
c980ac49
RS
7396 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7397 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7398 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7399 {
cbbc503e
JL
7400 rtx constant_part;
7401
c980ac49
RS
7402 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7403 EXPAND_SUM);
cbbc503e
JL
7404 /* Use immed_double_const to ensure that the constant is
7405 truncated according to the mode of OP1, then sign extended
7406 to a HOST_WIDE_INT. Using the constant directly can result
7407 in non-canonical RTL in a 64x32 cross compile. */
7408 constant_part
7409 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7410 (HOST_WIDE_INT) 0,
a5efcd63 7411 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7412 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7413 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7414 op1 = force_operand (op1, target);
7415 return op1;
7416 }
bbf6f052 7417
c980ac49
RS
7418 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7419 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7420 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7421 {
cbbc503e
JL
7422 rtx constant_part;
7423
c980ac49
RS
7424 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7425 EXPAND_SUM);
7426 if (! CONSTANT_P (op0))
7427 {
7428 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7429 VOIDmode, modifier);
709f5be1
RS
7430 /* Don't go to both_summands if modifier
7431 says it's not right to return a PLUS. */
7432 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7433 goto binop2;
c980ac49
RS
7434 goto both_summands;
7435 }
cbbc503e
JL
7436 /* Use immed_double_const to ensure that the constant is
7437 truncated according to the mode of OP1, then sign extended
7438 to a HOST_WIDE_INT. Using the constant directly can result
7439 in non-canonical RTL in a 64x32 cross compile. */
7440 constant_part
7441 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7442 (HOST_WIDE_INT) 0,
2a94e396 7443 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7444 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7445 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7446 op0 = force_operand (op0, target);
7447 return op0;
7448 }
bbf6f052
RK
7449 }
7450
7451 /* No sense saving up arithmetic to be done
7452 if it's all in the wrong mode to form part of an address.
7453 And force_operand won't know whether to sign-extend or
7454 zero-extend. */
7455 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7456 || mode != ptr_mode)
c980ac49 7457 goto binop;
bbf6f052 7458
e5e809f4 7459 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7460 subtarget = 0;
7461
921b3427
RK
7462 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7463 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7464
c980ac49 7465 both_summands:
bbf6f052
RK
7466 /* Make sure any term that's a sum with a constant comes last. */
7467 if (GET_CODE (op0) == PLUS
7468 && CONSTANT_P (XEXP (op0, 1)))
7469 {
7470 temp = op0;
7471 op0 = op1;
7472 op1 = temp;
7473 }
7474 /* If adding to a sum including a constant,
7475 associate it to put the constant outside. */
7476 if (GET_CODE (op1) == PLUS
7477 && CONSTANT_P (XEXP (op1, 1)))
7478 {
7479 rtx constant_term = const0_rtx;
7480
7481 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7482 if (temp != 0)
7483 op0 = temp;
6f90e075
JW
7484 /* Ensure that MULT comes first if there is one. */
7485 else if (GET_CODE (op0) == MULT)
38a448ca 7486 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7487 else
38a448ca 7488 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7489
7490 /* Let's also eliminate constants from op0 if possible. */
7491 op0 = eliminate_constant_term (op0, &constant_term);
7492
7493 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
3a94c984 7494 their sum should be a constant. Form it into OP1, since the
bbf6f052
RK
7495 result we want will then be OP0 + OP1. */
7496
7497 temp = simplify_binary_operation (PLUS, mode, constant_term,
7498 XEXP (op1, 1));
7499 if (temp != 0)
7500 op1 = temp;
7501 else
38a448ca 7502 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7503 }
7504
7505 /* Put a constant term last and put a multiplication first. */
7506 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7507 temp = op1, op1 = op0, op0 = temp;
7508
7509 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7510 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7511
7512 case MINUS_EXPR:
ea87523e
RK
7513 /* For initializers, we are allowed to return a MINUS of two
7514 symbolic constants. Here we handle all cases when both operands
7515 are constant. */
bbf6f052
RK
7516 /* Handle difference of two symbolic constants,
7517 for the sake of an initializer. */
7518 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7519 && really_constant_p (TREE_OPERAND (exp, 0))
7520 && really_constant_p (TREE_OPERAND (exp, 1)))
7521 {
906c4e36 7522 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7523 VOIDmode, ro_modifier);
906c4e36 7524 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7525 VOIDmode, ro_modifier);
ea87523e 7526
ea87523e
RK
7527 /* If the last operand is a CONST_INT, use plus_constant of
7528 the negated constant. Else make the MINUS. */
7529 if (GET_CODE (op1) == CONST_INT)
7530 return plus_constant (op0, - INTVAL (op1));
7531 else
38a448ca 7532 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7533 }
7534 /* Convert A - const to A + (-const). */
7535 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7536 {
ae431183
RK
7537 tree negated = fold (build1 (NEGATE_EXPR, type,
7538 TREE_OPERAND (exp, 1)));
7539
ae431183 7540 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7541 /* If we can't negate the constant in TYPE, leave it alone and
7542 expand_binop will negate it for us. We used to try to do it
7543 here in the signed version of TYPE, but that doesn't work
7544 on POINTER_TYPEs. */;
ae431183
RK
7545 else
7546 {
7547 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7548 goto plus_expr;
7549 }
bbf6f052 7550 }
91ce572a
CC
7551 this_optab = ! unsignedp && flag_trapv
7552 && (GET_MODE_CLASS(mode) == MODE_INT)
7553 ? subv_optab : sub_optab;
bbf6f052
RK
7554 goto binop;
7555
7556 case MULT_EXPR:
bbf6f052
RK
7557 /* If first operand is constant, swap them.
7558 Thus the following special case checks need only
7559 check the second operand. */
7560 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7561 {
7562 register tree t1 = TREE_OPERAND (exp, 0);
7563 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7564 TREE_OPERAND (exp, 1) = t1;
7565 }
7566
7567 /* Attempt to return something suitable for generating an
7568 indexed address, for machines that support that. */
7569
88f63c77 7570 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7571 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7572 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7573 {
921b3427
RK
7574 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7575 EXPAND_SUM);
bbf6f052
RK
7576
7577 /* Apply distributive law if OP0 is x+c. */
7578 if (GET_CODE (op0) == PLUS
7579 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7580 return
7581 gen_rtx_PLUS
7582 (mode,
7583 gen_rtx_MULT
7584 (mode, XEXP (op0, 0),
7585 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7586 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7587 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7588
7589 if (GET_CODE (op0) != REG)
906c4e36 7590 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7591 if (GET_CODE (op0) != REG)
7592 op0 = copy_to_mode_reg (mode, op0);
7593
c5c76735
JL
7594 return
7595 gen_rtx_MULT (mode, op0,
7596 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7597 }
7598
e5e809f4 7599 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7600 subtarget = 0;
7601
7602 /* Check for multiplying things that have been extended
7603 from a narrower type. If this machine supports multiplying
7604 in that narrower type with a result in the desired type,
7605 do it that way, and avoid the explicit type-conversion. */
7606 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7607 && TREE_CODE (type) == INTEGER_TYPE
7608 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7609 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7610 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7611 && int_fits_type_p (TREE_OPERAND (exp, 1),
7612 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7613 /* Don't use a widening multiply if a shift will do. */
7614 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7615 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7616 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7617 ||
7618 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7619 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7620 ==
7621 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7622 /* If both operands are extended, they must either both
7623 be zero-extended or both be sign-extended. */
7624 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7625 ==
7626 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7627 {
7628 enum machine_mode innermode
7629 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7630 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7631 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7632 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7633 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7634 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7635 {
b10af0c8
TG
7636 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7637 {
7638 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7639 NULL_RTX, VOIDmode, 0);
7640 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7641 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7642 VOIDmode, 0);
7643 else
7644 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7645 NULL_RTX, VOIDmode, 0);
7646 goto binop2;
7647 }
7648 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7649 && innermode == word_mode)
7650 {
7651 rtx htem;
7652 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7653 NULL_RTX, VOIDmode, 0);
7654 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
8c118062
GK
7655 op1 = convert_modes (innermode, mode,
7656 expand_expr (TREE_OPERAND (exp, 1),
7657 NULL_RTX, VOIDmode, 0),
7658 unsignedp);
b10af0c8
TG
7659 else
7660 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7661 NULL_RTX, VOIDmode, 0);
7662 temp = expand_binop (mode, other_optab, op0, op1, target,
7663 unsignedp, OPTAB_LIB_WIDEN);
7664 htem = expand_mult_highpart_adjust (innermode,
7665 gen_highpart (innermode, temp),
7666 op0, op1,
7667 gen_highpart (innermode, temp),
7668 unsignedp);
7669 emit_move_insn (gen_highpart (innermode, temp), htem);
7670 return temp;
7671 }
bbf6f052
RK
7672 }
7673 }
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7675 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7676 return expand_mult (mode, op0, op1, target, unsignedp);
7677
7678 case TRUNC_DIV_EXPR:
7679 case FLOOR_DIV_EXPR:
7680 case CEIL_DIV_EXPR:
7681 case ROUND_DIV_EXPR:
7682 case EXACT_DIV_EXPR:
e5e809f4 7683 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7684 subtarget = 0;
7685 /* Possible optimization: compute the dividend with EXPAND_SUM
7686 then if the divisor is constant can optimize the case
7687 where some terms of the dividend have coeffs divisible by it. */
7688 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7689 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7690 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7691
7692 case RDIV_EXPR:
7693 this_optab = flodiv_optab;
7694 goto binop;
7695
7696 case TRUNC_MOD_EXPR:
7697 case FLOOR_MOD_EXPR:
7698 case CEIL_MOD_EXPR:
7699 case ROUND_MOD_EXPR:
e5e809f4 7700 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7701 subtarget = 0;
7702 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7703 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7704 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7705
7706 case FIX_ROUND_EXPR:
7707 case FIX_FLOOR_EXPR:
7708 case FIX_CEIL_EXPR:
7709 abort (); /* Not used for C. */
7710
7711 case FIX_TRUNC_EXPR:
906c4e36 7712 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7713 if (target == 0)
7714 target = gen_reg_rtx (mode);
7715 expand_fix (target, op0, unsignedp);
7716 return target;
7717
7718 case FLOAT_EXPR:
906c4e36 7719 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7720 if (target == 0)
7721 target = gen_reg_rtx (mode);
7722 /* expand_float can't figure out what to do if FROM has VOIDmode.
7723 So give it the correct mode. With -O, cse will optimize this. */
7724 if (GET_MODE (op0) == VOIDmode)
7725 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7726 op0);
7727 expand_float (target, op0,
7728 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7729 return target;
7730
7731 case NEGATE_EXPR:
5b22bee8 7732 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
91ce572a
CC
7733 temp = expand_unop (mode,
7734 ! unsignedp && flag_trapv
7735 && (GET_MODE_CLASS(mode) == MODE_INT)
7736 ? negv_optab : neg_optab, op0, target, 0);
bbf6f052
RK
7737 if (temp == 0)
7738 abort ();
7739 return temp;
7740
7741 case ABS_EXPR:
7742 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7743
2d7050fd 7744 /* Handle complex values specially. */
d6a5ac33
RK
7745 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7746 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7747 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7748
bbf6f052
RK
7749 /* Unsigned abs is simply the operand. Testing here means we don't
7750 risk generating incorrect code below. */
7751 if (TREE_UNSIGNED (type))
7752 return op0;
7753
91ce572a 7754 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7755 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7756
7757 case MAX_EXPR:
7758 case MIN_EXPR:
7759 target = original_target;
e5e809f4 7760 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7761 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7762 || GET_MODE (target) != mode
bbf6f052
RK
7763 || (GET_CODE (target) == REG
7764 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7765 target = gen_reg_rtx (mode);
906c4e36 7766 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7767 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7768
7769 /* First try to do it with a special MIN or MAX instruction.
7770 If that does not win, use a conditional jump to select the proper
7771 value. */
7772 this_optab = (TREE_UNSIGNED (type)
7773 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7774 : (code == MIN_EXPR ? smin_optab : smax_optab));
7775
7776 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7777 OPTAB_WIDEN);
7778 if (temp != 0)
7779 return temp;
7780
fa2981d8
JW
7781 /* At this point, a MEM target is no longer useful; we will get better
7782 code without it. */
3a94c984 7783
fa2981d8
JW
7784 if (GET_CODE (target) == MEM)
7785 target = gen_reg_rtx (mode);
7786
ee456b1c
RK
7787 if (target != op0)
7788 emit_move_insn (target, op0);
d6a5ac33 7789
bbf6f052 7790 op0 = gen_label_rtx ();
d6a5ac33 7791
f81497d9
RS
7792 /* If this mode is an integer too wide to compare properly,
7793 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7794 if (GET_MODE_CLASS (mode) == MODE_INT
7795 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7796 {
f81497d9 7797 if (code == MAX_EXPR)
d6a5ac33
RK
7798 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7799 target, op1, NULL_RTX, op0);
bbf6f052 7800 else
d6a5ac33
RK
7801 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7802 op1, target, NULL_RTX, op0);
bbf6f052 7803 }
f81497d9
RS
7804 else
7805 {
b30f05db
BS
7806 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7807 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7808 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7809 op0);
f81497d9 7810 }
b30f05db 7811 emit_move_insn (target, op1);
bbf6f052
RK
7812 emit_label (op0);
7813 return target;
7814
bbf6f052
RK
7815 case BIT_NOT_EXPR:
7816 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7817 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7818 if (temp == 0)
7819 abort ();
7820 return temp;
7821
7822 case FFS_EXPR:
7823 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7824 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7825 if (temp == 0)
7826 abort ();
7827 return temp;
7828
d6a5ac33
RK
7829 /* ??? Can optimize bitwise operations with one arg constant.
7830 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7831 and (a bitwise1 b) bitwise2 b (etc)
7832 but that is probably not worth while. */
7833
7834 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7835 boolean values when we want in all cases to compute both of them. In
7836 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7837 as actual zero-or-1 values and then bitwise anding. In cases where
7838 there cannot be any side effects, better code would be made by
7839 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7840 how to recognize those cases. */
7841
bbf6f052
RK
7842 case TRUTH_AND_EXPR:
7843 case BIT_AND_EXPR:
7844 this_optab = and_optab;
7845 goto binop;
7846
bbf6f052
RK
7847 case TRUTH_OR_EXPR:
7848 case BIT_IOR_EXPR:
7849 this_optab = ior_optab;
7850 goto binop;
7851
874726a8 7852 case TRUTH_XOR_EXPR:
bbf6f052
RK
7853 case BIT_XOR_EXPR:
7854 this_optab = xor_optab;
7855 goto binop;
7856
7857 case LSHIFT_EXPR:
7858 case RSHIFT_EXPR:
7859 case LROTATE_EXPR:
7860 case RROTATE_EXPR:
e5e809f4 7861 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7862 subtarget = 0;
7863 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7864 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7865 unsignedp);
7866
d6a5ac33
RK
7867 /* Could determine the answer when only additive constants differ. Also,
7868 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7869 case LT_EXPR:
7870 case LE_EXPR:
7871 case GT_EXPR:
7872 case GE_EXPR:
7873 case EQ_EXPR:
7874 case NE_EXPR:
1eb8759b
RH
7875 case UNORDERED_EXPR:
7876 case ORDERED_EXPR:
7877 case UNLT_EXPR:
7878 case UNLE_EXPR:
7879 case UNGT_EXPR:
7880 case UNGE_EXPR:
7881 case UNEQ_EXPR:
bbf6f052
RK
7882 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7883 if (temp != 0)
7884 return temp;
d6a5ac33 7885
0f41302f 7886 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7887 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7888 && original_target
7889 && GET_CODE (original_target) == REG
7890 && (GET_MODE (original_target)
7891 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7892 {
d6a5ac33
RK
7893 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7894 VOIDmode, 0);
7895
bbf6f052
RK
7896 if (temp != original_target)
7897 temp = copy_to_reg (temp);
d6a5ac33 7898
bbf6f052 7899 op1 = gen_label_rtx ();
c5d5d461
JL
7900 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7901 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7902 emit_move_insn (temp, const1_rtx);
7903 emit_label (op1);
7904 return temp;
7905 }
d6a5ac33 7906
bbf6f052
RK
7907 /* If no set-flag instruction, must generate a conditional
7908 store into a temporary variable. Drop through
7909 and handle this like && and ||. */
7910
7911 case TRUTH_ANDIF_EXPR:
7912 case TRUTH_ORIF_EXPR:
e44842fe 7913 if (! ignore
e5e809f4 7914 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7915 /* Make sure we don't have a hard reg (such as function's return
7916 value) live across basic blocks, if not optimizing. */
7917 || (!optimize && GET_CODE (target) == REG
7918 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7919 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7920
7921 if (target)
7922 emit_clr_insn (target);
7923
bbf6f052
RK
7924 op1 = gen_label_rtx ();
7925 jumpifnot (exp, op1);
e44842fe
RK
7926
7927 if (target)
7928 emit_0_to_1_insn (target);
7929
bbf6f052 7930 emit_label (op1);
e44842fe 7931 return ignore ? const0_rtx : target;
bbf6f052
RK
7932
7933 case TRUTH_NOT_EXPR:
7934 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7935 /* The parser is careful to generate TRUTH_NOT_EXPR
7936 only with operands that are always zero or one. */
906c4e36 7937 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7938 target, 1, OPTAB_LIB_WIDEN);
7939 if (temp == 0)
7940 abort ();
7941 return temp;
7942
7943 case COMPOUND_EXPR:
7944 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7945 emit_queue ();
7946 return expand_expr (TREE_OPERAND (exp, 1),
7947 (ignore ? const0_rtx : target),
7948 VOIDmode, 0);
7949
7950 case COND_EXPR:
ac01eace
RK
7951 /* If we would have a "singleton" (see below) were it not for a
7952 conversion in each arm, bring that conversion back out. */
7953 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7954 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7955 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7956 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7957 {
7958 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7959 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7960
7961 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7962 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7963 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7964 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7965 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7966 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7967 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7968 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7969 return expand_expr (build1 (NOP_EXPR, type,
7970 build (COND_EXPR, TREE_TYPE (true),
7971 TREE_OPERAND (exp, 0),
7972 true, false)),
7973 target, tmode, modifier);
7974 }
7975
bbf6f052
RK
7976 {
7977 /* Note that COND_EXPRs whose type is a structure or union
7978 are required to be constructed to contain assignments of
7979 a temporary variable, so that we can evaluate them here
7980 for side effect only. If type is void, we must do likewise. */
7981
7982 /* If an arm of the branch requires a cleanup,
7983 only that cleanup is performed. */
7984
7985 tree singleton = 0;
7986 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7987
7988 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7989 convert it to our mode, if necessary. */
7990 if (integer_onep (TREE_OPERAND (exp, 1))
7991 && integer_zerop (TREE_OPERAND (exp, 2))
7992 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7993 {
dd27116b
RK
7994 if (ignore)
7995 {
7996 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7997 ro_modifier);
dd27116b
RK
7998 return const0_rtx;
7999 }
8000
921b3427 8001 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
8002 if (GET_MODE (op0) == mode)
8003 return op0;
d6a5ac33 8004
bbf6f052
RK
8005 if (target == 0)
8006 target = gen_reg_rtx (mode);
8007 convert_move (target, op0, unsignedp);
8008 return target;
8009 }
8010
ac01eace
RK
8011 /* Check for X ? A + B : A. If we have this, we can copy A to the
8012 output and conditionally add B. Similarly for unary operations.
8013 Don't do this if X has side-effects because those side effects
8014 might affect A or B and the "?" operation is a sequence point in
8015 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
8016
8017 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
8018 && operand_equal_p (TREE_OPERAND (exp, 2),
8019 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8020 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
8021 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
8022 && operand_equal_p (TREE_OPERAND (exp, 1),
8023 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8024 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
8025 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
8026 && operand_equal_p (TREE_OPERAND (exp, 2),
8027 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
8028 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
8029 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
8030 && operand_equal_p (TREE_OPERAND (exp, 1),
8031 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
8032 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
8033
01c8a7c8
RK
8034 /* If we are not to produce a result, we have no target. Otherwise,
8035 if a target was specified use it; it will not be used as an
3a94c984 8036 intermediate target unless it is safe. If no target, use a
01c8a7c8
RK
8037 temporary. */
8038
8039 if (ignore)
8040 temp = 0;
8041 else if (original_target
e5e809f4 8042 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
8043 || (singleton && GET_CODE (original_target) == REG
8044 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
8045 && original_target == var_rtx (singleton)))
8046 && GET_MODE (original_target) == mode
7c00d1fe
RK
8047#ifdef HAVE_conditional_move
8048 && (! can_conditionally_move_p (mode)
8049 || GET_CODE (original_target) == REG
8050 || TREE_ADDRESSABLE (type))
8051#endif
01c8a7c8
RK
8052 && ! (GET_CODE (original_target) == MEM
8053 && MEM_VOLATILE_P (original_target)))
8054 temp = original_target;
8055 else if (TREE_ADDRESSABLE (type))
8056 abort ();
8057 else
8058 temp = assign_temp (type, 0, 0, 1);
8059
ac01eace
RK
8060 /* If we had X ? A + C : A, with C a constant power of 2, and we can
8061 do the test of X as a store-flag operation, do this as
8062 A + ((X != 0) << log C). Similarly for other simple binary
8063 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 8064 if (temp && singleton && binary_op
bbf6f052
RK
8065 && (TREE_CODE (binary_op) == PLUS_EXPR
8066 || TREE_CODE (binary_op) == MINUS_EXPR
8067 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 8068 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
8069 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
8070 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
8071 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
8072 {
8073 rtx result;
91ce572a
CC
8074 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR
8075 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8076 ? addv_optab : add_optab)
8077 : TREE_CODE (binary_op) == MINUS_EXPR
8078 ? (TYPE_TRAP_SIGNED (TREE_TYPE (binary_op))
8079 ? subv_optab : sub_optab)
8080 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
8081 : xor_optab);
bbf6f052
RK
8082
8083 /* If we had X ? A : A + 1, do this as A + (X == 0).
8084
8085 We have to invert the truth value here and then put it
8086 back later if do_store_flag fails. We cannot simply copy
8087 TREE_OPERAND (exp, 0) to another variable and modify that
8088 because invert_truthvalue can modify the tree pointed to
8089 by its argument. */
8090 if (singleton == TREE_OPERAND (exp, 1))
8091 TREE_OPERAND (exp, 0)
8092 = invert_truthvalue (TREE_OPERAND (exp, 0));
8093
8094 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 8095 (safe_from_p (temp, singleton, 1)
906c4e36 8096 ? temp : NULL_RTX),
bbf6f052
RK
8097 mode, BRANCH_COST <= 1);
8098
ac01eace
RK
8099 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
8100 result = expand_shift (LSHIFT_EXPR, mode, result,
8101 build_int_2 (tree_log2
8102 (TREE_OPERAND
8103 (binary_op, 1)),
8104 0),
e5e809f4 8105 (safe_from_p (temp, singleton, 1)
ac01eace
RK
8106 ? temp : NULL_RTX), 0);
8107
bbf6f052
RK
8108 if (result)
8109 {
906c4e36 8110 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8111 return expand_binop (mode, boptab, op1, result, temp,
8112 unsignedp, OPTAB_LIB_WIDEN);
8113 }
8114 else if (singleton == TREE_OPERAND (exp, 1))
8115 TREE_OPERAND (exp, 0)
8116 = invert_truthvalue (TREE_OPERAND (exp, 0));
8117 }
3a94c984 8118
dabf8373 8119 do_pending_stack_adjust ();
bbf6f052
RK
8120 NO_DEFER_POP;
8121 op0 = gen_label_rtx ();
8122
8123 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
8124 {
8125 if (temp != 0)
8126 {
8127 /* If the target conflicts with the other operand of the
8128 binary op, we can't use it. Also, we can't use the target
8129 if it is a hard register, because evaluating the condition
8130 might clobber it. */
8131 if ((binary_op
e5e809f4 8132 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
8133 || (GET_CODE (temp) == REG
8134 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
8135 temp = gen_reg_rtx (mode);
8136 store_expr (singleton, temp, 0);
8137 }
8138 else
906c4e36 8139 expand_expr (singleton,
2937cf87 8140 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8141 if (singleton == TREE_OPERAND (exp, 1))
8142 jumpif (TREE_OPERAND (exp, 0), op0);
8143 else
8144 jumpifnot (TREE_OPERAND (exp, 0), op0);
8145
956d6950 8146 start_cleanup_deferral ();
bbf6f052
RK
8147 if (binary_op && temp == 0)
8148 /* Just touch the other operand. */
8149 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 8150 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8151 else if (binary_op)
8152 store_expr (build (TREE_CODE (binary_op), type,
8153 make_tree (type, temp),
8154 TREE_OPERAND (binary_op, 1)),
8155 temp, 0);
8156 else
8157 store_expr (build1 (TREE_CODE (unary_op), type,
8158 make_tree (type, temp)),
8159 temp, 0);
8160 op1 = op0;
bbf6f052 8161 }
bbf6f052
RK
8162 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
8163 comparison operator. If we have one of these cases, set the
8164 output to A, branch on A (cse will merge these two references),
8165 then set the output to FOO. */
8166 else if (temp
8167 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8168 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8169 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8170 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
8171 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8172 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 8173 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052 8174 {
3a94c984
KH
8175 if (GET_CODE (temp) == REG
8176 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8177 temp = gen_reg_rtx (mode);
8178 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8179 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 8180
956d6950 8181 start_cleanup_deferral ();
bbf6f052
RK
8182 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8183 op1 = op0;
8184 }
8185 else if (temp
8186 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
8187 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
8188 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
8189 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
8190 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
8191 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 8192 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052 8193 {
3a94c984
KH
8194 if (GET_CODE (temp) == REG
8195 && REGNO (temp) < FIRST_PSEUDO_REGISTER)
bbf6f052
RK
8196 temp = gen_reg_rtx (mode);
8197 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8198 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8199
956d6950 8200 start_cleanup_deferral ();
bbf6f052
RK
8201 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8202 op1 = op0;
8203 }
8204 else
8205 {
8206 op1 = gen_label_rtx ();
8207 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 8208
956d6950 8209 start_cleanup_deferral ();
3a94c984 8210
2ac84cfe 8211 /* One branch of the cond can be void, if it never returns. For
3a94c984 8212 example A ? throw : E */
2ac84cfe 8213 if (temp != 0
3a94c984 8214 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
8215 store_expr (TREE_OPERAND (exp, 1), temp, 0);
8216 else
906c4e36
RK
8217 expand_expr (TREE_OPERAND (exp, 1),
8218 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 8219 end_cleanup_deferral ();
bbf6f052
RK
8220 emit_queue ();
8221 emit_jump_insn (gen_jump (op1));
8222 emit_barrier ();
8223 emit_label (op0);
956d6950 8224 start_cleanup_deferral ();
2ac84cfe 8225 if (temp != 0
3a94c984 8226 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
8227 store_expr (TREE_OPERAND (exp, 2), temp, 0);
8228 else
906c4e36
RK
8229 expand_expr (TREE_OPERAND (exp, 2),
8230 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8231 }
8232
956d6950 8233 end_cleanup_deferral ();
bbf6f052
RK
8234
8235 emit_queue ();
8236 emit_label (op1);
8237 OK_DEFER_POP;
5dab5552 8238
bbf6f052
RK
8239 return temp;
8240 }
8241
8242 case TARGET_EXPR:
8243 {
8244 /* Something needs to be initialized, but we didn't know
8245 where that thing was when building the tree. For example,
8246 it could be the return value of a function, or a parameter
8247 to a function which lays down in the stack, or a temporary
8248 variable which must be passed by reference.
8249
8250 We guarantee that the expression will either be constructed
8251 or copied into our original target. */
8252
8253 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8254 tree cleanups = NULL_TREE;
5c062816 8255 tree exp1;
bbf6f052
RK
8256
8257 if (TREE_CODE (slot) != VAR_DECL)
8258 abort ();
8259
9c51f375
RK
8260 if (! ignore)
8261 target = original_target;
8262
6fbfac92
JM
8263 /* Set this here so that if we get a target that refers to a
8264 register variable that's already been used, put_reg_into_stack
3a94c984 8265 knows that it should fix up those uses. */
6fbfac92
JM
8266 TREE_USED (slot) = 1;
8267
bbf6f052
RK
8268 if (target == 0)
8269 {
8270 if (DECL_RTL (slot) != 0)
ac993f4f
MS
8271 {
8272 target = DECL_RTL (slot);
5c062816 8273 /* If we have already expanded the slot, so don't do
ac993f4f 8274 it again. (mrs) */
5c062816
MS
8275 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8276 return target;
ac993f4f 8277 }
bbf6f052
RK
8278 else
8279 {
e9a25f70 8280 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8281 /* All temp slots at this level must not conflict. */
8282 preserve_temp_slots (target);
8283 DECL_RTL (slot) = target;
e9a25f70 8284 if (TREE_ADDRESSABLE (slot))
4361b41d 8285 put_var_into_stack (slot);
bbf6f052 8286
e287fd6e
RK
8287 /* Since SLOT is not known to the called function
8288 to belong to its stack frame, we must build an explicit
8289 cleanup. This case occurs when we must build up a reference
8290 to pass the reference as an argument. In this case,
8291 it is very likely that such a reference need not be
8292 built here. */
8293
8294 if (TREE_OPERAND (exp, 2) == 0)
8295 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8296 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8297 }
bbf6f052
RK
8298 }
8299 else
8300 {
8301 /* This case does occur, when expanding a parameter which
8302 needs to be constructed on the stack. The target
8303 is the actual stack address that we want to initialize.
8304 The function we call will perform the cleanup in this case. */
8305
8c042b47
RS
8306 /* If we have already assigned it space, use that space,
8307 not target that we were passed in, as our target
8308 parameter is only a hint. */
8309 if (DECL_RTL (slot) != 0)
3a94c984
KH
8310 {
8311 target = DECL_RTL (slot);
8312 /* If we have already expanded the slot, so don't do
8c042b47 8313 it again. (mrs) */
3a94c984
KH
8314 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8315 return target;
8c042b47 8316 }
21002281
JW
8317 else
8318 {
8319 DECL_RTL (slot) = target;
8320 /* If we must have an addressable slot, then make sure that
8321 the RTL that we just stored in slot is OK. */
8322 if (TREE_ADDRESSABLE (slot))
4361b41d 8323 put_var_into_stack (slot);
21002281 8324 }
bbf6f052
RK
8325 }
8326
4847c938 8327 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8328 /* Mark it as expanded. */
8329 TREE_OPERAND (exp, 1) = NULL_TREE;
8330
41531e5b 8331 store_expr (exp1, target, 0);
61d6b1cc 8332
e976b8b2 8333 expand_decl_cleanup (NULL_TREE, cleanups);
3a94c984 8334
41531e5b 8335 return target;
bbf6f052
RK
8336 }
8337
8338 case INIT_EXPR:
8339 {
8340 tree lhs = TREE_OPERAND (exp, 0);
8341 tree rhs = TREE_OPERAND (exp, 1);
8342 tree noncopied_parts = 0;
8343 tree lhs_type = TREE_TYPE (lhs);
8344
8345 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8346 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8347 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8348 TYPE_NONCOPIED_PARTS (lhs_type));
8349 while (noncopied_parts != 0)
8350 {
8351 expand_assignment (TREE_VALUE (noncopied_parts),
8352 TREE_PURPOSE (noncopied_parts), 0, 0);
8353 noncopied_parts = TREE_CHAIN (noncopied_parts);
8354 }
8355 return temp;
8356 }
8357
8358 case MODIFY_EXPR:
8359 {
8360 /* If lhs is complex, expand calls in rhs before computing it.
8361 That's so we don't compute a pointer and save it over a call.
8362 If lhs is simple, compute it first so we can give it as a
8363 target if the rhs is just a call. This avoids an extra temp and copy
8364 and that prevents a partial-subsumption which makes bad code.
8365 Actually we could treat component_ref's of vars like vars. */
8366
8367 tree lhs = TREE_OPERAND (exp, 0);
8368 tree rhs = TREE_OPERAND (exp, 1);
8369 tree noncopied_parts = 0;
8370 tree lhs_type = TREE_TYPE (lhs);
8371
8372 temp = 0;
8373
8374 if (TREE_CODE (lhs) != VAR_DECL
8375 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
8376 && TREE_CODE (lhs) != PARM_DECL
8377 && ! (TREE_CODE (lhs) == INDIRECT_REF
8378 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
8379
8380 /* Check for |= or &= of a bitfield of size one into another bitfield
8381 of size 1. In this case, (unless we need the result of the
8382 assignment) we can do this more efficiently with a
8383 test followed by an assignment, if necessary.
8384
8385 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8386 things change so we do, this code should be enhanced to
8387 support it. */
8388 if (ignore
8389 && TREE_CODE (lhs) == COMPONENT_REF
8390 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8391 || TREE_CODE (rhs) == BIT_AND_EXPR)
8392 && TREE_OPERAND (rhs, 0) == lhs
8393 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8394 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8395 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8396 {
8397 rtx label = gen_label_rtx ();
8398
8399 do_jump (TREE_OPERAND (rhs, 1),
8400 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8401 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8402 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8403 (TREE_CODE (rhs) == BIT_IOR_EXPR
8404 ? integer_one_node
8405 : integer_zero_node)),
8406 0, 0);
e7c33f54 8407 do_pending_stack_adjust ();
bbf6f052
RK
8408 emit_label (label);
8409 return const0_rtx;
8410 }
8411
8412 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8413 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8414 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8415 TYPE_NONCOPIED_PARTS (lhs_type));
8416
8417 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8418 while (noncopied_parts != 0)
8419 {
8420 expand_assignment (TREE_PURPOSE (noncopied_parts),
8421 TREE_VALUE (noncopied_parts), 0, 0);
8422 noncopied_parts = TREE_CHAIN (noncopied_parts);
8423 }
8424 return temp;
8425 }
8426
6e7f84a7
APB
8427 case RETURN_EXPR:
8428 if (!TREE_OPERAND (exp, 0))
8429 expand_null_return ();
8430 else
8431 expand_return (TREE_OPERAND (exp, 0));
8432 return const0_rtx;
8433
bbf6f052
RK
8434 case PREINCREMENT_EXPR:
8435 case PREDECREMENT_EXPR:
7b8b9722 8436 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8437
8438 case POSTINCREMENT_EXPR:
8439 case POSTDECREMENT_EXPR:
8440 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8441 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8442
8443 case ADDR_EXPR:
987c71d9 8444 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8445 be a MEM corresponding to a stack slot. */
987c71d9
RK
8446 temp = 0;
8447
bbf6f052
RK
8448 /* Are we taking the address of a nested function? */
8449 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8450 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8451 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8452 && ! TREE_STATIC (exp))
bbf6f052
RK
8453 {
8454 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8455 op0 = force_operand (op0, target);
8456 }
682ba3a6
RK
8457 /* If we are taking the address of something erroneous, just
8458 return a zero. */
8459 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8460 return const0_rtx;
bbf6f052
RK
8461 else
8462 {
e287fd6e
RK
8463 /* We make sure to pass const0_rtx down if we came in with
8464 ignore set, to avoid doing the cleanups twice for something. */
8465 op0 = expand_expr (TREE_OPERAND (exp, 0),
8466 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8467 (modifier == EXPAND_INITIALIZER
8468 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8469
119af78a
RK
8470 /* If we are going to ignore the result, OP0 will have been set
8471 to const0_rtx, so just return it. Don't get confused and
8472 think we are taking the address of the constant. */
8473 if (ignore)
8474 return op0;
8475
3539e816
MS
8476 op0 = protect_from_queue (op0, 0);
8477
c5c76735
JL
8478 /* We would like the object in memory. If it is a constant, we can
8479 have it be statically allocated into memory. For a non-constant,
8480 we need to allocate some memory and store the value into it. */
896102d0
RK
8481
8482 if (CONSTANT_P (op0))
8483 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8484 op0);
987c71d9 8485 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8486 {
8487 mark_temp_addr_taken (op0);
8488 temp = XEXP (op0, 0);
8489 }
896102d0 8490
682ba3a6 8491 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
df6018fd
JJ
8492 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF
8493 || GET_CODE (op0) == PARALLEL)
896102d0
RK
8494 {
8495 /* If this object is in a register, it must be not
0f41302f 8496 be BLKmode. */
896102d0 8497 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
1da68f56
RK
8498 tree nt = build_qualified_type (inner_type,
8499 (TYPE_QUALS (inner_type)
8500 | TYPE_QUAL_CONST));
8501 rtx memloc = assign_temp (nt, 1, 1, 1);
896102d0 8502
7a0b7b9a 8503 mark_temp_addr_taken (memloc);
df6018fd
JJ
8504 if (GET_CODE (op0) == PARALLEL)
8505 /* Handle calls that pass values in multiple non-contiguous
8506 locations. The Irix 6 ABI has examples of this. */
8507 emit_group_store (memloc, op0,
8508 int_size_in_bytes (inner_type),
8509 TYPE_ALIGN (inner_type));
8510 else
8511 emit_move_insn (memloc, op0);
896102d0
RK
8512 op0 = memloc;
8513 }
8514
bbf6f052
RK
8515 if (GET_CODE (op0) != MEM)
8516 abort ();
3a94c984 8517
bbf6f052 8518 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8519 {
8520 temp = XEXP (op0, 0);
8521#ifdef POINTERS_EXTEND_UNSIGNED
8522 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8523 && mode == ptr_mode)
9fcfcce7 8524 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8525#endif
8526 return temp;
8527 }
987c71d9 8528
bbf6f052
RK
8529 op0 = force_operand (XEXP (op0, 0), target);
8530 }
987c71d9 8531
bbf6f052 8532 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8533 op0 = force_reg (Pmode, op0);
8534
dc6d66b3
RK
8535 if (GET_CODE (op0) == REG
8536 && ! REG_USERVAR_P (op0))
bdb429a5 8537 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9
RK
8538
8539 /* If we might have had a temp slot, add an equivalent address
8540 for it. */
8541 if (temp != 0)
8542 update_temp_slot_address (temp, op0);
8543
88f63c77
RK
8544#ifdef POINTERS_EXTEND_UNSIGNED
8545 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8546 && mode == ptr_mode)
9fcfcce7 8547 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8548#endif
8549
bbf6f052
RK
8550 return op0;
8551
8552 case ENTRY_VALUE_EXPR:
8553 abort ();
8554
7308a047
RS
8555 /* COMPLEX type for Extended Pascal & Fortran */
8556 case COMPLEX_EXPR:
8557 {
8558 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8559 rtx insns;
7308a047
RS
8560
8561 /* Get the rtx code of the operands. */
8562 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8563 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8564
8565 if (! target)
8566 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8567
6551fa4d 8568 start_sequence ();
7308a047
RS
8569
8570 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8571 emit_move_insn (gen_realpart (mode, target), op0);
8572 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8573
6551fa4d
JW
8574 insns = get_insns ();
8575 end_sequence ();
8576
7308a047 8577 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8578 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8579 each with a separate pseudo as destination.
8580 It's not correct for flow to treat them as a unit. */
6d6e61ce 8581 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8582 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8583 else
8584 emit_insns (insns);
7308a047
RS
8585
8586 return target;
8587 }
8588
8589 case REALPART_EXPR:
2d7050fd
RS
8590 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8591 return gen_realpart (mode, op0);
3a94c984 8592
7308a047 8593 case IMAGPART_EXPR:
2d7050fd
RS
8594 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8595 return gen_imagpart (mode, op0);
7308a047
RS
8596
8597 case CONJ_EXPR:
8598 {
62acb978 8599 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8600 rtx imag_t;
6551fa4d 8601 rtx insns;
3a94c984
KH
8602
8603 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7308a047
RS
8604
8605 if (! target)
d6a5ac33 8606 target = gen_reg_rtx (mode);
3a94c984 8607
6551fa4d 8608 start_sequence ();
7308a047
RS
8609
8610 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8611 emit_move_insn (gen_realpart (partmode, target),
8612 gen_realpart (partmode, op0));
7308a047 8613
62acb978 8614 imag_t = gen_imagpart (partmode, target);
91ce572a
CC
8615 temp = expand_unop (partmode,
8616 ! unsignedp && flag_trapv
8617 && (GET_MODE_CLASS(partmode) == MODE_INT)
8618 ? negv_optab : neg_optab,
3a94c984 8619 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8620 if (temp != imag_t)
8621 emit_move_insn (imag_t, temp);
8622
6551fa4d
JW
8623 insns = get_insns ();
8624 end_sequence ();
8625
3a94c984 8626 /* Conjugate should appear as a single unit
d6a5ac33 8627 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8628 each with a separate pseudo as destination.
8629 It's not correct for flow to treat them as a unit. */
6d6e61ce 8630 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8631 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8632 else
8633 emit_insns (insns);
7308a047
RS
8634
8635 return target;
8636 }
8637
e976b8b2
MS
8638 case TRY_CATCH_EXPR:
8639 {
8640 tree handler = TREE_OPERAND (exp, 1);
8641
8642 expand_eh_region_start ();
8643
8644 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8645
8646 expand_eh_region_end (handler);
8647
8648 return op0;
8649 }
8650
b335b813
PB
8651 case TRY_FINALLY_EXPR:
8652 {
8653 tree try_block = TREE_OPERAND (exp, 0);
8654 tree finally_block = TREE_OPERAND (exp, 1);
8655 rtx finally_label = gen_label_rtx ();
8656 rtx done_label = gen_label_rtx ();
8657 rtx return_link = gen_reg_rtx (Pmode);
8658 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8659 (tree) finally_label, (tree) return_link);
8660 TREE_SIDE_EFFECTS (cleanup) = 1;
8661
8662 /* Start a new binding layer that will keep track of all cleanup
8663 actions to be performed. */
8e91754e 8664 expand_start_bindings (2);
b335b813
PB
8665
8666 target_temp_slot_level = temp_slot_level;
8667
8668 expand_decl_cleanup (NULL_TREE, cleanup);
8669 op0 = expand_expr (try_block, target, tmode, modifier);
8670
8671 preserve_temp_slots (op0);
8672 expand_end_bindings (NULL_TREE, 0, 0);
8673 emit_jump (done_label);
8674 emit_label (finally_label);
8675 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8676 emit_indirect_jump (return_link);
8677 emit_label (done_label);
8678 return op0;
8679 }
8680
3a94c984 8681 case GOTO_SUBROUTINE_EXPR:
b335b813
PB
8682 {
8683 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8684 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8685 rtx return_address = gen_label_rtx ();
3a94c984
KH
8686 emit_move_insn (return_link,
8687 gen_rtx_LABEL_REF (Pmode, return_address));
b335b813
PB
8688 emit_jump (subr);
8689 emit_label (return_address);
8690 return const0_rtx;
8691 }
8692
e976b8b2
MS
8693 case POPDCC_EXPR:
8694 {
8695 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8696 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8697 return const0_rtx;
8698 }
8699
8700 case POPDHC_EXPR:
8701 {
8702 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8703 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8704 return const0_rtx;
8705 }
8706
d3707adb
RH
8707 case VA_ARG_EXPR:
8708 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8709
bbf6f052 8710 default:
90764a87 8711 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8712 }
8713
8714 /* Here to do an ordinary binary operator, generating an instruction
8715 from the optab already placed in `this_optab'. */
8716 binop:
e5e809f4 8717 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8718 subtarget = 0;
8719 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8720 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8721 binop2:
8722 temp = expand_binop (mode, this_optab, op0, op1, target,
8723 unsignedp, OPTAB_LIB_WIDEN);
8724 if (temp == 0)
8725 abort ();
8726 return temp;
8727}
b93a436e 8728\f
14a774a9
RK
8729/* Similar to expand_expr, except that we don't specify a target, target
8730 mode, or modifier and we return the alignment of the inner type. This is
8731 used in cases where it is not necessary to align the result to the
8732 alignment of its type as long as we know the alignment of the result, for
8733 example for comparisons of BLKmode values. */
8734
8735static rtx
8736expand_expr_unaligned (exp, palign)
8737 register tree exp;
729a2125 8738 unsigned int *palign;
14a774a9
RK
8739{
8740 register rtx op0;
8741 tree type = TREE_TYPE (exp);
8742 register enum machine_mode mode = TYPE_MODE (type);
8743
8744 /* Default the alignment we return to that of the type. */
8745 *palign = TYPE_ALIGN (type);
8746
8747 /* The only cases in which we do anything special is if the resulting mode
8748 is BLKmode. */
8749 if (mode != BLKmode)
8750 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8751
8752 switch (TREE_CODE (exp))
8753 {
8754 case CONVERT_EXPR:
8755 case NOP_EXPR:
8756 case NON_LVALUE_EXPR:
8757 /* Conversions between BLKmode values don't change the underlying
8758 alignment or value. */
8759 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8760 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8761 break;
8762
8763 case ARRAY_REF:
8764 /* Much of the code for this case is copied directly from expand_expr.
8765 We need to duplicate it here because we will do something different
8766 in the fall-through case, so we need to handle the same exceptions
8767 it does. */
8768 {
8769 tree array = TREE_OPERAND (exp, 0);
8770 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8771 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8772 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8773 HOST_WIDE_INT i;
8774
8775 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8776 abort ();
8777
8778 /* Optimize the special-case of a zero lower bound.
8779
8780 We convert the low_bound to sizetype to avoid some problems
8781 with constant folding. (E.g. suppose the lower bound is 1,
8782 and its mode is QI. Without the conversion, (ARRAY
8783 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8784 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8785
8786 if (! integer_zerop (low_bound))
fed3cef0 8787 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8788
8789 /* If this is a constant index into a constant array,
8790 just get the value from the array. Handle both the cases when
8791 we have an explicit constructor and when our operand is a variable
8792 that was declared const. */
8793
05bccae2 8794 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
235783d1 8795 && host_integerp (index, 0)
3a94c984 8796 && 0 > compare_tree_int (index,
05bccae2
RK
8797 list_length (CONSTRUCTOR_ELTS
8798 (TREE_OPERAND (exp, 0)))))
14a774a9 8799 {
05bccae2
RK
8800 tree elem;
8801
8802 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
235783d1 8803 i = tree_low_cst (index, 0);
05bccae2
RK
8804 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8805 ;
8806
8807 if (elem)
8808 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9 8809 }
3a94c984 8810
14a774a9
RK
8811 else if (optimize >= 1
8812 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8813 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8814 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8815 {
8816 if (TREE_CODE (index) == INTEGER_CST)
8817 {
8818 tree init = DECL_INITIAL (array);
8819
14a774a9
RK
8820 if (TREE_CODE (init) == CONSTRUCTOR)
8821 {
05bccae2
RK
8822 tree elem;
8823
8824 for (elem = CONSTRUCTOR_ELTS (init);
8825 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8826 elem = TREE_CHAIN (elem))
8827 ;
14a774a9 8828
14a774a9
RK
8829 if (elem)
8830 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8831 palign);
8832 }
8833 }
8834 }
8835 }
3a94c984 8836 /* Fall through. */
14a774a9
RK
8837
8838 case COMPONENT_REF:
8839 case BIT_FIELD_REF:
8840 /* If the operand is a CONSTRUCTOR, we can just extract the
8841 appropriate field if it is present. Don't do this if we have
8842 already written the data since we want to refer to that copy
8843 and varasm.c assumes that's what we'll do. */
8844 if (TREE_CODE (exp) != ARRAY_REF
8845 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8846 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8847 {
8848 tree elt;
8849
8850 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8851 elt = TREE_CHAIN (elt))
8852 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8853 /* Note that unlike the case in expand_expr, we know this is
8854 BLKmode and hence not an integer. */
8855 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8856 }
8857
8858 {
8859 enum machine_mode mode1;
770ae6cc 8860 HOST_WIDE_INT bitsize, bitpos;
14a774a9
RK
8861 tree offset;
8862 int volatilep = 0;
729a2125 8863 unsigned int alignment;
14a774a9
RK
8864 int unsignedp;
8865 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8866 &mode1, &unsignedp, &volatilep,
8867 &alignment);
8868
8869 /* If we got back the original object, something is wrong. Perhaps
8870 we are evaluating an expression too early. In any event, don't
8871 infinitely recurse. */
8872 if (tem == exp)
8873 abort ();
8874
8875 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8876
8877 /* If this is a constant, put it into a register if it is a
8878 legitimate constant and OFFSET is 0 and memory if it isn't. */
8879 if (CONSTANT_P (op0))
8880 {
8881 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8882
8883 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8884 && offset == 0)
8885 op0 = force_reg (inner_mode, op0);
8886 else
8887 op0 = validize_mem (force_const_mem (inner_mode, op0));
8888 }
8889
8890 if (offset != 0)
8891 {
8892 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8893
8894 /* If this object is in a register, put it into memory.
8895 This case can't occur in C, but can in Ada if we have
8896 unchecked conversion of an expression from a scalar type to
8897 an array or record type. */
8898 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8899 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8900 {
1da68f56
RK
8901 tree nt = build_qualified_type (TREE_TYPE (tem),
8902 (TYPE_QUALS (TREE_TYPE (tem))
8903 | TYPE_QUAL_CONST));
8904 rtx memloc = assign_temp (nt, 1, 1, 1);
14a774a9
RK
8905
8906 mark_temp_addr_taken (memloc);
8907 emit_move_insn (memloc, op0);
8908 op0 = memloc;
8909 }
8910
8911 if (GET_CODE (op0) != MEM)
8912 abort ();
8913
8914 if (GET_MODE (offset_rtx) != ptr_mode)
8915 {
8916#ifdef POINTERS_EXTEND_UNSIGNED
8917 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8918#else
8919 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8920#endif
8921 }
8922
8923 op0 = change_address (op0, VOIDmode,
8924 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8925 force_reg (ptr_mode,
8926 offset_rtx)));
8927 }
8928
8929 /* Don't forget about volatility even if this is a bitfield. */
8930 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8931 {
8932 op0 = copy_rtx (op0);
8933 MEM_VOLATILE_P (op0) = 1;
8934 }
8935
8936 /* Check the access. */
8937 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
3a94c984 8938 {
14a774a9
RK
8939 rtx to;
8940 int size;
8941
8942 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8943 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8944
8945 /* Check the access right of the pointer. */
ea4da9db 8946 in_check_memory_usage = 1;
14a774a9 8947 if (size > BITS_PER_UNIT)
ebb1b59a
BS
8948 emit_library_call (chkr_check_addr_libfunc,
8949 LCT_CONST_MAKE_BLOCK, VOIDmode, 3,
14a774a9
RK
8950 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8951 TYPE_MODE (sizetype),
3a94c984 8952 GEN_INT (MEMORY_USE_RO),
14a774a9 8953 TYPE_MODE (integer_type_node));
ea4da9db 8954 in_check_memory_usage = 0;
14a774a9
RK
8955 }
8956
a2b99161
RK
8957 /* In cases where an aligned union has an unaligned object
8958 as a field, we might be extracting a BLKmode value from
8959 an integer-mode (e.g., SImode) object. Handle this case
8960 by doing the extract into an object as wide as the field
8961 (which we know to be the width of a basic mode), then
8962 storing into memory, and changing the mode to BLKmode.
8963 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8964 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8965 if (mode1 == VOIDmode
8966 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 8967 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
19caa751 8968 && (TYPE_ALIGN (type) > alignment
a2b99161
RK
8969 || bitpos % TYPE_ALIGN (type) != 0)))
8970 {
8971 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8972
8973 if (ext_mode == BLKmode)
8974 {
8975 /* In this case, BITPOS must start at a byte boundary. */
8976 if (GET_CODE (op0) != MEM
8977 || bitpos % BITS_PER_UNIT != 0)
8978 abort ();
8979
8980 op0 = change_address (op0, VOIDmode,
8981 plus_constant (XEXP (op0, 0),
8982 bitpos / BITS_PER_UNIT));
8983 }
8984 else
8985 {
1da68f56
RK
8986 tree nt = build_qualified_type (type_for_mode (ext_mode, 0),
8987 TYPE_QUAL_CONST);
8988 rtx new = assign_temp (nt, 0, 1, 1);
a2b99161
RK
8989
8990 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8991 unsignedp, NULL_RTX, ext_mode,
8992 ext_mode, alignment,
8993 int_size_in_bytes (TREE_TYPE (tem)));
8994
8995 /* If the result is a record type and BITSIZE is narrower than
8996 the mode of OP0, an integral mode, and this is a big endian
8997 machine, we must put the field into the high-order bits. */
8998 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8999 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
9000 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
9001 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
9002 size_int (GET_MODE_BITSIZE
9003 (GET_MODE (op0))
9004 - bitsize),
9005 op0, 1);
9006
a2b99161
RK
9007 emit_move_insn (new, op0);
9008 op0 = copy_rtx (new);
9009 PUT_MODE (op0, BLKmode);
9010 }
9011 }
9012 else
9013 /* Get a reference to just this component. */
9014 op0 = change_address (op0, mode1,
3a94c984
KH
9015 plus_constant (XEXP (op0, 0),
9016 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
9017
9018 MEM_ALIAS_SET (op0) = get_alias_set (exp);
9019
9020 /* Adjust the alignment in case the bit position is not
9021 a multiple of the alignment of the inner object. */
9022 while (bitpos % alignment != 0)
9023 alignment >>= 1;
9024
9025 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 9026 mark_reg_pointer (XEXP (op0, 0), alignment);
14a774a9
RK
9027
9028 MEM_IN_STRUCT_P (op0) = 1;
9029 MEM_VOLATILE_P (op0) |= volatilep;
9030
9031 *palign = alignment;
9032 return op0;
9033 }
9034
9035 default:
9036 break;
9037
9038 }
9039
9040 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
9041}
9042\f
fed3cef0
RK
9043/* Return the tree node if a ARG corresponds to a string constant or zero
9044 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
9045 in bytes within the string that ARG is accessing. The type of the
9046 offset will be `sizetype'. */
b93a436e 9047
28f4ec01 9048tree
b93a436e
JL
9049string_constant (arg, ptr_offset)
9050 tree arg;
9051 tree *ptr_offset;
9052{
9053 STRIP_NOPS (arg);
9054
9055 if (TREE_CODE (arg) == ADDR_EXPR
9056 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
9057 {
fed3cef0 9058 *ptr_offset = size_zero_node;
b93a436e
JL
9059 return TREE_OPERAND (arg, 0);
9060 }
9061 else if (TREE_CODE (arg) == PLUS_EXPR)
9062 {
9063 tree arg0 = TREE_OPERAND (arg, 0);
9064 tree arg1 = TREE_OPERAND (arg, 1);
9065
9066 STRIP_NOPS (arg0);
9067 STRIP_NOPS (arg1);
9068
9069 if (TREE_CODE (arg0) == ADDR_EXPR
9070 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 9071 {
fed3cef0 9072 *ptr_offset = convert (sizetype, arg1);
b93a436e 9073 return TREE_OPERAND (arg0, 0);
bbf6f052 9074 }
b93a436e
JL
9075 else if (TREE_CODE (arg1) == ADDR_EXPR
9076 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 9077 {
fed3cef0 9078 *ptr_offset = convert (sizetype, arg0);
b93a436e 9079 return TREE_OPERAND (arg1, 0);
bbf6f052 9080 }
b93a436e 9081 }
ca695ac9 9082
b93a436e
JL
9083 return 0;
9084}
ca695ac9 9085\f
b93a436e
JL
9086/* Expand code for a post- or pre- increment or decrement
9087 and return the RTX for the result.
9088 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9089
b93a436e
JL
9090static rtx
9091expand_increment (exp, post, ignore)
9092 register tree exp;
9093 int post, ignore;
ca695ac9 9094{
b93a436e
JL
9095 register rtx op0, op1;
9096 register rtx temp, value;
9097 register tree incremented = TREE_OPERAND (exp, 0);
9098 optab this_optab = add_optab;
9099 int icode;
9100 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9101 int op0_is_copy = 0;
9102 int single_insn = 0;
9103 /* 1 means we can't store into OP0 directly,
9104 because it is a subreg narrower than a word,
9105 and we don't dare clobber the rest of the word. */
9106 int bad_subreg = 0;
1499e0a8 9107
b93a436e
JL
9108 /* Stabilize any component ref that might need to be
9109 evaluated more than once below. */
9110 if (!post
9111 || TREE_CODE (incremented) == BIT_FIELD_REF
9112 || (TREE_CODE (incremented) == COMPONENT_REF
9113 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9114 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9115 incremented = stabilize_reference (incremented);
9116 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9117 ones into save exprs so that they don't accidentally get evaluated
9118 more than once by the code below. */
9119 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9120 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9121 incremented = save_expr (incremented);
e9a25f70 9122
b93a436e
JL
9123 /* Compute the operands as RTX.
9124 Note whether OP0 is the actual lvalue or a copy of it:
9125 I believe it is a copy iff it is a register or subreg
9126 and insns were generated in computing it. */
e9a25f70 9127
b93a436e
JL
9128 temp = get_last_insn ();
9129 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9130
b93a436e
JL
9131 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9132 in place but instead must do sign- or zero-extension during assignment,
9133 so we copy it into a new register and let the code below use it as
9134 a copy.
e9a25f70 9135
b93a436e
JL
9136 Note that we can safely modify this SUBREG since it is know not to be
9137 shared (it was made by the expand_expr call above). */
9138
9139 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9140 {
9141 if (post)
9142 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9143 else
9144 bad_subreg = 1;
9145 }
9146 else if (GET_CODE (op0) == SUBREG
9147 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9148 {
9149 /* We cannot increment this SUBREG in place. If we are
9150 post-incrementing, get a copy of the old value. Otherwise,
9151 just mark that we cannot increment in place. */
9152 if (post)
9153 op0 = copy_to_reg (op0);
9154 else
9155 bad_subreg = 1;
e9a25f70
JL
9156 }
9157
b93a436e
JL
9158 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9159 && temp != get_last_insn ());
9160 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9161 EXPAND_MEMORY_USE_BAD);
1499e0a8 9162
b93a436e
JL
9163 /* Decide whether incrementing or decrementing. */
9164 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9165 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9166 this_optab = sub_optab;
9167
9168 /* Convert decrement by a constant into a negative increment. */
9169 if (this_optab == sub_optab
9170 && GET_CODE (op1) == CONST_INT)
ca695ac9 9171 {
3a94c984 9172 op1 = GEN_INT (-INTVAL (op1));
b93a436e 9173 this_optab = add_optab;
ca695ac9 9174 }
1499e0a8 9175
91ce572a
CC
9176 if (TYPE_TRAP_SIGNED (TREE_TYPE (exp)))
9177 this_optab = this_optab == add_optab ? addv_optab : subv_optab;
9178
b93a436e
JL
9179 /* For a preincrement, see if we can do this with a single instruction. */
9180 if (!post)
9181 {
9182 icode = (int) this_optab->handlers[(int) mode].insn_code;
9183 if (icode != (int) CODE_FOR_nothing
9184 /* Make sure that OP0 is valid for operands 0 and 1
9185 of the insn we want to queue. */
a995e389
RH
9186 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9187 && (*insn_data[icode].operand[1].predicate) (op0, mode)
9188 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
9189 single_insn = 1;
9190 }
bbf6f052 9191
b93a436e
JL
9192 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9193 then we cannot just increment OP0. We must therefore contrive to
9194 increment the original value. Then, for postincrement, we can return
9195 OP0 since it is a copy of the old value. For preincrement, expand here
9196 unless we can do it with a single insn.
bbf6f052 9197
b93a436e
JL
9198 Likewise if storing directly into OP0 would clobber high bits
9199 we need to preserve (bad_subreg). */
9200 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 9201 {
b93a436e
JL
9202 /* This is the easiest way to increment the value wherever it is.
9203 Problems with multiple evaluation of INCREMENTED are prevented
9204 because either (1) it is a component_ref or preincrement,
9205 in which case it was stabilized above, or (2) it is an array_ref
9206 with constant index in an array in a register, which is
9207 safe to reevaluate. */
9208 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
9209 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9210 ? MINUS_EXPR : PLUS_EXPR),
9211 TREE_TYPE (exp),
9212 incremented,
9213 TREE_OPERAND (exp, 1));
a358cee0 9214
b93a436e
JL
9215 while (TREE_CODE (incremented) == NOP_EXPR
9216 || TREE_CODE (incremented) == CONVERT_EXPR)
9217 {
9218 newexp = convert (TREE_TYPE (incremented), newexp);
9219 incremented = TREE_OPERAND (incremented, 0);
9220 }
bbf6f052 9221
b93a436e
JL
9222 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
9223 return post ? op0 : temp;
9224 }
bbf6f052 9225
b93a436e
JL
9226 if (post)
9227 {
9228 /* We have a true reference to the value in OP0.
9229 If there is an insn to add or subtract in this mode, queue it.
9230 Queueing the increment insn avoids the register shuffling
9231 that often results if we must increment now and first save
9232 the old value for subsequent use. */
bbf6f052 9233
b93a436e
JL
9234#if 0 /* Turned off to avoid making extra insn for indexed memref. */
9235 op0 = stabilize (op0);
9236#endif
41dfd40c 9237
b93a436e
JL
9238 icode = (int) this_optab->handlers[(int) mode].insn_code;
9239 if (icode != (int) CODE_FOR_nothing
9240 /* Make sure that OP0 is valid for operands 0 and 1
9241 of the insn we want to queue. */
a995e389
RH
9242 && (*insn_data[icode].operand[0].predicate) (op0, mode)
9243 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 9244 {
a995e389 9245 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9246 op1 = force_reg (mode, op1);
bbf6f052 9247
b93a436e
JL
9248 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
9249 }
9250 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
9251 {
9252 rtx addr = (general_operand (XEXP (op0, 0), mode)
9253 ? force_reg (Pmode, XEXP (op0, 0))
9254 : copy_to_reg (XEXP (op0, 0)));
9255 rtx temp, result;
ca695ac9 9256
b93a436e
JL
9257 op0 = change_address (op0, VOIDmode, addr);
9258 temp = force_reg (GET_MODE (op0), op0);
a995e389 9259 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 9260 op1 = force_reg (mode, op1);
ca695ac9 9261
b93a436e
JL
9262 /* The increment queue is LIFO, thus we have to `queue'
9263 the instructions in reverse order. */
9264 enqueue_insn (op0, gen_move_insn (op0, temp));
9265 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9266 return result;
bbf6f052
RK
9267 }
9268 }
ca695ac9 9269
b93a436e
JL
9270 /* Preincrement, or we can't increment with one simple insn. */
9271 if (post)
9272 /* Save a copy of the value before inc or dec, to return it later. */
9273 temp = value = copy_to_reg (op0);
9274 else
9275 /* Arrange to return the incremented value. */
9276 /* Copy the rtx because expand_binop will protect from the queue,
9277 and the results of that would be invalid for us to return
9278 if our caller does emit_queue before using our result. */
9279 temp = copy_rtx (value = op0);
bbf6f052 9280
b93a436e
JL
9281 /* Increment however we can. */
9282 op1 = expand_binop (mode, this_optab, value, op1,
3a94c984 9283 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9284 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9285 /* Make sure the value is stored into OP0. */
9286 if (op1 != op0)
9287 emit_move_insn (op0, op1);
5718612f 9288
b93a436e
JL
9289 return temp;
9290}
9291\f
b93a436e
JL
9292/* At the start of a function, record that we have no previously-pushed
9293 arguments waiting to be popped. */
bbf6f052 9294
b93a436e
JL
9295void
9296init_pending_stack_adjust ()
9297{
9298 pending_stack_adjust = 0;
9299}
bbf6f052 9300
b93a436e 9301/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9302 so the adjustment won't get done.
9303
9304 Note, if the current function calls alloca, then it must have a
9305 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9306
b93a436e
JL
9307void
9308clear_pending_stack_adjust ()
9309{
9310#ifdef EXIT_IGNORE_STACK
9311 if (optimize > 0
060fbabf
JL
9312 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9313 && EXIT_IGNORE_STACK
b93a436e
JL
9314 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9315 && ! flag_inline_functions)
1503a7ec
JH
9316 {
9317 stack_pointer_delta -= pending_stack_adjust,
9318 pending_stack_adjust = 0;
9319 }
b93a436e
JL
9320#endif
9321}
bbf6f052 9322
b93a436e
JL
9323/* Pop any previously-pushed arguments that have not been popped yet. */
9324
9325void
9326do_pending_stack_adjust ()
9327{
9328 if (inhibit_defer_pop == 0)
ca695ac9 9329 {
b93a436e
JL
9330 if (pending_stack_adjust != 0)
9331 adjust_stack (GEN_INT (pending_stack_adjust));
9332 pending_stack_adjust = 0;
bbf6f052 9333 }
bbf6f052
RK
9334}
9335\f
b93a436e 9336/* Expand conditional expressions. */
bbf6f052 9337
b93a436e
JL
9338/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9339 LABEL is an rtx of code CODE_LABEL, in this function and all the
9340 functions here. */
bbf6f052 9341
b93a436e
JL
9342void
9343jumpifnot (exp, label)
ca695ac9 9344 tree exp;
b93a436e 9345 rtx label;
bbf6f052 9346{
b93a436e
JL
9347 do_jump (exp, label, NULL_RTX);
9348}
bbf6f052 9349
b93a436e 9350/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9351
b93a436e
JL
9352void
9353jumpif (exp, label)
9354 tree exp;
9355 rtx label;
9356{
9357 do_jump (exp, NULL_RTX, label);
9358}
ca695ac9 9359
b93a436e
JL
9360/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9361 the result is zero, or IF_TRUE_LABEL if the result is one.
9362 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9363 meaning fall through in that case.
ca695ac9 9364
b93a436e
JL
9365 do_jump always does any pending stack adjust except when it does not
9366 actually perform a jump. An example where there is no jump
9367 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9368
b93a436e
JL
9369 This function is responsible for optimizing cases such as
9370 &&, || and comparison operators in EXP. */
5718612f 9371
b93a436e
JL
9372void
9373do_jump (exp, if_false_label, if_true_label)
9374 tree exp;
9375 rtx if_false_label, if_true_label;
9376{
9377 register enum tree_code code = TREE_CODE (exp);
9378 /* Some cases need to create a label to jump to
9379 in order to properly fall through.
9380 These cases set DROP_THROUGH_LABEL nonzero. */
9381 rtx drop_through_label = 0;
9382 rtx temp;
b93a436e
JL
9383 int i;
9384 tree type;
9385 enum machine_mode mode;
ca695ac9 9386
dbecbbe4
JL
9387#ifdef MAX_INTEGER_COMPUTATION_MODE
9388 check_max_integer_computation_mode (exp);
9389#endif
9390
b93a436e 9391 emit_queue ();
ca695ac9 9392
b93a436e 9393 switch (code)
ca695ac9 9394 {
b93a436e 9395 case ERROR_MARK:
ca695ac9 9396 break;
bbf6f052 9397
b93a436e
JL
9398 case INTEGER_CST:
9399 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9400 if (temp)
9401 emit_jump (temp);
9402 break;
bbf6f052 9403
b93a436e
JL
9404#if 0
9405 /* This is not true with #pragma weak */
9406 case ADDR_EXPR:
9407 /* The address of something can never be zero. */
9408 if (if_true_label)
9409 emit_jump (if_true_label);
9410 break;
9411#endif
bbf6f052 9412
b93a436e
JL
9413 case NOP_EXPR:
9414 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9415 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9416 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9417 goto normal;
9418 case CONVERT_EXPR:
9419 /* If we are narrowing the operand, we have to do the compare in the
9420 narrower mode. */
9421 if ((TYPE_PRECISION (TREE_TYPE (exp))
9422 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9423 goto normal;
9424 case NON_LVALUE_EXPR:
9425 case REFERENCE_EXPR:
9426 case ABS_EXPR:
9427 case NEGATE_EXPR:
9428 case LROTATE_EXPR:
9429 case RROTATE_EXPR:
9430 /* These cannot change zero->non-zero or vice versa. */
9431 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9432 break;
bbf6f052 9433
14a774a9
RK
9434 case WITH_RECORD_EXPR:
9435 /* Put the object on the placeholder list, recurse through our first
9436 operand, and pop the list. */
9437 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9438 placeholder_list);
9439 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9440 placeholder_list = TREE_CHAIN (placeholder_list);
9441 break;
9442
b93a436e
JL
9443#if 0
9444 /* This is never less insns than evaluating the PLUS_EXPR followed by
9445 a test and can be longer if the test is eliminated. */
9446 case PLUS_EXPR:
9447 /* Reduce to minus. */
9448 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9449 TREE_OPERAND (exp, 0),
9450 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9451 TREE_OPERAND (exp, 1))));
9452 /* Process as MINUS. */
ca695ac9 9453#endif
bbf6f052 9454
b93a436e
JL
9455 case MINUS_EXPR:
9456 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9457 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9458 TREE_OPERAND (exp, 0),
9459 TREE_OPERAND (exp, 1)),
9460 NE, NE, if_false_label, if_true_label);
b93a436e 9461 break;
bbf6f052 9462
b93a436e
JL
9463 case BIT_AND_EXPR:
9464 /* If we are AND'ing with a small constant, do this comparison in the
9465 smallest type that fits. If the machine doesn't have comparisons
9466 that small, it will be converted back to the wider comparison.
9467 This helps if we are testing the sign bit of a narrower object.
9468 combine can't do this for us because it can't know whether a
9469 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9470
b93a436e
JL
9471 if (! SLOW_BYTE_ACCESS
9472 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9473 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9474 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9475 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9476 && (type = type_for_mode (mode, 1)) != 0
9477 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9478 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9479 != CODE_FOR_nothing))
9480 {
9481 do_jump (convert (type, exp), if_false_label, if_true_label);
9482 break;
9483 }
9484 goto normal;
bbf6f052 9485
b93a436e
JL
9486 case TRUTH_NOT_EXPR:
9487 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9488 break;
bbf6f052 9489
b93a436e
JL
9490 case TRUTH_ANDIF_EXPR:
9491 if (if_false_label == 0)
9492 if_false_label = drop_through_label = gen_label_rtx ();
9493 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9494 start_cleanup_deferral ();
9495 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9496 end_cleanup_deferral ();
9497 break;
bbf6f052 9498
b93a436e
JL
9499 case TRUTH_ORIF_EXPR:
9500 if (if_true_label == 0)
9501 if_true_label = drop_through_label = gen_label_rtx ();
9502 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9503 start_cleanup_deferral ();
9504 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9505 end_cleanup_deferral ();
9506 break;
bbf6f052 9507
b93a436e
JL
9508 case COMPOUND_EXPR:
9509 push_temp_slots ();
9510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9511 preserve_temp_slots (NULL_RTX);
9512 free_temp_slots ();
9513 pop_temp_slots ();
9514 emit_queue ();
9515 do_pending_stack_adjust ();
9516 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9517 break;
bbf6f052 9518
b93a436e
JL
9519 case COMPONENT_REF:
9520 case BIT_FIELD_REF:
9521 case ARRAY_REF:
9522 {
770ae6cc
RK
9523 HOST_WIDE_INT bitsize, bitpos;
9524 int unsignedp;
b93a436e
JL
9525 enum machine_mode mode;
9526 tree type;
9527 tree offset;
9528 int volatilep = 0;
729a2125 9529 unsigned int alignment;
bbf6f052 9530
b93a436e
JL
9531 /* Get description of this reference. We don't actually care
9532 about the underlying object here. */
19caa751
RK
9533 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9534 &unsignedp, &volatilep, &alignment);
bbf6f052 9535
b93a436e
JL
9536 type = type_for_size (bitsize, unsignedp);
9537 if (! SLOW_BYTE_ACCESS
9538 && type != 0 && bitsize >= 0
9539 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9540 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9541 != CODE_FOR_nothing))
9542 {
9543 do_jump (convert (type, exp), if_false_label, if_true_label);
9544 break;
9545 }
9546 goto normal;
9547 }
bbf6f052 9548
b93a436e
JL
9549 case COND_EXPR:
9550 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9551 if (integer_onep (TREE_OPERAND (exp, 1))
9552 && integer_zerop (TREE_OPERAND (exp, 2)))
9553 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9554
b93a436e
JL
9555 else if (integer_zerop (TREE_OPERAND (exp, 1))
9556 && integer_onep (TREE_OPERAND (exp, 2)))
9557 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9558
b93a436e
JL
9559 else
9560 {
9561 register rtx label1 = gen_label_rtx ();
9562 drop_through_label = gen_label_rtx ();
bbf6f052 9563
b93a436e 9564 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9565
b93a436e
JL
9566 start_cleanup_deferral ();
9567 /* Now the THEN-expression. */
9568 do_jump (TREE_OPERAND (exp, 1),
9569 if_false_label ? if_false_label : drop_through_label,
9570 if_true_label ? if_true_label : drop_through_label);
9571 /* In case the do_jump just above never jumps. */
9572 do_pending_stack_adjust ();
9573 emit_label (label1);
bbf6f052 9574
b93a436e
JL
9575 /* Now the ELSE-expression. */
9576 do_jump (TREE_OPERAND (exp, 2),
9577 if_false_label ? if_false_label : drop_through_label,
9578 if_true_label ? if_true_label : drop_through_label);
9579 end_cleanup_deferral ();
9580 }
9581 break;
bbf6f052 9582
b93a436e
JL
9583 case EQ_EXPR:
9584 {
9585 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9586
9ec36da5
JL
9587 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9588 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9589 {
9590 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9591 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9592 do_jump
9593 (fold
9594 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9595 fold (build (EQ_EXPR, TREE_TYPE (exp),
9596 fold (build1 (REALPART_EXPR,
9597 TREE_TYPE (inner_type),
9598 exp0)),
9599 fold (build1 (REALPART_EXPR,
9600 TREE_TYPE (inner_type),
9601 exp1)))),
9602 fold (build (EQ_EXPR, TREE_TYPE (exp),
9603 fold (build1 (IMAGPART_EXPR,
9604 TREE_TYPE (inner_type),
9605 exp0)),
9606 fold (build1 (IMAGPART_EXPR,
9607 TREE_TYPE (inner_type),
9608 exp1)))))),
9609 if_false_label, if_true_label);
9610 }
9ec36da5
JL
9611
9612 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9613 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9614
b93a436e 9615 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9616 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9617 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9618 else
b30f05db 9619 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9620 break;
9621 }
bbf6f052 9622
b93a436e
JL
9623 case NE_EXPR:
9624 {
9625 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9626
9ec36da5
JL
9627 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9628 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9629 {
9630 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9631 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9632 do_jump
9633 (fold
9634 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9635 fold (build (NE_EXPR, TREE_TYPE (exp),
9636 fold (build1 (REALPART_EXPR,
9637 TREE_TYPE (inner_type),
9638 exp0)),
9639 fold (build1 (REALPART_EXPR,
9640 TREE_TYPE (inner_type),
9641 exp1)))),
9642 fold (build (NE_EXPR, TREE_TYPE (exp),
9643 fold (build1 (IMAGPART_EXPR,
9644 TREE_TYPE (inner_type),
9645 exp0)),
9646 fold (build1 (IMAGPART_EXPR,
9647 TREE_TYPE (inner_type),
9648 exp1)))))),
9649 if_false_label, if_true_label);
9650 }
9ec36da5
JL
9651
9652 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9653 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9654
b93a436e 9655 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9656 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9657 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9658 else
b30f05db 9659 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9660 break;
9661 }
bbf6f052 9662
b93a436e 9663 case LT_EXPR:
1c0290ea
BS
9664 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9665 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9666 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9667 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9668 else
b30f05db 9669 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9670 break;
bbf6f052 9671
b93a436e 9672 case LE_EXPR:
1c0290ea
BS
9673 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9674 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9675 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9676 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9677 else
b30f05db 9678 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9679 break;
bbf6f052 9680
b93a436e 9681 case GT_EXPR:
1c0290ea
BS
9682 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9683 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9684 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9685 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9686 else
b30f05db 9687 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9688 break;
bbf6f052 9689
b93a436e 9690 case GE_EXPR:
1c0290ea
BS
9691 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9692 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9693 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9694 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9695 else
b30f05db 9696 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9697 break;
bbf6f052 9698
1eb8759b
RH
9699 case UNORDERED_EXPR:
9700 case ORDERED_EXPR:
9701 {
9702 enum rtx_code cmp, rcmp;
9703 int do_rev;
9704
9705 if (code == UNORDERED_EXPR)
9706 cmp = UNORDERED, rcmp = ORDERED;
9707 else
9708 cmp = ORDERED, rcmp = UNORDERED;
3a94c984 9709 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
1eb8759b
RH
9710
9711 do_rev = 0;
9712 if (! can_compare_p (cmp, mode, ccp_jump)
9713 && (can_compare_p (rcmp, mode, ccp_jump)
9714 /* If the target doesn't provide either UNORDERED or ORDERED
9715 comparisons, canonicalize on UNORDERED for the library. */
9716 || rcmp == UNORDERED))
9717 do_rev = 1;
9718
9719 if (! do_rev)
9720 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9721 else
9722 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9723 }
9724 break;
9725
9726 {
9727 enum rtx_code rcode1;
9728 enum tree_code tcode2;
9729
9730 case UNLT_EXPR:
9731 rcode1 = UNLT;
9732 tcode2 = LT_EXPR;
9733 goto unordered_bcc;
9734 case UNLE_EXPR:
9735 rcode1 = UNLE;
9736 tcode2 = LE_EXPR;
9737 goto unordered_bcc;
9738 case UNGT_EXPR:
9739 rcode1 = UNGT;
9740 tcode2 = GT_EXPR;
9741 goto unordered_bcc;
9742 case UNGE_EXPR:
9743 rcode1 = UNGE;
9744 tcode2 = GE_EXPR;
9745 goto unordered_bcc;
9746 case UNEQ_EXPR:
9747 rcode1 = UNEQ;
9748 tcode2 = EQ_EXPR;
9749 goto unordered_bcc;
7913f3d0 9750
1eb8759b
RH
9751 unordered_bcc:
9752 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9753 if (can_compare_p (rcode1, mode, ccp_jump))
9754 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9755 if_true_label);
9756 else
9757 {
9758 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9759 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9760 tree cmp0, cmp1;
9761
3a94c984 9762 /* If the target doesn't support combined unordered
1eb8759b
RH
9763 compares, decompose into UNORDERED + comparison. */
9764 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9765 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9766 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9767 do_jump (exp, if_false_label, if_true_label);
9768 }
9769 }
9770 break;
9771
b93a436e
JL
9772 default:
9773 normal:
9774 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9775#if 0
9776 /* This is not needed any more and causes poor code since it causes
9777 comparisons and tests from non-SI objects to have different code
9778 sequences. */
9779 /* Copy to register to avoid generating bad insns by cse
9780 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9781 if (!cse_not_expected && GET_CODE (temp) == MEM)
9782 temp = copy_to_reg (temp);
ca695ac9 9783#endif
b93a436e 9784 do_pending_stack_adjust ();
b30f05db
BS
9785 /* Do any postincrements in the expression that was tested. */
9786 emit_queue ();
9787
998a298e
GK
9788 if (GET_CODE (temp) == CONST_INT
9789 || (GET_CODE (temp) == CONST_DOUBLE && GET_MODE (temp) == VOIDmode)
9790 || GET_CODE (temp) == LABEL_REF)
b30f05db
BS
9791 {
9792 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9793 if (target)
9794 emit_jump (target);
9795 }
b93a436e 9796 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9797 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9798 /* Note swapping the labels gives us not-equal. */
9799 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9800 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9801 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9802 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9803 GET_MODE (temp), NULL_RTX, 0,
9804 if_false_label, if_true_label);
b93a436e
JL
9805 else
9806 abort ();
9807 }
bbf6f052 9808
b93a436e
JL
9809 if (drop_through_label)
9810 {
9811 /* If do_jump produces code that might be jumped around,
9812 do any stack adjusts from that code, before the place
9813 where control merges in. */
9814 do_pending_stack_adjust ();
9815 emit_label (drop_through_label);
9816 }
bbf6f052 9817}
b93a436e
JL
9818\f
9819/* Given a comparison expression EXP for values too wide to be compared
9820 with one insn, test the comparison and jump to the appropriate label.
9821 The code of EXP is ignored; we always test GT if SWAP is 0,
9822 and LT if SWAP is 1. */
bbf6f052 9823
b93a436e
JL
9824static void
9825do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9826 tree exp;
9827 int swap;
9828 rtx if_false_label, if_true_label;
9829{
9830 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9831 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9832 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9833 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9834
b30f05db 9835 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9836}
9837
b93a436e
JL
9838/* Compare OP0 with OP1, word at a time, in mode MODE.
9839 UNSIGNEDP says to do unsigned comparison.
9840 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9841
b93a436e
JL
9842void
9843do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9844 enum machine_mode mode;
9845 int unsignedp;
9846 rtx op0, op1;
9847 rtx if_false_label, if_true_label;
f81497d9 9848{
b93a436e
JL
9849 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9850 rtx drop_through_label = 0;
9851 int i;
f81497d9 9852
b93a436e
JL
9853 if (! if_true_label || ! if_false_label)
9854 drop_through_label = gen_label_rtx ();
9855 if (! if_true_label)
9856 if_true_label = drop_through_label;
9857 if (! if_false_label)
9858 if_false_label = drop_through_label;
f81497d9 9859
b93a436e
JL
9860 /* Compare a word at a time, high order first. */
9861 for (i = 0; i < nwords; i++)
9862 {
b93a436e 9863 rtx op0_word, op1_word;
bbf6f052 9864
b93a436e
JL
9865 if (WORDS_BIG_ENDIAN)
9866 {
9867 op0_word = operand_subword_force (op0, i, mode);
9868 op1_word = operand_subword_force (op1, i, mode);
9869 }
9870 else
9871 {
9872 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9873 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9874 }
bbf6f052 9875
b93a436e 9876 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9877 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9878 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9879 NULL_RTX, if_true_label);
bbf6f052 9880
b93a436e 9881 /* Consider lower words only if these are equal. */
b30f05db
BS
9882 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9883 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9884 }
bbf6f052 9885
b93a436e
JL
9886 if (if_false_label)
9887 emit_jump (if_false_label);
9888 if (drop_through_label)
9889 emit_label (drop_through_label);
bbf6f052
RK
9890}
9891
b93a436e
JL
9892/* Given an EQ_EXPR expression EXP for values too wide to be compared
9893 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9894
b93a436e
JL
9895static void
9896do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9897 tree exp;
9898 rtx if_false_label, if_true_label;
bbf6f052 9899{
b93a436e
JL
9900 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9901 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9902 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9903 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9904 int i;
9905 rtx drop_through_label = 0;
bbf6f052 9906
b93a436e
JL
9907 if (! if_false_label)
9908 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9909
b93a436e 9910 for (i = 0; i < nwords; i++)
b30f05db
BS
9911 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9912 operand_subword_force (op1, i, mode),
9913 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9914 word_mode, NULL_RTX, 0, if_false_label,
9915 NULL_RTX);
bbf6f052 9916
b93a436e
JL
9917 if (if_true_label)
9918 emit_jump (if_true_label);
9919 if (drop_through_label)
9920 emit_label (drop_through_label);
bbf6f052 9921}
b93a436e
JL
9922\f
9923/* Jump according to whether OP0 is 0.
9924 We assume that OP0 has an integer mode that is too wide
9925 for the available compare insns. */
bbf6f052 9926
f5963e61 9927void
b93a436e
JL
9928do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9929 rtx op0;
9930 rtx if_false_label, if_true_label;
ca695ac9 9931{
b93a436e
JL
9932 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9933 rtx part;
9934 int i;
9935 rtx drop_through_label = 0;
bbf6f052 9936
b93a436e
JL
9937 /* The fastest way of doing this comparison on almost any machine is to
9938 "or" all the words and compare the result. If all have to be loaded
9939 from memory and this is a very wide item, it's possible this may
9940 be slower, but that's highly unlikely. */
bbf6f052 9941
b93a436e
JL
9942 part = gen_reg_rtx (word_mode);
9943 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9944 for (i = 1; i < nwords && part != 0; i++)
9945 part = expand_binop (word_mode, ior_optab, part,
9946 operand_subword_force (op0, i, GET_MODE (op0)),
9947 part, 1, OPTAB_WIDEN);
bbf6f052 9948
b93a436e
JL
9949 if (part != 0)
9950 {
b30f05db
BS
9951 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9952 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9953
b93a436e
JL
9954 return;
9955 }
bbf6f052 9956
b93a436e
JL
9957 /* If we couldn't do the "or" simply, do this with a series of compares. */
9958 if (! if_false_label)
9959 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9960
b93a436e 9961 for (i = 0; i < nwords; i++)
b30f05db
BS
9962 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9963 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9964 if_false_label, NULL_RTX);
bbf6f052 9965
b93a436e
JL
9966 if (if_true_label)
9967 emit_jump (if_true_label);
0f41302f 9968
b93a436e
JL
9969 if (drop_through_label)
9970 emit_label (drop_through_label);
bbf6f052 9971}
b93a436e 9972\f
b30f05db 9973/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9974 (including code to compute the values to be compared)
9975 and set (CC0) according to the result.
b30f05db 9976 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9977
b93a436e 9978 We force a stack adjustment unless there are currently
b30f05db 9979 things pushed on the stack that aren't yet used.
ca695ac9 9980
b30f05db
BS
9981 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9982 compared.
9983
9984 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9985 size of MODE should be used. */
9986
9987rtx
9988compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9989 register rtx op0, op1;
9990 enum rtx_code code;
9991 int unsignedp;
9992 enum machine_mode mode;
9993 rtx size;
729a2125 9994 unsigned int align;
b93a436e 9995{
b30f05db 9996 rtx tem;
76bbe028 9997
b30f05db
BS
9998 /* If one operand is constant, make it the second one. Only do this
9999 if the other operand is not constant as well. */
ca695ac9 10000
b30f05db
BS
10001 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10002 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 10003 {
b30f05db
BS
10004 tem = op0;
10005 op0 = op1;
10006 op1 = tem;
10007 code = swap_condition (code);
ca695ac9 10008 }
bbf6f052 10009
b30f05db 10010 if (flag_force_mem)
b93a436e 10011 {
b30f05db
BS
10012 op0 = force_not_mem (op0);
10013 op1 = force_not_mem (op1);
10014 }
bbf6f052 10015
b30f05db
BS
10016 do_pending_stack_adjust ();
10017
10018 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10019 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
10020 return tem;
10021
10022#if 0
10023 /* There's no need to do this now that combine.c can eliminate lots of
10024 sign extensions. This can be less efficient in certain cases on other
10025 machines. */
10026
10027 /* If this is a signed equality comparison, we can do it as an
10028 unsigned comparison since zero-extension is cheaper than sign
10029 extension and comparisons with zero are done as unsigned. This is
10030 the case even on machines that can do fast sign extension, since
10031 zero-extension is easier to combine with other operations than
10032 sign-extension is. If we are comparing against a constant, we must
10033 convert it to what it would look like unsigned. */
10034 if ((code == EQ || code == NE) && ! unsignedp
10035 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10036 {
10037 if (GET_CODE (op1) == CONST_INT
10038 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10039 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10040 unsignedp = 1;
b93a436e
JL
10041 }
10042#endif
3a94c984 10043
b30f05db 10044 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 10045
b30f05db 10046 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 10047}
bbf6f052 10048
b30f05db 10049/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 10050 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10051
b93a436e
JL
10052 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10053 compared.
bbf6f052 10054
b93a436e
JL
10055 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10056 size of MODE should be used. */
ca695ac9 10057
b30f05db
BS
10058void
10059do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
10060 if_false_label, if_true_label)
b93a436e
JL
10061 register rtx op0, op1;
10062 enum rtx_code code;
10063 int unsignedp;
10064 enum machine_mode mode;
10065 rtx size;
729a2125 10066 unsigned int align;
b30f05db 10067 rtx if_false_label, if_true_label;
bbf6f052 10068{
b93a436e 10069 rtx tem;
b30f05db
BS
10070 int dummy_true_label = 0;
10071
10072 /* Reverse the comparison if that is safe and we want to jump if it is
10073 false. */
10074 if (! if_true_label && ! FLOAT_MODE_P (mode))
10075 {
10076 if_true_label = if_false_label;
10077 if_false_label = 0;
10078 code = reverse_condition (code);
10079 }
bbf6f052 10080
b93a436e
JL
10081 /* If one operand is constant, make it the second one. Only do this
10082 if the other operand is not constant as well. */
e7c33f54 10083
b93a436e
JL
10084 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10085 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10086 {
b93a436e
JL
10087 tem = op0;
10088 op0 = op1;
10089 op1 = tem;
10090 code = swap_condition (code);
10091 }
bbf6f052 10092
b93a436e
JL
10093 if (flag_force_mem)
10094 {
10095 op0 = force_not_mem (op0);
10096 op1 = force_not_mem (op1);
10097 }
bbf6f052 10098
b93a436e 10099 do_pending_stack_adjust ();
ca695ac9 10100
b93a436e
JL
10101 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
10102 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
10103 {
10104 if (tem == const_true_rtx)
10105 {
10106 if (if_true_label)
10107 emit_jump (if_true_label);
10108 }
10109 else
10110 {
10111 if (if_false_label)
10112 emit_jump (if_false_label);
10113 }
10114 return;
10115 }
ca695ac9 10116
b93a436e
JL
10117#if 0
10118 /* There's no need to do this now that combine.c can eliminate lots of
10119 sign extensions. This can be less efficient in certain cases on other
10120 machines. */
ca695ac9 10121
b93a436e
JL
10122 /* If this is a signed equality comparison, we can do it as an
10123 unsigned comparison since zero-extension is cheaper than sign
10124 extension and comparisons with zero are done as unsigned. This is
10125 the case even on machines that can do fast sign extension, since
10126 zero-extension is easier to combine with other operations than
10127 sign-extension is. If we are comparing against a constant, we must
10128 convert it to what it would look like unsigned. */
10129 if ((code == EQ || code == NE) && ! unsignedp
10130 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
10131 {
10132 if (GET_CODE (op1) == CONST_INT
10133 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
10134 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
10135 unsignedp = 1;
10136 }
10137#endif
ca695ac9 10138
b30f05db
BS
10139 if (! if_true_label)
10140 {
10141 dummy_true_label = 1;
10142 if_true_label = gen_label_rtx ();
10143 }
10144
10145 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
10146 if_true_label);
10147
10148 if (if_false_label)
10149 emit_jump (if_false_label);
10150 if (dummy_true_label)
10151 emit_label (if_true_label);
10152}
10153
10154/* Generate code for a comparison expression EXP (including code to compute
10155 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
10156 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
10157 generated code will drop through.
10158 SIGNED_CODE should be the rtx operation for this comparison for
10159 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
10160
10161 We force a stack adjustment unless there are currently
10162 things pushed on the stack that aren't yet used. */
10163
10164static void
10165do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
10166 if_true_label)
10167 register tree exp;
10168 enum rtx_code signed_code, unsigned_code;
10169 rtx if_false_label, if_true_label;
10170{
729a2125 10171 unsigned int align0, align1;
b30f05db
BS
10172 register rtx op0, op1;
10173 register tree type;
10174 register enum machine_mode mode;
10175 int unsignedp;
10176 enum rtx_code code;
10177
10178 /* Don't crash if the comparison was erroneous. */
14a774a9 10179 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
10180 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
10181 return;
10182
14a774a9 10183 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
6b16805e
JJ
10184 if (TREE_CODE (TREE_OPERAND (exp, 1)) == ERROR_MARK)
10185 return;
10186
b30f05db
BS
10187 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10188 mode = TYPE_MODE (type);
6b16805e
JJ
10189 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
10190 && (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST
10191 || (GET_MODE_BITSIZE (mode)
31a7659b
JDA
10192 > GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp,
10193 1)))))))
6b16805e
JJ
10194 {
10195 /* op0 might have been replaced by promoted constant, in which
10196 case the type of second argument should be used. */
10197 type = TREE_TYPE (TREE_OPERAND (exp, 1));
10198 mode = TYPE_MODE (type);
10199 }
b30f05db
BS
10200 unsignedp = TREE_UNSIGNED (type);
10201 code = unsignedp ? unsigned_code : signed_code;
10202
10203#ifdef HAVE_canonicalize_funcptr_for_compare
10204 /* If function pointers need to be "canonicalized" before they can
10205 be reliably compared, then canonicalize them. */
10206 if (HAVE_canonicalize_funcptr_for_compare
10207 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10208 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10209 == FUNCTION_TYPE))
10210 {
10211 rtx new_op0 = gen_reg_rtx (mode);
10212
10213 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10214 op0 = new_op0;
10215 }
10216
10217 if (HAVE_canonicalize_funcptr_for_compare
10218 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10219 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10220 == FUNCTION_TYPE))
10221 {
10222 rtx new_op1 = gen_reg_rtx (mode);
10223
10224 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10225 op1 = new_op1;
10226 }
10227#endif
10228
10229 /* Do any postincrements in the expression that was tested. */
10230 emit_queue ();
10231
10232 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10233 ((mode == BLKmode)
10234 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
19caa751 10235 MIN (align0, align1),
b30f05db 10236 if_false_label, if_true_label);
b93a436e
JL
10237}
10238\f
10239/* Generate code to calculate EXP using a store-flag instruction
10240 and return an rtx for the result. EXP is either a comparison
10241 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10242
b93a436e 10243 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10244
b93a436e
JL
10245 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10246 cheap.
ca695ac9 10247
b93a436e
JL
10248 Return zero if there is no suitable set-flag instruction
10249 available on this machine.
ca695ac9 10250
b93a436e
JL
10251 Once expand_expr has been called on the arguments of the comparison,
10252 we are committed to doing the store flag, since it is not safe to
10253 re-evaluate the expression. We emit the store-flag insn by calling
10254 emit_store_flag, but only expand the arguments if we have a reason
10255 to believe that emit_store_flag will be successful. If we think that
10256 it will, but it isn't, we have to simulate the store-flag with a
10257 set/jump/set sequence. */
ca695ac9 10258
b93a436e
JL
10259static rtx
10260do_store_flag (exp, target, mode, only_cheap)
10261 tree exp;
10262 rtx target;
10263 enum machine_mode mode;
10264 int only_cheap;
10265{
10266 enum rtx_code code;
10267 tree arg0, arg1, type;
10268 tree tem;
10269 enum machine_mode operand_mode;
10270 int invert = 0;
10271 int unsignedp;
10272 rtx op0, op1;
10273 enum insn_code icode;
10274 rtx subtarget = target;
381127e8 10275 rtx result, label;
ca695ac9 10276
b93a436e
JL
10277 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10278 result at the end. We can't simply invert the test since it would
10279 have already been inverted if it were valid. This case occurs for
10280 some floating-point comparisons. */
ca695ac9 10281
b93a436e
JL
10282 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10283 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10284
b93a436e
JL
10285 arg0 = TREE_OPERAND (exp, 0);
10286 arg1 = TREE_OPERAND (exp, 1);
5129d2ce
AH
10287
10288 /* Don't crash if the comparison was erroneous. */
10289 if (arg0 == error_mark_node || arg1 == error_mark_node)
10290 return const0_rtx;
10291
b93a436e
JL
10292 type = TREE_TYPE (arg0);
10293 operand_mode = TYPE_MODE (type);
10294 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10295
b93a436e
JL
10296 /* We won't bother with BLKmode store-flag operations because it would mean
10297 passing a lot of information to emit_store_flag. */
10298 if (operand_mode == BLKmode)
10299 return 0;
ca695ac9 10300
b93a436e
JL
10301 /* We won't bother with store-flag operations involving function pointers
10302 when function pointers must be canonicalized before comparisons. */
10303#ifdef HAVE_canonicalize_funcptr_for_compare
10304 if (HAVE_canonicalize_funcptr_for_compare
10305 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10306 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10307 == FUNCTION_TYPE))
10308 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10309 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10310 == FUNCTION_TYPE))))
10311 return 0;
ca695ac9
JB
10312#endif
10313
b93a436e
JL
10314 STRIP_NOPS (arg0);
10315 STRIP_NOPS (arg1);
ca695ac9 10316
b93a436e
JL
10317 /* Get the rtx comparison code to use. We know that EXP is a comparison
10318 operation of some type. Some comparisons against 1 and -1 can be
10319 converted to comparisons with zero. Do so here so that the tests
10320 below will be aware that we have a comparison with zero. These
10321 tests will not catch constants in the first operand, but constants
10322 are rarely passed as the first operand. */
ca695ac9 10323
b93a436e
JL
10324 switch (TREE_CODE (exp))
10325 {
10326 case EQ_EXPR:
10327 code = EQ;
bbf6f052 10328 break;
b93a436e
JL
10329 case NE_EXPR:
10330 code = NE;
bbf6f052 10331 break;
b93a436e
JL
10332 case LT_EXPR:
10333 if (integer_onep (arg1))
10334 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10335 else
10336 code = unsignedp ? LTU : LT;
ca695ac9 10337 break;
b93a436e
JL
10338 case LE_EXPR:
10339 if (! unsignedp && integer_all_onesp (arg1))
10340 arg1 = integer_zero_node, code = LT;
10341 else
10342 code = unsignedp ? LEU : LE;
ca695ac9 10343 break;
b93a436e
JL
10344 case GT_EXPR:
10345 if (! unsignedp && integer_all_onesp (arg1))
10346 arg1 = integer_zero_node, code = GE;
10347 else
10348 code = unsignedp ? GTU : GT;
10349 break;
10350 case GE_EXPR:
10351 if (integer_onep (arg1))
10352 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10353 else
10354 code = unsignedp ? GEU : GE;
ca695ac9 10355 break;
1eb8759b
RH
10356
10357 case UNORDERED_EXPR:
10358 code = UNORDERED;
10359 break;
10360 case ORDERED_EXPR:
10361 code = ORDERED;
10362 break;
10363 case UNLT_EXPR:
10364 code = UNLT;
10365 break;
10366 case UNLE_EXPR:
10367 code = UNLE;
10368 break;
10369 case UNGT_EXPR:
10370 code = UNGT;
10371 break;
10372 case UNGE_EXPR:
10373 code = UNGE;
10374 break;
10375 case UNEQ_EXPR:
10376 code = UNEQ;
10377 break;
1eb8759b 10378
ca695ac9 10379 default:
b93a436e 10380 abort ();
bbf6f052 10381 }
bbf6f052 10382
b93a436e
JL
10383 /* Put a constant second. */
10384 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10385 {
10386 tem = arg0; arg0 = arg1; arg1 = tem;
10387 code = swap_condition (code);
ca695ac9 10388 }
bbf6f052 10389
b93a436e
JL
10390 /* If this is an equality or inequality test of a single bit, we can
10391 do this by shifting the bit being tested to the low-order bit and
10392 masking the result with the constant 1. If the condition was EQ,
10393 we xor it with 1. This does not require an scc insn and is faster
10394 than an scc insn even if we have it. */
d39985fa 10395
b93a436e
JL
10396 if ((code == NE || code == EQ)
10397 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10398 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10399 {
10400 tree inner = TREE_OPERAND (arg0, 0);
10401 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10402 int ops_unsignedp;
bbf6f052 10403
b93a436e
JL
10404 /* If INNER is a right shift of a constant and it plus BITNUM does
10405 not overflow, adjust BITNUM and INNER. */
ca695ac9 10406
b93a436e
JL
10407 if (TREE_CODE (inner) == RSHIFT_EXPR
10408 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10409 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10410 && bitnum < TYPE_PRECISION (type)
10411 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10412 bitnum - TYPE_PRECISION (type)))
ca695ac9 10413 {
b93a436e
JL
10414 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10415 inner = TREE_OPERAND (inner, 0);
ca695ac9 10416 }
ca695ac9 10417
b93a436e
JL
10418 /* If we are going to be able to omit the AND below, we must do our
10419 operations as unsigned. If we must use the AND, we have a choice.
10420 Normally unsigned is faster, but for some machines signed is. */
10421 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10422#ifdef LOAD_EXTEND_OP
10423 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10424#else
10425 : 1
10426#endif
10427 );
bbf6f052 10428
296b4ed9 10429 if (! get_subtarget (subtarget)
a47fed55 10430 || GET_MODE (subtarget) != operand_mode
e5e809f4 10431 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10432 subtarget = 0;
bbf6f052 10433
b93a436e 10434 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10435
b93a436e 10436 if (bitnum != 0)
681cb233 10437 op0 = expand_shift (RSHIFT_EXPR, operand_mode, op0,
b93a436e 10438 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10439
b93a436e
JL
10440 if (GET_MODE (op0) != mode)
10441 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10442
b93a436e
JL
10443 if ((code == EQ && ! invert) || (code == NE && invert))
10444 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10445 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10446
b93a436e
JL
10447 /* Put the AND last so it can combine with more things. */
10448 if (bitnum != TYPE_PRECISION (type) - 1)
10449 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10450
b93a436e
JL
10451 return op0;
10452 }
bbf6f052 10453
b93a436e 10454 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10455 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10456 return 0;
1eb8759b 10457
b93a436e
JL
10458 icode = setcc_gen_code[(int) code];
10459 if (icode == CODE_FOR_nothing
a995e389 10460 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10461 {
b93a436e
JL
10462 /* We can only do this if it is one of the special cases that
10463 can be handled without an scc insn. */
10464 if ((code == LT && integer_zerop (arg1))
10465 || (! only_cheap && code == GE && integer_zerop (arg1)))
10466 ;
10467 else if (BRANCH_COST >= 0
10468 && ! only_cheap && (code == NE || code == EQ)
10469 && TREE_CODE (type) != REAL_TYPE
10470 && ((abs_optab->handlers[(int) operand_mode].insn_code
10471 != CODE_FOR_nothing)
10472 || (ffs_optab->handlers[(int) operand_mode].insn_code
10473 != CODE_FOR_nothing)))
10474 ;
10475 else
10476 return 0;
ca695ac9 10477 }
3a94c984 10478
296b4ed9 10479 if (! get_subtarget (target)
a47fed55 10480 || GET_MODE (subtarget) != operand_mode
e5e809f4 10481 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10482 subtarget = 0;
10483
10484 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10485 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10486
10487 if (target == 0)
10488 target = gen_reg_rtx (mode);
10489
10490 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10491 because, if the emit_store_flag does anything it will succeed and
10492 OP0 and OP1 will not be used subsequently. */
ca695ac9 10493
b93a436e
JL
10494 result = emit_store_flag (target, code,
10495 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10496 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10497 operand_mode, unsignedp, 1);
ca695ac9 10498
b93a436e
JL
10499 if (result)
10500 {
10501 if (invert)
10502 result = expand_binop (mode, xor_optab, result, const1_rtx,
10503 result, 0, OPTAB_LIB_WIDEN);
10504 return result;
ca695ac9 10505 }
bbf6f052 10506
b93a436e
JL
10507 /* If this failed, we have to do this with set/compare/jump/set code. */
10508 if (GET_CODE (target) != REG
10509 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10510 target = gen_reg_rtx (GET_MODE (target));
10511
10512 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10513 result = compare_from_rtx (op0, op1, code, unsignedp,
10514 operand_mode, NULL_RTX, 0);
10515 if (GET_CODE (result) == CONST_INT)
10516 return (((result == const0_rtx && ! invert)
10517 || (result != const0_rtx && invert))
10518 ? const0_rtx : const1_rtx);
ca695ac9 10519
b93a436e
JL
10520 label = gen_label_rtx ();
10521 if (bcc_gen_fctn[(int) code] == 0)
10522 abort ();
0f41302f 10523
b93a436e
JL
10524 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10525 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10526 emit_label (label);
bbf6f052 10527
b93a436e 10528 return target;
ca695ac9 10529}
b93a436e
JL
10530\f
10531/* Generate a tablejump instruction (used for switch statements). */
10532
10533#ifdef HAVE_tablejump
e87b4f3f 10534
b93a436e
JL
10535/* INDEX is the value being switched on, with the lowest value
10536 in the table already subtracted.
10537 MODE is its expected mode (needed if INDEX is constant).
10538 RANGE is the length of the jump table.
10539 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10540
b93a436e
JL
10541 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10542 index value is out of range. */
0f41302f 10543
ca695ac9 10544void
b93a436e
JL
10545do_tablejump (index, mode, range, table_label, default_label)
10546 rtx index, range, table_label, default_label;
10547 enum machine_mode mode;
ca695ac9 10548{
b93a436e 10549 register rtx temp, vector;
88d3b7f0 10550
b93a436e
JL
10551 /* Do an unsigned comparison (in the proper mode) between the index
10552 expression and the value which represents the length of the range.
10553 Since we just finished subtracting the lower bound of the range
10554 from the index expression, this comparison allows us to simultaneously
10555 check that the original index expression value is both greater than
10556 or equal to the minimum value of the range and less than or equal to
10557 the maximum value of the range. */
709f5be1 10558
c5d5d461
JL
10559 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10560 0, default_label);
bbf6f052 10561
b93a436e
JL
10562 /* If index is in range, it must fit in Pmode.
10563 Convert to Pmode so we can index with it. */
10564 if (mode != Pmode)
10565 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10566
b93a436e
JL
10567 /* Don't let a MEM slip thru, because then INDEX that comes
10568 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10569 and break_out_memory_refs will go to work on it and mess it up. */
10570#ifdef PIC_CASE_VECTOR_ADDRESS
10571 if (flag_pic && GET_CODE (index) != REG)
10572 index = copy_to_mode_reg (Pmode, index);
10573#endif
ca695ac9 10574
b93a436e
JL
10575 /* If flag_force_addr were to affect this address
10576 it could interfere with the tricky assumptions made
10577 about addresses that contain label-refs,
10578 which may be valid only very near the tablejump itself. */
10579 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10580 GET_MODE_SIZE, because this indicates how large insns are. The other
10581 uses should all be Pmode, because they are addresses. This code
10582 could fail if addresses and insns are not the same size. */
10583 index = gen_rtx_PLUS (Pmode,
10584 gen_rtx_MULT (Pmode, index,
10585 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10586 gen_rtx_LABEL_REF (Pmode, table_label));
10587#ifdef PIC_CASE_VECTOR_ADDRESS
10588 if (flag_pic)
10589 index = PIC_CASE_VECTOR_ADDRESS (index);
10590 else
bbf6f052 10591#endif
b93a436e
JL
10592 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10593 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10594 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10595 RTX_UNCHANGING_P (vector) = 1;
10596 convert_move (temp, vector, 0);
10597
10598 emit_jump_insn (gen_tablejump (temp, table_label));
10599
10600 /* If we are generating PIC code or if the table is PC-relative, the
10601 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10602 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10603 emit_barrier ();
bbf6f052 10604}
b93a436e 10605
3a94c984 10606#endif /* HAVE_tablejump */
This page took 3.116577 seconds and 5 git commands to generate.