]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
tree.h (INT_CST_LT, [...]): Remove unneeded casts.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
06ceef4e
RK
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
bbf6f052
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
bbf6f052
RK
21
22
23#include "config.h"
670ee920 24#include "system.h"
ca695ac9 25#include "machmode.h"
bbf6f052
RK
26#include "rtl.h"
27#include "tree.h"
ca695ac9 28#include "obstack.h"
bbf6f052 29#include "flags.h"
bf76bb5a 30#include "regs.h"
4ed67205 31#include "hard-reg-set.h"
3d195391 32#include "except.h"
bbf6f052
RK
33#include "function.h"
34#include "insn-flags.h"
35#include "insn-codes.h"
bbf6f052 36#include "insn-config.h"
d6f4ec51
KG
37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38#include "expr.h"
bbf6f052
RK
39#include "recog.h"
40#include "output.h"
bbf6f052 41#include "typeclass.h"
ca55abae 42#include "defaults.h"
10f0ad3d 43#include "toplev.h"
d7db6646 44#include "ggc.h"
b1474bb7 45#include "tm_p.h"
bbf6f052 46
bbf6f052 47/* Decide whether a function's arguments should be processed
bbc8a071
RK
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
bbf6f052 52
bbf6f052 53#ifdef PUSH_ROUNDING
bbc8a071 54
3319a347 55#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
56#define PUSH_ARGS_REVERSED /* If it's last to first */
57#endif
bbc8a071 58
bbf6f052
RK
59#endif
60
61#ifndef STACK_PUSH_CODE
62#ifdef STACK_GROWS_DOWNWARD
63#define STACK_PUSH_CODE PRE_DEC
64#else
65#define STACK_PUSH_CODE PRE_INC
66#endif
67#endif
68
18543a22
ILT
69/* Assume that case vectors are not pc-relative. */
70#ifndef CASE_VECTOR_PC_RELATIVE
71#define CASE_VECTOR_PC_RELATIVE 0
72#endif
73
bbf6f052
RK
74/* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85int do_preexpand_calls = 1;
86
956d6950 87/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
956d6950
JL
90static int in_check_memory_usage;
91
14a774a9
RK
92/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
93static tree placeholder_list = 0;
94
4969d05d
RK
95/* This structure is used by move_by_pieces to describe the move to
96 be performed. */
4969d05d
RK
97struct move_by_pieces
98{
99 rtx to;
100 rtx to_addr;
101 int autinc_to;
102 int explicit_inc_to;
e9cf6a97 103 int to_struct;
c5c76735 104 int to_readonly;
4969d05d
RK
105 rtx from;
106 rtx from_addr;
107 int autinc_from;
108 int explicit_inc_from;
e9cf6a97 109 int from_struct;
c5c76735 110 int from_readonly;
4969d05d
RK
111 int len;
112 int offset;
113 int reverse;
114};
115
9de08200
RK
116/* This structure is used by clear_by_pieces to describe the clear to
117 be performed. */
118
119struct clear_by_pieces
120{
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
125 int to_struct;
126 int len;
127 int offset;
128 int reverse;
129};
130
292b1216 131extern struct obstack permanent_obstack;
c02bd5d9 132
711d877c
KG
133static rtx get_push_address PARAMS ((int));
134
135static rtx enqueue_insn PARAMS ((rtx, rtx));
729a2125 136static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
711d877c
KG
137static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
138 struct move_by_pieces *));
729a2125 139static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
711d877c
KG
140static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
141 enum machine_mode,
142 struct clear_by_pieces *));
143static int is_zeros_p PARAMS ((tree));
144static int mostly_zeros_p PARAMS ((tree));
145static void store_constructor_field PARAMS ((rtx, int, int, enum machine_mode,
729a2125
RK
146 tree, tree, unsigned int, int));
147static void store_constructor PARAMS ((tree, rtx, unsigned int, int, int));
711d877c 148static rtx store_field PARAMS ((rtx, int, int, enum machine_mode,
729a2125
RK
149 tree, enum machine_mode, int,
150 unsigned int, int, int));
e009aaf3 151static enum memory_use_mode
711d877c
KG
152 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
153static tree save_noncopied_parts PARAMS ((tree, tree));
154static tree init_noncopied_parts PARAMS ((tree, tree));
155static int safe_from_p PARAMS ((rtx, tree, int));
156static int fixed_type_p PARAMS ((tree));
157static rtx var_rtx PARAMS ((tree));
158static int readonly_fields_p PARAMS ((tree));
729a2125 159static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c
KG
160static rtx expand_increment PARAMS ((tree, int, int));
161static void preexpand_calls PARAMS ((tree));
162static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
163static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
164static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code, rtx, rtx));
165static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
bbf6f052 166
4fa52007
RK
167/* Record for each mode whether we can move a register directly to or
168 from an object of that mode in memory. If we can't, we won't try
169 to use that mode directly when accessing a field of that mode. */
170
171static char direct_load[NUM_MACHINE_MODES];
172static char direct_store[NUM_MACHINE_MODES];
173
7e24ffc9
HPN
174/* If a memory-to-memory move would take MOVE_RATIO or more simple
175 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
176
177#ifndef MOVE_RATIO
266007a7 178#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
179#define MOVE_RATIO 2
180#else
996d9dac
MM
181/* If we are optimizing for space (-Os), cut down the default move ratio */
182#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
183#endif
184#endif
e87b4f3f 185
fbe1758d
AM
186/* This macro is used to determine whether move_by_pieces should be called
187 to perform a structure copy. */
188#ifndef MOVE_BY_PIECES_P
189#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
190 (SIZE, ALIGN) < MOVE_RATIO)
191#endif
192
266007a7 193/* This array records the insn_code of insns to perform block moves. */
e6677db3 194enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 195
9de08200
RK
196/* This array records the insn_code of insns to perform block clears. */
197enum insn_code clrstr_optab[NUM_MACHINE_MODES];
198
0f41302f 199/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
200
201#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 202#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 203#endif
bbf6f052 204\f
4fa52007 205/* This is run once per compilation to set up which modes can be used
266007a7 206 directly in memory and to initialize the block move optab. */
4fa52007
RK
207
208void
209init_expr_once ()
210{
211 rtx insn, pat;
212 enum machine_mode mode;
cff48d8f 213 int num_clobbers;
9ec36da5
JL
214 rtx mem, mem1;
215 char *free_point;
216
217 start_sequence ();
218
219 /* Since we are on the permanent obstack, we must be sure we save this
220 spot AFTER we call start_sequence, since it will reuse the rtl it
221 makes. */
222 free_point = (char *) oballoc (0);
223
e2549997
RS
224 /* Try indexing by frame ptr and try by stack ptr.
225 It is known that on the Convex the stack ptr isn't a valid index.
226 With luck, one or the other is valid on any machine. */
9ec36da5
JL
227 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
228 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 229
38a448ca 230 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
231 pat = PATTERN (insn);
232
233 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
234 mode = (enum machine_mode) ((int) mode + 1))
235 {
236 int regno;
237 rtx reg;
4fa52007
RK
238
239 direct_load[(int) mode] = direct_store[(int) mode] = 0;
240 PUT_MODE (mem, mode);
e2549997 241 PUT_MODE (mem1, mode);
4fa52007 242
e6fe56a4
RK
243 /* See if there is some register that can be used in this mode and
244 directly loaded or stored from memory. */
245
7308a047
RS
246 if (mode != VOIDmode && mode != BLKmode)
247 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
248 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
249 regno++)
250 {
251 if (! HARD_REGNO_MODE_OK (regno, mode))
252 continue;
e6fe56a4 253
38a448ca 254 reg = gen_rtx_REG (mode, regno);
e6fe56a4 255
7308a047
RS
256 SET_SRC (pat) = mem;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
e6fe56a4 260
e2549997
RS
261 SET_SRC (pat) = mem1;
262 SET_DEST (pat) = reg;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_load[(int) mode] = 1;
265
7308a047
RS
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
e2549997
RS
270
271 SET_SRC (pat) = reg;
272 SET_DEST (pat) = mem1;
273 if (recog (pat, insn, &num_clobbers) >= 0)
274 direct_store[(int) mode] = 1;
7308a047 275 }
4fa52007
RK
276 }
277
278 end_sequence ();
9ec36da5 279 obfree (free_point);
4fa52007 280}
cff48d8f 281
bbf6f052
RK
282/* This is run at the start of compiling a function. */
283
284void
285init_expr ()
286{
01d939e8 287 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 288
49ad7cfa 289 pending_chain = 0;
bbf6f052 290 pending_stack_adjust = 0;
c2732da3 291 arg_space_so_far = 0;
bbf6f052 292 inhibit_defer_pop = 0;
bbf6f052 293 saveregs_value = 0;
0006469d 294 apply_args_value = 0;
e87b4f3f 295 forced_labels = 0;
bbf6f052
RK
296}
297
fa51b01b
RH
298void
299mark_expr_status (p)
300 struct expr_status *p;
301{
302 if (p == NULL)
303 return;
304
305 ggc_mark_rtx (p->x_saveregs_value);
306 ggc_mark_rtx (p->x_apply_args_value);
307 ggc_mark_rtx (p->x_forced_labels);
308}
309
310void
311free_expr_status (f)
312 struct function *f;
313{
314 free (f->expr);
315 f->expr = NULL;
316}
317
49ad7cfa 318/* Small sanity check that the queue is empty at the end of a function. */
bbf6f052 319void
49ad7cfa 320finish_expr_for_function ()
bbf6f052 321{
49ad7cfa
BS
322 if (pending_chain)
323 abort ();
bbf6f052
RK
324}
325\f
326/* Manage the queue of increment instructions to be output
327 for POSTINCREMENT_EXPR expressions, etc. */
328
bbf6f052
RK
329/* Queue up to increment (or change) VAR later. BODY says how:
330 BODY should be the same thing you would pass to emit_insn
331 to increment right away. It will go to emit_insn later on.
332
333 The value is a QUEUED expression to be used in place of VAR
334 where you want to guarantee the pre-incrementation value of VAR. */
335
336static rtx
337enqueue_insn (var, body)
338 rtx var, body;
339{
c5c76735
JL
340 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
341 body, pending_chain);
bbf6f052
RK
342 return pending_chain;
343}
344
345/* Use protect_from_queue to convert a QUEUED expression
346 into something that you can put immediately into an instruction.
347 If the queued incrementation has not happened yet,
348 protect_from_queue returns the variable itself.
349 If the incrementation has happened, protect_from_queue returns a temp
350 that contains a copy of the old value of the variable.
351
352 Any time an rtx which might possibly be a QUEUED is to be put
353 into an instruction, it must be passed through protect_from_queue first.
354 QUEUED expressions are not meaningful in instructions.
355
356 Do not pass a value through protect_from_queue and then hold
357 on to it for a while before putting it in an instruction!
358 If the queue is flushed in between, incorrect code will result. */
359
360rtx
361protect_from_queue (x, modify)
362 register rtx x;
363 int modify;
364{
365 register RTX_CODE code = GET_CODE (x);
366
367#if 0 /* A QUEUED can hang around after the queue is forced out. */
368 /* Shortcut for most common case. */
369 if (pending_chain == 0)
370 return x;
371#endif
372
373 if (code != QUEUED)
374 {
e9baa644
RK
375 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
376 use of autoincrement. Make a copy of the contents of the memory
377 location rather than a copy of the address, but not if the value is
378 of mode BLKmode. Don't modify X in place since it might be
379 shared. */
bbf6f052
RK
380 if (code == MEM && GET_MODE (x) != BLKmode
381 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
382 {
383 register rtx y = XEXP (x, 0);
38a448ca 384 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 385
e9baa644 386 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 387 MEM_COPY_ATTRIBUTES (new, x);
41472af8 388 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 389
bbf6f052
RK
390 if (QUEUED_INSN (y))
391 {
e9baa644
RK
392 register rtx temp = gen_reg_rtx (GET_MODE (new));
393 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
394 QUEUED_INSN (y));
395 return temp;
396 }
e9baa644 397 return new;
bbf6f052
RK
398 }
399 /* Otherwise, recursively protect the subexpressions of all
400 the kinds of rtx's that can contain a QUEUED. */
401 if (code == MEM)
3f15938e
RS
402 {
403 rtx tem = protect_from_queue (XEXP (x, 0), 0);
404 if (tem != XEXP (x, 0))
405 {
406 x = copy_rtx (x);
407 XEXP (x, 0) = tem;
408 }
409 }
bbf6f052
RK
410 else if (code == PLUS || code == MULT)
411 {
3f15938e
RS
412 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
413 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
414 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
415 {
416 x = copy_rtx (x);
417 XEXP (x, 0) = new0;
418 XEXP (x, 1) = new1;
419 }
bbf6f052
RK
420 }
421 return x;
422 }
423 /* If the increment has not happened, use the variable itself. */
424 if (QUEUED_INSN (x) == 0)
425 return QUEUED_VAR (x);
426 /* If the increment has happened and a pre-increment copy exists,
427 use that copy. */
428 if (QUEUED_COPY (x) != 0)
429 return QUEUED_COPY (x);
430 /* The increment has happened but we haven't set up a pre-increment copy.
431 Set one up now, and use it. */
432 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
433 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
434 QUEUED_INSN (x));
435 return QUEUED_COPY (x);
436}
437
438/* Return nonzero if X contains a QUEUED expression:
439 if it contains anything that will be altered by a queued increment.
440 We handle only combinations of MEM, PLUS, MINUS and MULT operators
441 since memory addresses generally contain only those. */
442
1f06ee8d 443int
bbf6f052
RK
444queued_subexp_p (x)
445 rtx x;
446{
447 register enum rtx_code code = GET_CODE (x);
448 switch (code)
449 {
450 case QUEUED:
451 return 1;
452 case MEM:
453 return queued_subexp_p (XEXP (x, 0));
454 case MULT:
455 case PLUS:
456 case MINUS:
e9a25f70
JL
457 return (queued_subexp_p (XEXP (x, 0))
458 || queued_subexp_p (XEXP (x, 1)));
459 default:
460 return 0;
bbf6f052 461 }
bbf6f052
RK
462}
463
464/* Perform all the pending incrementations. */
465
466void
467emit_queue ()
468{
469 register rtx p;
381127e8 470 while ((p = pending_chain))
bbf6f052 471 {
41b083c4
R
472 rtx body = QUEUED_BODY (p);
473
474 if (GET_CODE (body) == SEQUENCE)
475 {
476 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
477 emit_insn (QUEUED_BODY (p));
478 }
479 else
480 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
481 pending_chain = QUEUED_NEXT (p);
482 }
483}
bbf6f052
RK
484\f
485/* Copy data from FROM to TO, where the machine modes are not the same.
486 Both modes may be integer, or both may be floating.
487 UNSIGNEDP should be nonzero if FROM is an unsigned type.
488 This causes zero-extension instead of sign-extension. */
489
490void
491convert_move (to, from, unsignedp)
492 register rtx to, from;
493 int unsignedp;
494{
495 enum machine_mode to_mode = GET_MODE (to);
496 enum machine_mode from_mode = GET_MODE (from);
497 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
498 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
499 enum insn_code code;
500 rtx libcall;
501
502 /* rtx code for making an equivalent value. */
503 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
504
505 to = protect_from_queue (to, 1);
506 from = protect_from_queue (from, 0);
507
508 if (to_real != from_real)
509 abort ();
510
1499e0a8
RK
511 /* If FROM is a SUBREG that indicates that we have already done at least
512 the required extension, strip it. We don't handle such SUBREGs as
513 TO here. */
514
515 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
516 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
517 >= GET_MODE_SIZE (to_mode))
518 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
519 from = gen_lowpart (to_mode, from), from_mode = to_mode;
520
521 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
522 abort ();
523
bbf6f052
RK
524 if (to_mode == from_mode
525 || (from_mode == VOIDmode && CONSTANT_P (from)))
526 {
527 emit_move_insn (to, from);
528 return;
529 }
530
531 if (to_real)
532 {
81d79e2c
RS
533 rtx value;
534
2b01c326 535 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 536 {
2b01c326
RK
537 /* Try converting directly if the insn is supported. */
538 if ((code = can_extend_p (to_mode, from_mode, 0))
539 != CODE_FOR_nothing)
540 {
541 emit_unop_insn (code, to, from, UNKNOWN);
542 return;
543 }
bbf6f052 544 }
2b01c326 545
b424402e
RS
546#ifdef HAVE_trunchfqf2
547 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
548 {
549 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
550 return;
551 }
552#endif
704af6a1
JL
553#ifdef HAVE_trunctqfqf2
554 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
555 {
556 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
557 return;
558 }
559#endif
b424402e
RS
560#ifdef HAVE_truncsfqf2
561 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
562 {
563 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
564 return;
565 }
566#endif
567#ifdef HAVE_truncdfqf2
568 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
569 {
570 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
571 return;
572 }
573#endif
574#ifdef HAVE_truncxfqf2
575 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
576 {
577 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
578 return;
579 }
580#endif
581#ifdef HAVE_trunctfqf2
582 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
583 {
584 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
585 return;
586 }
587#endif
03747aa3
RK
588
589#ifdef HAVE_trunctqfhf2
590 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
593 return;
594 }
595#endif
b424402e
RS
596#ifdef HAVE_truncsfhf2
597 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
598 {
599 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
600 return;
601 }
602#endif
603#ifdef HAVE_truncdfhf2
604 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
605 {
606 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
607 return;
608 }
609#endif
610#ifdef HAVE_truncxfhf2
611 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
612 {
613 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
614 return;
615 }
616#endif
617#ifdef HAVE_trunctfhf2
618 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
619 {
620 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
621 return;
622 }
623#endif
2b01c326
RK
624
625#ifdef HAVE_truncsftqf2
626 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
627 {
628 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
629 return;
630 }
631#endif
632#ifdef HAVE_truncdftqf2
633 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
634 {
635 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
636 return;
637 }
638#endif
639#ifdef HAVE_truncxftqf2
640 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
641 {
642 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
643 return;
644 }
645#endif
646#ifdef HAVE_trunctftqf2
647 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
648 {
649 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
650 return;
651 }
652#endif
653
bbf6f052
RK
654#ifdef HAVE_truncdfsf2
655 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
656 {
657 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
b092b471
JW
661#ifdef HAVE_truncxfsf2
662 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
663 {
664 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
665 return;
666 }
667#endif
bbf6f052
RK
668#ifdef HAVE_trunctfsf2
669 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
670 {
671 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
672 return;
673 }
674#endif
b092b471
JW
675#ifdef HAVE_truncxfdf2
676 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
677 {
678 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
679 return;
680 }
681#endif
bbf6f052
RK
682#ifdef HAVE_trunctfdf2
683 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
684 {
685 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
686 return;
687 }
688#endif
689
b092b471
JW
690 libcall = (rtx) 0;
691 switch (from_mode)
692 {
693 case SFmode:
694 switch (to_mode)
695 {
696 case DFmode:
697 libcall = extendsfdf2_libfunc;
698 break;
699
700 case XFmode:
701 libcall = extendsfxf2_libfunc;
702 break;
703
704 case TFmode:
705 libcall = extendsftf2_libfunc;
706 break;
e9a25f70
JL
707
708 default:
709 break;
b092b471
JW
710 }
711 break;
712
713 case DFmode:
714 switch (to_mode)
715 {
716 case SFmode:
717 libcall = truncdfsf2_libfunc;
718 break;
719
720 case XFmode:
721 libcall = extenddfxf2_libfunc;
722 break;
723
724 case TFmode:
725 libcall = extenddftf2_libfunc;
726 break;
e9a25f70
JL
727
728 default:
729 break;
b092b471
JW
730 }
731 break;
732
733 case XFmode:
734 switch (to_mode)
735 {
736 case SFmode:
737 libcall = truncxfsf2_libfunc;
738 break;
739
740 case DFmode:
741 libcall = truncxfdf2_libfunc;
742 break;
e9a25f70
JL
743
744 default:
745 break;
b092b471
JW
746 }
747 break;
748
749 case TFmode:
750 switch (to_mode)
751 {
752 case SFmode:
753 libcall = trunctfsf2_libfunc;
754 break;
755
756 case DFmode:
757 libcall = trunctfdf2_libfunc;
758 break;
e9a25f70
JL
759
760 default:
761 break;
b092b471
JW
762 }
763 break;
e9a25f70
JL
764
765 default:
766 break;
b092b471
JW
767 }
768
769 if (libcall == (rtx) 0)
770 /* This conversion is not implemented yet. */
bbf6f052
RK
771 abort ();
772
81d79e2c
RS
773 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
774 1, from, from_mode);
775 emit_move_insn (to, value);
bbf6f052
RK
776 return;
777 }
778
779 /* Now both modes are integers. */
780
781 /* Handle expanding beyond a word. */
782 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
783 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
784 {
785 rtx insns;
786 rtx lowpart;
787 rtx fill_value;
788 rtx lowfrom;
789 int i;
790 enum machine_mode lowpart_mode;
791 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
792
793 /* Try converting directly if the insn is supported. */
794 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
795 != CODE_FOR_nothing)
796 {
cd1b4b44
RK
797 /* If FROM is a SUBREG, put it into a register. Do this
798 so that we always generate the same set of insns for
799 better cse'ing; if an intermediate assignment occurred,
800 we won't be doing the operation directly on the SUBREG. */
801 if (optimize > 0 && GET_CODE (from) == SUBREG)
802 from = force_reg (from_mode, from);
bbf6f052
RK
803 emit_unop_insn (code, to, from, equiv_code);
804 return;
805 }
806 /* Next, try converting via full word. */
807 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
808 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
809 != CODE_FOR_nothing))
810 {
a81fee56 811 if (GET_CODE (to) == REG)
38a448ca 812 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
813 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
814 emit_unop_insn (code, to,
815 gen_lowpart (word_mode, to), equiv_code);
816 return;
817 }
818
819 /* No special multiword conversion insn; do it by hand. */
820 start_sequence ();
821
5c5033c3
RK
822 /* Since we will turn this into a no conflict block, we must ensure
823 that the source does not overlap the target. */
824
825 if (reg_overlap_mentioned_p (to, from))
826 from = force_reg (from_mode, from);
827
bbf6f052
RK
828 /* Get a copy of FROM widened to a word, if necessary. */
829 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
830 lowpart_mode = word_mode;
831 else
832 lowpart_mode = from_mode;
833
834 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
835
836 lowpart = gen_lowpart (lowpart_mode, to);
837 emit_move_insn (lowpart, lowfrom);
838
839 /* Compute the value to put in each remaining word. */
840 if (unsignedp)
841 fill_value = const0_rtx;
842 else
843 {
844#ifdef HAVE_slt
845 if (HAVE_slt
a995e389 846 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
847 && STORE_FLAG_VALUE == -1)
848 {
906c4e36
RK
849 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
850 lowpart_mode, 0, 0);
bbf6f052
RK
851 fill_value = gen_reg_rtx (word_mode);
852 emit_insn (gen_slt (fill_value));
853 }
854 else
855#endif
856 {
857 fill_value
858 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
859 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 860 NULL_RTX, 0);
bbf6f052
RK
861 fill_value = convert_to_mode (word_mode, fill_value, 1);
862 }
863 }
864
865 /* Fill the remaining words. */
866 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
867 {
868 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
869 rtx subword = operand_subword (to, index, 1, to_mode);
870
871 if (subword == 0)
872 abort ();
873
874 if (fill_value != subword)
875 emit_move_insn (subword, fill_value);
876 }
877
878 insns = get_insns ();
879 end_sequence ();
880
906c4e36 881 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 882 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
883 return;
884 }
885
d3c64ee3
RS
886 /* Truncating multi-word to a word or less. */
887 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
888 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 889 {
431a6eca
JW
890 if (!((GET_CODE (from) == MEM
891 && ! MEM_VOLATILE_P (from)
892 && direct_load[(int) to_mode]
893 && ! mode_dependent_address_p (XEXP (from, 0)))
894 || GET_CODE (from) == REG
895 || GET_CODE (from) == SUBREG))
896 from = force_reg (from_mode, from);
bbf6f052
RK
897 convert_move (to, gen_lowpart (word_mode, from), 0);
898 return;
899 }
900
901 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
902 if (to_mode == PQImode)
903 {
904 if (from_mode != QImode)
905 from = convert_to_mode (QImode, from, unsignedp);
906
907#ifdef HAVE_truncqipqi2
908 if (HAVE_truncqipqi2)
909 {
910 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
911 return;
912 }
913#endif /* HAVE_truncqipqi2 */
914 abort ();
915 }
916
917 if (from_mode == PQImode)
918 {
919 if (to_mode != QImode)
920 {
921 from = convert_to_mode (QImode, from, unsignedp);
922 from_mode = QImode;
923 }
924 else
925 {
926#ifdef HAVE_extendpqiqi2
927 if (HAVE_extendpqiqi2)
928 {
929 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
930 return;
931 }
932#endif /* HAVE_extendpqiqi2 */
933 abort ();
934 }
935 }
936
bbf6f052
RK
937 if (to_mode == PSImode)
938 {
939 if (from_mode != SImode)
940 from = convert_to_mode (SImode, from, unsignedp);
941
1f584163
DE
942#ifdef HAVE_truncsipsi2
943 if (HAVE_truncsipsi2)
bbf6f052 944 {
1f584163 945 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
946 return;
947 }
1f584163 948#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
949 abort ();
950 }
951
952 if (from_mode == PSImode)
953 {
954 if (to_mode != SImode)
955 {
956 from = convert_to_mode (SImode, from, unsignedp);
957 from_mode = SImode;
958 }
959 else
960 {
1f584163
DE
961#ifdef HAVE_extendpsisi2
962 if (HAVE_extendpsisi2)
bbf6f052 963 {
1f584163 964 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
965 return;
966 }
1f584163 967#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
968 abort ();
969 }
970 }
971
0407367d
RK
972 if (to_mode == PDImode)
973 {
974 if (from_mode != DImode)
975 from = convert_to_mode (DImode, from, unsignedp);
976
977#ifdef HAVE_truncdipdi2
978 if (HAVE_truncdipdi2)
979 {
980 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
981 return;
982 }
983#endif /* HAVE_truncdipdi2 */
984 abort ();
985 }
986
987 if (from_mode == PDImode)
988 {
989 if (to_mode != DImode)
990 {
991 from = convert_to_mode (DImode, from, unsignedp);
992 from_mode = DImode;
993 }
994 else
995 {
996#ifdef HAVE_extendpdidi2
997 if (HAVE_extendpdidi2)
998 {
999 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1000 return;
1001 }
1002#endif /* HAVE_extendpdidi2 */
1003 abort ();
1004 }
1005 }
1006
bbf6f052
RK
1007 /* Now follow all the conversions between integers
1008 no more than a word long. */
1009
1010 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1011 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1012 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1013 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1014 {
d3c64ee3
RS
1015 if (!((GET_CODE (from) == MEM
1016 && ! MEM_VOLATILE_P (from)
1017 && direct_load[(int) to_mode]
1018 && ! mode_dependent_address_p (XEXP (from, 0)))
1019 || GET_CODE (from) == REG
1020 || GET_CODE (from) == SUBREG))
1021 from = force_reg (from_mode, from);
34aa3599
RK
1022 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1023 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1024 from = copy_to_reg (from);
bbf6f052
RK
1025 emit_move_insn (to, gen_lowpart (to_mode, from));
1026 return;
1027 }
1028
d3c64ee3 1029 /* Handle extension. */
bbf6f052
RK
1030 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1031 {
1032 /* Convert directly if that works. */
1033 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1034 != CODE_FOR_nothing)
1035 {
1036 emit_unop_insn (code, to, from, equiv_code);
1037 return;
1038 }
1039 else
1040 {
1041 enum machine_mode intermediate;
2b28d92e
NC
1042 rtx tmp;
1043 tree shift_amount;
bbf6f052
RK
1044
1045 /* Search for a mode to convert via. */
1046 for (intermediate = from_mode; intermediate != VOIDmode;
1047 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1048 if (((can_extend_p (to_mode, intermediate, unsignedp)
1049 != CODE_FOR_nothing)
1050 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1051 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1052 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1053 && (can_extend_p (intermediate, from_mode, unsignedp)
1054 != CODE_FOR_nothing))
1055 {
1056 convert_move (to, convert_to_mode (intermediate, from,
1057 unsignedp), unsignedp);
1058 return;
1059 }
1060
2b28d92e
NC
1061 /* No suitable intermediate mode.
1062 Generate what we need with shifts. */
1063 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1064 - GET_MODE_BITSIZE (from_mode), 0);
1065 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1066 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1067 to, unsignedp);
1068 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1069 to, unsignedp);
1070 if (tmp != to)
1071 emit_move_insn (to, tmp);
1072 return;
bbf6f052
RK
1073 }
1074 }
1075
1076 /* Support special truncate insns for certain modes. */
1077
1078 if (from_mode == DImode && to_mode == SImode)
1079 {
1080#ifdef HAVE_truncdisi2
1081 if (HAVE_truncdisi2)
1082 {
1083 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1084 return;
1085 }
1086#endif
1087 convert_move (to, force_reg (from_mode, from), unsignedp);
1088 return;
1089 }
1090
1091 if (from_mode == DImode && to_mode == HImode)
1092 {
1093#ifdef HAVE_truncdihi2
1094 if (HAVE_truncdihi2)
1095 {
1096 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1097 return;
1098 }
1099#endif
1100 convert_move (to, force_reg (from_mode, from), unsignedp);
1101 return;
1102 }
1103
1104 if (from_mode == DImode && to_mode == QImode)
1105 {
1106#ifdef HAVE_truncdiqi2
1107 if (HAVE_truncdiqi2)
1108 {
1109 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1110 return;
1111 }
1112#endif
1113 convert_move (to, force_reg (from_mode, from), unsignedp);
1114 return;
1115 }
1116
1117 if (from_mode == SImode && to_mode == HImode)
1118 {
1119#ifdef HAVE_truncsihi2
1120 if (HAVE_truncsihi2)
1121 {
1122 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1123 return;
1124 }
1125#endif
1126 convert_move (to, force_reg (from_mode, from), unsignedp);
1127 return;
1128 }
1129
1130 if (from_mode == SImode && to_mode == QImode)
1131 {
1132#ifdef HAVE_truncsiqi2
1133 if (HAVE_truncsiqi2)
1134 {
1135 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1136 return;
1137 }
1138#endif
1139 convert_move (to, force_reg (from_mode, from), unsignedp);
1140 return;
1141 }
1142
1143 if (from_mode == HImode && to_mode == QImode)
1144 {
1145#ifdef HAVE_trunchiqi2
1146 if (HAVE_trunchiqi2)
1147 {
1148 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1149 return;
1150 }
1151#endif
1152 convert_move (to, force_reg (from_mode, from), unsignedp);
1153 return;
1154 }
1155
b9bcad65
RK
1156 if (from_mode == TImode && to_mode == DImode)
1157 {
1158#ifdef HAVE_trunctidi2
1159 if (HAVE_trunctidi2)
1160 {
1161 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1162 return;
1163 }
1164#endif
1165 convert_move (to, force_reg (from_mode, from), unsignedp);
1166 return;
1167 }
1168
1169 if (from_mode == TImode && to_mode == SImode)
1170 {
1171#ifdef HAVE_trunctisi2
1172 if (HAVE_trunctisi2)
1173 {
1174 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1175 return;
1176 }
1177#endif
1178 convert_move (to, force_reg (from_mode, from), unsignedp);
1179 return;
1180 }
1181
1182 if (from_mode == TImode && to_mode == HImode)
1183 {
1184#ifdef HAVE_trunctihi2
1185 if (HAVE_trunctihi2)
1186 {
1187 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1188 return;
1189 }
1190#endif
1191 convert_move (to, force_reg (from_mode, from), unsignedp);
1192 return;
1193 }
1194
1195 if (from_mode == TImode && to_mode == QImode)
1196 {
1197#ifdef HAVE_trunctiqi2
1198 if (HAVE_trunctiqi2)
1199 {
1200 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1201 return;
1202 }
1203#endif
1204 convert_move (to, force_reg (from_mode, from), unsignedp);
1205 return;
1206 }
1207
bbf6f052
RK
1208 /* Handle truncation of volatile memrefs, and so on;
1209 the things that couldn't be truncated directly,
1210 and for which there was no special instruction. */
1211 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1212 {
1213 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1214 emit_move_insn (to, temp);
1215 return;
1216 }
1217
1218 /* Mode combination is not recognized. */
1219 abort ();
1220}
1221
1222/* Return an rtx for a value that would result
1223 from converting X to mode MODE.
1224 Both X and MODE may be floating, or both integer.
1225 UNSIGNEDP is nonzero if X is an unsigned value.
1226 This can be done by referring to a part of X in place
5d901c31
RS
1227 or by copying to a new temporary with conversion.
1228
1229 This function *must not* call protect_from_queue
1230 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1231
1232rtx
1233convert_to_mode (mode, x, unsignedp)
1234 enum machine_mode mode;
1235 rtx x;
1236 int unsignedp;
5ffe63ed
RS
1237{
1238 return convert_modes (mode, VOIDmode, x, unsignedp);
1239}
1240
1241/* Return an rtx for a value that would result
1242 from converting X from mode OLDMODE to mode MODE.
1243 Both modes may be floating, or both integer.
1244 UNSIGNEDP is nonzero if X is an unsigned value.
1245
1246 This can be done by referring to a part of X in place
1247 or by copying to a new temporary with conversion.
1248
1249 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1250
1251 This function *must not* call protect_from_queue
1252 except when putting X into an insn (in which case convert_move does it). */
1253
1254rtx
1255convert_modes (mode, oldmode, x, unsignedp)
1256 enum machine_mode mode, oldmode;
1257 rtx x;
1258 int unsignedp;
bbf6f052
RK
1259{
1260 register rtx temp;
5ffe63ed 1261
1499e0a8
RK
1262 /* If FROM is a SUBREG that indicates that we have already done at least
1263 the required extension, strip it. */
1264
1265 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1266 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1267 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1268 x = gen_lowpart (mode, x);
bbf6f052 1269
64791b18
RK
1270 if (GET_MODE (x) != VOIDmode)
1271 oldmode = GET_MODE (x);
1272
5ffe63ed 1273 if (mode == oldmode)
bbf6f052
RK
1274 return x;
1275
1276 /* There is one case that we must handle specially: If we are converting
906c4e36 1277 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1278 we are to interpret the constant as unsigned, gen_lowpart will do
1279 the wrong if the constant appears negative. What we want to do is
1280 make the high-order word of the constant zero, not all ones. */
1281
1282 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1283 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1284 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1285 {
1286 HOST_WIDE_INT val = INTVAL (x);
1287
1288 if (oldmode != VOIDmode
1289 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1290 {
1291 int width = GET_MODE_BITSIZE (oldmode);
1292
1293 /* We need to zero extend VAL. */
1294 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1295 }
1296
1297 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1298 }
bbf6f052
RK
1299
1300 /* We can do this with a gen_lowpart if both desired and current modes
1301 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1302 non-volatile MEM. Except for the constant case where MODE is no
1303 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1304
ba2e110c
RK
1305 if ((GET_CODE (x) == CONST_INT
1306 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1307 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1308 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1309 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1310 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1311 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1312 && direct_load[(int) mode])
2bf29316
JW
1313 || (GET_CODE (x) == REG
1314 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1315 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1316 {
1317 /* ?? If we don't know OLDMODE, we have to assume here that
1318 X does not need sign- or zero-extension. This may not be
1319 the case, but it's the best we can do. */
1320 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1321 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1322 {
1323 HOST_WIDE_INT val = INTVAL (x);
1324 int width = GET_MODE_BITSIZE (oldmode);
1325
1326 /* We must sign or zero-extend in this case. Start by
1327 zero-extending, then sign extend if we need to. */
1328 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1329 if (! unsignedp
1330 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1331 val |= (HOST_WIDE_INT) (-1) << width;
1332
1333 return GEN_INT (val);
1334 }
1335
1336 return gen_lowpart (mode, x);
1337 }
bbf6f052
RK
1338
1339 temp = gen_reg_rtx (mode);
1340 convert_move (temp, x, unsignedp);
1341 return temp;
1342}
1343\f
fbe1758d
AM
1344
1345/* This macro is used to determine what the largest unit size that
1346 move_by_pieces can use is. */
1347
1348/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1349 move efficiently, as opposed to MOVE_MAX which is the maximum
1350 number of bhytes we can move with a single instruction. */
1351
1352#ifndef MOVE_MAX_PIECES
1353#define MOVE_MAX_PIECES MOVE_MAX
1354#endif
1355
bbf6f052
RK
1356/* Generate several move instructions to copy LEN bytes
1357 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1358 The caller must pass FROM and TO
1359 through protect_from_queue before calling.
1360 ALIGN (in bytes) is maximum alignment we can assume. */
1361
2e245dac 1362void
bbf6f052
RK
1363move_by_pieces (to, from, len, align)
1364 rtx to, from;
729a2125
RK
1365 int len;
1366 unsigned int align;
bbf6f052
RK
1367{
1368 struct move_by_pieces data;
1369 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1370 int max_size = MOVE_MAX_PIECES + 1;
1371 enum machine_mode mode = VOIDmode, tmode;
1372 enum insn_code icode;
bbf6f052
RK
1373
1374 data.offset = 0;
1375 data.to_addr = to_addr;
1376 data.from_addr = from_addr;
1377 data.to = to;
1378 data.from = from;
1379 data.autinc_to
1380 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1381 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1382 data.autinc_from
1383 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1384 || GET_CODE (from_addr) == POST_INC
1385 || GET_CODE (from_addr) == POST_DEC);
1386
1387 data.explicit_inc_from = 0;
1388 data.explicit_inc_to = 0;
1389 data.reverse
1390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1391 if (data.reverse) data.offset = len;
1392 data.len = len;
1393
e9cf6a97
JW
1394 data.to_struct = MEM_IN_STRUCT_P (to);
1395 data.from_struct = MEM_IN_STRUCT_P (from);
c5c76735
JL
1396 data.to_readonly = RTX_UNCHANGING_P (to);
1397 data.from_readonly = RTX_UNCHANGING_P (from);
e9cf6a97 1398
bbf6f052
RK
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data.autinc_from && data.autinc_to)
1403 && move_by_pieces_ninsns (len, align) > 2)
1404 {
fbe1758d
AM
1405 /* Find the mode of the largest move... */
1406 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1407 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1408 if (GET_MODE_SIZE (tmode) < max_size)
1409 mode = tmode;
1410
1411 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1412 {
1413 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = -1;
1416 }
fbe1758d 1417 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1418 {
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 data.autinc_from = 1;
1421 data.explicit_inc_from = 1;
1422 }
bbf6f052
RK
1423 if (!data.autinc_from && CONSTANT_P (from_addr))
1424 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1425 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1426 {
1427 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = -1;
1430 }
fbe1758d 1431 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1432 {
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1436 }
bbf6f052
RK
1437 if (!data.autinc_to && CONSTANT_P (to_addr))
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 }
1440
e1565e65 1441 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
e87b4f3f 1442 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1443 align = MOVE_MAX;
bbf6f052
RK
1444
1445 /* First move what we can in the largest integer mode, then go to
1446 successively smaller modes. */
1447
1448 while (max_size > 1)
1449 {
e7c33f54
RK
1450 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1451 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1452 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1453 mode = tmode;
1454
1455 if (mode == VOIDmode)
1456 break;
1457
1458 icode = mov_optab->handlers[(int) mode].insn_code;
1459 if (icode != CODE_FOR_nothing
1460 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
729a2125 1461 (unsigned int) GET_MODE_SIZE (mode)))
bbf6f052
RK
1462 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1463
1464 max_size = GET_MODE_SIZE (mode);
1465 }
1466
1467 /* The code above should have handled everything. */
2a8e278c 1468 if (data.len > 0)
bbf6f052
RK
1469 abort ();
1470}
1471
1472/* Return number of insns required to move L bytes by pieces.
1473 ALIGN (in bytes) is maximum alignment we can assume. */
1474
1475static int
1476move_by_pieces_ninsns (l, align)
1477 unsigned int l;
729a2125 1478 unsigned int align;
bbf6f052
RK
1479{
1480 register int n_insns = 0;
e87b4f3f 1481 int max_size = MOVE_MAX + 1;
bbf6f052 1482
e1565e65 1483 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
e87b4f3f 1484 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1485 align = MOVE_MAX;
bbf6f052
RK
1486
1487 while (max_size > 1)
1488 {
1489 enum machine_mode mode = VOIDmode, tmode;
1490 enum insn_code icode;
1491
e7c33f54
RK
1492 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1493 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1494 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1495 mode = tmode;
1496
1497 if (mode == VOIDmode)
1498 break;
1499
1500 icode = mov_optab->handlers[(int) mode].insn_code;
1501 if (icode != CODE_FOR_nothing
729a2125 1502 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
bbf6f052
RK
1503 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1504
1505 max_size = GET_MODE_SIZE (mode);
1506 }
1507
1508 return n_insns;
1509}
1510
1511/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1512 with move instructions for mode MODE. GENFUN is the gen_... function
1513 to make a move insn for that mode. DATA has all the other info. */
1514
1515static void
1516move_by_pieces_1 (genfun, mode, data)
711d877c 1517 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1518 enum machine_mode mode;
1519 struct move_by_pieces *data;
1520{
1521 register int size = GET_MODE_SIZE (mode);
1522 register rtx to1, from1;
1523
1524 while (data->len >= size)
1525 {
1526 if (data->reverse) data->offset -= size;
1527
1528 to1 = (data->autinc_to
38a448ca 1529 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1530 : copy_rtx (change_address (data->to, mode,
1531 plus_constant (data->to_addr,
1532 data->offset))));
e9cf6a97 1533 MEM_IN_STRUCT_P (to1) = data->to_struct;
c5c76735 1534 RTX_UNCHANGING_P (to1) = data->to_readonly;
effbcc6a 1535
db3cf6fb
MS
1536 from1
1537 = (data->autinc_from
38a448ca 1538 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1539 : copy_rtx (change_address (data->from, mode,
1540 plus_constant (data->from_addr,
1541 data->offset))));
e9cf6a97 1542 MEM_IN_STRUCT_P (from1) = data->from_struct;
c5c76735 1543 RTX_UNCHANGING_P (from1) = data->from_readonly;
bbf6f052 1544
940da324 1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1546 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1547 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1548 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1549
1550 emit_insn ((*genfun) (to1, from1));
940da324 1551 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1552 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1553 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1554 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1555
1556 if (! data->reverse) data->offset += size;
1557
1558 data->len -= size;
1559 }
1560}
1561\f
1562/* Emit code to move a block Y to a block X.
1563 This may be done with string-move instructions,
1564 with multiple scalar move instructions, or with a library call.
1565
1566 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1567 with mode BLKmode.
1568 SIZE is an rtx that says how long they are.
1569 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1570 measured in bytes.
bbf6f052 1571
e9a25f70
JL
1572 Return the address of the new block, if memcpy is called and returns it,
1573 0 otherwise. */
1574
1575rtx
bbf6f052
RK
1576emit_block_move (x, y, size, align)
1577 rtx x, y;
1578 rtx size;
729a2125 1579 unsigned int align;
bbf6f052 1580{
e9a25f70 1581 rtx retval = 0;
52cf7115
JL
1582#ifdef TARGET_MEM_FUNCTIONS
1583 static tree fn;
1584 tree call_expr, arg_list;
1585#endif
e9a25f70 1586
bbf6f052
RK
1587 if (GET_MODE (x) != BLKmode)
1588 abort ();
1589
1590 if (GET_MODE (y) != BLKmode)
1591 abort ();
1592
1593 x = protect_from_queue (x, 1);
1594 y = protect_from_queue (y, 0);
5d901c31 1595 size = protect_from_queue (size, 0);
bbf6f052
RK
1596
1597 if (GET_CODE (x) != MEM)
1598 abort ();
1599 if (GET_CODE (y) != MEM)
1600 abort ();
1601 if (size == 0)
1602 abort ();
1603
fbe1758d 1604 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1605 move_by_pieces (x, y, INTVAL (size), align);
1606 else
1607 {
1608 /* Try the most limited insn first, because there's no point
1609 including more than one in the machine description unless
1610 the more limited one has some advantage. */
266007a7 1611
0bba3f6f 1612 rtx opalign = GEN_INT (align);
266007a7
RK
1613 enum machine_mode mode;
1614
1615 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1616 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1617 {
266007a7 1618 enum insn_code code = movstr_optab[(int) mode];
a995e389 1619 insn_operand_predicate_fn pred;
266007a7
RK
1620
1621 if (code != CODE_FOR_nothing
803090c4
RK
1622 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1623 here because if SIZE is less than the mode mask, as it is
8008b228 1624 returned by the macro, it will definitely be less than the
803090c4 1625 actual mode mask. */
8ca00751
RK
1626 && ((GET_CODE (size) == CONST_INT
1627 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1628 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1629 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1630 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1631 || (*pred) (x, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1633 || (*pred) (y, BLKmode))
1634 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1635 || (*pred) (opalign, VOIDmode)))
bbf6f052 1636 {
1ba1e2a8 1637 rtx op2;
266007a7
RK
1638 rtx last = get_last_insn ();
1639 rtx pat;
1640
1ba1e2a8 1641 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1642 pred = insn_data[(int) code].operand[2].predicate;
1643 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1644 op2 = copy_to_mode_reg (mode, op2);
1645
1646 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1647 if (pat)
1648 {
1649 emit_insn (pat);
e9a25f70 1650 return 0;
266007a7
RK
1651 }
1652 else
1653 delete_insns_since (last);
bbf6f052
RK
1654 }
1655 }
bbf6f052 1656
4bc973ae
JL
1657 /* X, Y, or SIZE may have been passed through protect_from_queue.
1658
1659 It is unsafe to save the value generated by protect_from_queue
1660 and reuse it later. Consider what happens if emit_queue is
1661 called before the return value from protect_from_queue is used.
1662
1663 Expansion of the CALL_EXPR below will call emit_queue before
1664 we are finished emitting RTL for argument setup. So if we are
1665 not careful we could get the wrong value for an argument.
1666
1667 To avoid this problem we go ahead and emit code to copy X, Y &
1668 SIZE into new pseudos. We can then place those new pseudos
1669 into an RTL_EXPR and use them later, even after a call to
1670 emit_queue.
1671
1672 Note this is not strictly needed for library calls since they
1673 do not call emit_queue before loading their arguments. However,
1674 we may need to have library calls call emit_queue in the future
1675 since failing to do so could cause problems for targets which
1676 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1677 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1678 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1679
1680#ifdef TARGET_MEM_FUNCTIONS
1681 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1682#else
1683 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1684 TREE_UNSIGNED (integer_type_node));
f3dc586a 1685 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1686#endif
1687
bbf6f052 1688#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1689 /* It is incorrect to use the libcall calling conventions to call
1690 memcpy in this context.
1691
1692 This could be a user call to memcpy and the user may wish to
1693 examine the return value from memcpy.
1694
1695 For targets where libcalls and normal calls have different conventions
1696 for returning pointers, we could end up generating incorrect code.
1697
1698 So instead of using a libcall sequence we build up a suitable
1699 CALL_EXPR and expand the call in the normal fashion. */
1700 if (fn == NULL_TREE)
1701 {
1702 tree fntype;
1703
1704 /* This was copied from except.c, I don't know if all this is
1705 necessary in this context or not. */
1706 fn = get_identifier ("memcpy");
1707 push_obstacks_nochange ();
1708 end_temporary_allocation ();
1709 fntype = build_pointer_type (void_type_node);
1710 fntype = build_function_type (fntype, NULL_TREE);
1711 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 1712 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1713 DECL_EXTERNAL (fn) = 1;
1714 TREE_PUBLIC (fn) = 1;
1715 DECL_ARTIFICIAL (fn) = 1;
1716 make_decl_rtl (fn, NULL_PTR, 1);
1717 assemble_external (fn);
1718 pop_obstacks ();
1719 }
1720
1721 /* We need to make an argument list for the function call.
1722
1723 memcpy has three arguments, the first two are void * addresses and
1724 the last is a size_t byte count for the copy. */
1725 arg_list
1726 = build_tree_list (NULL_TREE,
4bc973ae 1727 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1728 TREE_CHAIN (arg_list)
1729 = build_tree_list (NULL_TREE,
4bc973ae 1730 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1731 TREE_CHAIN (TREE_CHAIN (arg_list))
1732 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1733 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1734
1735 /* Now we have to build up the CALL_EXPR itself. */
1736 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1737 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1738 call_expr, arg_list, NULL_TREE);
1739 TREE_SIDE_EFFECTS (call_expr) = 1;
1740
1741 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1742#else
d562e42e 1743 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1744 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1745 convert_to_mode (TYPE_MODE (integer_type_node), size,
1746 TREE_UNSIGNED (integer_type_node)),
1747 TYPE_MODE (integer_type_node));
bbf6f052
RK
1748#endif
1749 }
e9a25f70
JL
1750
1751 return retval;
bbf6f052
RK
1752}
1753\f
1754/* Copy all or part of a value X into registers starting at REGNO.
1755 The number of registers to be filled is NREGS. */
1756
1757void
1758move_block_to_reg (regno, x, nregs, mode)
1759 int regno;
1760 rtx x;
1761 int nregs;
1762 enum machine_mode mode;
1763{
1764 int i;
381127e8
RL
1765#ifdef HAVE_load_multiple
1766 rtx pat;
1767 rtx last;
1768#endif
bbf6f052 1769
72bb9717
RK
1770 if (nregs == 0)
1771 return;
1772
bbf6f052
RK
1773 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1774 x = validize_mem (force_const_mem (mode, x));
1775
1776 /* See if the machine can do this with a load multiple insn. */
1777#ifdef HAVE_load_multiple
c3a02afe 1778 if (HAVE_load_multiple)
bbf6f052 1779 {
c3a02afe 1780 last = get_last_insn ();
38a448ca 1781 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1782 GEN_INT (nregs));
1783 if (pat)
1784 {
1785 emit_insn (pat);
1786 return;
1787 }
1788 else
1789 delete_insns_since (last);
bbf6f052 1790 }
bbf6f052
RK
1791#endif
1792
1793 for (i = 0; i < nregs; i++)
38a448ca 1794 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1795 operand_subword_force (x, i, mode));
1796}
1797
1798/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1799 The number of registers to be filled is NREGS. SIZE indicates the number
1800 of bytes in the object X. */
1801
bbf6f052
RK
1802
1803void
0040593d 1804move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1805 int regno;
1806 rtx x;
1807 int nregs;
0040593d 1808 int size;
bbf6f052
RK
1809{
1810 int i;
381127e8
RL
1811#ifdef HAVE_store_multiple
1812 rtx pat;
1813 rtx last;
1814#endif
58a32c5c 1815 enum machine_mode mode;
bbf6f052 1816
58a32c5c
DE
1817 /* If SIZE is that of a mode no bigger than a word, just use that
1818 mode's store operation. */
1819 if (size <= UNITS_PER_WORD
1820 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1821 {
1822 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1823 gen_rtx_REG (mode, regno));
58a32c5c
DE
1824 return;
1825 }
1826
0040593d 1827 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1828 to the left before storing to memory. Note that the previous test
1829 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1830 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1831 {
1832 rtx tem = operand_subword (x, 0, 1, BLKmode);
1833 rtx shift;
1834
1835 if (tem == 0)
1836 abort ();
1837
1838 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1839 gen_rtx_REG (word_mode, regno),
0040593d
JW
1840 build_int_2 ((UNITS_PER_WORD - size)
1841 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1842 emit_move_insn (tem, shift);
1843 return;
1844 }
1845
bbf6f052
RK
1846 /* See if the machine can do this with a store multiple insn. */
1847#ifdef HAVE_store_multiple
c3a02afe 1848 if (HAVE_store_multiple)
bbf6f052 1849 {
c3a02afe 1850 last = get_last_insn ();
38a448ca 1851 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1852 GEN_INT (nregs));
1853 if (pat)
1854 {
1855 emit_insn (pat);
1856 return;
1857 }
1858 else
1859 delete_insns_since (last);
bbf6f052 1860 }
bbf6f052
RK
1861#endif
1862
1863 for (i = 0; i < nregs; i++)
1864 {
1865 rtx tem = operand_subword (x, i, 1, BLKmode);
1866
1867 if (tem == 0)
1868 abort ();
1869
38a448ca 1870 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1871 }
1872}
1873
aac5cc16
RH
1874/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1875 registers represented by a PARALLEL. SSIZE represents the total size of
1876 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1877 SRC in bits. */
1878/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1879 the balance will be in what would be the low-order memory addresses, i.e.
1880 left justified for big endian, right justified for little endian. This
1881 happens to be true for the targets currently using this support. If this
1882 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1883 would be needed. */
fffa9c1d
JW
1884
1885void
aac5cc16
RH
1886emit_group_load (dst, orig_src, ssize, align)
1887 rtx dst, orig_src;
729a2125
RK
1888 unsigned int align;
1889 int ssize;
fffa9c1d 1890{
aac5cc16
RH
1891 rtx *tmps, src;
1892 int start, i;
fffa9c1d 1893
aac5cc16 1894 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1895 abort ();
1896
1897 /* Check for a NULL entry, used to indicate that the parameter goes
1898 both on the stack and in registers. */
aac5cc16
RH
1899 if (XEXP (XVECEXP (dst, 0, 0), 0))
1900 start = 0;
fffa9c1d 1901 else
aac5cc16
RH
1902 start = 1;
1903
1904 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1905
1906 /* If we won't be loading directly from memory, protect the real source
1907 from strange tricks we might play. */
1908 src = orig_src;
1909 if (GET_CODE (src) != MEM)
1910 {
8b725198
JJ
1911 if (GET_CODE (src) == VOIDmode)
1912 src = gen_reg_rtx (GET_MODE (dst));
1913 else
1914 src = gen_reg_rtx (GET_MODE (orig_src));
aac5cc16
RH
1915 emit_move_insn (src, orig_src);
1916 }
1917
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (dst, 0); i++)
1920 {
1921 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1922 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1923 int bytelen = GET_MODE_SIZE (mode);
1924 int shift = 0;
1925
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + bytelen > ssize)
1928 {
1929 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1930 bytelen = ssize - bytepos;
1931 if (bytelen <= 0)
729a2125 1932 abort ();
aac5cc16
RH
1933 }
1934
1935 /* Optimize the access just a bit. */
1936 if (GET_CODE (src) == MEM
729a2125
RK
1937 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1938 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1939 && bytelen == GET_MODE_SIZE (mode))
1940 {
1941 tmps[i] = gen_reg_rtx (mode);
1942 emit_move_insn (tmps[i],
1943 change_address (src, mode,
1944 plus_constant (XEXP (src, 0),
1945 bytepos)));
fffa9c1d 1946 }
7c4a6db0
JW
1947 else if (GET_CODE (src) == CONCAT)
1948 {
1949 if (bytepos == 0
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1951 tmps[i] = XEXP (src, 0);
1952 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1953 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1954 tmps[i] = XEXP (src, 1);
1955 else
1956 abort ();
1957 }
fffa9c1d 1958 else
aac5cc16
RH
1959 {
1960 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1961 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1962 mode, mode, align, ssize);
1963 }
fffa9c1d 1964
aac5cc16
RH
1965 if (BYTES_BIG_ENDIAN && shift)
1966 {
1967 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1968 tmps[i], 0, OPTAB_WIDEN);
1969 }
fffa9c1d 1970 }
aac5cc16
RH
1971 emit_queue();
1972
1973 /* Copy the extracted pieces into the proper (probable) hard regs. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1976}
1977
aac5cc16
RH
1978/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1979 registers represented by a PARALLEL. SSIZE represents the total size of
1980 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1981
1982void
aac5cc16
RH
1983emit_group_store (orig_dst, src, ssize, align)
1984 rtx orig_dst, src;
729a2125
RK
1985 int ssize;
1986 unsigned int align;
fffa9c1d 1987{
aac5cc16
RH
1988 rtx *tmps, dst;
1989 int start, i;
fffa9c1d 1990
aac5cc16 1991 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1992 abort ();
1993
1994 /* Check for a NULL entry, used to indicate that the parameter goes
1995 both on the stack and in registers. */
aac5cc16
RH
1996 if (XEXP (XVECEXP (src, 0, 0), 0))
1997 start = 0;
fffa9c1d 1998 else
aac5cc16
RH
1999 start = 1;
2000
2001 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 2002
aac5cc16
RH
2003 /* Copy the (probable) hard regs into pseudos. */
2004 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2005 {
aac5cc16
RH
2006 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2007 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2008 emit_move_insn (tmps[i], reg);
2009 }
2010 emit_queue();
fffa9c1d 2011
aac5cc16
RH
2012 /* If we won't be storing directly into memory, protect the real destination
2013 from strange tricks we might play. */
2014 dst = orig_dst;
10a9f2be
JW
2015 if (GET_CODE (dst) == PARALLEL)
2016 {
2017 rtx temp;
2018
2019 /* We can get a PARALLEL dst if there is a conditional expression in
2020 a return statement. In that case, the dst and src are the same,
2021 so no action is necessary. */
2022 if (rtx_equal_p (dst, src))
2023 return;
2024
2025 /* It is unclear if we can ever reach here, but we may as well handle
2026 it. Allocate a temporary, and split this into a store/load to/from
2027 the temporary. */
2028
2029 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2030 emit_group_store (temp, src, ssize, align);
2031 emit_group_load (dst, temp, ssize, align);
2032 return;
2033 }
2034 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2035 {
2036 dst = gen_reg_rtx (GET_MODE (orig_dst));
2037 /* Make life a bit easier for combine. */
2038 emit_move_insn (dst, const0_rtx);
2039 }
2040 else if (! MEM_IN_STRUCT_P (dst))
2041 {
2042 /* store_bit_field requires that memory operations have
2043 mem_in_struct_p set; we might not. */
fffa9c1d 2044
aac5cc16 2045 dst = copy_rtx (orig_dst);
c6df88cb 2046 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2047 }
2048
2049 /* Process the pieces. */
2050 for (i = start; i < XVECLEN (src, 0); i++)
2051 {
2052 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2053 enum machine_mode mode = GET_MODE (tmps[i]);
2054 int bytelen = GET_MODE_SIZE (mode);
2055
2056 /* Handle trailing fragments that run over the size of the struct. */
2057 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2058 {
aac5cc16
RH
2059 if (BYTES_BIG_ENDIAN)
2060 {
2061 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2062 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2063 tmps[i], 0, OPTAB_WIDEN);
2064 }
2065 bytelen = ssize - bytepos;
71bc0330 2066 }
fffa9c1d 2067
aac5cc16
RH
2068 /* Optimize the access just a bit. */
2069 if (GET_CODE (dst) == MEM
729a2125
RK
2070 && align * BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2071 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2072 && bytelen == GET_MODE_SIZE (mode))
729a2125
RK
2073 emit_move_insn (change_address (dst, mode,
2074 plus_constant (XEXP (dst, 0),
2075 bytepos)),
2076 tmps[i]);
aac5cc16 2077 else
729a2125 2078 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
aac5cc16 2079 mode, tmps[i], align, ssize);
fffa9c1d 2080 }
729a2125 2081
aac5cc16
RH
2082 emit_queue();
2083
2084 /* Copy from the pseudo into the (probable) hard reg. */
2085 if (GET_CODE (dst) == REG)
2086 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2087}
2088
c36fce9a
GRK
2089/* Generate code to copy a BLKmode object of TYPE out of a
2090 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2091 is null, a stack temporary is created. TGTBLK is returned.
2092
2093 The primary purpose of this routine is to handle functions
2094 that return BLKmode structures in registers. Some machines
2095 (the PA for example) want to return all small structures
729a2125 2096 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2097
2098rtx
729a2125 2099copy_blkmode_from_reg (tgtblk,srcreg,type)
c36fce9a
GRK
2100 rtx tgtblk;
2101 rtx srcreg;
2102 tree type;
2103{
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
729a2125 2106 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
c36fce9a
GRK
2107 int bitpos, xbitpos, big_endian_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2113 preserve_temp_slots (tgtblk);
2114 }
2115
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2122
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2129 * BITS_PER_UNIT));
2130
2131 /* Copy the structure BITSIZE bites at a time.
2132
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2139 {
2140
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2148 BLKmode);
2149
2150 /* We need a new destination operand each time bitpos is on
2151 a word boundary. */
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2161 word_mode,
2162 bitsize / BITS_PER_UNIT,
2163 BITS_PER_WORD),
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 }
2166 return tgtblk;
2167}
2168
2169
94b25f81
RK
2170/* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2172
2173void
b3f8cf4a
RK
2174use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2176{
0304dfbb
DE
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2179 abort();
2180
2181 *call_fusage
38a448ca
RH
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2184}
2185
94b25f81
RK
2186/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2188
2189void
0304dfbb
DE
2190use_regs (call_fusage, regno, nregs)
2191 rtx *call_fusage;
bbf6f052
RK
2192 int regno;
2193 int nregs;
2194{
0304dfbb 2195 int i;
bbf6f052 2196
0304dfbb
DE
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 for (i = 0; i < nregs; i++)
38a448ca 2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2202}
fffa9c1d
JW
2203
2204/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207
2208void
2209use_group_regs (call_fusage, regs)
2210 rtx *call_fusage;
2211 rtx regs;
2212{
2213 int i;
2214
6bd35f86
DE
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2218
6bd35f86
DE
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
e9a25f70 2222 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2223 use_reg (call_fusage, reg);
2224 }
fffa9c1d 2225}
bbf6f052 2226\f
9de08200
RK
2227/* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2230 we can assume. */
2231
2232static void
2233clear_by_pieces (to, len, align)
2234 rtx to;
729a2125
RK
2235 int len;
2236 unsigned int align;
9de08200
RK
2237{
2238 struct clear_by_pieces data;
2239 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2240 int max_size = MOVE_MAX_PIECES + 1;
2241 enum machine_mode mode = VOIDmode, tmode;
2242 enum insn_code icode;
9de08200
RK
2243
2244 data.offset = 0;
2245 data.to_addr = to_addr;
2246 data.to = to;
2247 data.autinc_to
2248 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2249 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2250
2251 data.explicit_inc_to = 0;
2252 data.reverse
2253 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2254 if (data.reverse) data.offset = len;
2255 data.len = len;
2256
2257 data.to_struct = MEM_IN_STRUCT_P (to);
2258
2259 /* If copying requires more than two move insns,
2260 copy addresses to registers (to make displacements shorter)
2261 and use post-increment if available. */
2262 if (!data.autinc_to
2263 && move_by_pieces_ninsns (len, align) > 2)
2264 {
fbe1758d
AM
2265 /* Determine the main mode we'll be using */
2266 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2267 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2268 if (GET_MODE_SIZE (tmode) < max_size)
2269 mode = tmode;
2270
2271 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2272 {
2273 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2274 data.autinc_to = 1;
2275 data.explicit_inc_to = -1;
2276 }
fbe1758d 2277 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2278 {
2279 data.to_addr = copy_addr_to_reg (to_addr);
2280 data.autinc_to = 1;
2281 data.explicit_inc_to = 1;
2282 }
9de08200
RK
2283 if (!data.autinc_to && CONSTANT_P (to_addr))
2284 data.to_addr = copy_addr_to_reg (to_addr);
2285 }
2286
e1565e65 2287 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
9de08200
RK
2288 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2289 align = MOVE_MAX;
2290
2291 /* First move what we can in the largest integer mode, then go to
2292 successively smaller modes. */
2293
2294 while (max_size > 1)
2295 {
9de08200
RK
2296 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2297 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2298 if (GET_MODE_SIZE (tmode) < max_size)
2299 mode = tmode;
2300
2301 if (mode == VOIDmode)
2302 break;
2303
2304 icode = mov_optab->handlers[(int) mode].insn_code;
2305 if (icode != CODE_FOR_nothing
729a2125 2306 && align >= GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT)
9de08200
RK
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 if (data.len != 0)
2314 abort ();
2315}
2316
2317/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2320
2321static void
2322clear_by_pieces_1 (genfun, mode, data)
711d877c 2323 rtx (*genfun) PARAMS ((rtx, ...));
9de08200
RK
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2326{
2327 register int size = GET_MODE_SIZE (mode);
2328 register rtx to1;
2329
2330 while (data->len >= size)
2331 {
2332 if (data->reverse) data->offset -= size;
2333
2334 to1 = (data->autinc_to
38a448ca 2335 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2338 data->offset))));
9de08200
RK
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340
940da324 2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2343
2344 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2347
2348 if (! data->reverse) data->offset += size;
2349
2350 data->len -= size;
2351 }
2352}
2353\f
bbf6f052 2354/* Write zeros through the storage of OBJECT.
9de08200 2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2356 the maximum alignment we can is has, measured in bytes.
bbf6f052 2357
e9a25f70
JL
2358 If we call a function that returns the length of the block, return it. */
2359
2360rtx
9de08200 2361clear_storage (object, size, align)
bbf6f052 2362 rtx object;
4c08eef0 2363 rtx size;
729a2125 2364 unsigned int align;
bbf6f052 2365{
52cf7115
JL
2366#ifdef TARGET_MEM_FUNCTIONS
2367 static tree fn;
2368 tree call_expr, arg_list;
2369#endif
e9a25f70
JL
2370 rtx retval = 0;
2371
bbf6f052
RK
2372 if (GET_MODE (object) == BLKmode)
2373 {
9de08200
RK
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2376
2377 if (GET_CODE (size) == CONST_INT
fbe1758d 2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2379 clear_by_pieces (object, INTVAL (size), align);
2380
2381 else
2382 {
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2386
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2389
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2392 {
2393 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2394 insn_operand_predicate_fn pred;
9de08200
RK
2395
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2403 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2409 {
2410 rtx op1;
2411 rtx last = get_last_insn ();
2412 rtx pat;
2413
2414 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2417 op1 = copy_to_mode_reg (mode, op1);
2418
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2420 if (pat)
2421 {
2422 emit_insn (pat);
e9a25f70 2423 return 0;
9de08200
RK
2424 }
2425 else
2426 delete_insns_since (last);
2427 }
2428 }
2429
4bc973ae 2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2431
4bc973ae
JL
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
52cf7115 2435
4bc973ae
JL
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
52cf7115 2439
4bc973ae
JL
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2443 emit_queue.
52cf7115 2444
4bc973ae
JL
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2451
4bc973ae
JL
2452#ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2454#else
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
f3dc586a 2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2458#endif
52cf7115 2459
52cf7115 2460
4bc973ae
JL
2461#ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
52cf7115 2464
4bc973ae
JL
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
52cf7115 2467
4bc973ae
JL
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2470 incorrect code.
2471
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2486 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2492 pop_obstacks ();
2493 }
2494
2495 /* We need to make an argument list for the function call.
2496
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2500 arg_list
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2503 object));
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2517
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2519#else
9de08200 2520 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2521 VOIDmode, 2, object, Pmode, size,
9de08200 2522 TYPE_MODE (integer_type_node));
bbf6f052 2523#endif
9de08200 2524 }
bbf6f052
RK
2525 }
2526 else
66ed0683 2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2528
2529 return retval;
bbf6f052
RK
2530}
2531
2532/* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2536
2537 Return the last instruction emitted. */
2538
2539rtx
2540emit_move_insn (x, y)
2541 rtx x, y;
2542{
2543 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2544
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2547
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2549 abort ();
2550
ee5332b8
RH
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 ;
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2555 y = force_const_mem (mode, y);
2556
2557 /* If X or Y are memory references, verify that their addresses are valid
2558 for the machine. */
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2565
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2568 || (flag_force_addr
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2571
2572 if (mode == BLKmode)
2573 abort ();
2574
261c4230
RS
2575 return emit_move_insn_1 (x, y);
2576}
2577
2578/* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2581
2582rtx
2583emit_move_insn_1 (x, y)
2584 rtx x, y;
2585{
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2589 int i;
2590
76bbe028
ZW
2591 if (mode >= MAX_MACHINE_MODE)
2592 abort ();
2593
bbf6f052
RK
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2595 return
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597
89742723 2598 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2601 * BITS_PER_UNIT),
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2604 0))
7308a047
RS
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2607 {
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
7308a047 2610
7308a047
RS
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2613
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
c937357e
RS
2616 if (stack)
2617 {
e33c0d66
RS
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
c937357e
RS
2620#ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2623 gen_imagpart (submode, y)));
c937357e 2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2626 gen_realpart (submode, y)));
c937357e
RS
2627#else
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2630 gen_realpart (submode, y)));
c937357e 2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2633 gen_imagpart (submode, y)));
c937357e
RS
2634#endif
2635 }
2636 else
2637 {
235ae7be
DM
2638 rtx realpart_x, realpart_y;
2639 rtx imagpart_x, imagpart_y;
2640
405f63da
MM
2641 /* If this is a complex value with each part being smaller than a
2642 word, the usual calling sequence will likely pack the pieces into
2643 a single register. Unfortunately, SUBREG of hard registers only
2644 deals in terms of words, so we have a problem converting input
2645 arguments to the CONCAT of two registers that is used elsewhere
2646 for complex values. If this is before reload, we can copy it into
2647 memory and reload. FIXME, we should see about using extract and
2648 insert on integer registers, but complex short and complex char
2649 variables should be rarely used. */
2650 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2651 && (reload_in_progress | reload_completed) == 0)
2652 {
2653 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2654 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2655
2656 if (packed_dest_p || packed_src_p)
2657 {
2658 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2659 ? MODE_FLOAT : MODE_INT);
2660
2661 enum machine_mode reg_mode =
2662 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2663
2664 if (reg_mode != BLKmode)
2665 {
2666 rtx mem = assign_stack_temp (reg_mode,
2667 GET_MODE_SIZE (mode), 0);
2668
2669 rtx cmem = change_address (mem, mode, NULL_RTX);
2670
01d939e8 2671 cfun->cannot_inline = "function uses short complex types";
405f63da
MM
2672
2673 if (packed_dest_p)
2674 {
2675 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2676 emit_move_insn_1 (cmem, y);
2677 return emit_move_insn_1 (sreg, mem);
2678 }
2679 else
2680 {
2681 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2682 emit_move_insn_1 (mem, sreg);
2683 return emit_move_insn_1 (x, cmem);
2684 }
2685 }
2686 }
2687 }
2688
235ae7be
DM
2689 realpart_x = gen_realpart (submode, x);
2690 realpart_y = gen_realpart (submode, y);
2691 imagpart_x = gen_imagpart (submode, x);
2692 imagpart_y = gen_imagpart (submode, y);
2693
2694 /* Show the output dies here. This is necessary for SUBREGs
2695 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2696 hard regs shouldn't appear here except as return values.
2697 We never want to emit such a clobber after reload. */
2698 if (x != y
235ae7be
DM
2699 && ! (reload_in_progress || reload_completed)
2700 && (GET_CODE (realpart_x) == SUBREG
2701 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2702 {
c14c6529 2703 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2704 }
2638126a 2705
c937357e 2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2707 (realpart_x, realpart_y));
c937357e 2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2709 (imagpart_x, imagpart_y));
c937357e 2710 }
7308a047 2711
7a1ab50a 2712 return get_last_insn ();
7308a047
RS
2713 }
2714
bbf6f052
RK
2715 /* This will handle any multi-word mode that lacks a move_insn pattern.
2716 However, you will get better code if you define such patterns,
2717 even if they must turn into multiple assembler instructions. */
a4320483 2718 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2719 {
2720 rtx last_insn = 0;
235ae7be
DM
2721 rtx seq;
2722 int need_clobber;
6551fa4d 2723
a98c9f1a
RK
2724#ifdef PUSH_ROUNDING
2725
2726 /* If X is a push on the stack, do the push now and replace
2727 X with a reference to the stack pointer. */
2728 if (push_operand (x, GET_MODE (x)))
2729 {
2730 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2731 x = change_address (x, VOIDmode, stack_pointer_rtx);
2732 }
2733#endif
2734
235ae7be 2735 start_sequence ();
15a7a8ec 2736
235ae7be 2737 need_clobber = 0;
bbf6f052
RK
2738 for (i = 0;
2739 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2740 i++)
2741 {
2742 rtx xpart = operand_subword (x, i, 1, mode);
2743 rtx ypart = operand_subword (y, i, 1, mode);
2744
2745 /* If we can't get a part of Y, put Y into memory if it is a
2746 constant. Otherwise, force it into a register. If we still
2747 can't get a part of Y, abort. */
2748 if (ypart == 0 && CONSTANT_P (y))
2749 {
2750 y = force_const_mem (mode, y);
2751 ypart = operand_subword (y, i, 1, mode);
2752 }
2753 else if (ypart == 0)
2754 ypart = operand_subword_force (y, i, mode);
2755
2756 if (xpart == 0 || ypart == 0)
2757 abort ();
2758
235ae7be
DM
2759 need_clobber |= (GET_CODE (xpart) == SUBREG);
2760
bbf6f052
RK
2761 last_insn = emit_move_insn (xpart, ypart);
2762 }
6551fa4d 2763
235ae7be
DM
2764 seq = gen_sequence ();
2765 end_sequence ();
2766
2767 /* Show the output dies here. This is necessary for SUBREGs
2768 of pseudos since we cannot track their lifetimes correctly;
2769 hard regs shouldn't appear here except as return values.
2770 We never want to emit such a clobber after reload. */
2771 if (x != y
2772 && ! (reload_in_progress || reload_completed)
2773 && need_clobber != 0)
2774 {
2775 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2776 }
2777
2778 emit_insn (seq);
2779
bbf6f052
RK
2780 return last_insn;
2781 }
2782 else
2783 abort ();
2784}
2785\f
2786/* Pushing data onto the stack. */
2787
2788/* Push a block of length SIZE (perhaps variable)
2789 and return an rtx to address the beginning of the block.
2790 Note that it is not possible for the value returned to be a QUEUED.
2791 The value may be virtual_outgoing_args_rtx.
2792
2793 EXTRA is the number of bytes of padding to push in addition to SIZE.
2794 BELOW nonzero means this padding comes at low addresses;
2795 otherwise, the padding comes at high addresses. */
2796
2797rtx
2798push_block (size, extra, below)
2799 rtx size;
2800 int extra, below;
2801{
2802 register rtx temp;
88f63c77
RK
2803
2804 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2805 if (CONSTANT_P (size))
2806 anti_adjust_stack (plus_constant (size, extra));
2807 else if (GET_CODE (size) == REG && extra == 0)
2808 anti_adjust_stack (size);
2809 else
2810 {
2811 rtx temp = copy_to_mode_reg (Pmode, size);
2812 if (extra != 0)
906c4e36 2813 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2814 temp, 0, OPTAB_LIB_WIDEN);
2815 anti_adjust_stack (temp);
2816 }
2817
e1a9b2ab
HB
2818#if defined (STACK_GROWS_DOWNWARD) \
2819 || (defined (ARGS_GROW_DOWNWARD) \
2820 && !defined (ACCUMULATE_OUTGOING_ARGS))
2821
2822 /* Return the lowest stack address when STACK or ARGS grow downward and
2823 we are not aaccumulating outgoing arguments (the c4x port uses such
2824 conventions). */
bbf6f052
RK
2825 temp = virtual_outgoing_args_rtx;
2826 if (extra != 0 && below)
2827 temp = plus_constant (temp, extra);
2828#else
2829 if (GET_CODE (size) == CONST_INT)
2830 temp = plus_constant (virtual_outgoing_args_rtx,
2831 - INTVAL (size) - (below ? 0 : extra));
2832 else if (extra != 0 && !below)
38a448ca 2833 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2834 negate_rtx (Pmode, plus_constant (size, extra)));
2835 else
38a448ca 2836 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
c5c76735 2837 negate_rtx (Pmode, size));
bbf6f052
RK
2838#endif
2839
2840 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2841}
2842
87e38d84 2843rtx
bbf6f052
RK
2844gen_push_operand ()
2845{
38a448ca 2846 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2847}
2848
921b3427
RK
2849/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2850 block of SIZE bytes. */
2851
2852static rtx
2853get_push_address (size)
2854 int size;
2855{
2856 register rtx temp;
2857
2858 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2859 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2860 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2861 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2862 else
2863 temp = stack_pointer_rtx;
2864
c85f7c16 2865 return copy_to_reg (temp);
921b3427
RK
2866}
2867
bbf6f052
RK
2868/* Generate code to push X onto the stack, assuming it has mode MODE and
2869 type TYPE.
2870 MODE is redundant except when X is a CONST_INT (since they don't
2871 carry mode info).
2872 SIZE is an rtx for the size of data to be copied (in bytes),
2873 needed only if X is BLKmode.
2874
2875 ALIGN (in bytes) is maximum alignment we can assume.
2876
cd048831
RK
2877 If PARTIAL and REG are both nonzero, then copy that many of the first
2878 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2879 The amount of space pushed is decreased by PARTIAL words,
2880 rounded *down* to a multiple of PARM_BOUNDARY.
2881 REG must be a hard register in this case.
cd048831
RK
2882 If REG is zero but PARTIAL is not, take any all others actions for an
2883 argument partially in registers, but do not actually load any
2884 registers.
bbf6f052
RK
2885
2886 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2887 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2888
2889 On a machine that lacks real push insns, ARGS_ADDR is the address of
2890 the bottom of the argument block for this call. We use indexing off there
2891 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2892 argument block has not been preallocated.
2893
e5e809f4
JL
2894 ARGS_SO_FAR is the size of args previously pushed for this call.
2895
2896 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2897 for arguments passed in registers. If nonzero, it will be the number
2898 of bytes required. */
bbf6f052
RK
2899
2900void
2901emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
2902 args_addr, args_so_far, reg_parm_stack_space,
2903 alignment_pad)
bbf6f052
RK
2904 register rtx x;
2905 enum machine_mode mode;
2906 tree type;
2907 rtx size;
729a2125 2908 unsigned int align;
bbf6f052
RK
2909 int partial;
2910 rtx reg;
2911 int extra;
2912 rtx args_addr;
2913 rtx args_so_far;
e5e809f4 2914 int reg_parm_stack_space;
4fc026cd 2915 rtx alignment_pad;
bbf6f052
RK
2916{
2917 rtx xinner;
2918 enum direction stack_direction
2919#ifdef STACK_GROWS_DOWNWARD
2920 = downward;
2921#else
2922 = upward;
2923#endif
2924
2925 /* Decide where to pad the argument: `downward' for below,
2926 `upward' for above, or `none' for don't pad it.
2927 Default is below for small data on big-endian machines; else above. */
2928 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2929
2930 /* Invert direction if stack is post-update. */
2931 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2932 if (where_pad != none)
2933 where_pad = (where_pad == downward ? upward : downward);
2934
2935 xinner = x = protect_from_queue (x, 0);
2936
2937 if (mode == BLKmode)
2938 {
2939 /* Copy a block into the stack, entirely or partially. */
2940
2941 register rtx temp;
2942 int used = partial * UNITS_PER_WORD;
2943 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2944 int skip;
2945
2946 if (size == 0)
2947 abort ();
2948
2949 used -= offset;
2950
2951 /* USED is now the # of bytes we need not copy to the stack
2952 because registers will take care of them. */
2953
2954 if (partial != 0)
2955 xinner = change_address (xinner, BLKmode,
2956 plus_constant (XEXP (xinner, 0), used));
2957
2958 /* If the partial register-part of the arg counts in its stack size,
2959 skip the part of stack space corresponding to the registers.
2960 Otherwise, start copying to the beginning of the stack space,
2961 by setting SKIP to 0. */
e5e809f4 2962 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2963
2964#ifdef PUSH_ROUNDING
2965 /* Do it with several push insns if that doesn't take lots of insns
2966 and if there is no difficulty with push insns that skip bytes
2967 on the stack for alignment purposes. */
2968 if (args_addr == 0
2969 && GET_CODE (size) == CONST_INT
2970 && skip == 0
15914757 2971 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2972 /* Here we avoid the case of a structure whose weak alignment
2973 forces many pushes of a small amount of data,
2974 and such small pushes do rounding that causes trouble. */
e1565e65 2975 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
e87b4f3f 2976 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2977 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2978 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2979 {
2980 /* Push padding now if padding above and stack grows down,
2981 or if padding below and stack grows up.
2982 But if space already allocated, this has already been done. */
2983 if (extra && args_addr == 0
2984 && where_pad != none && where_pad != stack_direction)
906c4e36 2985 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2986
38a448ca 2987 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2988 INTVAL (size) - used, align);
921b3427 2989
7d384cc0 2990 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2991 {
2992 rtx temp;
2993
956d6950 2994 in_check_memory_usage = 1;
921b3427 2995 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2996 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2997 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2998 temp, Pmode,
2999 XEXP (xinner, 0), Pmode,
921b3427
RK
3000 GEN_INT (INTVAL(size) - used),
3001 TYPE_MODE (sizetype));
3002 else
3003 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3004 temp, Pmode,
921b3427
RK
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype),
956d6950
JL
3007 GEN_INT (MEMORY_USE_RW),
3008 TYPE_MODE (integer_type_node));
3009 in_check_memory_usage = 0;
921b3427 3010 }
bbf6f052
RK
3011 }
3012 else
3013#endif /* PUSH_ROUNDING */
3014 {
3015 /* Otherwise make space on the stack and copy the data
3016 to the address of that space. */
3017
3018 /* Deduct words put into registers from the size we must copy. */
3019 if (partial != 0)
3020 {
3021 if (GET_CODE (size) == CONST_INT)
906c4e36 3022 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3023 else
3024 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3025 GEN_INT (used), NULL_RTX, 0,
3026 OPTAB_LIB_WIDEN);
bbf6f052
RK
3027 }
3028
3029 /* Get the address of the stack space.
3030 In this case, we do not deal with EXTRA separately.
3031 A single stack adjust will do. */
3032 if (! args_addr)
3033 {
3034 temp = push_block (size, extra, where_pad == downward);
3035 extra = 0;
3036 }
3037 else if (GET_CODE (args_so_far) == CONST_INT)
3038 temp = memory_address (BLKmode,
3039 plus_constant (args_addr,
3040 skip + INTVAL (args_so_far)));
3041 else
3042 temp = memory_address (BLKmode,
38a448ca
RH
3043 plus_constant (gen_rtx_PLUS (Pmode,
3044 args_addr,
3045 args_so_far),
bbf6f052 3046 skip));
7d384cc0 3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3048 {
3049 rtx target;
3050
956d6950 3051 in_check_memory_usage = 1;
921b3427 3052 target = copy_to_reg (temp);
c85f7c16 3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3055 target, Pmode,
3056 XEXP (xinner, 0), Pmode,
921b3427
RK
3057 size, TYPE_MODE (sizetype));
3058 else
3059 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3060 target, Pmode,
921b3427 3061 size, TYPE_MODE (sizetype),
956d6950
JL
3062 GEN_INT (MEMORY_USE_RW),
3063 TYPE_MODE (integer_type_node));
3064 in_check_memory_usage = 0;
921b3427 3065 }
bbf6f052
RK
3066
3067 /* TEMP is the address of the block. Copy the data there. */
3068 if (GET_CODE (size) == CONST_INT
729a2125 3069 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3070 {
38a448ca 3071 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
3072 INTVAL (size), align);
3073 goto ret;
3074 }
e5e809f4 3075 else
bbf6f052 3076 {
e5e809f4
JL
3077 rtx opalign = GEN_INT (align);
3078 enum machine_mode mode;
9e6a5703 3079 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
3080
3081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3082 mode != VOIDmode;
3083 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3084 {
e5e809f4 3085 enum insn_code code = movstr_optab[(int) mode];
a995e389 3086 insn_operand_predicate_fn pred;
e5e809f4
JL
3087
3088 if (code != CODE_FOR_nothing
3089 && ((GET_CODE (size) == CONST_INT
3090 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3091 <= (GET_MODE_MASK (mode) >> 1)))
3092 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3093 && (!(pred = insn_data[(int) code].operand[0].predicate)
3094 || ((*pred) (target, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[1].predicate)
3096 || ((*pred) (xinner, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[3].predicate)
3098 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3099 {
3100 rtx op2 = convert_to_mode (mode, size, 1);
3101 rtx last = get_last_insn ();
3102 rtx pat;
3103
a995e389
RH
3104 pred = insn_data[(int) code].operand[2].predicate;
3105 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3106 op2 = copy_to_mode_reg (mode, op2);
3107
3108 pat = GEN_FCN ((int) code) (target, xinner,
3109 op2, opalign);
3110 if (pat)
3111 {
3112 emit_insn (pat);
3113 goto ret;
3114 }
3115 else
3116 delete_insns_since (last);
3117 }
c841050e 3118 }
bbf6f052 3119 }
bbf6f052
RK
3120
3121#ifndef ACCUMULATE_OUTGOING_ARGS
3122 /* If the source is referenced relative to the stack pointer,
3123 copy it to another register to stabilize it. We do not need
3124 to do this if we know that we won't be changing sp. */
3125
3126 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3127 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3128 temp = copy_to_reg (temp);
3129#endif
3130
3131 /* Make inhibit_defer_pop nonzero around the library call
3132 to force it to pop the bcopy-arguments right away. */
3133 NO_DEFER_POP;
3134#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3135 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3136 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3137 convert_to_mode (TYPE_MODE (sizetype),
3138 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3139 TYPE_MODE (sizetype));
bbf6f052 3140#else
d562e42e 3141 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3142 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3143 convert_to_mode (TYPE_MODE (integer_type_node),
3144 size,
3145 TREE_UNSIGNED (integer_type_node)),
3146 TYPE_MODE (integer_type_node));
bbf6f052
RK
3147#endif
3148 OK_DEFER_POP;
3149 }
3150 }
3151 else if (partial > 0)
3152 {
3153 /* Scalar partly in registers. */
3154
3155 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3156 int i;
3157 int not_stack;
3158 /* # words of start of argument
3159 that we must make space for but need not store. */
3160 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3161 int args_offset = INTVAL (args_so_far);
3162 int skip;
3163
3164 /* Push padding now if padding above and stack grows down,
3165 or if padding below and stack grows up.
3166 But if space already allocated, this has already been done. */
3167 if (extra && args_addr == 0
3168 && where_pad != none && where_pad != stack_direction)
906c4e36 3169 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3170
3171 /* If we make space by pushing it, we might as well push
3172 the real data. Otherwise, we can leave OFFSET nonzero
3173 and leave the space uninitialized. */
3174 if (args_addr == 0)
3175 offset = 0;
3176
3177 /* Now NOT_STACK gets the number of words that we don't need to
3178 allocate on the stack. */
3179 not_stack = partial - offset;
3180
3181 /* If the partial register-part of the arg counts in its stack size,
3182 skip the part of stack space corresponding to the registers.
3183 Otherwise, start copying to the beginning of the stack space,
3184 by setting SKIP to 0. */
e5e809f4 3185 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3186
3187 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3188 x = validize_mem (force_const_mem (mode, x));
3189
3190 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3191 SUBREGs of such registers are not allowed. */
3192 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3193 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3194 x = copy_to_reg (x);
3195
3196 /* Loop over all the words allocated on the stack for this arg. */
3197 /* We can do it by words, because any scalar bigger than a word
3198 has a size a multiple of a word. */
3199#ifndef PUSH_ARGS_REVERSED
3200 for (i = not_stack; i < size; i++)
3201#else
3202 for (i = size - 1; i >= not_stack; i--)
3203#endif
3204 if (i >= not_stack + offset)
3205 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3206 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3207 0, args_addr,
3208 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3209 * UNITS_PER_WORD)),
4fc026cd 3210 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3211 }
3212 else
3213 {
3214 rtx addr;
921b3427 3215 rtx target = NULL_RTX;
bbf6f052
RK
3216
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
906c4e36 3222 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3223
3224#ifdef PUSH_ROUNDING
3225 if (args_addr == 0)
3226 addr = gen_push_operand ();
3227 else
3228#endif
921b3427
RK
3229 {
3230 if (GET_CODE (args_so_far) == CONST_INT)
3231 addr
3232 = memory_address (mode,
3233 plus_constant (args_addr,
3234 INTVAL (args_so_far)));
3235 else
38a448ca
RH
3236 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3237 args_so_far));
921b3427
RK
3238 target = addr;
3239 }
bbf6f052 3240
38a448ca 3241 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3242
7d384cc0 3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3244 {
956d6950 3245 in_check_memory_usage = 1;
921b3427
RK
3246 if (target == 0)
3247 target = get_push_address (GET_MODE_SIZE (mode));
3248
c85f7c16 3249 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3250 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3251 target, Pmode,
3252 XEXP (x, 0), Pmode,
921b3427
RK
3253 GEN_INT (GET_MODE_SIZE (mode)),
3254 TYPE_MODE (sizetype));
3255 else
3256 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3257 target, Pmode,
921b3427
RK
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype),
956d6950
JL
3260 GEN_INT (MEMORY_USE_RW),
3261 TYPE_MODE (integer_type_node));
3262 in_check_memory_usage = 0;
921b3427 3263 }
bbf6f052
RK
3264 }
3265
3266 ret:
3267 /* If part should go in registers, copy that part
3268 into the appropriate registers. Do this now, at the end,
3269 since mem-to-mem copies above may do function calls. */
cd048831 3270 if (partial > 0 && reg != 0)
fffa9c1d
JW
3271 {
3272 /* Handle calls that pass values in multiple non-contiguous locations.
3273 The Irix 6 ABI has examples of this. */
3274 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3275 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3276 else
3277 move_block_to_reg (REGNO (reg), x, partial, mode);
3278 }
bbf6f052
RK
3279
3280 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3281 anti_adjust_stack (GEN_INT (extra));
4fc026cd
CM
3282
3283 if (alignment_pad)
3284 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3285}
3286\f
bbf6f052
RK
3287/* Expand an assignment that stores the value of FROM into TO.
3288 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3289 (This may contain a QUEUED rtx;
3290 if the value is constant, this rtx is a constant.)
3291 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3292
3293 SUGGEST_REG is no longer actually used.
3294 It used to mean, copy the value through a register
3295 and return that register, if that is possible.
709f5be1 3296 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3297
3298rtx
3299expand_assignment (to, from, want_value, suggest_reg)
3300 tree to, from;
3301 int want_value;
c5c76735 3302 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3303{
3304 register rtx to_rtx = 0;
3305 rtx result;
3306
3307 /* Don't crash if the lhs of the assignment was erroneous. */
3308
3309 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3310 {
3311 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3312 return want_value ? result : NULL_RTX;
3313 }
bbf6f052
RK
3314
3315 /* Assignment of a structure component needs special treatment
3316 if the structure component's rtx is not simply a MEM.
6be58303
JW
3317 Assignment of an array element at a constant index, and assignment of
3318 an array element in an unaligned packed structure field, has the same
3319 problem. */
bbf6f052 3320
08293add
RK
3321 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3322 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3323 {
3324 enum machine_mode mode1;
3325 int bitsize;
3326 int bitpos;
7bb0943f 3327 tree offset;
bbf6f052
RK
3328 int unsignedp;
3329 int volatilep = 0;
0088fcb1 3330 tree tem;
729a2125 3331 unsigned int alignment;
0088fcb1
RK
3332
3333 push_temp_slots ();
839c4796
RK
3334 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3335 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3336
3337 /* If we are going to use store_bit_field and extract_bit_field,
3338 make sure to_rtx will be safe for multiple use. */
3339
3340 if (mode1 == VOIDmode && want_value)
3341 tem = stabilize_reference (tem);
3342
921b3427 3343 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3344 if (offset != 0)
3345 {
906c4e36 3346 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3347
3348 if (GET_CODE (to_rtx) != MEM)
3349 abort ();
bd070e1a
RH
3350
3351 if (GET_MODE (offset_rtx) != ptr_mode)
3352 {
3353#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3354 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3355#else
3356 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3357#endif
3358 }
3359
9a7b9f4f
JL
3360 /* A constant address in TO_RTX can have VOIDmode, we must not try
3361 to call force_reg for that case. Avoid that case. */
89752202
HB
3362 if (GET_CODE (to_rtx) == MEM
3363 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3364 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3365 && bitsize
3366 && (bitpos % bitsize) == 0
3367 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3368 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3369 {
3370 rtx temp = change_address (to_rtx, mode1,
3371 plus_constant (XEXP (to_rtx, 0),
3372 (bitpos /
3373 BITS_PER_UNIT)));
3374 if (GET_CODE (XEXP (temp, 0)) == REG)
3375 to_rtx = temp;
3376 else
3377 to_rtx = change_address (to_rtx, mode1,
3378 force_reg (GET_MODE (XEXP (temp, 0)),
3379 XEXP (temp, 0)));
3380 bitpos = 0;
3381 }
3382
7bb0943f 3383 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3385 force_reg (ptr_mode,
3386 offset_rtx)));
7bb0943f 3387 }
c5c76735 3388
bbf6f052
RK
3389 if (volatilep)
3390 {
3391 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3392 {
3393 /* When the offset is zero, to_rtx is the address of the
3394 structure we are storing into, and hence may be shared.
3395 We must make a new MEM before setting the volatile bit. */
3396 if (offset == 0)
effbcc6a
RK
3397 to_rtx = copy_rtx (to_rtx);
3398
01188446
JW
3399 MEM_VOLATILE_P (to_rtx) = 1;
3400 }
bbf6f052
RK
3401#if 0 /* This was turned off because, when a field is volatile
3402 in an object which is not volatile, the object may be in a register,
3403 and then we would abort over here. */
3404 else
3405 abort ();
3406#endif
3407 }
3408
956d6950
JL
3409 if (TREE_CODE (to) == COMPONENT_REF
3410 && TREE_READONLY (TREE_OPERAND (to, 1)))
3411 {
8bd6ecc2 3412 if (offset == 0)
956d6950
JL
3413 to_rtx = copy_rtx (to_rtx);
3414
3415 RTX_UNCHANGING_P (to_rtx) = 1;
3416 }
3417
921b3427 3418 /* Check the access. */
7d384cc0 3419 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3420 {
3421 rtx to_addr;
3422 int size;
3423 int best_mode_size;
3424 enum machine_mode best_mode;
3425
3426 best_mode = get_best_mode (bitsize, bitpos,
3427 TYPE_ALIGN (TREE_TYPE (tem)),
3428 mode1, volatilep);
3429 if (best_mode == VOIDmode)
3430 best_mode = QImode;
3431
3432 best_mode_size = GET_MODE_BITSIZE (best_mode);
3433 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3434 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3435 size *= GET_MODE_SIZE (best_mode);
3436
3437 /* Check the access right of the pointer. */
e9a25f70
JL
3438 if (size)
3439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3440 to_addr, Pmode,
e9a25f70 3441 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3442 GEN_INT (MEMORY_USE_WO),
3443 TYPE_MODE (integer_type_node));
921b3427
RK
3444 }
3445
bbf6f052
RK
3446 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3447 (want_value
3448 /* Spurious cast makes HPUX compiler happy. */
3449 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3450 : VOIDmode),
3451 unsignedp,
3452 /* Required alignment of containing datum. */
d78d243c 3453 alignment,
ece32014
MM
3454 int_size_in_bytes (TREE_TYPE (tem)),
3455 get_alias_set (to));
bbf6f052
RK
3456 preserve_temp_slots (result);
3457 free_temp_slots ();
0088fcb1 3458 pop_temp_slots ();
bbf6f052 3459
709f5be1
RS
3460 /* If the value is meaningful, convert RESULT to the proper mode.
3461 Otherwise, return nothing. */
5ffe63ed
RS
3462 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3463 TYPE_MODE (TREE_TYPE (from)),
3464 result,
3465 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3466 : NULL_RTX);
bbf6f052
RK
3467 }
3468
cd1db108
RS
3469 /* If the rhs is a function call and its value is not an aggregate,
3470 call the function before we start to compute the lhs.
3471 This is needed for correct code for cases such as
3472 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3473 requires loading up part of an address in a separate insn.
3474
3475 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3476 a promoted variable where the zero- or sign- extension needs to be done.
3477 Handling this in the normal way is safe because no computation is done
3478 before the call. */
3479 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3481 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3482 {
0088fcb1
RK
3483 rtx value;
3484
3485 push_temp_slots ();
3486 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3487 if (to_rtx == 0)
921b3427 3488 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3489
fffa9c1d
JW
3490 /* Handle calls that return values in multiple non-contiguous locations.
3491 The Irix 6 ABI has examples of this. */
3492 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3493 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3494 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3495 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3496 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3497 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45 3498 else
6419e5b0
DT
3499 {
3500#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3501 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3502 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3503 value = convert_memory_address (GET_MODE (to_rtx), value);
3504#endif
3505 emit_move_insn (to_rtx, value);
3506 }
cd1db108
RS
3507 preserve_temp_slots (to_rtx);
3508 free_temp_slots ();
0088fcb1 3509 pop_temp_slots ();
709f5be1 3510 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3511 }
3512
bbf6f052
RK
3513 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3514 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3515
3516 if (to_rtx == 0)
41472af8
MM
3517 {
3518 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3519 if (GET_CODE (to_rtx) == MEM)
3520 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3521 }
bbf6f052 3522
86d38d25 3523 /* Don't move directly into a return register. */
14a774a9
RK
3524 if (TREE_CODE (to) == RESULT_DECL
3525 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3526 {
0088fcb1
RK
3527 rtx temp;
3528
3529 push_temp_slots ();
3530 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3531
3532 if (GET_CODE (to_rtx) == PARALLEL)
3533 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3534 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3535 else
3536 emit_move_insn (to_rtx, temp);
3537
86d38d25
RS
3538 preserve_temp_slots (to_rtx);
3539 free_temp_slots ();
0088fcb1 3540 pop_temp_slots ();
709f5be1 3541 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3542 }
3543
bbf6f052
RK
3544 /* In case we are returning the contents of an object which overlaps
3545 the place the value is being stored, use a safe function when copying
3546 a value through a pointer into a structure value return block. */
3547 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3548 && current_function_returns_struct
3549 && !current_function_returns_pcc_struct)
3550 {
0088fcb1
RK
3551 rtx from_rtx, size;
3552
3553 push_temp_slots ();
33a20d10 3554 size = expr_size (from);
921b3427
RK
3555 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3556 EXPAND_MEMORY_USE_DONT);
3557
3558 /* Copy the rights of the bitmap. */
7d384cc0 3559 if (current_function_check_memory_usage)
921b3427 3560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3561 XEXP (to_rtx, 0), Pmode,
3562 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3563 convert_to_mode (TYPE_MODE (sizetype),
3564 size, TREE_UNSIGNED (sizetype)),
3565 TYPE_MODE (sizetype));
bbf6f052
RK
3566
3567#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3568 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3569 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3570 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3571 convert_to_mode (TYPE_MODE (sizetype),
3572 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3573 TYPE_MODE (sizetype));
bbf6f052 3574#else
d562e42e 3575 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3576 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3577 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3578 convert_to_mode (TYPE_MODE (integer_type_node),
3579 size, TREE_UNSIGNED (integer_type_node)),
3580 TYPE_MODE (integer_type_node));
bbf6f052
RK
3581#endif
3582
3583 preserve_temp_slots (to_rtx);
3584 free_temp_slots ();
0088fcb1 3585 pop_temp_slots ();
709f5be1 3586 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3587 }
3588
3589 /* Compute FROM and store the value in the rtx we got. */
3590
0088fcb1 3591 push_temp_slots ();
bbf6f052
RK
3592 result = store_expr (from, to_rtx, want_value);
3593 preserve_temp_slots (result);
3594 free_temp_slots ();
0088fcb1 3595 pop_temp_slots ();
709f5be1 3596 return want_value ? result : NULL_RTX;
bbf6f052
RK
3597}
3598
3599/* Generate code for computing expression EXP,
3600 and storing the value into TARGET.
bbf6f052
RK
3601 TARGET may contain a QUEUED rtx.
3602
709f5be1
RS
3603 If WANT_VALUE is nonzero, return a copy of the value
3604 not in TARGET, so that we can be sure to use the proper
3605 value in a containing expression even if TARGET has something
3606 else stored in it. If possible, we copy the value through a pseudo
3607 and return that pseudo. Or, if the value is constant, we try to
3608 return the constant. In some cases, we return a pseudo
3609 copied *from* TARGET.
3610
3611 If the mode is BLKmode then we may return TARGET itself.
3612 It turns out that in BLKmode it doesn't cause a problem.
3613 because C has no operators that could combine two different
3614 assignments into the same BLKmode object with different values
3615 with no sequence point. Will other languages need this to
3616 be more thorough?
3617
3618 If WANT_VALUE is 0, we return NULL, to make sure
3619 to catch quickly any cases where the caller uses the value
3620 and fails to set WANT_VALUE. */
bbf6f052
RK
3621
3622rtx
709f5be1 3623store_expr (exp, target, want_value)
bbf6f052
RK
3624 register tree exp;
3625 register rtx target;
709f5be1 3626 int want_value;
bbf6f052
RK
3627{
3628 register rtx temp;
3629 int dont_return_target = 0;
3630
3631 if (TREE_CODE (exp) == COMPOUND_EXPR)
3632 {
3633 /* Perform first part of compound expression, then assign from second
3634 part. */
3635 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3636 emit_queue ();
709f5be1 3637 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3638 }
3639 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3640 {
3641 /* For conditional expression, get safe form of the target. Then
3642 test the condition, doing the appropriate assignment on either
3643 side. This avoids the creation of unnecessary temporaries.
3644 For non-BLKmode, it is more efficient not to do this. */
3645
3646 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3647
3648 emit_queue ();
3649 target = protect_from_queue (target, 1);
3650
dabf8373 3651 do_pending_stack_adjust ();
bbf6f052
RK
3652 NO_DEFER_POP;
3653 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3654 start_cleanup_deferral ();
709f5be1 3655 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3656 end_cleanup_deferral ();
bbf6f052
RK
3657 emit_queue ();
3658 emit_jump_insn (gen_jump (lab2));
3659 emit_barrier ();
3660 emit_label (lab1);
956d6950 3661 start_cleanup_deferral ();
709f5be1 3662 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3663 end_cleanup_deferral ();
bbf6f052
RK
3664 emit_queue ();
3665 emit_label (lab2);
3666 OK_DEFER_POP;
a3a58acc 3667
709f5be1 3668 return want_value ? target : NULL_RTX;
bbf6f052 3669 }
bbf6f052 3670 else if (queued_subexp_p (target))
709f5be1
RS
3671 /* If target contains a postincrement, let's not risk
3672 using it as the place to generate the rhs. */
bbf6f052
RK
3673 {
3674 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3675 {
3676 /* Expand EXP into a new pseudo. */
3677 temp = gen_reg_rtx (GET_MODE (target));
3678 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3679 }
3680 else
906c4e36 3681 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3682
3683 /* If target is volatile, ANSI requires accessing the value
3684 *from* the target, if it is accessed. So make that happen.
3685 In no case return the target itself. */
3686 if (! MEM_VOLATILE_P (target) && want_value)
3687 dont_return_target = 1;
bbf6f052 3688 }
12f06d17
CH
3689 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3690 && GET_MODE (target) != BLKmode)
3691 /* If target is in memory and caller wants value in a register instead,
3692 arrange that. Pass TARGET as target for expand_expr so that,
3693 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3694 We know expand_expr will not use the target in that case.
3695 Don't do this if TARGET is volatile because we are supposed
3696 to write it and then read it. */
3697 {
1da93fe0 3698 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3699 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3700 temp = copy_to_reg (temp);
3701 dont_return_target = 1;
3702 }
1499e0a8
RK
3703 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3704 /* If this is an scalar in a register that is stored in a wider mode
3705 than the declared mode, compute the result into its declared mode
3706 and then convert to the wider mode. Our value is the computed
3707 expression. */
3708 {
5a32d038 3709 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3710 which will often result in some optimizations. Do the conversion
3711 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3712 the extend. But don't do this if the type of EXP is a subtype
3713 of something else since then the conversion might involve
3714 more than just converting modes. */
3715 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3716 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3717 {
3718 if (TREE_UNSIGNED (TREE_TYPE (exp))
3719 != SUBREG_PROMOTED_UNSIGNED_P (target))
3720 exp
3721 = convert
3722 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3723 TREE_TYPE (exp)),
3724 exp);
3725
3726 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3727 SUBREG_PROMOTED_UNSIGNED_P (target)),
3728 exp);
3729 }
5a32d038 3730
1499e0a8 3731 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3732
766f36c7 3733 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3734 the access now so it gets done only once. Likewise if
3735 it contains TARGET. */
3736 if (GET_CODE (temp) == MEM && want_value
3737 && (MEM_VOLATILE_P (temp)
3738 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3739 temp = copy_to_reg (temp);
3740
b258707c
RS
3741 /* If TEMP is a VOIDmode constant, use convert_modes to make
3742 sure that we properly convert it. */
3743 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 TYPE_MODE (TREE_TYPE (exp)), temp,
3746 SUBREG_PROMOTED_UNSIGNED_P (target));
3747
1499e0a8
RK
3748 convert_move (SUBREG_REG (target), temp,
3749 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3750
3751 /* If we promoted a constant, change the mode back down to match
3752 target. Otherwise, the caller might get confused by a result whose
3753 mode is larger than expected. */
3754
3755 if (want_value && GET_MODE (temp) != GET_MODE (target)
3756 && GET_MODE (temp) != VOIDmode)
3757 {
3758 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3759 SUBREG_PROMOTED_VAR_P (temp) = 1;
3760 SUBREG_PROMOTED_UNSIGNED_P (temp)
3761 = SUBREG_PROMOTED_UNSIGNED_P (target);
3762 }
3763
709f5be1 3764 return want_value ? temp : NULL_RTX;
1499e0a8 3765 }
bbf6f052
RK
3766 else
3767 {
3768 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3769 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3770 If TARGET is a volatile mem ref, either return TARGET
3771 or return a reg copied *from* TARGET; ANSI requires this.
3772
3773 Otherwise, if TEMP is not TARGET, return TEMP
3774 if it is constant (for efficiency),
3775 or if we really want the correct value. */
bbf6f052
RK
3776 if (!(target && GET_CODE (target) == REG
3777 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3778 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3779 && ! rtx_equal_p (temp, target)
709f5be1 3780 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3781 dont_return_target = 1;
3782 }
3783
b258707c
RS
3784 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3785 the same as that of TARGET, adjust the constant. This is needed, for
3786 example, in case it is a CONST_DOUBLE and we want only a word-sized
3787 value. */
3788 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3789 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3790 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3791 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3792 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3793
7d384cc0 3794 if (current_function_check_memory_usage
921b3427
RK
3795 && GET_CODE (target) == MEM
3796 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3797 {
3798 if (GET_CODE (temp) == MEM)
3799 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3800 XEXP (target, 0), Pmode,
3801 XEXP (temp, 0), Pmode,
921b3427
RK
3802 expr_size (exp), TYPE_MODE (sizetype));
3803 else
3804 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3805 XEXP (target, 0), Pmode,
921b3427 3806 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3807 GEN_INT (MEMORY_USE_WO),
3808 TYPE_MODE (integer_type_node));
921b3427
RK
3809 }
3810
bbf6f052
RK
3811 /* If value was not generated in the target, store it there.
3812 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3813 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3814 one or both of them are volatile memory refs, we have to distinguish
3815 two cases:
3816 - expand_expr has used TARGET. In this case, we must not generate
3817 another copy. This can be detected by TARGET being equal according
3818 to == .
3819 - expand_expr has not used TARGET - that means that the source just
3820 happens to have the same RTX form. Since temp will have been created
3821 by expand_expr, it will compare unequal according to == .
3822 We must generate a copy in this case, to reach the correct number
3823 of volatile memory references. */
bbf6f052 3824
6036acbb 3825 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3826 || (temp != target && (side_effects_p (temp)
3827 || side_effects_p (target))))
6036acbb 3828 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3829 {
3830 target = protect_from_queue (target, 1);
3831 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3832 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3833 {
3834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3835 if (dont_return_target)
3836 {
3837 /* In this case, we will return TEMP,
3838 so make sure it has the proper mode.
3839 But don't forget to store the value into TARGET. */
3840 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3841 emit_move_insn (target, temp);
3842 }
3843 else
3844 convert_move (target, temp, unsignedp);
3845 }
3846
3847 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3848 {
3849 /* Handle copying a string constant into an array.
3850 The string constant may be shorter than the array.
3851 So copy just the string's actual length, and clear the rest. */
3852 rtx size;
22619c3f 3853 rtx addr;
bbf6f052 3854
e87b4f3f
RS
3855 /* Get the size of the data type of the string,
3856 which is actually the size of the target. */
3857 size = expr_size (exp);
3858 if (GET_CODE (size) == CONST_INT
3859 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3860 emit_block_move (target, temp, size,
3861 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3862 else
bbf6f052 3863 {
e87b4f3f
RS
3864 /* Compute the size of the data to copy from the string. */
3865 tree copy_size
c03b7665 3866 = size_binop (MIN_EXPR,
b50d17a1 3867 make_tree (sizetype, size),
fed3cef0 3868 size_int (TREE_STRING_LENGTH (exp)));
906c4e36
RK
3869 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3870 VOIDmode, 0);
e87b4f3f
RS
3871 rtx label = 0;
3872
3873 /* Copy that much. */
3874 emit_block_move (target, temp, copy_size_rtx,
3875 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3876
88f63c77
RK
3877 /* Figure out how much is left in TARGET that we have to clear.
3878 Do all calculations in ptr_mode. */
3879
3880 addr = XEXP (target, 0);
3881 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3882
e87b4f3f
RS
3883 if (GET_CODE (copy_size_rtx) == CONST_INT)
3884 {
88f63c77 3885 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3886 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3887 }
3888 else
3889 {
88f63c77
RK
3890 addr = force_reg (ptr_mode, addr);
3891 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3892 copy_size_rtx, NULL_RTX, 0,
3893 OPTAB_LIB_WIDEN);
e87b4f3f 3894
88f63c77 3895 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3896 copy_size_rtx, NULL_RTX, 0,
3897 OPTAB_LIB_WIDEN);
e87b4f3f 3898
e87b4f3f 3899 label = gen_label_rtx ();
c5d5d461
JL
3900 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3901 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3902 }
3903
3904 if (size != const0_rtx)
3905 {
921b3427 3906 /* Be sure we can write on ADDR. */
7d384cc0 3907 if (current_function_check_memory_usage)
921b3427 3908 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3909 addr, Pmode,
921b3427 3910 size, TYPE_MODE (sizetype),
956d6950
JL
3911 GEN_INT (MEMORY_USE_WO),
3912 TYPE_MODE (integer_type_node));
bbf6f052 3913#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3914 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3915 addr, ptr_mode,
3b6f75e2
JW
3916 const0_rtx, TYPE_MODE (integer_type_node),
3917 convert_to_mode (TYPE_MODE (sizetype),
3918 size,
3919 TREE_UNSIGNED (sizetype)),
3920 TYPE_MODE (sizetype));
bbf6f052 3921#else
d562e42e 3922 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3923 addr, ptr_mode,
3b6f75e2
JW
3924 convert_to_mode (TYPE_MODE (integer_type_node),
3925 size,
3926 TREE_UNSIGNED (integer_type_node)),
3927 TYPE_MODE (integer_type_node));
bbf6f052 3928#endif
e87b4f3f 3929 }
22619c3f 3930
e87b4f3f
RS
3931 if (label)
3932 emit_label (label);
bbf6f052
RK
3933 }
3934 }
fffa9c1d
JW
3935 /* Handle calls that return values in multiple non-contiguous locations.
3936 The Irix 6 ABI has examples of this. */
3937 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3938 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3939 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3940 else if (GET_MODE (temp) == BLKmode)
3941 emit_block_move (target, temp, expr_size (exp),
3942 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3943 else
3944 emit_move_insn (target, temp);
3945 }
709f5be1 3946
766f36c7
RK
3947 /* If we don't want a value, return NULL_RTX. */
3948 if (! want_value)
3949 return NULL_RTX;
3950
3951 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3952 ??? The latter test doesn't seem to make sense. */
3953 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3954 return temp;
766f36c7
RK
3955
3956 /* Return TARGET itself if it is a hard register. */
3957 else if (want_value && GET_MODE (target) != BLKmode
3958 && ! (GET_CODE (target) == REG
3959 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3960 return copy_to_reg (target);
766f36c7
RK
3961
3962 else
709f5be1 3963 return target;
bbf6f052
RK
3964}
3965\f
9de08200
RK
3966/* Return 1 if EXP just contains zeros. */
3967
3968static int
3969is_zeros_p (exp)
3970 tree exp;
3971{
3972 tree elt;
3973
3974 switch (TREE_CODE (exp))
3975 {
3976 case CONVERT_EXPR:
3977 case NOP_EXPR:
3978 case NON_LVALUE_EXPR:
3979 return is_zeros_p (TREE_OPERAND (exp, 0));
3980
3981 case INTEGER_CST:
05bccae2 3982 return integer_zerop (exp);
9de08200
RK
3983
3984 case COMPLEX_CST:
3985 return
3986 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3987
3988 case REAL_CST:
41c9120b 3989 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3990
3991 case CONSTRUCTOR:
e1a43f73
PB
3992 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3993 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3994 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3995 if (! is_zeros_p (TREE_VALUE (elt)))
3996 return 0;
3997
3998 return 1;
e9a25f70
JL
3999
4000 default:
4001 return 0;
9de08200 4002 }
9de08200
RK
4003}
4004
4005/* Return 1 if EXP contains mostly (3/4) zeros. */
4006
4007static int
4008mostly_zeros_p (exp)
4009 tree exp;
4010{
9de08200
RK
4011 if (TREE_CODE (exp) == CONSTRUCTOR)
4012 {
e1a43f73
PB
4013 int elts = 0, zeros = 0;
4014 tree elt = CONSTRUCTOR_ELTS (exp);
4015 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4016 {
4017 /* If there are no ranges of true bits, it is all zero. */
4018 return elt == NULL_TREE;
4019 }
4020 for (; elt; elt = TREE_CHAIN (elt))
4021 {
4022 /* We do not handle the case where the index is a RANGE_EXPR,
4023 so the statistic will be somewhat inaccurate.
4024 We do make a more accurate count in store_constructor itself,
4025 so since this function is only used for nested array elements,
0f41302f 4026 this should be close enough. */
e1a43f73
PB
4027 if (mostly_zeros_p (TREE_VALUE (elt)))
4028 zeros++;
4029 elts++;
4030 }
9de08200
RK
4031
4032 return 4 * zeros >= 3 * elts;
4033 }
4034
4035 return is_zeros_p (exp);
4036}
4037\f
e1a43f73
PB
4038/* Helper function for store_constructor.
4039 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4040 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4041 ALIGN and CLEARED are as for store_constructor.
23ccec44
JW
4042
4043 This provides a recursive shortcut back to store_constructor when it isn't
4044 necessary to go through store_field. This is so that we can pass through
4045 the cleared field to let store_constructor know that we may not have to
4046 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4047
4048static void
4049store_constructor_field (target, bitsize, bitpos,
c5c76735 4050 mode, exp, type, align, cleared)
e1a43f73
PB
4051 rtx target;
4052 int bitsize, bitpos;
4053 enum machine_mode mode;
4054 tree exp, type;
729a2125 4055 unsigned int align;
e1a43f73
PB
4056 int cleared;
4057{
4058 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4059 && bitpos % BITS_PER_UNIT == 0
4060 /* If we have a non-zero bitpos for a register target, then we just
4061 let store_field do the bitfield handling. This is unlikely to
4062 generate unnecessary clear instructions anyways. */
4063 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4064 {
126e5b0d 4065 if (bitpos != 0)
ce64861e
RK
4066 target
4067 = change_address (target,
4068 GET_MODE (target) == BLKmode
4069 || 0 != (bitpos
4070 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4071 ? BLKmode : VOIDmode,
4072 plus_constant (XEXP (target, 0),
4073 bitpos / BITS_PER_UNIT));
b7010412 4074 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4075 }
4076 else
c5c76735
JL
4077 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4078 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
0db5adc6 4079 int_size_in_bytes (type), 0);
e1a43f73
PB
4080}
4081
bbf6f052 4082/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4083 TARGET is either a REG or a MEM.
c5c76735 4084 ALIGN is the maximum known alignment for TARGET, in bits.
b7010412
RK
4085 CLEARED is true if TARGET is known to have been zero'd.
4086 SIZE is the number of bytes of TARGET we are allowed to modify: this
4087 may not be the same as the size of EXP if we are assigning to a field
4088 which has been packed to exclude padding bits. */
bbf6f052
RK
4089
4090static void
b7010412 4091store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4092 tree exp;
4093 rtx target;
729a2125 4094 unsigned int align;
e1a43f73 4095 int cleared;
b7010412 4096 int size;
bbf6f052 4097{
4af3895e 4098 tree type = TREE_TYPE (exp);
a5efcd63 4099#ifdef WORD_REGISTER_OPERATIONS
34c73909 4100 rtx exp_size = expr_size (exp);
a5efcd63 4101#endif
4af3895e 4102
bbf6f052
RK
4103 /* We know our target cannot conflict, since safe_from_p has been called. */
4104#if 0
4105 /* Don't try copying piece by piece into a hard register
4106 since that is vulnerable to being clobbered by EXP.
4107 Instead, construct in a pseudo register and then copy it all. */
4108 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4109 {
4110 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4111 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4112 emit_move_insn (target, temp);
4113 return;
4114 }
4115#endif
4116
e44842fe
RK
4117 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4118 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4119 {
4120 register tree elt;
4121
4af3895e 4122 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4123 if ((TREE_CODE (type) == UNION_TYPE
4124 || TREE_CODE (type) == QUAL_UNION_TYPE)
4125 && ! cleared)
a59f8640
R
4126 {
4127 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4128
4129 /* If the constructor is empty, clear the union. */
4130 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4131 clear_storage (target, expr_size (exp),
4132 TYPE_ALIGN (type) / BITS_PER_UNIT);
4133 }
4af3895e
JVA
4134
4135 /* If we are building a static constructor into a register,
4136 set the initial value as zero so we can fold the value into
67225c15
RK
4137 a constant. But if more than one register is involved,
4138 this probably loses. */
4139 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4140 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4141 {
4142 if (! cleared)
e9a25f70 4143 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4144
9de08200
RK
4145 cleared = 1;
4146 }
4147
4148 /* If the constructor has fewer fields than the structure
4149 or if we are initializing the structure to mostly zeros,
bbf6f052 4150 clear the whole structure first. */
9376fcd6
RK
4151 else if (size > 0
4152 && ((list_length (CONSTRUCTOR_ELTS (exp))
4153 != list_length (TYPE_FIELDS (type)))
4154 || mostly_zeros_p (exp)))
9de08200
RK
4155 {
4156 if (! cleared)
b7010412 4157 clear_storage (target, GEN_INT (size),
c5c76735 4158 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4159
4160 cleared = 1;
4161 }
dd1db5ec 4162 else if (! cleared)
bbf6f052 4163 /* Inform later passes that the old value is dead. */
38a448ca 4164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4165
4166 /* Store each element of the constructor into
4167 the corresponding field of TARGET. */
4168
4169 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4170 {
4171 register tree field = TREE_PURPOSE (elt);
c5c76735 4172#ifdef WORD_REGISTER_OPERATIONS
34c73909 4173 tree value = TREE_VALUE (elt);
c5c76735 4174#endif
bbf6f052
RK
4175 register enum machine_mode mode;
4176 int bitsize;
b50d17a1 4177 int bitpos = 0;
bbf6f052 4178 int unsignedp;
b50d17a1
RK
4179 tree pos, constant = 0, offset = 0;
4180 rtx to_rtx = target;
bbf6f052 4181
f32fd778
RS
4182 /* Just ignore missing fields.
4183 We cleared the whole structure, above,
4184 if any fields are missing. */
4185 if (field == 0)
4186 continue;
4187
e1a43f73
PB
4188 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4189 continue;
9de08200 4190
14a774a9
RK
4191 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4192 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4193 else
4194 bitsize = -1;
4195
bbf6f052
RK
4196 unsignedp = TREE_UNSIGNED (field);
4197 mode = DECL_MODE (field);
4198 if (DECL_BIT_FIELD (field))
4199 mode = VOIDmode;
4200
b50d17a1
RK
4201 pos = DECL_FIELD_BITPOS (field);
4202 if (TREE_CODE (pos) == INTEGER_CST)
4203 constant = pos;
4204 else if (TREE_CODE (pos) == PLUS_EXPR
4205 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4206 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4207 else
4208 offset = pos;
4209
4210 if (constant)
cd11b87e 4211 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4212
4213 if (offset)
4214 {
4215 rtx offset_rtx;
4216
4217 if (contains_placeholder_p (offset))
fed3cef0 4218 offset = build (WITH_RECORD_EXPR, bitsizetype,
956d6950 4219 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4220
9f887d05 4221 offset = size_binop (EXACT_DIV_EXPR, offset,
fed3cef0
RK
4222 bitsize_int (BITS_PER_UNIT));
4223 offset = convert (sizetype, offset);
bbf6f052 4224
b50d17a1
RK
4225 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4226 if (GET_CODE (to_rtx) != MEM)
4227 abort ();
4228
bd070e1a
RH
4229 if (GET_MODE (offset_rtx) != ptr_mode)
4230 {
4231#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4232 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4233#else
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4235#endif
4236 }
4237
b50d17a1
RK
4238 to_rtx
4239 = change_address (to_rtx, VOIDmode,
38a448ca 4240 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4241 force_reg (ptr_mode,
4242 offset_rtx)));
b50d17a1 4243 }
c5c76735 4244
cf04eb80
RK
4245 if (TREE_READONLY (field))
4246 {
9151b3bf 4247 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4248 to_rtx = copy_rtx (to_rtx);
4249
cf04eb80
RK
4250 RTX_UNCHANGING_P (to_rtx) = 1;
4251 }
4252
34c73909
R
4253#ifdef WORD_REGISTER_OPERATIONS
4254 /* If this initializes a field that is smaller than a word, at the
4255 start of a word, try to widen it to a full word.
4256 This special case allows us to output C++ member function
4257 initializations in a form that the optimizers can understand. */
4258 if (constant
4259 && GET_CODE (target) == REG
4260 && bitsize < BITS_PER_WORD
4261 && bitpos % BITS_PER_WORD == 0
4262 && GET_MODE_CLASS (mode) == MODE_INT
4263 && TREE_CODE (value) == INTEGER_CST
4264 && GET_CODE (exp_size) == CONST_INT
4265 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4266 {
4267 tree type = TREE_TYPE (value);
4268 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4269 {
4270 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4271 value = convert (type, value);
4272 }
4273 if (BYTES_BIG_ENDIAN)
4274 value
4275 = fold (build (LSHIFT_EXPR, type, value,
4276 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4277 bitsize = BITS_PER_WORD;
4278 mode = word_mode;
4279 }
4280#endif
c5c76735
JL
4281 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4282 TREE_VALUE (elt), type,
4283 MIN (align,
4284 DECL_ALIGN (TREE_PURPOSE (elt))),
4285 cleared);
bbf6f052
RK
4286 }
4287 }
4af3895e 4288 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4289 {
4290 register tree elt;
4291 register int i;
e1a43f73 4292 int need_to_clear;
4af3895e 4293 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4294 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4295 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4296 tree elttype = TREE_TYPE (type);
bbf6f052 4297
e1a43f73 4298 /* If the constructor has fewer elements than the array,
38e01259 4299 clear the whole array first. Similarly if this is
e1a43f73
PB
4300 static constructor of a non-BLKmode object. */
4301 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4302 need_to_clear = 1;
4303 else
4304 {
4305 HOST_WIDE_INT count = 0, zero_count = 0;
4306 need_to_clear = 0;
4307 /* This loop is a more accurate version of the loop in
4308 mostly_zeros_p (it handles RANGE_EXPR in an index).
4309 It is also needed to check for missing elements. */
4310 for (elt = CONSTRUCTOR_ELTS (exp);
4311 elt != NULL_TREE;
df0faff1 4312 elt = TREE_CHAIN (elt))
e1a43f73
PB
4313 {
4314 tree index = TREE_PURPOSE (elt);
4315 HOST_WIDE_INT this_node_count;
4316 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4317 {
4318 tree lo_index = TREE_OPERAND (index, 0);
4319 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4320
e1a43f73
PB
4321 if (TREE_CODE (lo_index) != INTEGER_CST
4322 || TREE_CODE (hi_index) != INTEGER_CST)
4323 {
4324 need_to_clear = 1;
4325 break;
4326 }
05bccae2
RK
4327 this_node_count = (TREE_INT_CST_LOW (hi_index)
4328 - TREE_INT_CST_LOW (lo_index) + 1);
e1a43f73
PB
4329 }
4330 else
4331 this_node_count = 1;
4332 count += this_node_count;
4333 if (mostly_zeros_p (TREE_VALUE (elt)))
4334 zero_count += this_node_count;
4335 }
8e958f70 4336 /* Clear the entire array first if there are any missing elements,
0f41302f 4337 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4338 if (count < maxelt - minelt + 1
4339 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4340 need_to_clear = 1;
4341 }
9376fcd6 4342 if (need_to_clear && size > 0)
9de08200
RK
4343 {
4344 if (! cleared)
b7010412 4345 clear_storage (target, GEN_INT (size),
c5c76735 4346 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4347 cleared = 1;
4348 }
bbf6f052
RK
4349 else
4350 /* Inform later passes that the old value is dead. */
38a448ca 4351 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4352
4353 /* Store each element of the constructor into
4354 the corresponding element of TARGET, determined
4355 by counting the elements. */
4356 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4357 elt;
4358 elt = TREE_CHAIN (elt), i++)
4359 {
4360 register enum machine_mode mode;
4361 int bitsize;
4362 int bitpos;
4363 int unsignedp;
e1a43f73 4364 tree value = TREE_VALUE (elt);
729a2125 4365 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4366 tree index = TREE_PURPOSE (elt);
4367 rtx xtarget = target;
bbf6f052 4368
e1a43f73
PB
4369 if (cleared && is_zeros_p (value))
4370 continue;
9de08200 4371
bbf6f052 4372 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4373 mode = TYPE_MODE (elttype);
4374 if (mode == BLKmode)
4375 {
4376 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4377 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4378 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4379 else
4380 bitsize = -1;
4381 }
4382 else
4383 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4384
e1a43f73
PB
4385 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4386 {
4387 tree lo_index = TREE_OPERAND (index, 0);
4388 tree hi_index = TREE_OPERAND (index, 1);
4389 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4390 struct nesting *loop;
05c0b405
PB
4391 HOST_WIDE_INT lo, hi, count;
4392 tree position;
e1a43f73 4393
0f41302f 4394 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4395 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4396 && TREE_CODE (hi_index) == INTEGER_CST
4397 && (lo = TREE_INT_CST_LOW (lo_index),
4398 hi = TREE_INT_CST_LOW (hi_index),
4399 count = hi - lo + 1,
4400 (GET_CODE (target) != MEM
4401 || count <= 2
4402 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4403 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4404 <= 40 * 8))))
e1a43f73 4405 {
05c0b405
PB
4406 lo -= minelt; hi -= minelt;
4407 for (; lo <= hi; lo++)
e1a43f73 4408 {
05c0b405 4409 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
c5c76735
JL
4410 store_constructor_field (target, bitsize, bitpos, mode,
4411 value, type, align, cleared);
e1a43f73
PB
4412 }
4413 }
4414 else
4415 {
4416 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4417 loop_top = gen_label_rtx ();
4418 loop_end = gen_label_rtx ();
4419
4420 unsignedp = TREE_UNSIGNED (domain);
4421
4422 index = build_decl (VAR_DECL, NULL_TREE, domain);
4423
4424 DECL_RTL (index) = index_r
4425 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4426 &unsignedp, 0));
4427
4428 if (TREE_CODE (value) == SAVE_EXPR
4429 && SAVE_EXPR_RTL (value) == 0)
4430 {
0f41302f
MS
4431 /* Make sure value gets expanded once before the
4432 loop. */
e1a43f73
PB
4433 expand_expr (value, const0_rtx, VOIDmode, 0);
4434 emit_queue ();
4435 }
4436 store_expr (lo_index, index_r, 0);
4437 loop = expand_start_loop (0);
4438
0f41302f 4439 /* Assign value to element index. */
fed3cef0
RK
4440 position
4441 = convert (ssizetype,
4442 fold (build (MINUS_EXPR, TREE_TYPE (index),
4443 index, TYPE_MIN_VALUE (domain))));
4444 position = size_binop (MULT_EXPR, position,
4445 convert (ssizetype,
4446 TYPE_SIZE_UNIT (elttype)));
4447
e1a43f73 4448 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4449 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4450 xtarget = change_address (target, mode, addr);
4451 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4452 store_constructor (value, xtarget, align, cleared,
4453 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4454 else
4455 store_expr (value, xtarget, 0);
4456
4457 expand_exit_loop_if_false (loop,
4458 build (LT_EXPR, integer_type_node,
4459 index, hi_index));
4460
4461 expand_increment (build (PREINCREMENT_EXPR,
4462 TREE_TYPE (index),
7b8b9722 4463 index, integer_one_node), 0, 0);
e1a43f73
PB
4464 expand_end_loop ();
4465 emit_label (loop_end);
e1a43f73
PB
4466 }
4467 }
4468 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4469 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4470 {
e1a43f73 4471 rtx pos_rtx, addr;
03dc44a6
RS
4472 tree position;
4473
5b6c44ff 4474 if (index == 0)
fed3cef0 4475 index = ssize_int (1);
5b6c44ff 4476
e1a43f73 4477 if (minelt)
fed3cef0
RK
4478 index = convert (ssizetype,
4479 fold (build (MINUS_EXPR, index,
4480 TYPE_MIN_VALUE (domain))));
4481 position = size_binop (MULT_EXPR, index,
4482 convert (ssizetype,
4483 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4484 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4485 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4486 xtarget = change_address (target, mode, addr);
e1a43f73 4487 store_expr (value, xtarget, 0);
03dc44a6
RS
4488 }
4489 else
4490 {
4491 if (index != 0)
7c314719 4492 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4493 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4494 else
4495 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
c5c76735
JL
4496 store_constructor_field (target, bitsize, bitpos, mode, value,
4497 type, align, cleared);
03dc44a6 4498 }
bbf6f052
RK
4499 }
4500 }
071a6595
PB
4501 /* set constructor assignments */
4502 else if (TREE_CODE (type) == SET_TYPE)
4503 {
e1a43f73 4504 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4505 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4506 tree domain = TYPE_DOMAIN (type);
4507 tree domain_min, domain_max, bitlength;
4508
9faa82d8 4509 /* The default implementation strategy is to extract the constant
071a6595
PB
4510 parts of the constructor, use that to initialize the target,
4511 and then "or" in whatever non-constant ranges we need in addition.
4512
4513 If a large set is all zero or all ones, it is
4514 probably better to set it using memset (if available) or bzero.
4515 Also, if a large set has just a single range, it may also be
4516 better to first clear all the first clear the set (using
0f41302f 4517 bzero/memset), and set the bits we want. */
071a6595 4518
0f41302f 4519 /* Check for all zeros. */
9376fcd6 4520 if (elt == NULL_TREE && size > 0)
071a6595 4521 {
e1a43f73 4522 if (!cleared)
b7010412 4523 clear_storage (target, GEN_INT (size),
e1a43f73 4524 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4525 return;
4526 }
4527
071a6595
PB
4528 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4529 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4530 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4531 size_diffop (domain_max, domain_min),
4532 ssize_int (1));
071a6595 4533
e1a43f73
PB
4534 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4535 abort ();
4536 nbits = TREE_INT_CST_LOW (bitlength);
4537
4538 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4539 are "complicated" (more than one range), initialize (the
4540 constant parts) by copying from a constant. */
4541 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4542 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4543 {
b4ee5a72
PB
4544 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4545 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4546 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4547 HOST_WIDE_INT word = 0;
4548 int bit_pos = 0;
4549 int ibit = 0;
0f41302f 4550 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4551 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4552 for (;;)
071a6595 4553 {
b4ee5a72
PB
4554 if (bit_buffer[ibit])
4555 {
b09f3348 4556 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4557 word |= (1 << (set_word_size - 1 - bit_pos));
4558 else
4559 word |= 1 << bit_pos;
4560 }
4561 bit_pos++; ibit++;
4562 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4563 {
e1a43f73
PB
4564 if (word != 0 || ! cleared)
4565 {
4566 rtx datum = GEN_INT (word);
4567 rtx to_rtx;
0f41302f
MS
4568 /* The assumption here is that it is safe to use
4569 XEXP if the set is multi-word, but not if
4570 it's single-word. */
e1a43f73
PB
4571 if (GET_CODE (target) == MEM)
4572 {
4573 to_rtx = plus_constant (XEXP (target, 0), offset);
4574 to_rtx = change_address (target, mode, to_rtx);
4575 }
4576 else if (offset == 0)
4577 to_rtx = target;
4578 else
4579 abort ();
4580 emit_move_insn (to_rtx, datum);
4581 }
b4ee5a72
PB
4582 if (ibit == nbits)
4583 break;
4584 word = 0;
4585 bit_pos = 0;
4586 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4587 }
4588 }
071a6595 4589 }
e1a43f73
PB
4590 else if (!cleared)
4591 {
0f41302f 4592 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4593 if (TREE_CHAIN (elt) != NULL_TREE
4594 || (TREE_PURPOSE (elt) == NULL_TREE
4595 ? nbits != 1
4596 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4597 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
05bccae2
RK
4598 || ((HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_VALUE (elt))
4599 - (HOST_WIDE_INT) TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
e1a43f73
PB
4600 != nbits))))
4601 clear_storage (target, expr_size (exp),
4602 TYPE_ALIGN (type) / BITS_PER_UNIT);
4603 }
4604
4605 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4606 {
4607 /* start of range of element or NULL */
4608 tree startbit = TREE_PURPOSE (elt);
4609 /* end of range of element, or element value */
4610 tree endbit = TREE_VALUE (elt);
381127e8 4611#ifdef TARGET_MEM_FUNCTIONS
071a6595 4612 HOST_WIDE_INT startb, endb;
381127e8 4613#endif
071a6595
PB
4614 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4615
4616 bitlength_rtx = expand_expr (bitlength,
4617 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4618
4619 /* handle non-range tuple element like [ expr ] */
4620 if (startbit == NULL_TREE)
4621 {
4622 startbit = save_expr (endbit);
4623 endbit = startbit;
4624 }
4625 startbit = convert (sizetype, startbit);
4626 endbit = convert (sizetype, endbit);
4627 if (! integer_zerop (domain_min))
4628 {
4629 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4630 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4631 }
4632 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4633 EXPAND_CONST_ADDRESS);
4634 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4635 EXPAND_CONST_ADDRESS);
4636
4637 if (REG_P (target))
4638 {
4639 targetx = assign_stack_temp (GET_MODE (target),
4640 GET_MODE_SIZE (GET_MODE (target)),
4641 0);
4642 emit_move_insn (targetx, target);
4643 }
4644 else if (GET_CODE (target) == MEM)
4645 targetx = target;
4646 else
4647 abort ();
4648
4649#ifdef TARGET_MEM_FUNCTIONS
4650 /* Optimization: If startbit and endbit are
9faa82d8 4651 constants divisible by BITS_PER_UNIT,
0f41302f 4652 call memset instead. */
071a6595
PB
4653 if (TREE_CODE (startbit) == INTEGER_CST
4654 && TREE_CODE (endbit) == INTEGER_CST
4655 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4656 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4657 {
071a6595
PB
4658 emit_library_call (memset_libfunc, 0,
4659 VOIDmode, 3,
e1a43f73
PB
4660 plus_constant (XEXP (targetx, 0),
4661 startb / BITS_PER_UNIT),
071a6595 4662 Pmode,
3b6f75e2 4663 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4664 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4665 TYPE_MODE (sizetype));
071a6595
PB
4666 }
4667 else
4668#endif
4669 {
38a448ca 4670 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4671 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4672 bitlength_rtx, TYPE_MODE (sizetype),
4673 startbit_rtx, TYPE_MODE (sizetype),
4674 endbit_rtx, TYPE_MODE (sizetype));
4675 }
4676 if (REG_P (target))
4677 emit_move_insn (target, targetx);
4678 }
4679 }
bbf6f052
RK
4680
4681 else
4682 abort ();
4683}
4684
4685/* Store the value of EXP (an expression tree)
4686 into a subfield of TARGET which has mode MODE and occupies
4687 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4688 If MODE is VOIDmode, it means that we are storing into a bit-field.
4689
4690 If VALUE_MODE is VOIDmode, return nothing in particular.
4691 UNSIGNEDP is not used in this case.
4692
4693 Otherwise, return an rtx for the value stored. This rtx
4694 has mode VALUE_MODE if that is convenient to do.
4695 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4696
4697 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4698 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4699
4700 ALIAS_SET is the alias set for the destination. This value will
4701 (in general) be different from that for TARGET, since TARGET is a
4702 reference to the containing structure. */
bbf6f052
RK
4703
4704static rtx
4705store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4706 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4707 rtx target;
4708 int bitsize, bitpos;
4709 enum machine_mode mode;
4710 tree exp;
4711 enum machine_mode value_mode;
4712 int unsignedp;
729a2125 4713 unsigned int align;
bbf6f052 4714 int total_size;
ece32014 4715 int alias_set;
bbf6f052 4716{
906c4e36 4717 HOST_WIDE_INT width_mask = 0;
bbf6f052 4718
e9a25f70
JL
4719 if (TREE_CODE (exp) == ERROR_MARK)
4720 return const0_rtx;
4721
906c4e36
RK
4722 if (bitsize < HOST_BITS_PER_WIDE_INT)
4723 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4724
4725 /* If we are storing into an unaligned field of an aligned union that is
4726 in a register, we may have the mode of TARGET being an integer mode but
4727 MODE == BLKmode. In that case, get an aligned object whose size and
4728 alignment are the same as TARGET and store TARGET into it (we can avoid
4729 the store if the field being stored is the entire width of TARGET). Then
4730 call ourselves recursively to store the field into a BLKmode version of
4731 that object. Finally, load from the object into TARGET. This is not
4732 very efficient in general, but should only be slightly more expensive
4733 than the otherwise-required unaligned accesses. Perhaps this can be
4734 cleaned up later. */
4735
4736 if (mode == BLKmode
4737 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4738 {
4739 rtx object = assign_stack_temp (GET_MODE (target),
4740 GET_MODE_SIZE (GET_MODE (target)), 0);
4741 rtx blk_object = copy_rtx (object);
4742
c6df88cb
MM
4743 MEM_SET_IN_STRUCT_P (object, 1);
4744 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4745 PUT_MODE (blk_object, BLKmode);
4746
4747 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4748 emit_move_insn (object, target);
4749
4750 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4751 align, total_size, alias_set);
bbf6f052 4752
46093b97
RS
4753 /* Even though we aren't returning target, we need to
4754 give it the updated value. */
bbf6f052
RK
4755 emit_move_insn (target, object);
4756
46093b97 4757 return blk_object;
bbf6f052
RK
4758 }
4759
4760 /* If the structure is in a register or if the component
4761 is a bit field, we cannot use addressing to access it.
4762 Use bit-field techniques or SUBREG to store in it. */
4763
4fa52007 4764 if (mode == VOIDmode
6ab06cbb
JW
4765 || (mode != BLKmode && ! direct_store[(int) mode]
4766 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4768 || GET_CODE (target) == REG
c980ac49 4769 || GET_CODE (target) == SUBREG
ccc98036
RS
4770 /* If the field isn't aligned enough to store as an ordinary memref,
4771 store it as a bit field. */
e1565e65 4772 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
14a774a9
RK
4773 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4774 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 4775 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
14a774a9
RK
4776 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4777 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4778 /* If the RHS and field are a constant size and the size of the
4779 RHS isn't the same size as the bitfield, we must use bitfield
4780 operations. */
05bccae2
RK
4781 || (bitsize >= 0
4782 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4783 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 4784 {
906c4e36 4785 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4786
ef19912d
RK
4787 /* If BITSIZE is narrower than the size of the type of EXP
4788 we will be narrowing TEMP. Normally, what's wanted are the
4789 low-order bits. However, if EXP's type is a record and this is
4790 big-endian machine, we want the upper BITSIZE bits. */
4791 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4792 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4793 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4794 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4795 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4796 - bitsize),
4797 temp, 1);
4798
bbd6cf73
RK
4799 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4800 MODE. */
4801 if (mode != VOIDmode && mode != BLKmode
4802 && mode != TYPE_MODE (TREE_TYPE (exp)))
4803 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4804
a281e72d
RK
4805 /* If the modes of TARGET and TEMP are both BLKmode, both
4806 must be in memory and BITPOS must be aligned on a byte
4807 boundary. If so, we simply do a block copy. */
4808 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4809 {
729a2125
RK
4810 unsigned int exp_align = expr_align (exp) / BITS_PER_UNIT;
4811
a281e72d
RK
4812 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4813 || bitpos % BITS_PER_UNIT != 0)
4814 abort ();
4815
0086427c
RK
4816 target = change_address (target, VOIDmode,
4817 plus_constant (XEXP (target, 0),
a281e72d
RK
4818 bitpos / BITS_PER_UNIT));
4819
729a2125
RK
4820 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4821 align = MIN (exp_align, align);
c297a34e 4822
14a774a9
RK
4823 /* Find an alignment that is consistent with the bit position. */
4824 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4825 align >>= 1;
4826
a281e72d
RK
4827 emit_block_move (target, temp,
4828 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4829 / BITS_PER_UNIT),
14a774a9 4830 align);
a281e72d
RK
4831
4832 return value_mode == VOIDmode ? const0_rtx : target;
4833 }
4834
bbf6f052
RK
4835 /* Store the value in the bitfield. */
4836 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4837 if (value_mode != VOIDmode)
4838 {
4839 /* The caller wants an rtx for the value. */
4840 /* If possible, avoid refetching from the bitfield itself. */
4841 if (width_mask != 0
4842 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4843 {
9074de27 4844 tree count;
5c4d7cfb 4845 enum machine_mode tmode;
86a2c12a 4846
5c4d7cfb
RS
4847 if (unsignedp)
4848 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4849 tmode = GET_MODE (temp);
86a2c12a
RS
4850 if (tmode == VOIDmode)
4851 tmode = value_mode;
5c4d7cfb
RS
4852 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4853 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4854 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4855 }
bbf6f052 4856 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4857 NULL_RTX, value_mode, 0, align,
4858 total_size);
bbf6f052
RK
4859 }
4860 return const0_rtx;
4861 }
4862 else
4863 {
4864 rtx addr = XEXP (target, 0);
4865 rtx to_rtx;
4866
4867 /* If a value is wanted, it must be the lhs;
4868 so make the address stable for multiple use. */
4869
4870 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4871 && ! CONSTANT_ADDRESS_P (addr)
4872 /* A frame-pointer reference is already stable. */
4873 && ! (GET_CODE (addr) == PLUS
4874 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4875 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4876 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4877 addr = copy_to_reg (addr);
4878
4879 /* Now build a reference to just the desired component. */
4880
effbcc6a
RK
4881 to_rtx = copy_rtx (change_address (target, mode,
4882 plus_constant (addr,
4883 (bitpos
4884 / BITS_PER_UNIT))));
c6df88cb 4885 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4886 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4887
4888 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4889 }
4890}
4891\f
4892/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4893 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4894 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4895
4896 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4897 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4898 If the position of the field is variable, we store a tree
4899 giving the variable offset (in units) in *POFFSET.
4900 This offset is in addition to the bit position.
4901 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4902 We set *PALIGNMENT to the alignment in bytes of the address that will be
4903 computed. This is the alignment of the thing we return if *POFFSET
4904 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4905
4906 If any of the extraction expressions is volatile,
4907 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4908
4909 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4910 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4911 is redundant.
4912
4913 If the field describes a variable-sized object, *PMODE is set to
4914 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4915 this case, but the address of the object can be found. */
bbf6f052
RK
4916
4917tree
4969d05d 4918get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4919 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4920 tree exp;
4921 int *pbitsize;
4922 int *pbitpos;
7bb0943f 4923 tree *poffset;
bbf6f052
RK
4924 enum machine_mode *pmode;
4925 int *punsignedp;
4926 int *pvolatilep;
729a2125 4927 unsigned int *palignment;
bbf6f052 4928{
b50d17a1 4929 tree orig_exp = exp;
bbf6f052
RK
4930 tree size_tree = 0;
4931 enum machine_mode mode = VOIDmode;
fed3cef0 4932 tree offset = size_zero_node;
c84e2712 4933 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4934
4935 if (TREE_CODE (exp) == COMPONENT_REF)
4936 {
4937 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4938 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4939 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4940 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4941 }
4942 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4943 {
4944 size_tree = TREE_OPERAND (exp, 1);
4945 *punsignedp = TREE_UNSIGNED (exp);
4946 }
4947 else
4948 {
4949 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4950 if (mode == BLKmode)
4951 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4952
bbf6f052
RK
4953 *pbitsize = GET_MODE_BITSIZE (mode);
4954 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4955 }
4956
4957 if (size_tree)
4958 {
4959 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4960 mode = BLKmode, *pbitsize = -1;
4961 else
4962 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4963 }
4964
4965 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4966 and find the ultimate containing object. */
4967
4968 *pbitpos = 0;
4969
4970 while (1)
4971 {
7bb0943f 4972 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4973 {
7bb0943f
RS
4974 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4975 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4976 : TREE_OPERAND (exp, 2));
fed3cef0 4977 tree constant = bitsize_int (0), var = pos;
bbf6f052 4978
e7f3c83f
RK
4979 /* If this field hasn't been filled in yet, don't go
4980 past it. This should only happen when folding expressions
4981 made during type construction. */
4982 if (pos == 0)
4983 break;
4984
e6d8c385
RK
4985 /* Assume here that the offset is a multiple of a unit.
4986 If not, there should be an explicitly added constant. */
4987 if (TREE_CODE (pos) == PLUS_EXPR
4988 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4989 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4990 else if (TREE_CODE (pos) == INTEGER_CST)
fed3cef0 4991 constant = pos, var = bitsize_int (0);
e6d8c385
RK
4992
4993 *pbitpos += TREE_INT_CST_LOW (constant);
fed3cef0
RK
4994 offset
4995 = size_binop (PLUS_EXPR, offset,
4996 convert (sizetype,
4997 size_binop (EXACT_DIV_EXPR, var,
4998 bitsize_int (BITS_PER_UNIT))));
bbf6f052 4999 }
bbf6f052 5000
742920c7 5001 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 5002 {
742920c7
RK
5003 /* This code is based on the code in case ARRAY_REF in expand_expr
5004 below. We assume here that the size of an array element is
5005 always an integral multiple of BITS_PER_UNIT. */
5006
5007 tree index = TREE_OPERAND (exp, 1);
5008 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5009 tree low_bound
5010 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5011 tree index_type = TREE_TYPE (index);
ead17059 5012 tree xindex;
742920c7 5013
4c08eef0 5014 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 5015 {
4c08eef0
RK
5016 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5017 index);
742920c7
RK
5018 index_type = TREE_TYPE (index);
5019 }
5020
74a4fbfc
DB
5021 /* Optimize the special-case of a zero lower bound.
5022
5023 We convert the low_bound to sizetype to avoid some problems
5024 with constant folding. (E.g. suppose the lower bound is 1,
5025 and its mode is QI. Without the conversion, (ARRAY
5026 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5027 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5028
5029 But sizetype isn't quite right either (especially if
5030 the lowbound is negative). FIXME */
5031
ca0f2220 5032 if (! integer_zerop (low_bound))
74a4fbfc
DB
5033 index = fold (build (MINUS_EXPR, index_type, index,
5034 convert (sizetype, low_bound)));
ca0f2220 5035
f8dac6eb
R
5036 if (TREE_CODE (index) == INTEGER_CST)
5037 {
5038 index = convert (sbitsizetype, index);
5039 index_type = TREE_TYPE (index);
5040 }
5041
ead17059
RH
5042 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5043 convert (sbitsizetype,
5044 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 5045
ead17059
RH
5046 if (TREE_CODE (xindex) == INTEGER_CST
5047 && TREE_INT_CST_HIGH (xindex) == 0)
5048 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 5049 else
956d6950 5050 {
ead17059
RH
5051 /* Either the bit offset calculated above is not constant, or
5052 it overflowed. In either case, redo the multiplication
5053 against the size in units. This is especially important
5054 in the non-constant case to avoid a division at runtime. */
fed3cef0
RK
5055 xindex
5056 = fold (build (MULT_EXPR, ssizetype, index,
5057 convert (ssizetype,
5058 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
ead17059
RH
5059
5060 if (contains_placeholder_p (xindex))
fed3cef0 5061 xindex = build (WITH_RECORD_EXPR, ssizetype, xindex, exp);
ead17059 5062
fed3cef0
RK
5063 offset
5064 = size_binop (PLUS_EXPR, offset, convert (sizetype, xindex));
956d6950 5065 }
bbf6f052
RK
5066 }
5067 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5068 && ! ((TREE_CODE (exp) == NOP_EXPR
5069 || TREE_CODE (exp) == CONVERT_EXPR)
5070 && (TYPE_MODE (TREE_TYPE (exp))
5071 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5072 break;
7bb0943f
RS
5073
5074 /* If any reference in the chain is volatile, the effect is volatile. */
5075 if (TREE_THIS_VOLATILE (exp))
5076 *pvolatilep = 1;
839c4796
RK
5077
5078 /* If the offset is non-constant already, then we can't assume any
5079 alignment more than the alignment here. */
5080 if (! integer_zerop (offset))
5081 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5082
bbf6f052
RK
5083 exp = TREE_OPERAND (exp, 0);
5084 }
5085
839c4796
RK
5086 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5087 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5088 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5089 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5090
742920c7
RK
5091 if (integer_zerop (offset))
5092 offset = 0;
5093
b50d17a1
RK
5094 if (offset != 0 && contains_placeholder_p (offset))
5095 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5096
bbf6f052 5097 *pmode = mode;
7bb0943f 5098 *poffset = offset;
839c4796 5099 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
5100 return exp;
5101}
921b3427
RK
5102
5103/* Subroutine of expand_exp: compute memory_usage from modifier. */
5104static enum memory_use_mode
5105get_memory_usage_from_modifier (modifier)
5106 enum expand_modifier modifier;
5107{
5108 switch (modifier)
5109 {
5110 case EXPAND_NORMAL:
e5e809f4 5111 case EXPAND_SUM:
921b3427
RK
5112 return MEMORY_USE_RO;
5113 break;
5114 case EXPAND_MEMORY_USE_WO:
5115 return MEMORY_USE_WO;
5116 break;
5117 case EXPAND_MEMORY_USE_RW:
5118 return MEMORY_USE_RW;
5119 break;
921b3427 5120 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5121 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5122 MEMORY_USE_DONT, because they are modifiers to a call of
5123 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5124 case EXPAND_CONST_ADDRESS:
e5e809f4 5125 case EXPAND_INITIALIZER:
921b3427
RK
5126 return MEMORY_USE_DONT;
5127 case EXPAND_MEMORY_USE_BAD:
5128 default:
5129 abort ();
5130 }
5131}
bbf6f052
RK
5132\f
5133/* Given an rtx VALUE that may contain additions and multiplications,
5134 return an equivalent value that just refers to a register or memory.
5135 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
5136 and returning a pseudo-register containing the value.
5137
5138 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5139
5140rtx
5141force_operand (value, target)
5142 rtx value, target;
5143{
5144 register optab binoptab = 0;
5145 /* Use a temporary to force order of execution of calls to
5146 `force_operand'. */
5147 rtx tmp;
5148 register rtx op2;
5149 /* Use subtarget as the target for operand 0 of a binary operation. */
5150 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5151
8b015896
RH
5152 /* Check for a PIC address load. */
5153 if (flag_pic
5154 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5155 && XEXP (value, 0) == pic_offset_table_rtx
5156 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5157 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5158 || GET_CODE (XEXP (value, 1)) == CONST))
5159 {
5160 if (!subtarget)
5161 subtarget = gen_reg_rtx (GET_MODE (value));
5162 emit_move_insn (subtarget, value);
5163 return subtarget;
5164 }
5165
bbf6f052
RK
5166 if (GET_CODE (value) == PLUS)
5167 binoptab = add_optab;
5168 else if (GET_CODE (value) == MINUS)
5169 binoptab = sub_optab;
5170 else if (GET_CODE (value) == MULT)
5171 {
5172 op2 = XEXP (value, 1);
5173 if (!CONSTANT_P (op2)
5174 && !(GET_CODE (op2) == REG && op2 != subtarget))
5175 subtarget = 0;
5176 tmp = force_operand (XEXP (value, 0), subtarget);
5177 return expand_mult (GET_MODE (value), tmp,
906c4e36 5178 force_operand (op2, NULL_RTX),
bbf6f052
RK
5179 target, 0);
5180 }
5181
5182 if (binoptab)
5183 {
5184 op2 = XEXP (value, 1);
5185 if (!CONSTANT_P (op2)
5186 && !(GET_CODE (op2) == REG && op2 != subtarget))
5187 subtarget = 0;
5188 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5189 {
5190 binoptab = add_optab;
5191 op2 = negate_rtx (GET_MODE (value), op2);
5192 }
5193
5194 /* Check for an addition with OP2 a constant integer and our first
5195 operand a PLUS of a virtual register and something else. In that
5196 case, we want to emit the sum of the virtual register and the
5197 constant first and then add the other value. This allows virtual
5198 register instantiation to simply modify the constant rather than
5199 creating another one around this addition. */
5200 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5201 && GET_CODE (XEXP (value, 0)) == PLUS
5202 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5203 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5204 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5205 {
5206 rtx temp = expand_binop (GET_MODE (value), binoptab,
5207 XEXP (XEXP (value, 0), 0), op2,
5208 subtarget, 0, OPTAB_LIB_WIDEN);
5209 return expand_binop (GET_MODE (value), binoptab, temp,
5210 force_operand (XEXP (XEXP (value, 0), 1), 0),
5211 target, 0, OPTAB_LIB_WIDEN);
5212 }
5213
5214 tmp = force_operand (XEXP (value, 0), subtarget);
5215 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5216 force_operand (op2, NULL_RTX),
bbf6f052 5217 target, 0, OPTAB_LIB_WIDEN);
8008b228 5218 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5219 because the only operations we are expanding here are signed ones. */
5220 }
5221 return value;
5222}
5223\f
5224/* Subroutine of expand_expr:
5225 save the non-copied parts (LIST) of an expr (LHS), and return a list
5226 which can restore these values to their previous values,
5227 should something modify their storage. */
5228
5229static tree
5230save_noncopied_parts (lhs, list)
5231 tree lhs;
5232 tree list;
5233{
5234 tree tail;
5235 tree parts = 0;
5236
5237 for (tail = list; tail; tail = TREE_CHAIN (tail))
5238 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5239 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5240 else
5241 {
5242 tree part = TREE_VALUE (tail);
5243 tree part_type = TREE_TYPE (part);
906c4e36 5244 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5245 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5246 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5247 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5248 parts = tree_cons (to_be_saved,
906c4e36
RK
5249 build (RTL_EXPR, part_type, NULL_TREE,
5250 (tree) target),
bbf6f052
RK
5251 parts);
5252 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5253 }
5254 return parts;
5255}
5256
5257/* Subroutine of expand_expr:
5258 record the non-copied parts (LIST) of an expr (LHS), and return a list
5259 which specifies the initial values of these parts. */
5260
5261static tree
5262init_noncopied_parts (lhs, list)
5263 tree lhs;
5264 tree list;
5265{
5266 tree tail;
5267 tree parts = 0;
5268
5269 for (tail = list; tail; tail = TREE_CHAIN (tail))
5270 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5271 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5272 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5273 {
5274 tree part = TREE_VALUE (tail);
5275 tree part_type = TREE_TYPE (part);
906c4e36 5276 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5277 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5278 }
5279 return parts;
5280}
5281
5282/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5283 EXP can reference X, which is being modified. TOP_P is nonzero if this
5284 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5285 for EXP, as opposed to a recursive call to this function.
5286
5287 It is always safe for this routine to return zero since it merely
5288 searches for optimization opportunities. */
bbf6f052
RK
5289
5290static int
e5e809f4 5291safe_from_p (x, exp, top_p)
bbf6f052
RK
5292 rtx x;
5293 tree exp;
e5e809f4 5294 int top_p;
bbf6f052
RK
5295{
5296 rtx exp_rtl = 0;
5297 int i, nops;
ff439b5f
CB
5298 static int save_expr_count;
5299 static int save_expr_size = 0;
5300 static tree *save_expr_rewritten;
5301 static tree save_expr_trees[256];
bbf6f052 5302
6676e72f
RK
5303 if (x == 0
5304 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5305 have no way of allocating temporaries of variable size
5306 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5307 So we assume here that something at a higher level has prevented a
f4510f37 5308 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5309 do this when X is BLKmode and when we are at the top level. */
5310 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5311 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5312 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5313 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5314 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5315 != INTEGER_CST)
f4510f37 5316 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5317 return 1;
5318
ff439b5f
CB
5319 if (top_p && save_expr_size == 0)
5320 {
5321 int rtn;
5322
5323 save_expr_count = 0;
5324 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5325 save_expr_rewritten = &save_expr_trees[0];
5326
5327 rtn = safe_from_p (x, exp, 1);
5328
5329 for (i = 0; i < save_expr_count; ++i)
5330 {
5331 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5332 abort ();
5333 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5334 }
5335
5336 save_expr_size = 0;
5337
5338 return rtn;
5339 }
5340
bbf6f052
RK
5341 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5342 find the underlying pseudo. */
5343 if (GET_CODE (x) == SUBREG)
5344 {
5345 x = SUBREG_REG (x);
5346 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5347 return 0;
5348 }
5349
5350 /* If X is a location in the outgoing argument area, it is always safe. */
5351 if (GET_CODE (x) == MEM
5352 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5353 || (GET_CODE (XEXP (x, 0)) == PLUS
5354 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5355 return 1;
5356
5357 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5358 {
5359 case 'd':
5360 exp_rtl = DECL_RTL (exp);
5361 break;
5362
5363 case 'c':
5364 return 1;
5365
5366 case 'x':
5367 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5368 return ((TREE_VALUE (exp) == 0
e5e809f4 5369 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5370 && (TREE_CHAIN (exp) == 0
e5e809f4 5371 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5372 else if (TREE_CODE (exp) == ERROR_MARK)
5373 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5374 else
5375 return 0;
5376
5377 case '1':
e5e809f4 5378 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5379
5380 case '2':
5381 case '<':
e5e809f4
JL
5382 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5383 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5384
5385 case 'e':
5386 case 'r':
5387 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5388 the expression. If it is set, we conflict iff we are that rtx or
5389 both are in memory. Otherwise, we check all operands of the
5390 expression recursively. */
5391
5392 switch (TREE_CODE (exp))
5393 {
5394 case ADDR_EXPR:
e44842fe 5395 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5396 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5397 || TREE_STATIC (exp));
bbf6f052
RK
5398
5399 case INDIRECT_REF:
5400 if (GET_CODE (x) == MEM)
5401 return 0;
5402 break;
5403
5404 case CALL_EXPR:
5405 exp_rtl = CALL_EXPR_RTL (exp);
5406 if (exp_rtl == 0)
5407 {
5408 /* Assume that the call will clobber all hard registers and
5409 all of memory. */
5410 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5411 || GET_CODE (x) == MEM)
5412 return 0;
5413 }
5414
5415 break;
5416
5417 case RTL_EXPR:
3bb5826a
RK
5418 /* If a sequence exists, we would have to scan every instruction
5419 in the sequence to see if it was safe. This is probably not
5420 worthwhile. */
5421 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5422 return 0;
5423
3bb5826a 5424 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5425 break;
5426
5427 case WITH_CLEANUP_EXPR:
5428 exp_rtl = RTL_EXPR_RTL (exp);
5429 break;
5430
5dab5552 5431 case CLEANUP_POINT_EXPR:
e5e809f4 5432 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5433
bbf6f052
RK
5434 case SAVE_EXPR:
5435 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5436 if (exp_rtl)
5437 break;
5438
5439 /* This SAVE_EXPR might appear many times in the top-level
5440 safe_from_p() expression, and if it has a complex
5441 subexpression, examining it multiple times could result
5442 in a combinatorial explosion. E.g. on an Alpha
5443 running at least 200MHz, a Fortran test case compiled with
5444 optimization took about 28 minutes to compile -- even though
5445 it was only a few lines long, and the complicated line causing
5446 so much time to be spent in the earlier version of safe_from_p()
5447 had only 293 or so unique nodes.
5448
5449 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5450 where it is so we can turn it back in the top-level safe_from_p()
5451 when we're done. */
5452
5453 /* For now, don't bother re-sizing the array. */
5454 if (save_expr_count >= save_expr_size)
5455 return 0;
5456 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5457
5458 nops = tree_code_length[(int) SAVE_EXPR];
5459 for (i = 0; i < nops; i++)
ff59bfe6
JM
5460 {
5461 tree operand = TREE_OPERAND (exp, i);
5462 if (operand == NULL_TREE)
5463 continue;
5464 TREE_SET_CODE (exp, ERROR_MARK);
5465 if (!safe_from_p (x, operand, 0))
5466 return 0;
5467 TREE_SET_CODE (exp, SAVE_EXPR);
5468 }
5469 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5470 return 1;
bbf6f052 5471
8129842c
RS
5472 case BIND_EXPR:
5473 /* The only operand we look at is operand 1. The rest aren't
5474 part of the expression. */
e5e809f4 5475 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5476
bbf6f052 5477 case METHOD_CALL_EXPR:
0f41302f 5478 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5479 abort ();
e9a25f70
JL
5480
5481 default:
5482 break;
bbf6f052
RK
5483 }
5484
5485 /* If we have an rtx, we do not need to scan our operands. */
5486 if (exp_rtl)
5487 break;
5488
5489 nops = tree_code_length[(int) TREE_CODE (exp)];
5490 for (i = 0; i < nops; i++)
5491 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5492 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5493 return 0;
5494 }
5495
5496 /* If we have an rtl, find any enclosed object. Then see if we conflict
5497 with it. */
5498 if (exp_rtl)
5499 {
5500 if (GET_CODE (exp_rtl) == SUBREG)
5501 {
5502 exp_rtl = SUBREG_REG (exp_rtl);
5503 if (GET_CODE (exp_rtl) == REG
5504 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5505 return 0;
5506 }
5507
5508 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5509 are memory and EXP is not readonly. */
5510 return ! (rtx_equal_p (x, exp_rtl)
5511 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5512 && ! TREE_READONLY (exp)));
5513 }
5514
5515 /* If we reach here, it is safe. */
5516 return 1;
5517}
5518
5519/* Subroutine of expand_expr: return nonzero iff EXP is an
5520 expression whose type is statically determinable. */
5521
5522static int
5523fixed_type_p (exp)
5524 tree exp;
5525{
5526 if (TREE_CODE (exp) == PARM_DECL
5527 || TREE_CODE (exp) == VAR_DECL
5528 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5529 || TREE_CODE (exp) == COMPONENT_REF
5530 || TREE_CODE (exp) == ARRAY_REF)
5531 return 1;
5532 return 0;
5533}
01c8a7c8
RK
5534
5535/* Subroutine of expand_expr: return rtx if EXP is a
5536 variable or parameter; else return 0. */
5537
5538static rtx
5539var_rtx (exp)
5540 tree exp;
5541{
5542 STRIP_NOPS (exp);
5543 switch (TREE_CODE (exp))
5544 {
5545 case PARM_DECL:
5546 case VAR_DECL:
5547 return DECL_RTL (exp);
5548 default:
5549 return 0;
5550 }
5551}
dbecbbe4
JL
5552
5553#ifdef MAX_INTEGER_COMPUTATION_MODE
5554void
5555check_max_integer_computation_mode (exp)
5556 tree exp;
5557{
5f652c07 5558 enum tree_code code;
dbecbbe4
JL
5559 enum machine_mode mode;
5560
5f652c07
JM
5561 /* Strip any NOPs that don't change the mode. */
5562 STRIP_NOPS (exp);
5563 code = TREE_CODE (exp);
5564
71bca506
JL
5565 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5566 if (code == NOP_EXPR
5567 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5568 return;
5569
dbecbbe4
JL
5570 /* First check the type of the overall operation. We need only look at
5571 unary, binary and relational operations. */
5572 if (TREE_CODE_CLASS (code) == '1'
5573 || TREE_CODE_CLASS (code) == '2'
5574 || TREE_CODE_CLASS (code) == '<')
5575 {
5576 mode = TYPE_MODE (TREE_TYPE (exp));
5577 if (GET_MODE_CLASS (mode) == MODE_INT
5578 && mode > MAX_INTEGER_COMPUTATION_MODE)
5579 fatal ("unsupported wide integer operation");
5580 }
5581
5582 /* Check operand of a unary op. */
5583 if (TREE_CODE_CLASS (code) == '1')
5584 {
5585 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5586 if (GET_MODE_CLASS (mode) == MODE_INT
5587 && mode > MAX_INTEGER_COMPUTATION_MODE)
5588 fatal ("unsupported wide integer operation");
5589 }
5590
5591 /* Check operands of a binary/comparison op. */
5592 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5593 {
5594 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5595 if (GET_MODE_CLASS (mode) == MODE_INT
5596 && mode > MAX_INTEGER_COMPUTATION_MODE)
5597 fatal ("unsupported wide integer operation");
5598
5599 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5600 if (GET_MODE_CLASS (mode) == MODE_INT
5601 && mode > MAX_INTEGER_COMPUTATION_MODE)
5602 fatal ("unsupported wide integer operation");
5603 }
5604}
5605#endif
5606
14a774a9
RK
5607\f
5608/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5609 has any readonly fields. If any of the fields have types that
5610 contain readonly fields, return true as well. */
5611
5612static int
5613readonly_fields_p (type)
5614 tree type;
5615{
5616 tree field;
5617
5618 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
77fd6d10
MM
5619 if (TREE_CODE (field) == FIELD_DECL
5620 && (TREE_READONLY (field)
5621 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5622 && readonly_fields_p (TREE_TYPE (field)))))
14a774a9
RK
5623 return 1;
5624
5625 return 0;
5626}
bbf6f052
RK
5627\f
5628/* expand_expr: generate code for computing expression EXP.
5629 An rtx for the computed value is returned. The value is never null.
5630 In the case of a void EXP, const0_rtx is returned.
5631
5632 The value may be stored in TARGET if TARGET is nonzero.
5633 TARGET is just a suggestion; callers must assume that
5634 the rtx returned may not be the same as TARGET.
5635
5636 If TARGET is CONST0_RTX, it means that the value will be ignored.
5637
5638 If TMODE is not VOIDmode, it suggests generating the
5639 result in mode TMODE. But this is done only when convenient.
5640 Otherwise, TMODE is ignored and the value generated in its natural mode.
5641 TMODE is just a suggestion; callers must assume that
5642 the rtx returned may not have mode TMODE.
5643
d6a5ac33
RK
5644 Note that TARGET may have neither TMODE nor MODE. In that case, it
5645 probably will not be used.
bbf6f052
RK
5646
5647 If MODIFIER is EXPAND_SUM then when EXP is an addition
5648 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5649 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5650 products as above, or REG or MEM, or constant.
5651 Ordinarily in such cases we would output mul or add instructions
5652 and then return a pseudo reg containing the sum.
5653
5654 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5655 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5656 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5657 This is used for outputting expressions used in initializers.
5658
5659 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5660 with a constant address even if that address is not normally legitimate.
5661 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5662
5663rtx
5664expand_expr (exp, target, tmode, modifier)
5665 register tree exp;
5666 rtx target;
5667 enum machine_mode tmode;
5668 enum expand_modifier modifier;
5669{
5670 register rtx op0, op1, temp;
5671 tree type = TREE_TYPE (exp);
5672 int unsignedp = TREE_UNSIGNED (type);
68557e14 5673 register enum machine_mode mode;
bbf6f052
RK
5674 register enum tree_code code = TREE_CODE (exp);
5675 optab this_optab;
68557e14
ML
5676 rtx subtarget, original_target;
5677 int ignore;
bbf6f052 5678 tree context;
921b3427
RK
5679 /* Used by check-memory-usage to make modifier read only. */
5680 enum expand_modifier ro_modifier;
bbf6f052 5681
68557e14
ML
5682 /* Handle ERROR_MARK before anybody tries to access its type. */
5683 if (TREE_CODE (exp) == ERROR_MARK)
5684 {
5685 op0 = CONST0_RTX (tmode);
5686 if (op0 != 0)
5687 return op0;
5688 return const0_rtx;
5689 }
5690
5691 mode = TYPE_MODE (type);
5692 /* Use subtarget as the target for operand 0 of a binary operation. */
5693 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5694 original_target = target;
5695 ignore = (target == const0_rtx
5696 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5697 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5698 || code == COND_EXPR)
5699 && TREE_CODE (type) == VOID_TYPE));
5700
921b3427
RK
5701 /* Make a read-only version of the modifier. */
5702 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5703 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5704 ro_modifier = modifier;
5705 else
5706 ro_modifier = EXPAND_NORMAL;
ca695ac9 5707
bbf6f052
RK
5708 /* Don't use hard regs as subtargets, because the combiner
5709 can only handle pseudo regs. */
5710 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5711 subtarget = 0;
5712 /* Avoid subtargets inside loops,
5713 since they hide some invariant expressions. */
5714 if (preserve_subexpressions_p ())
5715 subtarget = 0;
5716
dd27116b
RK
5717 /* If we are going to ignore this result, we need only do something
5718 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5719 is, short-circuit the most common cases here. Note that we must
5720 not call expand_expr with anything but const0_rtx in case this
5721 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5722
dd27116b
RK
5723 if (ignore)
5724 {
5725 if (! TREE_SIDE_EFFECTS (exp))
5726 return const0_rtx;
5727
14a774a9
RK
5728 /* Ensure we reference a volatile object even if value is ignored, but
5729 don't do this if all we are doing is taking its address. */
dd27116b
RK
5730 if (TREE_THIS_VOLATILE (exp)
5731 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
5732 && mode != VOIDmode && mode != BLKmode
5733 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 5734 {
921b3427 5735 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5736 if (GET_CODE (temp) == MEM)
5737 temp = copy_to_reg (temp);
5738 return const0_rtx;
5739 }
5740
14a774a9
RK
5741 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5742 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 5743 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5744 VOIDmode, ro_modifier);
14a774a9
RK
5745 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5746 || code == ARRAY_REF)
dd27116b 5747 {
921b3427
RK
5748 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5749 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5750 return const0_rtx;
5751 }
5752 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5753 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5754 /* If the second operand has no side effects, just evaluate
0f41302f 5755 the first. */
dd27116b 5756 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5757 VOIDmode, ro_modifier);
14a774a9
RK
5758 else if (code == BIT_FIELD_REF)
5759 {
5760 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5761 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5762 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5763 return const0_rtx;
5764 }
5765;
90764a87 5766 target = 0;
dd27116b 5767 }
bbf6f052 5768
dbecbbe4 5769#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5770 /* Only check stuff here if the mode we want is different from the mode
5771 of the expression; if it's the same, check_max_integer_computiation_mode
5772 will handle it. Do we really need to check this stuff at all? */
5773
ce3c0b53 5774 if (target
5f652c07 5775 && GET_MODE (target) != mode
ce3c0b53
JL
5776 && TREE_CODE (exp) != INTEGER_CST
5777 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5778 && TREE_CODE (exp) != ARRAY_REF
5779 && TREE_CODE (exp) != COMPONENT_REF
5780 && TREE_CODE (exp) != BIT_FIELD_REF
5781 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5782 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5783 && TREE_CODE (exp) != VAR_DECL
5784 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5785 {
5786 enum machine_mode mode = GET_MODE (target);
5787
5788 if (GET_MODE_CLASS (mode) == MODE_INT
5789 && mode > MAX_INTEGER_COMPUTATION_MODE)
5790 fatal ("unsupported wide integer operation");
5791 }
5792
5f652c07
JM
5793 if (tmode != mode
5794 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5795 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5796 && TREE_CODE (exp) != ARRAY_REF
5797 && TREE_CODE (exp) != COMPONENT_REF
5798 && TREE_CODE (exp) != BIT_FIELD_REF
5799 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5800 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5801 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5802 && TREE_CODE (exp) != RTL_EXPR
71bca506 5803 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5804 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5805 fatal ("unsupported wide integer operation");
5806
5807 check_max_integer_computation_mode (exp);
5808#endif
5809
e44842fe
RK
5810 /* If will do cse, generate all results into pseudo registers
5811 since 1) that allows cse to find more things
5812 and 2) otherwise cse could produce an insn the machine
5813 cannot support. */
5814
bbf6f052
RK
5815 if (! cse_not_expected && mode != BLKmode && target
5816 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5817 target = subtarget;
5818
bbf6f052
RK
5819 switch (code)
5820 {
5821 case LABEL_DECL:
b552441b
RS
5822 {
5823 tree function = decl_function_context (exp);
5824 /* Handle using a label in a containing function. */
d0977240
RK
5825 if (function != current_function_decl
5826 && function != inline_function_decl && function != 0)
b552441b
RS
5827 {
5828 struct function *p = find_function_data (function);
5829 /* Allocate in the memory associated with the function
5830 that the label is in. */
5831 push_obstacks (p->function_obstack,
5832 p->function_maybepermanent_obstack);
5833
49ad7cfa
BS
5834 p->expr->x_forced_labels
5835 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5836 p->expr->x_forced_labels);
b552441b
RS
5837 pop_obstacks ();
5838 }
ab87f8c8
JL
5839 else
5840 {
ab87f8c8
JL
5841 if (modifier == EXPAND_INITIALIZER)
5842 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5843 label_rtx (exp),
5844 forced_labels);
5845 }
c5c76735 5846
38a448ca
RH
5847 temp = gen_rtx_MEM (FUNCTION_MODE,
5848 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5849 if (function != current_function_decl
5850 && function != inline_function_decl && function != 0)
26fcb35a
RS
5851 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5852 return temp;
b552441b 5853 }
bbf6f052
RK
5854
5855 case PARM_DECL:
5856 if (DECL_RTL (exp) == 0)
5857 {
5858 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5859 return CONST0_RTX (mode);
bbf6f052
RK
5860 }
5861
0f41302f 5862 /* ... fall through ... */
d6a5ac33 5863
bbf6f052 5864 case VAR_DECL:
2dca20cd
RS
5865 /* If a static var's type was incomplete when the decl was written,
5866 but the type is complete now, lay out the decl now. */
5867 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5868 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5869 {
5870 push_obstacks_nochange ();
5871 end_temporary_allocation ();
5872 layout_decl (exp, 0);
5873 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5874 pop_obstacks ();
5875 }
d6a5ac33 5876
7d384cc0
KR
5877 /* Although static-storage variables start off initialized, according to
5878 ANSI C, a memcpy could overwrite them with uninitialized values. So
5879 we check them too. This also lets us check for read-only variables
5880 accessed via a non-const declaration, in case it won't be detected
5881 any other way (e.g., in an embedded system or OS kernel without
5882 memory protection).
5883
5884 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 5885 if (cfun && current_function_check_memory_usage
49ad7cfa 5886 && code == VAR_DECL
921b3427 5887 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5888 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5889 {
5890 enum memory_use_mode memory_usage;
5891 memory_usage = get_memory_usage_from_modifier (modifier);
5892
5893 if (memory_usage != MEMORY_USE_DONT)
5894 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5895 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5896 GEN_INT (int_size_in_bytes (type)),
5897 TYPE_MODE (sizetype),
956d6950
JL
5898 GEN_INT (memory_usage),
5899 TYPE_MODE (integer_type_node));
921b3427
RK
5900 }
5901
0f41302f 5902 /* ... fall through ... */
d6a5ac33 5903
2dca20cd 5904 case FUNCTION_DECL:
bbf6f052
RK
5905 case RESULT_DECL:
5906 if (DECL_RTL (exp) == 0)
5907 abort ();
d6a5ac33 5908
e44842fe
RK
5909 /* Ensure variable marked as used even if it doesn't go through
5910 a parser. If it hasn't be used yet, write out an external
5911 definition. */
5912 if (! TREE_USED (exp))
5913 {
5914 assemble_external (exp);
5915 TREE_USED (exp) = 1;
5916 }
5917
dc6d66b3
RK
5918 /* Show we haven't gotten RTL for this yet. */
5919 temp = 0;
5920
bbf6f052
RK
5921 /* Handle variables inherited from containing functions. */
5922 context = decl_function_context (exp);
5923
5924 /* We treat inline_function_decl as an alias for the current function
5925 because that is the inline function whose vars, types, etc.
5926 are being merged into the current function.
5927 See expand_inline_function. */
d6a5ac33 5928
bbf6f052
RK
5929 if (context != 0 && context != current_function_decl
5930 && context != inline_function_decl
5931 /* If var is static, we don't need a static chain to access it. */
5932 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5933 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5934 {
5935 rtx addr;
5936
5937 /* Mark as non-local and addressable. */
81feeecb 5938 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5939 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5940 abort ();
bbf6f052
RK
5941 mark_addressable (exp);
5942 if (GET_CODE (DECL_RTL (exp)) != MEM)
5943 abort ();
5944 addr = XEXP (DECL_RTL (exp), 0);
5945 if (GET_CODE (addr) == MEM)
38a448ca
RH
5946 addr = gen_rtx_MEM (Pmode,
5947 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5948 else
5949 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5950 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5951 }
4af3895e 5952
bbf6f052
RK
5953 /* This is the case of an array whose size is to be determined
5954 from its initializer, while the initializer is still being parsed.
5955 See expand_decl. */
d6a5ac33 5956
dc6d66b3
RK
5957 else if (GET_CODE (DECL_RTL (exp)) == MEM
5958 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5959 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5960 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5961
5962 /* If DECL_RTL is memory, we are in the normal case and either
5963 the address is not valid or it is not a register and -fforce-addr
5964 is specified, get the address into a register. */
5965
dc6d66b3
RK
5966 else if (GET_CODE (DECL_RTL (exp)) == MEM
5967 && modifier != EXPAND_CONST_ADDRESS
5968 && modifier != EXPAND_SUM
5969 && modifier != EXPAND_INITIALIZER
5970 && (! memory_address_p (DECL_MODE (exp),
5971 XEXP (DECL_RTL (exp), 0))
5972 || (flag_force_addr
5973 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5974 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5975 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5976
dc6d66b3
RK
5977 /* If we got something, return it. But first, set the alignment
5978 the address is a register. */
5979 if (temp != 0)
5980 {
5981 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5982 mark_reg_pointer (XEXP (temp, 0),
5983 DECL_ALIGN (exp) / BITS_PER_UNIT);
5984
5985 return temp;
5986 }
5987
1499e0a8
RK
5988 /* If the mode of DECL_RTL does not match that of the decl, it
5989 must be a promoted value. We return a SUBREG of the wanted mode,
5990 but mark it so that we know that it was already extended. */
5991
5992 if (GET_CODE (DECL_RTL (exp)) == REG
5993 && GET_MODE (DECL_RTL (exp)) != mode)
5994 {
1499e0a8
RK
5995 /* Get the signedness used for this variable. Ensure we get the
5996 same mode we got when the variable was declared. */
78911e8b
RK
5997 if (GET_MODE (DECL_RTL (exp))
5998 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5999 abort ();
6000
38a448ca 6001 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
6002 SUBREG_PROMOTED_VAR_P (temp) = 1;
6003 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6004 return temp;
6005 }
6006
bbf6f052
RK
6007 return DECL_RTL (exp);
6008
6009 case INTEGER_CST:
6010 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 6011 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6012
6013 case CONST_DECL:
921b3427
RK
6014 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6015 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6016
6017 case REAL_CST:
6018 /* If optimized, generate immediate CONST_DOUBLE
6019 which will be turned into memory by reload if necessary.
6020
6021 We used to force a register so that loop.c could see it. But
6022 this does not allow gen_* patterns to perform optimizations with
6023 the constants. It also produces two insns in cases like "x = 1.0;".
6024 On most machines, floating-point constants are not permitted in
6025 many insns, so we'd end up copying it to a register in any case.
6026
6027 Now, we do the copying in expand_binop, if appropriate. */
6028 return immed_real_const (exp);
6029
6030 case COMPLEX_CST:
6031 case STRING_CST:
6032 if (! TREE_CST_RTL (exp))
6033 output_constant_def (exp);
6034
6035 /* TREE_CST_RTL probably contains a constant address.
6036 On RISC machines where a constant address isn't valid,
6037 make some insns to get that address into a register. */
6038 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6039 && modifier != EXPAND_CONST_ADDRESS
6040 && modifier != EXPAND_INITIALIZER
6041 && modifier != EXPAND_SUM
d6a5ac33
RK
6042 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6043 || (flag_force_addr
6044 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6045 return change_address (TREE_CST_RTL (exp), VOIDmode,
6046 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6047 return TREE_CST_RTL (exp);
6048
bf1e5319 6049 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6050 {
6051 rtx to_return;
6052 char *saved_input_filename = input_filename;
6053 int saved_lineno = lineno;
6054 input_filename = EXPR_WFL_FILENAME (exp);
6055 lineno = EXPR_WFL_LINENO (exp);
6056 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6057 emit_line_note (input_filename, lineno);
6058 /* Possibly avoid switching back and force here */
6059 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6060 input_filename = saved_input_filename;
6061 lineno = saved_lineno;
6062 return to_return;
6063 }
bf1e5319 6064
bbf6f052
RK
6065 case SAVE_EXPR:
6066 context = decl_function_context (exp);
d6a5ac33 6067
d0977240
RK
6068 /* If this SAVE_EXPR was at global context, assume we are an
6069 initialization function and move it into our context. */
6070 if (context == 0)
6071 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6072
bbf6f052
RK
6073 /* We treat inline_function_decl as an alias for the current function
6074 because that is the inline function whose vars, types, etc.
6075 are being merged into the current function.
6076 See expand_inline_function. */
6077 if (context == current_function_decl || context == inline_function_decl)
6078 context = 0;
6079
6080 /* If this is non-local, handle it. */
6081 if (context)
6082 {
d0977240
RK
6083 /* The following call just exists to abort if the context is
6084 not of a containing function. */
6085 find_function_data (context);
6086
bbf6f052
RK
6087 temp = SAVE_EXPR_RTL (exp);
6088 if (temp && GET_CODE (temp) == REG)
6089 {
6090 put_var_into_stack (exp);
6091 temp = SAVE_EXPR_RTL (exp);
6092 }
6093 if (temp == 0 || GET_CODE (temp) != MEM)
6094 abort ();
6095 return change_address (temp, mode,
6096 fix_lexical_addr (XEXP (temp, 0), exp));
6097 }
6098 if (SAVE_EXPR_RTL (exp) == 0)
6099 {
06089a8b
RK
6100 if (mode == VOIDmode)
6101 temp = const0_rtx;
6102 else
e5e809f4 6103 temp = assign_temp (type, 3, 0, 0);
1499e0a8 6104
bbf6f052 6105 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6106 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6107 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6108 save_expr_regs);
ff78f773
RK
6109
6110 /* If the mode of TEMP does not match that of the expression, it
6111 must be a promoted value. We pass store_expr a SUBREG of the
6112 wanted mode but mark it so that we know that it was already
6113 extended. Note that `unsignedp' was modified above in
6114 this case. */
6115
6116 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6117 {
38a448ca 6118 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
6119 SUBREG_PROMOTED_VAR_P (temp) = 1;
6120 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6121 }
6122
4c7a0be9 6123 if (temp == const0_rtx)
921b3427
RK
6124 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6125 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6126 else
6127 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6128
6129 TREE_USED (exp) = 1;
bbf6f052 6130 }
1499e0a8
RK
6131
6132 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6133 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6134 but mark it so that we know that it was already extended. */
1499e0a8
RK
6135
6136 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6137 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6138 {
e70d22c8
RK
6139 /* Compute the signedness and make the proper SUBREG. */
6140 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6141 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6142 SUBREG_PROMOTED_VAR_P (temp) = 1;
6143 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6144 return temp;
6145 }
6146
bbf6f052
RK
6147 return SAVE_EXPR_RTL (exp);
6148
679163cf
MS
6149 case UNSAVE_EXPR:
6150 {
6151 rtx temp;
6152 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6153 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6154 return temp;
6155 }
6156
b50d17a1 6157 case PLACEHOLDER_EXPR:
e9a25f70
JL
6158 {
6159 tree placeholder_expr;
6160
6161 /* If there is an object on the head of the placeholder list,
e5e809f4 6162 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6163 further information, see tree.def. */
6164 for (placeholder_expr = placeholder_list;
6165 placeholder_expr != 0;
6166 placeholder_expr = TREE_CHAIN (placeholder_expr))
6167 {
6168 tree need_type = TYPE_MAIN_VARIANT (type);
6169 tree object = 0;
6170 tree old_list = placeholder_list;
6171 tree elt;
6172
e5e809f4
JL
6173 /* Find the outermost reference that is of the type we want.
6174 If none, see if any object has a type that is a pointer to
6175 the type we want. */
6176 for (elt = TREE_PURPOSE (placeholder_expr);
6177 elt != 0 && object == 0;
6178 elt
6179 = ((TREE_CODE (elt) == COMPOUND_EXPR
6180 || TREE_CODE (elt) == COND_EXPR)
6181 ? TREE_OPERAND (elt, 1)
6182 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6183 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6184 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6185 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6186 ? TREE_OPERAND (elt, 0) : 0))
6187 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6188 object = elt;
e9a25f70 6189
e9a25f70 6190 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6191 elt != 0 && object == 0;
6192 elt
6193 = ((TREE_CODE (elt) == COMPOUND_EXPR
6194 || TREE_CODE (elt) == COND_EXPR)
6195 ? TREE_OPERAND (elt, 1)
6196 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6197 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6198 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6199 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6200 ? TREE_OPERAND (elt, 0) : 0))
6201 if (POINTER_TYPE_P (TREE_TYPE (elt))
6202 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6203 == need_type))
e5e809f4 6204 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6205
e9a25f70 6206 if (object != 0)
2cde2255 6207 {
e9a25f70
JL
6208 /* Expand this object skipping the list entries before
6209 it was found in case it is also a PLACEHOLDER_EXPR.
6210 In that case, we want to translate it using subsequent
6211 entries. */
6212 placeholder_list = TREE_CHAIN (placeholder_expr);
6213 temp = expand_expr (object, original_target, tmode,
6214 ro_modifier);
6215 placeholder_list = old_list;
6216 return temp;
2cde2255 6217 }
e9a25f70
JL
6218 }
6219 }
b50d17a1
RK
6220
6221 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6222 abort ();
6223
6224 case WITH_RECORD_EXPR:
6225 /* Put the object on the placeholder list, expand our first operand,
6226 and pop the list. */
6227 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6228 placeholder_list);
6229 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6230 tmode, ro_modifier);
b50d17a1
RK
6231 placeholder_list = TREE_CHAIN (placeholder_list);
6232 return target;
6233
70e6ca43
APB
6234 case GOTO_EXPR:
6235 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6236 expand_goto (TREE_OPERAND (exp, 0));
6237 else
6238 expand_computed_goto (TREE_OPERAND (exp, 0));
6239 return const0_rtx;
6240
bbf6f052 6241 case EXIT_EXPR:
e44842fe
RK
6242 expand_exit_loop_if_false (NULL_PTR,
6243 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6244 return const0_rtx;
6245
f42e28dd
APB
6246 case LABELED_BLOCK_EXPR:
6247 if (LABELED_BLOCK_BODY (exp))
6248 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6249 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6250 return const0_rtx;
6251
6252 case EXIT_BLOCK_EXPR:
6253 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6254 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6255 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6256 return const0_rtx;
6257
bbf6f052 6258 case LOOP_EXPR:
0088fcb1 6259 push_temp_slots ();
bbf6f052
RK
6260 expand_start_loop (1);
6261 expand_expr_stmt (TREE_OPERAND (exp, 0));
6262 expand_end_loop ();
0088fcb1 6263 pop_temp_slots ();
bbf6f052
RK
6264
6265 return const0_rtx;
6266
6267 case BIND_EXPR:
6268 {
6269 tree vars = TREE_OPERAND (exp, 0);
6270 int vars_need_expansion = 0;
6271
6272 /* Need to open a binding contour here because
e976b8b2 6273 if there are any cleanups they must be contained here. */
8e91754e 6274 expand_start_bindings (2);
bbf6f052 6275
2df53c0b
RS
6276 /* Mark the corresponding BLOCK for output in its proper place. */
6277 if (TREE_OPERAND (exp, 2) != 0
6278 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6279 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6280
6281 /* If VARS have not yet been expanded, expand them now. */
6282 while (vars)
6283 {
6284 if (DECL_RTL (vars) == 0)
6285 {
6286 vars_need_expansion = 1;
6287 expand_decl (vars);
6288 }
6289 expand_decl_init (vars);
6290 vars = TREE_CHAIN (vars);
6291 }
6292
921b3427 6293 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6294
6295 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6296
6297 return temp;
6298 }
6299
6300 case RTL_EXPR:
83b853c9
JM
6301 if (RTL_EXPR_SEQUENCE (exp))
6302 {
6303 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6304 abort ();
6305 emit_insns (RTL_EXPR_SEQUENCE (exp));
6306 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6307 }
64dc53f3
MM
6308 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6309 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6310 return RTL_EXPR_RTL (exp);
6311
6312 case CONSTRUCTOR:
dd27116b
RK
6313 /* If we don't need the result, just ensure we evaluate any
6314 subexpressions. */
6315 if (ignore)
6316 {
6317 tree elt;
6318 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6319 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6320 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6321 return const0_rtx;
6322 }
3207b172 6323
4af3895e
JVA
6324 /* All elts simple constants => refer to a constant in memory. But
6325 if this is a non-BLKmode mode, let it store a field at a time
6326 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6327 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6328 store directly into the target unless the type is large enough
6329 that memcpy will be used. If we are making an initializer and
3207b172 6330 all operands are constant, put it in memory as well. */
dd27116b 6331 else if ((TREE_STATIC (exp)
3207b172 6332 && ((mode == BLKmode
e5e809f4 6333 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6334 || TREE_ADDRESSABLE (exp)
05bccae2
RK
6335 || (TREE_CODE (TYPE_SIZE_UNIT (type)) == INTEGER_CST
6336 && TREE_INT_CST_HIGH (TYPE_SIZE_UNIT (type)) == 0
6337 && (! MOVE_BY_PIECES_P
6338 (TREE_INT_CST_LOW (TYPE_SIZE_UNIT (type)),
fbe1758d 6339 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6340 && ! mostly_zeros_p (exp))))
dd27116b 6341 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6342 {
6343 rtx constructor = output_constant_def (exp);
b552441b
RS
6344 if (modifier != EXPAND_CONST_ADDRESS
6345 && modifier != EXPAND_INITIALIZER
6346 && modifier != EXPAND_SUM
d6a5ac33
RK
6347 && (! memory_address_p (GET_MODE (constructor),
6348 XEXP (constructor, 0))
6349 || (flag_force_addr
6350 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6351 constructor = change_address (constructor, VOIDmode,
6352 XEXP (constructor, 0));
6353 return constructor;
6354 }
6355
bbf6f052
RK
6356 else
6357 {
e9ac02a6
JW
6358 /* Handle calls that pass values in multiple non-contiguous
6359 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6360 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6361 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6362 {
6363 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6364 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6365 else
6366 target = assign_temp (type, 0, 1, 1);
6367 }
07604beb
RK
6368
6369 if (TREE_READONLY (exp))
6370 {
9151b3bf 6371 if (GET_CODE (target) == MEM)
effbcc6a
RK
6372 target = copy_rtx (target);
6373
07604beb
RK
6374 RTX_UNCHANGING_P (target) = 1;
6375 }
6376
b7010412
RK
6377 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6378 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6379 return target;
6380 }
6381
6382 case INDIRECT_REF:
6383 {
6384 tree exp1 = TREE_OPERAND (exp, 0);
6385 tree exp2;
7581a30f
JW
6386 tree index;
6387 tree string = string_constant (exp1, &index);
7581a30f 6388
06eaa86f 6389 /* Try to optimize reads from const strings. */
7581a30f
JW
6390 if (string
6391 && TREE_CODE (string) == STRING_CST
6392 && TREE_CODE (index) == INTEGER_CST
05bccae2 6393 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6394 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6395 && GET_MODE_SIZE (mode) == 1
6396 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6397 return
6398 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6399
405f0da6
JW
6400 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6401 op0 = memory_address (mode, op0);
8c8a8e34 6402
01d939e8 6403 if (cfun && current_function_check_memory_usage
49ad7cfa 6404 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6405 {
6406 enum memory_use_mode memory_usage;
6407 memory_usage = get_memory_usage_from_modifier (modifier);
6408
6409 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6410 {
6411 in_check_memory_usage = 1;
6412 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6413 op0, Pmode,
c85f7c16
JL
6414 GEN_INT (int_size_in_bytes (type)),
6415 TYPE_MODE (sizetype),
6416 GEN_INT (memory_usage),
6417 TYPE_MODE (integer_type_node));
6418 in_check_memory_usage = 0;
6419 }
921b3427
RK
6420 }
6421
38a448ca 6422 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6423 /* If address was computed by addition,
6424 mark this as an element of an aggregate. */
9ec36da5
JL
6425 if (TREE_CODE (exp1) == PLUS_EXPR
6426 || (TREE_CODE (exp1) == SAVE_EXPR
6427 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6428 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6429 || (TREE_CODE (exp1) == ADDR_EXPR
6430 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6431 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6432 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6433
2c4c436a 6434 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6435 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6436
6437 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6438 here, because, in C and C++, the fact that a location is accessed
6439 through a pointer to const does not mean that the value there can
6440 never change. Languages where it can never change should
6441 also set TREE_STATIC. */
5cb7a25a 6442 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6443
6444 /* If we are writing to this object and its type is a record with
6445 readonly fields, we must mark it as readonly so it will
6446 conflict with readonly references to those fields. */
6447 if (modifier == EXPAND_MEMORY_USE_WO
6448 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6449 RTX_UNCHANGING_P (temp) = 1;
6450
8c8a8e34
JW
6451 return temp;
6452 }
bbf6f052
RK
6453
6454 case ARRAY_REF:
742920c7
RK
6455 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6456 abort ();
bbf6f052 6457
bbf6f052 6458 {
742920c7
RK
6459 tree array = TREE_OPERAND (exp, 0);
6460 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6461 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6462 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6463 HOST_WIDE_INT i;
b50d17a1 6464
d4c89139
PB
6465 /* Optimize the special-case of a zero lower bound.
6466
6467 We convert the low_bound to sizetype to avoid some problems
6468 with constant folding. (E.g. suppose the lower bound is 1,
6469 and its mode is QI. Without the conversion, (ARRAY
6470 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6471 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6472
742920c7 6473 if (! integer_zerop (low_bound))
fed3cef0 6474 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6475
742920c7 6476 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6477 This is not done in fold so it won't happen inside &.
6478 Don't fold if this is for wide characters since it's too
6479 difficult to do correctly and this is a very rare case. */
742920c7
RK
6480
6481 if (TREE_CODE (array) == STRING_CST
6482 && TREE_CODE (index) == INTEGER_CST
05bccae2 6483 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6484 && GET_MODE_CLASS (mode) == MODE_INT
6485 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6486 return
6487 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6488
742920c7
RK
6489 /* If this is a constant index into a constant array,
6490 just get the value from the array. Handle both the cases when
6491 we have an explicit constructor and when our operand is a variable
6492 that was declared const. */
4af3895e 6493
05bccae2
RK
6494 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6495 && TREE_CODE (index) == INTEGER_CST
6496 && 0 > compare_tree_int (index,
6497 list_length (CONSTRUCTOR_ELTS
6498 (TREE_OPERAND (exp, 0)))))
742920c7 6499 {
05bccae2
RK
6500 tree elem;
6501
6502 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6503 i = TREE_INT_CST_LOW (index);
6504 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6505 ;
6506
6507 if (elem)
6508 return expand_expr (fold (TREE_VALUE (elem)), target,
6509 tmode, ro_modifier);
742920c7 6510 }
4af3895e 6511
742920c7
RK
6512 else if (optimize >= 1
6513 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6514 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6515 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6516 {
08293add 6517 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6518 {
6519 tree init = DECL_INITIAL (array);
6520
742920c7
RK
6521 if (TREE_CODE (init) == CONSTRUCTOR)
6522 {
6523 tree elem = CONSTRUCTOR_ELTS (init);
6524
05bccae2
RK
6525 for (elem = CONSTRUCTOR_ELTS (init);
6526 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
6527 elem = TREE_CHAIN (elem))
6528 ;
6529
742920c7
RK
6530 if (elem)
6531 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6532 tmode, ro_modifier);
742920c7
RK
6533 }
6534 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6535 && 0 > compare_tree_int (index,
6536 TREE_STRING_LENGTH (init)))
08293add
RK
6537 return (GEN_INT
6538 (TREE_STRING_POINTER
6539 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6540 }
6541 }
6542 }
8c8a8e34 6543
08293add 6544 /* ... fall through ... */
bbf6f052
RK
6545
6546 case COMPONENT_REF:
6547 case BIT_FIELD_REF:
4af3895e 6548 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6549 appropriate field if it is present. Don't do this if we have
6550 already written the data since we want to refer to that copy
6551 and varasm.c assumes that's what we'll do. */
4af3895e 6552 if (code != ARRAY_REF
7a0b7b9a
RK
6553 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6554 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6555 {
6556 tree elt;
6557
6558 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6559 elt = TREE_CHAIN (elt))
86b5812c
RK
6560 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6561 /* We can normally use the value of the field in the
6562 CONSTRUCTOR. However, if this is a bitfield in
6563 an integral mode that we can fit in a HOST_WIDE_INT,
6564 we must mask only the number of bits in the bitfield,
6565 since this is done implicitly by the constructor. If
6566 the bitfield does not meet either of those conditions,
6567 we can't do this optimization. */
6568 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6569 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6570 == MODE_INT)
6571 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6572 <= HOST_BITS_PER_WIDE_INT))))
6573 {
6574 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6575 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6576 {
9df2c88c
RK
6577 HOST_WIDE_INT bitsize
6578 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6579
6580 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6581 {
6582 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6583 op0 = expand_and (op0, op1, target);
6584 }
6585 else
6586 {
e5e809f4
JL
6587 enum machine_mode imode
6588 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6589 tree count
e5e809f4
JL
6590 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6591 0);
86b5812c
RK
6592
6593 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6594 target, 0);
6595 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6596 target, 0);
6597 }
6598 }
6599
6600 return op0;
6601 }
4af3895e
JVA
6602 }
6603
bbf6f052
RK
6604 {
6605 enum machine_mode mode1;
6606 int bitsize;
6607 int bitpos;
7bb0943f 6608 tree offset;
bbf6f052 6609 int volatilep = 0;
729a2125 6610 unsigned int alignment;
839c4796
RK
6611 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6612 &mode1, &unsignedp, &volatilep,
6613 &alignment);
bbf6f052 6614
e7f3c83f
RK
6615 /* If we got back the original object, something is wrong. Perhaps
6616 we are evaluating an expression too early. In any event, don't
6617 infinitely recurse. */
6618 if (tem == exp)
6619 abort ();
6620
3d27140a 6621 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6622 computation, since it will need a temporary and TARGET is known
6623 to have to do. This occurs in unchecked conversion in Ada. */
6624
6625 op0 = expand_expr (tem,
6626 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6627 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6628 != INTEGER_CST)
6629 ? target : NULL_RTX),
4ed67205 6630 VOIDmode,
14a774a9
RK
6631 (modifier == EXPAND_INITIALIZER
6632 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 6633 ? modifier : EXPAND_NORMAL);
bbf6f052 6634
8c8a8e34 6635 /* If this is a constant, put it into a register if it is a
14a774a9 6636 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6637 if (CONSTANT_P (op0))
6638 {
6639 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6640 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6641 && offset == 0)
8c8a8e34
JW
6642 op0 = force_reg (mode, op0);
6643 else
6644 op0 = validize_mem (force_const_mem (mode, op0));
6645 }
6646
7bb0943f
RS
6647 if (offset != 0)
6648 {
906c4e36 6649 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 6650
14a774a9
RK
6651 /* If this object is in memory, put it into a register.
6652 This case can't occur in C, but can in Ada if we have
6653 unchecked conversion of an expression from a scalar type to
6654 an array or record type. */
6655 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6656 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6657 {
6658 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6659
6660 mark_temp_addr_taken (memloc);
6661 emit_move_insn (memloc, op0);
6662 op0 = memloc;
6663 }
6664
7bb0943f
RS
6665 if (GET_CODE (op0) != MEM)
6666 abort ();
2d48c13d
JL
6667
6668 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6669 {
2d48c13d 6670#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6671 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6672#else
bd070e1a 6673 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6674#endif
bd070e1a 6675 }
2d48c13d 6676
14a774a9 6677 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6678 to call force_reg for that case. Avoid that case. */
89752202
HB
6679 if (GET_CODE (op0) == MEM
6680 && GET_MODE (op0) == BLKmode
efd07ca7 6681 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6682 && bitsize != 0
89752202
HB
6683 && (bitpos % bitsize) == 0
6684 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6685 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6686 {
6687 rtx temp = change_address (op0, mode1,
6688 plus_constant (XEXP (op0, 0),
6689 (bitpos /
6690 BITS_PER_UNIT)));
6691 if (GET_CODE (XEXP (temp, 0)) == REG)
6692 op0 = temp;
6693 else
6694 op0 = change_address (op0, mode1,
6695 force_reg (GET_MODE (XEXP (temp, 0)),
6696 XEXP (temp, 0)));
6697 bitpos = 0;
6698 }
6699
6700
7bb0943f 6701 op0 = change_address (op0, VOIDmode,
38a448ca 6702 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6703 force_reg (ptr_mode,
6704 offset_rtx)));
7bb0943f
RS
6705 }
6706
bbf6f052
RK
6707 /* Don't forget about volatility even if this is a bitfield. */
6708 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6709 {
6710 op0 = copy_rtx (op0);
6711 MEM_VOLATILE_P (op0) = 1;
6712 }
6713
921b3427 6714 /* Check the access. */
c5c76735 6715 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6716 {
6717 enum memory_use_mode memory_usage;
6718 memory_usage = get_memory_usage_from_modifier (modifier);
6719
6720 if (memory_usage != MEMORY_USE_DONT)
6721 {
6722 rtx to;
6723 int size;
6724
6725 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6726 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6727
6728 /* Check the access right of the pointer. */
e9a25f70
JL
6729 if (size > BITS_PER_UNIT)
6730 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6731 to, Pmode,
e9a25f70
JL
6732 GEN_INT (size / BITS_PER_UNIT),
6733 TYPE_MODE (sizetype),
956d6950
JL
6734 GEN_INT (memory_usage),
6735 TYPE_MODE (integer_type_node));
921b3427
RK
6736 }
6737 }
6738
ccc98036
RS
6739 /* In cases where an aligned union has an unaligned object
6740 as a field, we might be extracting a BLKmode value from
6741 an integer-mode (e.g., SImode) object. Handle this case
6742 by doing the extract into an object as wide as the field
6743 (which we know to be the width of a basic mode), then
f2420d0b
JW
6744 storing into memory, and changing the mode to BLKmode.
6745 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6746 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6747 if (mode1 == VOIDmode
ccc98036 6748 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6749 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6750 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6751 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6752 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6753 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6754 /* If the field isn't aligned enough to fetch as a memref,
6755 fetch it as a bit field. */
e1565e65
DE
6756 || (mode1 != BLKmode
6757 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
14a774a9
RK
6758 && ((TYPE_ALIGN (TREE_TYPE (tem))
6759 < (unsigned int) GET_MODE_ALIGNMENT (mode))
dd841181
RK
6760 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6761 /* If the type and the field are a constant size and the
6762 size of the type isn't the same size as the bitfield,
6763 we must use bitfield operations. */
6764 || ((bitsize >= 0
6765 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6766 == INTEGER_CST)
05bccae2
RK
6767 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6768 bitsize)))))
14a774a9
RK
6769 || (modifier != EXPAND_CONST_ADDRESS
6770 && modifier != EXPAND_INITIALIZER
6771 && mode == BLKmode
e1565e65 6772 && SLOW_UNALIGNED_ACCESS (mode, alignment)
14a774a9
RK
6773 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6774 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 6775 {
bbf6f052
RK
6776 enum machine_mode ext_mode = mode;
6777
14a774a9
RK
6778 if (ext_mode == BLKmode
6779 && ! (target != 0 && GET_CODE (op0) == MEM
6780 && GET_CODE (target) == MEM
6781 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
6782 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6783
6784 if (ext_mode == BLKmode)
a281e72d
RK
6785 {
6786 /* In this case, BITPOS must start at a byte boundary and
6787 TARGET, if specified, must be a MEM. */
6788 if (GET_CODE (op0) != MEM
6789 || (target != 0 && GET_CODE (target) != MEM)
6790 || bitpos % BITS_PER_UNIT != 0)
6791 abort ();
6792
6793 op0 = change_address (op0, VOIDmode,
6794 plus_constant (XEXP (op0, 0),
6795 bitpos / BITS_PER_UNIT));
6796 if (target == 0)
6797 target = assign_temp (type, 0, 1, 1);
6798
6799 emit_block_move (target, op0,
6800 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6801 / BITS_PER_UNIT),
6802 1);
6803
6804 return target;
6805 }
bbf6f052 6806
dc6d66b3
RK
6807 op0 = validize_mem (op0);
6808
6809 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6810 mark_reg_pointer (XEXP (op0, 0), alignment);
6811
6812 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6813 unsignedp, target, ext_mode, ext_mode,
034f9101 6814 alignment,
bbf6f052 6815 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6816
6817 /* If the result is a record type and BITSIZE is narrower than
6818 the mode of OP0, an integral mode, and this is a big endian
6819 machine, we must put the field into the high-order bits. */
6820 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6821 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6822 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6823 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6824 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6825 - bitsize),
6826 op0, 1);
6827
bbf6f052
RK
6828 if (mode == BLKmode)
6829 {
6830 rtx new = assign_stack_temp (ext_mode,
6831 bitsize / BITS_PER_UNIT, 0);
6832
6833 emit_move_insn (new, op0);
6834 op0 = copy_rtx (new);
6835 PUT_MODE (op0, BLKmode);
c6df88cb 6836 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6837 }
6838
6839 return op0;
6840 }
6841
05019f83
RK
6842 /* If the result is BLKmode, use that to access the object
6843 now as well. */
6844 if (mode == BLKmode)
6845 mode1 = BLKmode;
6846
bbf6f052
RK
6847 /* Get a reference to just this component. */
6848 if (modifier == EXPAND_CONST_ADDRESS
6849 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6850 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6851 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6852 else
6853 op0 = change_address (op0, mode1,
6854 plus_constant (XEXP (op0, 0),
6855 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6856
6857 if (GET_CODE (op0) == MEM)
6858 MEM_ALIAS_SET (op0) = get_alias_set (exp);
14a774a9 6859
dc6d66b3
RK
6860 if (GET_CODE (XEXP (op0, 0)) == REG)
6861 mark_reg_pointer (XEXP (op0, 0), alignment);
6862
c6df88cb 6863 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6864 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6865 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6866 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6867 || modifier == EXPAND_INITIALIZER)
bbf6f052 6868 return op0;
0d15e60c 6869 else if (target == 0)
bbf6f052 6870 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6871
bbf6f052
RK
6872 convert_move (target, op0, unsignedp);
6873 return target;
6874 }
6875
bbf6f052
RK
6876 /* Intended for a reference to a buffer of a file-object in Pascal.
6877 But it's not certain that a special tree code will really be
6878 necessary for these. INDIRECT_REF might work for them. */
6879 case BUFFER_REF:
6880 abort ();
6881
7308a047 6882 case IN_EXPR:
7308a047 6883 {
d6a5ac33
RK
6884 /* Pascal set IN expression.
6885
6886 Algorithm:
6887 rlo = set_low - (set_low%bits_per_word);
6888 the_word = set [ (index - rlo)/bits_per_word ];
6889 bit_index = index % bits_per_word;
6890 bitmask = 1 << bit_index;
6891 return !!(the_word & bitmask); */
6892
7308a047
RS
6893 tree set = TREE_OPERAND (exp, 0);
6894 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6895 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6896 tree set_type = TREE_TYPE (set);
7308a047
RS
6897 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6898 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6899 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6900 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6901 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6902 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6903 rtx setaddr = XEXP (setval, 0);
6904 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6905 rtx rlow;
6906 rtx diff, quo, rem, addr, bit, result;
7308a047 6907
d6a5ac33
RK
6908 preexpand_calls (exp);
6909
6910 /* If domain is empty, answer is no. Likewise if index is constant
6911 and out of bounds. */
51723711 6912 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6913 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6914 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6915 || (TREE_CODE (index) == INTEGER_CST
6916 && TREE_CODE (set_low_bound) == INTEGER_CST
6917 && tree_int_cst_lt (index, set_low_bound))
6918 || (TREE_CODE (set_high_bound) == INTEGER_CST
6919 && TREE_CODE (index) == INTEGER_CST
6920 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6921 return const0_rtx;
6922
d6a5ac33
RK
6923 if (target == 0)
6924 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6925
6926 /* If we get here, we have to generate the code for both cases
6927 (in range and out of range). */
6928
6929 op0 = gen_label_rtx ();
6930 op1 = gen_label_rtx ();
6931
6932 if (! (GET_CODE (index_val) == CONST_INT
6933 && GET_CODE (lo_r) == CONST_INT))
6934 {
c5d5d461
JL
6935 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6936 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6937 }
6938
6939 if (! (GET_CODE (index_val) == CONST_INT
6940 && GET_CODE (hi_r) == CONST_INT))
6941 {
c5d5d461
JL
6942 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6943 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6944 }
6945
6946 /* Calculate the element number of bit zero in the first word
6947 of the set. */
6948 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6949 rlow = GEN_INT (INTVAL (lo_r)
6950 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6951 else
17938e57
RK
6952 rlow = expand_binop (index_mode, and_optab, lo_r,
6953 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6954 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6955
d6a5ac33
RK
6956 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6957 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6958
6959 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6960 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6961 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6962 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6963
7308a047 6964 addr = memory_address (byte_mode,
d6a5ac33
RK
6965 expand_binop (index_mode, add_optab, diff,
6966 setaddr, NULL_RTX, iunsignedp,
17938e57 6967 OPTAB_LIB_WIDEN));
d6a5ac33 6968
7308a047
RS
6969 /* Extract the bit we want to examine */
6970 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6971 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6972 make_tree (TREE_TYPE (index), rem),
6973 NULL_RTX, 1);
6974 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6975 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6976 1, OPTAB_LIB_WIDEN);
17938e57
RK
6977
6978 if (result != target)
6979 convert_move (target, result, 1);
7308a047
RS
6980
6981 /* Output the code to handle the out-of-range case. */
6982 emit_jump (op0);
6983 emit_label (op1);
6984 emit_move_insn (target, const0_rtx);
6985 emit_label (op0);
6986 return target;
6987 }
6988
bbf6f052
RK
6989 case WITH_CLEANUP_EXPR:
6990 if (RTL_EXPR_RTL (exp) == 0)
6991 {
6992 RTL_EXPR_RTL (exp)
921b3427 6993 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6994 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6995
bbf6f052
RK
6996 /* That's it for this cleanup. */
6997 TREE_OPERAND (exp, 2) = 0;
6998 }
6999 return RTL_EXPR_RTL (exp);
7000
5dab5552
MS
7001 case CLEANUP_POINT_EXPR:
7002 {
e976b8b2
MS
7003 /* Start a new binding layer that will keep track of all cleanup
7004 actions to be performed. */
8e91754e 7005 expand_start_bindings (2);
e976b8b2 7006
d93d4205 7007 target_temp_slot_level = temp_slot_level;
e976b8b2 7008
921b3427 7009 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
7010 /* If we're going to use this value, load it up now. */
7011 if (! ignore)
7012 op0 = force_not_mem (op0);
d93d4205 7013 preserve_temp_slots (op0);
e976b8b2 7014 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7015 }
7016 return op0;
7017
bbf6f052
RK
7018 case CALL_EXPR:
7019 /* Check for a built-in function. */
7020 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7021 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7022 == FUNCTION_DECL)
bbf6f052
RK
7023 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7024 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 7025
bbf6f052
RK
7026 /* If this call was expanded already by preexpand_calls,
7027 just return the result we got. */
7028 if (CALL_EXPR_RTL (exp) != 0)
7029 return CALL_EXPR_RTL (exp);
d6a5ac33 7030
8129842c 7031 return expand_call (exp, target, ignore);
bbf6f052
RK
7032
7033 case NON_LVALUE_EXPR:
7034 case NOP_EXPR:
7035 case CONVERT_EXPR:
7036 case REFERENCE_EXPR:
bbf6f052
RK
7037 if (TREE_CODE (type) == UNION_TYPE)
7038 {
7039 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7040
7041 /* If both input and output are BLKmode, this conversion
7042 isn't actually doing anything unless we need to make the
7043 alignment stricter. */
7044 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7045 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7046 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7047 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7048 modifier);
7049
bbf6f052 7050 if (target == 0)
06089a8b
RK
7051 {
7052 if (mode != BLKmode)
7053 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7054 else
7055 target = assign_temp (type, 0, 1, 1);
7056 }
d6a5ac33 7057
bbf6f052
RK
7058 if (GET_CODE (target) == MEM)
7059 /* Store data into beginning of memory target. */
7060 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7061 change_address (target, TYPE_MODE (valtype), 0), 0);
7062
bbf6f052
RK
7063 else if (GET_CODE (target) == REG)
7064 /* Store this field into a union of the proper type. */
14a774a9
RK
7065 store_field (target,
7066 MIN ((int_size_in_bytes (TREE_TYPE
7067 (TREE_OPERAND (exp, 0)))
7068 * BITS_PER_UNIT),
7069 GET_MODE_BITSIZE (mode)),
7070 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7071 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
bbf6f052
RK
7072 else
7073 abort ();
7074
7075 /* Return the entire union. */
7076 return target;
7077 }
d6a5ac33 7078
7f62854a
RK
7079 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7080 {
7081 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7082 ro_modifier);
7f62854a
RK
7083
7084 /* If the signedness of the conversion differs and OP0 is
7085 a promoted SUBREG, clear that indication since we now
7086 have to do the proper extension. */
7087 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7088 && GET_CODE (op0) == SUBREG)
7089 SUBREG_PROMOTED_VAR_P (op0) = 0;
7090
7091 return op0;
7092 }
7093
1499e0a8 7094 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7095 if (GET_MODE (op0) == mode)
7096 return op0;
12342f90 7097
d6a5ac33
RK
7098 /* If OP0 is a constant, just convert it into the proper mode. */
7099 if (CONSTANT_P (op0))
7100 return
7101 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7102 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7103
26fcb35a 7104 if (modifier == EXPAND_INITIALIZER)
38a448ca 7105 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7106
bbf6f052 7107 if (target == 0)
d6a5ac33
RK
7108 return
7109 convert_to_mode (mode, op0,
7110 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7111 else
d6a5ac33
RK
7112 convert_move (target, op0,
7113 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7114 return target;
7115
7116 case PLUS_EXPR:
0f41302f
MS
7117 /* We come here from MINUS_EXPR when the second operand is a
7118 constant. */
bbf6f052
RK
7119 plus_expr:
7120 this_optab = add_optab;
7121
7122 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7123 something else, make sure we add the register to the constant and
7124 then to the other thing. This case can occur during strength
7125 reduction and doing it this way will produce better code if the
7126 frame pointer or argument pointer is eliminated.
7127
7128 fold-const.c will ensure that the constant is always in the inner
7129 PLUS_EXPR, so the only case we need to do anything about is if
7130 sp, ap, or fp is our second argument, in which case we must swap
7131 the innermost first argument and our second argument. */
7132
7133 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7134 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7135 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7136 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7137 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7138 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7139 {
7140 tree t = TREE_OPERAND (exp, 1);
7141
7142 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7143 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7144 }
7145
88f63c77 7146 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7147 something, we might be forming a constant. So try to use
7148 plus_constant. If it produces a sum and we can't accept it,
7149 use force_operand. This allows P = &ARR[const] to generate
7150 efficient code on machines where a SYMBOL_REF is not a valid
7151 address.
7152
7153 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7154 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 7155 || mode == ptr_mode)
bbf6f052 7156 {
c980ac49
RS
7157 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7158 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7159 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7160 {
cbbc503e
JL
7161 rtx constant_part;
7162
c980ac49
RS
7163 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7164 EXPAND_SUM);
cbbc503e
JL
7165 /* Use immed_double_const to ensure that the constant is
7166 truncated according to the mode of OP1, then sign extended
7167 to a HOST_WIDE_INT. Using the constant directly can result
7168 in non-canonical RTL in a 64x32 cross compile. */
7169 constant_part
7170 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7171 (HOST_WIDE_INT) 0,
a5efcd63 7172 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7173 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7174 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7175 op1 = force_operand (op1, target);
7176 return op1;
7177 }
bbf6f052 7178
c980ac49
RS
7179 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7180 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7181 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7182 {
cbbc503e
JL
7183 rtx constant_part;
7184
c980ac49
RS
7185 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7186 EXPAND_SUM);
7187 if (! CONSTANT_P (op0))
7188 {
7189 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7190 VOIDmode, modifier);
709f5be1
RS
7191 /* Don't go to both_summands if modifier
7192 says it's not right to return a PLUS. */
7193 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7194 goto binop2;
c980ac49
RS
7195 goto both_summands;
7196 }
cbbc503e
JL
7197 /* Use immed_double_const to ensure that the constant is
7198 truncated according to the mode of OP1, then sign extended
7199 to a HOST_WIDE_INT. Using the constant directly can result
7200 in non-canonical RTL in a 64x32 cross compile. */
7201 constant_part
7202 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7203 (HOST_WIDE_INT) 0,
2a94e396 7204 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7205 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7206 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7207 op0 = force_operand (op0, target);
7208 return op0;
7209 }
bbf6f052
RK
7210 }
7211
7212 /* No sense saving up arithmetic to be done
7213 if it's all in the wrong mode to form part of an address.
7214 And force_operand won't know whether to sign-extend or
7215 zero-extend. */
7216 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7217 || mode != ptr_mode)
c980ac49 7218 goto binop;
bbf6f052
RK
7219
7220 preexpand_calls (exp);
e5e809f4 7221 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7222 subtarget = 0;
7223
921b3427
RK
7224 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7225 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7226
c980ac49 7227 both_summands:
bbf6f052
RK
7228 /* Make sure any term that's a sum with a constant comes last. */
7229 if (GET_CODE (op0) == PLUS
7230 && CONSTANT_P (XEXP (op0, 1)))
7231 {
7232 temp = op0;
7233 op0 = op1;
7234 op1 = temp;
7235 }
7236 /* If adding to a sum including a constant,
7237 associate it to put the constant outside. */
7238 if (GET_CODE (op1) == PLUS
7239 && CONSTANT_P (XEXP (op1, 1)))
7240 {
7241 rtx constant_term = const0_rtx;
7242
7243 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7244 if (temp != 0)
7245 op0 = temp;
6f90e075
JW
7246 /* Ensure that MULT comes first if there is one. */
7247 else if (GET_CODE (op0) == MULT)
38a448ca 7248 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7249 else
38a448ca 7250 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7251
7252 /* Let's also eliminate constants from op0 if possible. */
7253 op0 = eliminate_constant_term (op0, &constant_term);
7254
7255 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7256 their sum should be a constant. Form it into OP1, since the
7257 result we want will then be OP0 + OP1. */
7258
7259 temp = simplify_binary_operation (PLUS, mode, constant_term,
7260 XEXP (op1, 1));
7261 if (temp != 0)
7262 op1 = temp;
7263 else
38a448ca 7264 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7265 }
7266
7267 /* Put a constant term last and put a multiplication first. */
7268 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7269 temp = op1, op1 = op0, op0 = temp;
7270
7271 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7272 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7273
7274 case MINUS_EXPR:
ea87523e
RK
7275 /* For initializers, we are allowed to return a MINUS of two
7276 symbolic constants. Here we handle all cases when both operands
7277 are constant. */
bbf6f052
RK
7278 /* Handle difference of two symbolic constants,
7279 for the sake of an initializer. */
7280 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7281 && really_constant_p (TREE_OPERAND (exp, 0))
7282 && really_constant_p (TREE_OPERAND (exp, 1)))
7283 {
906c4e36 7284 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7285 VOIDmode, ro_modifier);
906c4e36 7286 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7287 VOIDmode, ro_modifier);
ea87523e 7288
ea87523e
RK
7289 /* If the last operand is a CONST_INT, use plus_constant of
7290 the negated constant. Else make the MINUS. */
7291 if (GET_CODE (op1) == CONST_INT)
7292 return plus_constant (op0, - INTVAL (op1));
7293 else
38a448ca 7294 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7295 }
7296 /* Convert A - const to A + (-const). */
7297 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7298 {
ae431183
RK
7299 tree negated = fold (build1 (NEGATE_EXPR, type,
7300 TREE_OPERAND (exp, 1)));
7301
ae431183 7302 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7303 /* If we can't negate the constant in TYPE, leave it alone and
7304 expand_binop will negate it for us. We used to try to do it
7305 here in the signed version of TYPE, but that doesn't work
7306 on POINTER_TYPEs. */;
ae431183
RK
7307 else
7308 {
7309 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7310 goto plus_expr;
7311 }
bbf6f052
RK
7312 }
7313 this_optab = sub_optab;
7314 goto binop;
7315
7316 case MULT_EXPR:
7317 preexpand_calls (exp);
7318 /* If first operand is constant, swap them.
7319 Thus the following special case checks need only
7320 check the second operand. */
7321 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7322 {
7323 register tree t1 = TREE_OPERAND (exp, 0);
7324 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7325 TREE_OPERAND (exp, 1) = t1;
7326 }
7327
7328 /* Attempt to return something suitable for generating an
7329 indexed address, for machines that support that. */
7330
88f63c77 7331 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7332 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7333 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7334 {
921b3427
RK
7335 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7336 EXPAND_SUM);
bbf6f052
RK
7337
7338 /* Apply distributive law if OP0 is x+c. */
7339 if (GET_CODE (op0) == PLUS
7340 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7341 return
7342 gen_rtx_PLUS
7343 (mode,
7344 gen_rtx_MULT
7345 (mode, XEXP (op0, 0),
7346 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7347 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7348 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7349
7350 if (GET_CODE (op0) != REG)
906c4e36 7351 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7352 if (GET_CODE (op0) != REG)
7353 op0 = copy_to_mode_reg (mode, op0);
7354
c5c76735
JL
7355 return
7356 gen_rtx_MULT (mode, op0,
7357 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7358 }
7359
e5e809f4 7360 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7361 subtarget = 0;
7362
7363 /* Check for multiplying things that have been extended
7364 from a narrower type. If this machine supports multiplying
7365 in that narrower type with a result in the desired type,
7366 do it that way, and avoid the explicit type-conversion. */
7367 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7368 && TREE_CODE (type) == INTEGER_TYPE
7369 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7370 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7371 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7372 && int_fits_type_p (TREE_OPERAND (exp, 1),
7373 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7374 /* Don't use a widening multiply if a shift will do. */
7375 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7376 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7377 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7378 ||
7379 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7380 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7381 ==
7382 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7383 /* If both operands are extended, they must either both
7384 be zero-extended or both be sign-extended. */
7385 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7386 ==
7387 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7388 {
7389 enum machine_mode innermode
7390 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7391 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7392 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7393 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7394 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7395 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7396 {
b10af0c8
TG
7397 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7398 {
7399 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7400 NULL_RTX, VOIDmode, 0);
7401 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7402 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7403 VOIDmode, 0);
7404 else
7405 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7406 NULL_RTX, VOIDmode, 0);
7407 goto binop2;
7408 }
7409 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7410 && innermode == word_mode)
7411 {
7412 rtx htem;
7413 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7414 NULL_RTX, VOIDmode, 0);
7415 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7416 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7417 VOIDmode, 0);
7418 else
7419 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7420 NULL_RTX, VOIDmode, 0);
7421 temp = expand_binop (mode, other_optab, op0, op1, target,
7422 unsignedp, OPTAB_LIB_WIDEN);
7423 htem = expand_mult_highpart_adjust (innermode,
7424 gen_highpart (innermode, temp),
7425 op0, op1,
7426 gen_highpart (innermode, temp),
7427 unsignedp);
7428 emit_move_insn (gen_highpart (innermode, temp), htem);
7429 return temp;
7430 }
bbf6f052
RK
7431 }
7432 }
7433 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7434 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7435 return expand_mult (mode, op0, op1, target, unsignedp);
7436
7437 case TRUNC_DIV_EXPR:
7438 case FLOOR_DIV_EXPR:
7439 case CEIL_DIV_EXPR:
7440 case ROUND_DIV_EXPR:
7441 case EXACT_DIV_EXPR:
7442 preexpand_calls (exp);
e5e809f4 7443 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7444 subtarget = 0;
7445 /* Possible optimization: compute the dividend with EXPAND_SUM
7446 then if the divisor is constant can optimize the case
7447 where some terms of the dividend have coeffs divisible by it. */
7448 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7449 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7450 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7451
7452 case RDIV_EXPR:
7453 this_optab = flodiv_optab;
7454 goto binop;
7455
7456 case TRUNC_MOD_EXPR:
7457 case FLOOR_MOD_EXPR:
7458 case CEIL_MOD_EXPR:
7459 case ROUND_MOD_EXPR:
7460 preexpand_calls (exp);
e5e809f4 7461 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7462 subtarget = 0;
7463 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7464 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7465 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7466
7467 case FIX_ROUND_EXPR:
7468 case FIX_FLOOR_EXPR:
7469 case FIX_CEIL_EXPR:
7470 abort (); /* Not used for C. */
7471
7472 case FIX_TRUNC_EXPR:
906c4e36 7473 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7474 if (target == 0)
7475 target = gen_reg_rtx (mode);
7476 expand_fix (target, op0, unsignedp);
7477 return target;
7478
7479 case FLOAT_EXPR:
906c4e36 7480 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7481 if (target == 0)
7482 target = gen_reg_rtx (mode);
7483 /* expand_float can't figure out what to do if FROM has VOIDmode.
7484 So give it the correct mode. With -O, cse will optimize this. */
7485 if (GET_MODE (op0) == VOIDmode)
7486 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7487 op0);
7488 expand_float (target, op0,
7489 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7490 return target;
7491
7492 case NEGATE_EXPR:
5b22bee8 7493 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7494 temp = expand_unop (mode, neg_optab, op0, target, 0);
7495 if (temp == 0)
7496 abort ();
7497 return temp;
7498
7499 case ABS_EXPR:
7500 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7501
2d7050fd 7502 /* Handle complex values specially. */
d6a5ac33
RK
7503 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7504 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7505 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7506
bbf6f052
RK
7507 /* Unsigned abs is simply the operand. Testing here means we don't
7508 risk generating incorrect code below. */
7509 if (TREE_UNSIGNED (type))
7510 return op0;
7511
91813b28 7512 return expand_abs (mode, op0, target,
e5e809f4 7513 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7514
7515 case MAX_EXPR:
7516 case MIN_EXPR:
7517 target = original_target;
e5e809f4 7518 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7519 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7520 || GET_MODE (target) != mode
bbf6f052
RK
7521 || (GET_CODE (target) == REG
7522 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7523 target = gen_reg_rtx (mode);
906c4e36 7524 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7525 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7526
7527 /* First try to do it with a special MIN or MAX instruction.
7528 If that does not win, use a conditional jump to select the proper
7529 value. */
7530 this_optab = (TREE_UNSIGNED (type)
7531 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7532 : (code == MIN_EXPR ? smin_optab : smax_optab));
7533
7534 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7535 OPTAB_WIDEN);
7536 if (temp != 0)
7537 return temp;
7538
fa2981d8
JW
7539 /* At this point, a MEM target is no longer useful; we will get better
7540 code without it. */
7541
7542 if (GET_CODE (target) == MEM)
7543 target = gen_reg_rtx (mode);
7544
ee456b1c
RK
7545 if (target != op0)
7546 emit_move_insn (target, op0);
d6a5ac33 7547
bbf6f052 7548 op0 = gen_label_rtx ();
d6a5ac33 7549
f81497d9
RS
7550 /* If this mode is an integer too wide to compare properly,
7551 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7552 if (GET_MODE_CLASS (mode) == MODE_INT
7553 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7554 {
f81497d9 7555 if (code == MAX_EXPR)
d6a5ac33
RK
7556 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7557 target, op1, NULL_RTX, op0);
bbf6f052 7558 else
d6a5ac33
RK
7559 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7560 op1, target, NULL_RTX, op0);
bbf6f052 7561 }
f81497d9
RS
7562 else
7563 {
b30f05db
BS
7564 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7565 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7566 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7567 op0);
f81497d9 7568 }
b30f05db 7569 emit_move_insn (target, op1);
bbf6f052
RK
7570 emit_label (op0);
7571 return target;
7572
bbf6f052
RK
7573 case BIT_NOT_EXPR:
7574 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7575 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7576 if (temp == 0)
7577 abort ();
7578 return temp;
7579
7580 case FFS_EXPR:
7581 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7582 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7583 if (temp == 0)
7584 abort ();
7585 return temp;
7586
d6a5ac33
RK
7587 /* ??? Can optimize bitwise operations with one arg constant.
7588 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7589 and (a bitwise1 b) bitwise2 b (etc)
7590 but that is probably not worth while. */
7591
7592 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7593 boolean values when we want in all cases to compute both of them. In
7594 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7595 as actual zero-or-1 values and then bitwise anding. In cases where
7596 there cannot be any side effects, better code would be made by
7597 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7598 how to recognize those cases. */
7599
bbf6f052
RK
7600 case TRUTH_AND_EXPR:
7601 case BIT_AND_EXPR:
7602 this_optab = and_optab;
7603 goto binop;
7604
bbf6f052
RK
7605 case TRUTH_OR_EXPR:
7606 case BIT_IOR_EXPR:
7607 this_optab = ior_optab;
7608 goto binop;
7609
874726a8 7610 case TRUTH_XOR_EXPR:
bbf6f052
RK
7611 case BIT_XOR_EXPR:
7612 this_optab = xor_optab;
7613 goto binop;
7614
7615 case LSHIFT_EXPR:
7616 case RSHIFT_EXPR:
7617 case LROTATE_EXPR:
7618 case RROTATE_EXPR:
7619 preexpand_calls (exp);
e5e809f4 7620 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7621 subtarget = 0;
7622 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7623 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7624 unsignedp);
7625
d6a5ac33
RK
7626 /* Could determine the answer when only additive constants differ. Also,
7627 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7628 case LT_EXPR:
7629 case LE_EXPR:
7630 case GT_EXPR:
7631 case GE_EXPR:
7632 case EQ_EXPR:
7633 case NE_EXPR:
1eb8759b
RH
7634 case UNORDERED_EXPR:
7635 case ORDERED_EXPR:
7636 case UNLT_EXPR:
7637 case UNLE_EXPR:
7638 case UNGT_EXPR:
7639 case UNGE_EXPR:
7640 case UNEQ_EXPR:
bbf6f052
RK
7641 preexpand_calls (exp);
7642 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7643 if (temp != 0)
7644 return temp;
d6a5ac33 7645
0f41302f 7646 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7647 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7648 && original_target
7649 && GET_CODE (original_target) == REG
7650 && (GET_MODE (original_target)
7651 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7652 {
d6a5ac33
RK
7653 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7654 VOIDmode, 0);
7655
bbf6f052
RK
7656 if (temp != original_target)
7657 temp = copy_to_reg (temp);
d6a5ac33 7658
bbf6f052 7659 op1 = gen_label_rtx ();
c5d5d461
JL
7660 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7661 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7662 emit_move_insn (temp, const1_rtx);
7663 emit_label (op1);
7664 return temp;
7665 }
d6a5ac33 7666
bbf6f052
RK
7667 /* If no set-flag instruction, must generate a conditional
7668 store into a temporary variable. Drop through
7669 and handle this like && and ||. */
7670
7671 case TRUTH_ANDIF_EXPR:
7672 case TRUTH_ORIF_EXPR:
e44842fe 7673 if (! ignore
e5e809f4 7674 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7675 /* Make sure we don't have a hard reg (such as function's return
7676 value) live across basic blocks, if not optimizing. */
7677 || (!optimize && GET_CODE (target) == REG
7678 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7679 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7680
7681 if (target)
7682 emit_clr_insn (target);
7683
bbf6f052
RK
7684 op1 = gen_label_rtx ();
7685 jumpifnot (exp, op1);
e44842fe
RK
7686
7687 if (target)
7688 emit_0_to_1_insn (target);
7689
bbf6f052 7690 emit_label (op1);
e44842fe 7691 return ignore ? const0_rtx : target;
bbf6f052
RK
7692
7693 case TRUTH_NOT_EXPR:
7694 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7695 /* The parser is careful to generate TRUTH_NOT_EXPR
7696 only with operands that are always zero or one. */
906c4e36 7697 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7698 target, 1, OPTAB_LIB_WIDEN);
7699 if (temp == 0)
7700 abort ();
7701 return temp;
7702
7703 case COMPOUND_EXPR:
7704 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7705 emit_queue ();
7706 return expand_expr (TREE_OPERAND (exp, 1),
7707 (ignore ? const0_rtx : target),
7708 VOIDmode, 0);
7709
7710 case COND_EXPR:
ac01eace
RK
7711 /* If we would have a "singleton" (see below) were it not for a
7712 conversion in each arm, bring that conversion back out. */
7713 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7714 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7715 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7716 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7717 {
7718 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7719 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7720
7721 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7722 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7723 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7724 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7725 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7726 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7727 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7728 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7729 return expand_expr (build1 (NOP_EXPR, type,
7730 build (COND_EXPR, TREE_TYPE (true),
7731 TREE_OPERAND (exp, 0),
7732 true, false)),
7733 target, tmode, modifier);
7734 }
7735
bbf6f052
RK
7736 {
7737 /* Note that COND_EXPRs whose type is a structure or union
7738 are required to be constructed to contain assignments of
7739 a temporary variable, so that we can evaluate them here
7740 for side effect only. If type is void, we must do likewise. */
7741
7742 /* If an arm of the branch requires a cleanup,
7743 only that cleanup is performed. */
7744
7745 tree singleton = 0;
7746 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7747
7748 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7749 convert it to our mode, if necessary. */
7750 if (integer_onep (TREE_OPERAND (exp, 1))
7751 && integer_zerop (TREE_OPERAND (exp, 2))
7752 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7753 {
dd27116b
RK
7754 if (ignore)
7755 {
7756 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7757 ro_modifier);
dd27116b
RK
7758 return const0_rtx;
7759 }
7760
921b3427 7761 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7762 if (GET_MODE (op0) == mode)
7763 return op0;
d6a5ac33 7764
bbf6f052
RK
7765 if (target == 0)
7766 target = gen_reg_rtx (mode);
7767 convert_move (target, op0, unsignedp);
7768 return target;
7769 }
7770
ac01eace
RK
7771 /* Check for X ? A + B : A. If we have this, we can copy A to the
7772 output and conditionally add B. Similarly for unary operations.
7773 Don't do this if X has side-effects because those side effects
7774 might affect A or B and the "?" operation is a sequence point in
7775 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7776
7777 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7778 && operand_equal_p (TREE_OPERAND (exp, 2),
7779 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7780 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7781 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7782 && operand_equal_p (TREE_OPERAND (exp, 1),
7783 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7784 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7785 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7786 && operand_equal_p (TREE_OPERAND (exp, 2),
7787 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7788 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7789 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7790 && operand_equal_p (TREE_OPERAND (exp, 1),
7791 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7792 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7793
01c8a7c8
RK
7794 /* If we are not to produce a result, we have no target. Otherwise,
7795 if a target was specified use it; it will not be used as an
7796 intermediate target unless it is safe. If no target, use a
7797 temporary. */
7798
7799 if (ignore)
7800 temp = 0;
7801 else if (original_target
e5e809f4 7802 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7803 || (singleton && GET_CODE (original_target) == REG
7804 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7805 && original_target == var_rtx (singleton)))
7806 && GET_MODE (original_target) == mode
7c00d1fe
RK
7807#ifdef HAVE_conditional_move
7808 && (! can_conditionally_move_p (mode)
7809 || GET_CODE (original_target) == REG
7810 || TREE_ADDRESSABLE (type))
7811#endif
01c8a7c8
RK
7812 && ! (GET_CODE (original_target) == MEM
7813 && MEM_VOLATILE_P (original_target)))
7814 temp = original_target;
7815 else if (TREE_ADDRESSABLE (type))
7816 abort ();
7817 else
7818 temp = assign_temp (type, 0, 0, 1);
7819
ac01eace
RK
7820 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7821 do the test of X as a store-flag operation, do this as
7822 A + ((X != 0) << log C). Similarly for other simple binary
7823 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7824 if (temp && singleton && binary_op
bbf6f052
RK
7825 && (TREE_CODE (binary_op) == PLUS_EXPR
7826 || TREE_CODE (binary_op) == MINUS_EXPR
7827 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7828 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7829 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7830 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7831 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7832 {
7833 rtx result;
7834 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7835 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7836 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7837 : xor_optab);
bbf6f052
RK
7838
7839 /* If we had X ? A : A + 1, do this as A + (X == 0).
7840
7841 We have to invert the truth value here and then put it
7842 back later if do_store_flag fails. We cannot simply copy
7843 TREE_OPERAND (exp, 0) to another variable and modify that
7844 because invert_truthvalue can modify the tree pointed to
7845 by its argument. */
7846 if (singleton == TREE_OPERAND (exp, 1))
7847 TREE_OPERAND (exp, 0)
7848 = invert_truthvalue (TREE_OPERAND (exp, 0));
7849
7850 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7851 (safe_from_p (temp, singleton, 1)
906c4e36 7852 ? temp : NULL_RTX),
bbf6f052
RK
7853 mode, BRANCH_COST <= 1);
7854
ac01eace
RK
7855 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7856 result = expand_shift (LSHIFT_EXPR, mode, result,
7857 build_int_2 (tree_log2
7858 (TREE_OPERAND
7859 (binary_op, 1)),
7860 0),
e5e809f4 7861 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7862 ? temp : NULL_RTX), 0);
7863
bbf6f052
RK
7864 if (result)
7865 {
906c4e36 7866 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7867 return expand_binop (mode, boptab, op1, result, temp,
7868 unsignedp, OPTAB_LIB_WIDEN);
7869 }
7870 else if (singleton == TREE_OPERAND (exp, 1))
7871 TREE_OPERAND (exp, 0)
7872 = invert_truthvalue (TREE_OPERAND (exp, 0));
7873 }
7874
dabf8373 7875 do_pending_stack_adjust ();
bbf6f052
RK
7876 NO_DEFER_POP;
7877 op0 = gen_label_rtx ();
7878
7879 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7880 {
7881 if (temp != 0)
7882 {
7883 /* If the target conflicts with the other operand of the
7884 binary op, we can't use it. Also, we can't use the target
7885 if it is a hard register, because evaluating the condition
7886 might clobber it. */
7887 if ((binary_op
e5e809f4 7888 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7889 || (GET_CODE (temp) == REG
7890 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7891 temp = gen_reg_rtx (mode);
7892 store_expr (singleton, temp, 0);
7893 }
7894 else
906c4e36 7895 expand_expr (singleton,
2937cf87 7896 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7897 if (singleton == TREE_OPERAND (exp, 1))
7898 jumpif (TREE_OPERAND (exp, 0), op0);
7899 else
7900 jumpifnot (TREE_OPERAND (exp, 0), op0);
7901
956d6950 7902 start_cleanup_deferral ();
bbf6f052
RK
7903 if (binary_op && temp == 0)
7904 /* Just touch the other operand. */
7905 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7906 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7907 else if (binary_op)
7908 store_expr (build (TREE_CODE (binary_op), type,
7909 make_tree (type, temp),
7910 TREE_OPERAND (binary_op, 1)),
7911 temp, 0);
7912 else
7913 store_expr (build1 (TREE_CODE (unary_op), type,
7914 make_tree (type, temp)),
7915 temp, 0);
7916 op1 = op0;
bbf6f052 7917 }
bbf6f052
RK
7918 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7919 comparison operator. If we have one of these cases, set the
7920 output to A, branch on A (cse will merge these two references),
7921 then set the output to FOO. */
7922 else if (temp
7923 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7924 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7925 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7926 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7927 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7928 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7929 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7930 {
7931 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7932 temp = gen_reg_rtx (mode);
7933 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7934 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7935
956d6950 7936 start_cleanup_deferral ();
bbf6f052
RK
7937 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7938 op1 = op0;
7939 }
7940 else if (temp
7941 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7942 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7943 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7944 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7945 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7946 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7947 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7948 {
7949 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7950 temp = gen_reg_rtx (mode);
7951 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7952 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7953
956d6950 7954 start_cleanup_deferral ();
bbf6f052
RK
7955 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7956 op1 = op0;
7957 }
7958 else
7959 {
7960 op1 = gen_label_rtx ();
7961 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7962
956d6950 7963 start_cleanup_deferral ();
2ac84cfe
NS
7964
7965 /* One branch of the cond can be void, if it never returns. For
7966 example A ? throw : E */
7967 if (temp != 0
7968 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
7969 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7970 else
906c4e36
RK
7971 expand_expr (TREE_OPERAND (exp, 1),
7972 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7973 end_cleanup_deferral ();
bbf6f052
RK
7974 emit_queue ();
7975 emit_jump_insn (gen_jump (op1));
7976 emit_barrier ();
7977 emit_label (op0);
956d6950 7978 start_cleanup_deferral ();
2ac84cfe
NS
7979 if (temp != 0
7980 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
7981 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7982 else
906c4e36
RK
7983 expand_expr (TREE_OPERAND (exp, 2),
7984 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7985 }
7986
956d6950 7987 end_cleanup_deferral ();
bbf6f052
RK
7988
7989 emit_queue ();
7990 emit_label (op1);
7991 OK_DEFER_POP;
5dab5552 7992
bbf6f052
RK
7993 return temp;
7994 }
7995
7996 case TARGET_EXPR:
7997 {
7998 /* Something needs to be initialized, but we didn't know
7999 where that thing was when building the tree. For example,
8000 it could be the return value of a function, or a parameter
8001 to a function which lays down in the stack, or a temporary
8002 variable which must be passed by reference.
8003
8004 We guarantee that the expression will either be constructed
8005 or copied into our original target. */
8006
8007 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 8008 tree cleanups = NULL_TREE;
5c062816 8009 tree exp1;
bbf6f052
RK
8010
8011 if (TREE_CODE (slot) != VAR_DECL)
8012 abort ();
8013
9c51f375
RK
8014 if (! ignore)
8015 target = original_target;
8016
6fbfac92
JM
8017 /* Set this here so that if we get a target that refers to a
8018 register variable that's already been used, put_reg_into_stack
8019 knows that it should fix up those uses. */
8020 TREE_USED (slot) = 1;
8021
bbf6f052
RK
8022 if (target == 0)
8023 {
8024 if (DECL_RTL (slot) != 0)
ac993f4f
MS
8025 {
8026 target = DECL_RTL (slot);
5c062816 8027 /* If we have already expanded the slot, so don't do
ac993f4f 8028 it again. (mrs) */
5c062816
MS
8029 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8030 return target;
ac993f4f 8031 }
bbf6f052
RK
8032 else
8033 {
e9a25f70 8034 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8035 /* All temp slots at this level must not conflict. */
8036 preserve_temp_slots (target);
8037 DECL_RTL (slot) = target;
e9a25f70
JL
8038 if (TREE_ADDRESSABLE (slot))
8039 {
8040 TREE_ADDRESSABLE (slot) = 0;
8041 mark_addressable (slot);
8042 }
bbf6f052 8043
e287fd6e
RK
8044 /* Since SLOT is not known to the called function
8045 to belong to its stack frame, we must build an explicit
8046 cleanup. This case occurs when we must build up a reference
8047 to pass the reference as an argument. In this case,
8048 it is very likely that such a reference need not be
8049 built here. */
8050
8051 if (TREE_OPERAND (exp, 2) == 0)
8052 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8053 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8054 }
bbf6f052
RK
8055 }
8056 else
8057 {
8058 /* This case does occur, when expanding a parameter which
8059 needs to be constructed on the stack. The target
8060 is the actual stack address that we want to initialize.
8061 The function we call will perform the cleanup in this case. */
8062
8c042b47
RS
8063 /* If we have already assigned it space, use that space,
8064 not target that we were passed in, as our target
8065 parameter is only a hint. */
8066 if (DECL_RTL (slot) != 0)
8067 {
8068 target = DECL_RTL (slot);
8069 /* If we have already expanded the slot, so don't do
8070 it again. (mrs) */
8071 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8072 return target;
8073 }
21002281
JW
8074 else
8075 {
8076 DECL_RTL (slot) = target;
8077 /* If we must have an addressable slot, then make sure that
8078 the RTL that we just stored in slot is OK. */
8079 if (TREE_ADDRESSABLE (slot))
8080 {
8081 TREE_ADDRESSABLE (slot) = 0;
8082 mark_addressable (slot);
8083 }
8084 }
bbf6f052
RK
8085 }
8086
4847c938 8087 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8088 /* Mark it as expanded. */
8089 TREE_OPERAND (exp, 1) = NULL_TREE;
8090
41531e5b 8091 store_expr (exp1, target, 0);
61d6b1cc 8092
e976b8b2 8093 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 8094
41531e5b 8095 return target;
bbf6f052
RK
8096 }
8097
8098 case INIT_EXPR:
8099 {
8100 tree lhs = TREE_OPERAND (exp, 0);
8101 tree rhs = TREE_OPERAND (exp, 1);
8102 tree noncopied_parts = 0;
8103 tree lhs_type = TREE_TYPE (lhs);
8104
8105 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8106 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8107 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8108 TYPE_NONCOPIED_PARTS (lhs_type));
8109 while (noncopied_parts != 0)
8110 {
8111 expand_assignment (TREE_VALUE (noncopied_parts),
8112 TREE_PURPOSE (noncopied_parts), 0, 0);
8113 noncopied_parts = TREE_CHAIN (noncopied_parts);
8114 }
8115 return temp;
8116 }
8117
8118 case MODIFY_EXPR:
8119 {
8120 /* If lhs is complex, expand calls in rhs before computing it.
8121 That's so we don't compute a pointer and save it over a call.
8122 If lhs is simple, compute it first so we can give it as a
8123 target if the rhs is just a call. This avoids an extra temp and copy
8124 and that prevents a partial-subsumption which makes bad code.
8125 Actually we could treat component_ref's of vars like vars. */
8126
8127 tree lhs = TREE_OPERAND (exp, 0);
8128 tree rhs = TREE_OPERAND (exp, 1);
8129 tree noncopied_parts = 0;
8130 tree lhs_type = TREE_TYPE (lhs);
8131
8132 temp = 0;
8133
8134 if (TREE_CODE (lhs) != VAR_DECL
8135 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
8136 && TREE_CODE (lhs) != PARM_DECL
8137 && ! (TREE_CODE (lhs) == INDIRECT_REF
8138 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
8139 preexpand_calls (exp);
8140
8141 /* Check for |= or &= of a bitfield of size one into another bitfield
8142 of size 1. In this case, (unless we need the result of the
8143 assignment) we can do this more efficiently with a
8144 test followed by an assignment, if necessary.
8145
8146 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8147 things change so we do, this code should be enhanced to
8148 support it. */
8149 if (ignore
8150 && TREE_CODE (lhs) == COMPONENT_REF
8151 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8152 || TREE_CODE (rhs) == BIT_AND_EXPR)
8153 && TREE_OPERAND (rhs, 0) == lhs
8154 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8155 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8156 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8157 {
8158 rtx label = gen_label_rtx ();
8159
8160 do_jump (TREE_OPERAND (rhs, 1),
8161 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8162 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8163 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8164 (TREE_CODE (rhs) == BIT_IOR_EXPR
8165 ? integer_one_node
8166 : integer_zero_node)),
8167 0, 0);
e7c33f54 8168 do_pending_stack_adjust ();
bbf6f052
RK
8169 emit_label (label);
8170 return const0_rtx;
8171 }
8172
8173 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8174 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8175 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8176 TYPE_NONCOPIED_PARTS (lhs_type));
8177
8178 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8179 while (noncopied_parts != 0)
8180 {
8181 expand_assignment (TREE_PURPOSE (noncopied_parts),
8182 TREE_VALUE (noncopied_parts), 0, 0);
8183 noncopied_parts = TREE_CHAIN (noncopied_parts);
8184 }
8185 return temp;
8186 }
8187
6e7f84a7
APB
8188 case RETURN_EXPR:
8189 if (!TREE_OPERAND (exp, 0))
8190 expand_null_return ();
8191 else
8192 expand_return (TREE_OPERAND (exp, 0));
8193 return const0_rtx;
8194
bbf6f052
RK
8195 case PREINCREMENT_EXPR:
8196 case PREDECREMENT_EXPR:
7b8b9722 8197 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8198
8199 case POSTINCREMENT_EXPR:
8200 case POSTDECREMENT_EXPR:
8201 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8202 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8203
8204 case ADDR_EXPR:
987c71d9 8205 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8206 be a MEM corresponding to a stack slot. */
987c71d9
RK
8207 temp = 0;
8208
bbf6f052
RK
8209 /* Are we taking the address of a nested function? */
8210 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8211 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8212 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8213 && ! TREE_STATIC (exp))
bbf6f052
RK
8214 {
8215 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8216 op0 = force_operand (op0, target);
8217 }
682ba3a6
RK
8218 /* If we are taking the address of something erroneous, just
8219 return a zero. */
8220 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8221 return const0_rtx;
bbf6f052
RK
8222 else
8223 {
e287fd6e
RK
8224 /* We make sure to pass const0_rtx down if we came in with
8225 ignore set, to avoid doing the cleanups twice for something. */
8226 op0 = expand_expr (TREE_OPERAND (exp, 0),
8227 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8228 (modifier == EXPAND_INITIALIZER
8229 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8230
119af78a
RK
8231 /* If we are going to ignore the result, OP0 will have been set
8232 to const0_rtx, so just return it. Don't get confused and
8233 think we are taking the address of the constant. */
8234 if (ignore)
8235 return op0;
8236
3539e816
MS
8237 op0 = protect_from_queue (op0, 0);
8238
c5c76735
JL
8239 /* We would like the object in memory. If it is a constant, we can
8240 have it be statically allocated into memory. For a non-constant,
8241 we need to allocate some memory and store the value into it. */
896102d0
RK
8242
8243 if (CONSTANT_P (op0))
8244 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8245 op0);
987c71d9 8246 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8247 {
8248 mark_temp_addr_taken (op0);
8249 temp = XEXP (op0, 0);
8250 }
896102d0 8251
682ba3a6 8252 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 8253 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
8254 {
8255 /* If this object is in a register, it must be not
0f41302f 8256 be BLKmode. */
896102d0 8257 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 8258 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 8259
7a0b7b9a 8260 mark_temp_addr_taken (memloc);
896102d0
RK
8261 emit_move_insn (memloc, op0);
8262 op0 = memloc;
8263 }
8264
bbf6f052
RK
8265 if (GET_CODE (op0) != MEM)
8266 abort ();
8267
8268 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8269 {
8270 temp = XEXP (op0, 0);
8271#ifdef POINTERS_EXTEND_UNSIGNED
8272 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8273 && mode == ptr_mode)
9fcfcce7 8274 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8275#endif
8276 return temp;
8277 }
987c71d9 8278
bbf6f052
RK
8279 op0 = force_operand (XEXP (op0, 0), target);
8280 }
987c71d9 8281
bbf6f052 8282 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8283 op0 = force_reg (Pmode, op0);
8284
dc6d66b3
RK
8285 if (GET_CODE (op0) == REG
8286 && ! REG_USERVAR_P (op0))
8287 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
8288
8289 /* If we might have had a temp slot, add an equivalent address
8290 for it. */
8291 if (temp != 0)
8292 update_temp_slot_address (temp, op0);
8293
88f63c77
RK
8294#ifdef POINTERS_EXTEND_UNSIGNED
8295 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8296 && mode == ptr_mode)
9fcfcce7 8297 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8298#endif
8299
bbf6f052
RK
8300 return op0;
8301
8302 case ENTRY_VALUE_EXPR:
8303 abort ();
8304
7308a047
RS
8305 /* COMPLEX type for Extended Pascal & Fortran */
8306 case COMPLEX_EXPR:
8307 {
8308 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8309 rtx insns;
7308a047
RS
8310
8311 /* Get the rtx code of the operands. */
8312 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8313 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8314
8315 if (! target)
8316 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8317
6551fa4d 8318 start_sequence ();
7308a047
RS
8319
8320 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8321 emit_move_insn (gen_realpart (mode, target), op0);
8322 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8323
6551fa4d
JW
8324 insns = get_insns ();
8325 end_sequence ();
8326
7308a047 8327 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8328 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8329 each with a separate pseudo as destination.
8330 It's not correct for flow to treat them as a unit. */
6d6e61ce 8331 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8332 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8333 else
8334 emit_insns (insns);
7308a047
RS
8335
8336 return target;
8337 }
8338
8339 case REALPART_EXPR:
2d7050fd
RS
8340 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8341 return gen_realpart (mode, op0);
7308a047
RS
8342
8343 case IMAGPART_EXPR:
2d7050fd
RS
8344 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8345 return gen_imagpart (mode, op0);
7308a047
RS
8346
8347 case CONJ_EXPR:
8348 {
62acb978 8349 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8350 rtx imag_t;
6551fa4d 8351 rtx insns;
7308a047
RS
8352
8353 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8354
8355 if (! target)
d6a5ac33 8356 target = gen_reg_rtx (mode);
7308a047 8357
6551fa4d 8358 start_sequence ();
7308a047
RS
8359
8360 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8361 emit_move_insn (gen_realpart (partmode, target),
8362 gen_realpart (partmode, op0));
7308a047 8363
62acb978
RK
8364 imag_t = gen_imagpart (partmode, target);
8365 temp = expand_unop (partmode, neg_optab,
8366 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8367 if (temp != imag_t)
8368 emit_move_insn (imag_t, temp);
8369
6551fa4d
JW
8370 insns = get_insns ();
8371 end_sequence ();
8372
d6a5ac33
RK
8373 /* Conjugate should appear as a single unit
8374 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8375 each with a separate pseudo as destination.
8376 It's not correct for flow to treat them as a unit. */
6d6e61ce 8377 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8378 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8379 else
8380 emit_insns (insns);
7308a047
RS
8381
8382 return target;
8383 }
8384
e976b8b2
MS
8385 case TRY_CATCH_EXPR:
8386 {
8387 tree handler = TREE_OPERAND (exp, 1);
8388
8389 expand_eh_region_start ();
8390
8391 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8392
8393 expand_eh_region_end (handler);
8394
8395 return op0;
8396 }
8397
b335b813
PB
8398 case TRY_FINALLY_EXPR:
8399 {
8400 tree try_block = TREE_OPERAND (exp, 0);
8401 tree finally_block = TREE_OPERAND (exp, 1);
8402 rtx finally_label = gen_label_rtx ();
8403 rtx done_label = gen_label_rtx ();
8404 rtx return_link = gen_reg_rtx (Pmode);
8405 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8406 (tree) finally_label, (tree) return_link);
8407 TREE_SIDE_EFFECTS (cleanup) = 1;
8408
8409 /* Start a new binding layer that will keep track of all cleanup
8410 actions to be performed. */
8e91754e 8411 expand_start_bindings (2);
b335b813
PB
8412
8413 target_temp_slot_level = temp_slot_level;
8414
8415 expand_decl_cleanup (NULL_TREE, cleanup);
8416 op0 = expand_expr (try_block, target, tmode, modifier);
8417
8418 preserve_temp_slots (op0);
8419 expand_end_bindings (NULL_TREE, 0, 0);
8420 emit_jump (done_label);
8421 emit_label (finally_label);
8422 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8423 emit_indirect_jump (return_link);
8424 emit_label (done_label);
8425 return op0;
8426 }
8427
8428 case GOTO_SUBROUTINE_EXPR:
8429 {
8430 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8431 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8432 rtx return_address = gen_label_rtx ();
8433 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8434 emit_jump (subr);
8435 emit_label (return_address);
8436 return const0_rtx;
8437 }
8438
e976b8b2
MS
8439 case POPDCC_EXPR:
8440 {
8441 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8442 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8443 return const0_rtx;
8444 }
8445
8446 case POPDHC_EXPR:
8447 {
8448 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8449 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8450 return const0_rtx;
8451 }
8452
d3707adb
RH
8453 case VA_ARG_EXPR:
8454 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8455
bbf6f052 8456 default:
90764a87 8457 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8458 }
8459
8460 /* Here to do an ordinary binary operator, generating an instruction
8461 from the optab already placed in `this_optab'. */
8462 binop:
8463 preexpand_calls (exp);
e5e809f4 8464 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8465 subtarget = 0;
8466 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8467 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8468 binop2:
8469 temp = expand_binop (mode, this_optab, op0, op1, target,
8470 unsignedp, OPTAB_LIB_WIDEN);
8471 if (temp == 0)
8472 abort ();
8473 return temp;
8474}
b93a436e 8475\f
14a774a9
RK
8476/* Similar to expand_expr, except that we don't specify a target, target
8477 mode, or modifier and we return the alignment of the inner type. This is
8478 used in cases where it is not necessary to align the result to the
8479 alignment of its type as long as we know the alignment of the result, for
8480 example for comparisons of BLKmode values. */
8481
8482static rtx
8483expand_expr_unaligned (exp, palign)
8484 register tree exp;
729a2125 8485 unsigned int *palign;
14a774a9
RK
8486{
8487 register rtx op0;
8488 tree type = TREE_TYPE (exp);
8489 register enum machine_mode mode = TYPE_MODE (type);
8490
8491 /* Default the alignment we return to that of the type. */
8492 *palign = TYPE_ALIGN (type);
8493
8494 /* The only cases in which we do anything special is if the resulting mode
8495 is BLKmode. */
8496 if (mode != BLKmode)
8497 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8498
8499 switch (TREE_CODE (exp))
8500 {
8501 case CONVERT_EXPR:
8502 case NOP_EXPR:
8503 case NON_LVALUE_EXPR:
8504 /* Conversions between BLKmode values don't change the underlying
8505 alignment or value. */
8506 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8507 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8508 break;
8509
8510 case ARRAY_REF:
8511 /* Much of the code for this case is copied directly from expand_expr.
8512 We need to duplicate it here because we will do something different
8513 in the fall-through case, so we need to handle the same exceptions
8514 it does. */
8515 {
8516 tree array = TREE_OPERAND (exp, 0);
8517 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8518 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8519 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8520 HOST_WIDE_INT i;
8521
8522 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8523 abort ();
8524
8525 /* Optimize the special-case of a zero lower bound.
8526
8527 We convert the low_bound to sizetype to avoid some problems
8528 with constant folding. (E.g. suppose the lower bound is 1,
8529 and its mode is QI. Without the conversion, (ARRAY
8530 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8531 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8532
8533 if (! integer_zerop (low_bound))
fed3cef0 8534 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8535
8536 /* If this is a constant index into a constant array,
8537 just get the value from the array. Handle both the cases when
8538 we have an explicit constructor and when our operand is a variable
8539 that was declared const. */
8540
05bccae2
RK
8541 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8542 && 0 > compare_tree_int (index,
8543 list_length (CONSTRUCTOR_ELTS
8544 (TREE_OPERAND (exp, 0)))))
14a774a9 8545 {
05bccae2
RK
8546 tree elem;
8547
8548 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8549 i = TREE_INT_CST_LOW (index);
8550 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8551 ;
8552
8553 if (elem)
8554 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9
RK
8555 }
8556
8557 else if (optimize >= 1
8558 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8559 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8560 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8561 {
8562 if (TREE_CODE (index) == INTEGER_CST)
8563 {
8564 tree init = DECL_INITIAL (array);
8565
14a774a9
RK
8566 if (TREE_CODE (init) == CONSTRUCTOR)
8567 {
05bccae2
RK
8568 tree elem;
8569
8570 for (elem = CONSTRUCTOR_ELTS (init);
8571 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8572 elem = TREE_CHAIN (elem))
8573 ;
14a774a9 8574
14a774a9
RK
8575 if (elem)
8576 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8577 palign);
8578 }
8579 }
8580 }
8581 }
8582
8583 /* ... fall through ... */
8584
8585 case COMPONENT_REF:
8586 case BIT_FIELD_REF:
8587 /* If the operand is a CONSTRUCTOR, we can just extract the
8588 appropriate field if it is present. Don't do this if we have
8589 already written the data since we want to refer to that copy
8590 and varasm.c assumes that's what we'll do. */
8591 if (TREE_CODE (exp) != ARRAY_REF
8592 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8593 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8594 {
8595 tree elt;
8596
8597 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8598 elt = TREE_CHAIN (elt))
8599 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8600 /* Note that unlike the case in expand_expr, we know this is
8601 BLKmode and hence not an integer. */
8602 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8603 }
8604
8605 {
8606 enum machine_mode mode1;
8607 int bitsize;
8608 int bitpos;
8609 tree offset;
8610 int volatilep = 0;
729a2125 8611 unsigned int alignment;
14a774a9
RK
8612 int unsignedp;
8613 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8614 &mode1, &unsignedp, &volatilep,
8615 &alignment);
8616
8617 /* If we got back the original object, something is wrong. Perhaps
8618 we are evaluating an expression too early. In any event, don't
8619 infinitely recurse. */
8620 if (tem == exp)
8621 abort ();
8622
8623 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8624
8625 /* If this is a constant, put it into a register if it is a
8626 legitimate constant and OFFSET is 0 and memory if it isn't. */
8627 if (CONSTANT_P (op0))
8628 {
8629 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8630
8631 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8632 && offset == 0)
8633 op0 = force_reg (inner_mode, op0);
8634 else
8635 op0 = validize_mem (force_const_mem (inner_mode, op0));
8636 }
8637
8638 if (offset != 0)
8639 {
8640 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8641
8642 /* If this object is in a register, put it into memory.
8643 This case can't occur in C, but can in Ada if we have
8644 unchecked conversion of an expression from a scalar type to
8645 an array or record type. */
8646 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8647 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8648 {
8649 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8650
8651 mark_temp_addr_taken (memloc);
8652 emit_move_insn (memloc, op0);
8653 op0 = memloc;
8654 }
8655
8656 if (GET_CODE (op0) != MEM)
8657 abort ();
8658
8659 if (GET_MODE (offset_rtx) != ptr_mode)
8660 {
8661#ifdef POINTERS_EXTEND_UNSIGNED
8662 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8663#else
8664 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8665#endif
8666 }
8667
8668 op0 = change_address (op0, VOIDmode,
8669 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8670 force_reg (ptr_mode,
8671 offset_rtx)));
8672 }
8673
8674 /* Don't forget about volatility even if this is a bitfield. */
8675 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8676 {
8677 op0 = copy_rtx (op0);
8678 MEM_VOLATILE_P (op0) = 1;
8679 }
8680
8681 /* Check the access. */
8682 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8683 {
8684 rtx to;
8685 int size;
8686
8687 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8688 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8689
8690 /* Check the access right of the pointer. */
8691 if (size > BITS_PER_UNIT)
8692 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8693 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8694 TYPE_MODE (sizetype),
8695 GEN_INT (MEMORY_USE_RO),
8696 TYPE_MODE (integer_type_node));
8697 }
8698
a2b99161
RK
8699 /* In cases where an aligned union has an unaligned object
8700 as a field, we might be extracting a BLKmode value from
8701 an integer-mode (e.g., SImode) object. Handle this case
8702 by doing the extract into an object as wide as the field
8703 (which we know to be the width of a basic mode), then
8704 storing into memory, and changing the mode to BLKmode.
8705 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8706 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8707 if (mode1 == VOIDmode
8708 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 8709 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
a2b99161
RK
8710 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8711 || bitpos % TYPE_ALIGN (type) != 0)))
8712 {
8713 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8714
8715 if (ext_mode == BLKmode)
8716 {
8717 /* In this case, BITPOS must start at a byte boundary. */
8718 if (GET_CODE (op0) != MEM
8719 || bitpos % BITS_PER_UNIT != 0)
8720 abort ();
8721
8722 op0 = change_address (op0, VOIDmode,
8723 plus_constant (XEXP (op0, 0),
8724 bitpos / BITS_PER_UNIT));
8725 }
8726 else
8727 {
8728 rtx new = assign_stack_temp (ext_mode,
8729 bitsize / BITS_PER_UNIT, 0);
8730
8731 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8732 unsignedp, NULL_RTX, ext_mode,
8733 ext_mode, alignment,
8734 int_size_in_bytes (TREE_TYPE (tem)));
8735
8736 /* If the result is a record type and BITSIZE is narrower than
8737 the mode of OP0, an integral mode, and this is a big endian
8738 machine, we must put the field into the high-order bits. */
8739 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8740 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8741 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8742 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8743 size_int (GET_MODE_BITSIZE
8744 (GET_MODE (op0))
8745 - bitsize),
8746 op0, 1);
8747
8748
8749 emit_move_insn (new, op0);
8750 op0 = copy_rtx (new);
8751 PUT_MODE (op0, BLKmode);
8752 }
8753 }
8754 else
8755 /* Get a reference to just this component. */
8756 op0 = change_address (op0, mode1,
8757 plus_constant (XEXP (op0, 0),
8758 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
8759
8760 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8761
8762 /* Adjust the alignment in case the bit position is not
8763 a multiple of the alignment of the inner object. */
8764 while (bitpos % alignment != 0)
8765 alignment >>= 1;
8766
8767 if (GET_CODE (XEXP (op0, 0)) == REG)
8768 mark_reg_pointer (XEXP (op0, 0), alignment);
8769
8770 MEM_IN_STRUCT_P (op0) = 1;
8771 MEM_VOLATILE_P (op0) |= volatilep;
8772
8773 *palign = alignment;
8774 return op0;
8775 }
8776
8777 default:
8778 break;
8779
8780 }
8781
8782 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8783}
8784\f
fed3cef0
RK
8785/* Return the tree node if a ARG corresponds to a string constant or zero
8786 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8787 in bytes within the string that ARG is accessing. The type of the
8788 offset will be `sizetype'. */
b93a436e 8789
28f4ec01 8790tree
b93a436e
JL
8791string_constant (arg, ptr_offset)
8792 tree arg;
8793 tree *ptr_offset;
8794{
8795 STRIP_NOPS (arg);
8796
8797 if (TREE_CODE (arg) == ADDR_EXPR
8798 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8799 {
fed3cef0 8800 *ptr_offset = size_zero_node;
b93a436e
JL
8801 return TREE_OPERAND (arg, 0);
8802 }
8803 else if (TREE_CODE (arg) == PLUS_EXPR)
8804 {
8805 tree arg0 = TREE_OPERAND (arg, 0);
8806 tree arg1 = TREE_OPERAND (arg, 1);
8807
8808 STRIP_NOPS (arg0);
8809 STRIP_NOPS (arg1);
8810
8811 if (TREE_CODE (arg0) == ADDR_EXPR
8812 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8813 {
fed3cef0 8814 *ptr_offset = convert (sizetype, arg1);
b93a436e 8815 return TREE_OPERAND (arg0, 0);
bbf6f052 8816 }
b93a436e
JL
8817 else if (TREE_CODE (arg1) == ADDR_EXPR
8818 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8819 {
fed3cef0 8820 *ptr_offset = convert (sizetype, arg0);
b93a436e 8821 return TREE_OPERAND (arg1, 0);
bbf6f052 8822 }
b93a436e 8823 }
ca695ac9 8824
b93a436e
JL
8825 return 0;
8826}
ca695ac9 8827\f
b93a436e
JL
8828/* Expand code for a post- or pre- increment or decrement
8829 and return the RTX for the result.
8830 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8831
b93a436e
JL
8832static rtx
8833expand_increment (exp, post, ignore)
8834 register tree exp;
8835 int post, ignore;
ca695ac9 8836{
b93a436e
JL
8837 register rtx op0, op1;
8838 register rtx temp, value;
8839 register tree incremented = TREE_OPERAND (exp, 0);
8840 optab this_optab = add_optab;
8841 int icode;
8842 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8843 int op0_is_copy = 0;
8844 int single_insn = 0;
8845 /* 1 means we can't store into OP0 directly,
8846 because it is a subreg narrower than a word,
8847 and we don't dare clobber the rest of the word. */
8848 int bad_subreg = 0;
1499e0a8 8849
b93a436e
JL
8850 /* Stabilize any component ref that might need to be
8851 evaluated more than once below. */
8852 if (!post
8853 || TREE_CODE (incremented) == BIT_FIELD_REF
8854 || (TREE_CODE (incremented) == COMPONENT_REF
8855 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8856 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8857 incremented = stabilize_reference (incremented);
8858 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8859 ones into save exprs so that they don't accidentally get evaluated
8860 more than once by the code below. */
8861 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8862 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8863 incremented = save_expr (incremented);
e9a25f70 8864
b93a436e
JL
8865 /* Compute the operands as RTX.
8866 Note whether OP0 is the actual lvalue or a copy of it:
8867 I believe it is a copy iff it is a register or subreg
8868 and insns were generated in computing it. */
e9a25f70 8869
b93a436e
JL
8870 temp = get_last_insn ();
8871 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 8872
b93a436e
JL
8873 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8874 in place but instead must do sign- or zero-extension during assignment,
8875 so we copy it into a new register and let the code below use it as
8876 a copy.
e9a25f70 8877
b93a436e
JL
8878 Note that we can safely modify this SUBREG since it is know not to be
8879 shared (it was made by the expand_expr call above). */
8880
8881 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8882 {
8883 if (post)
8884 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8885 else
8886 bad_subreg = 1;
8887 }
8888 else if (GET_CODE (op0) == SUBREG
8889 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8890 {
8891 /* We cannot increment this SUBREG in place. If we are
8892 post-incrementing, get a copy of the old value. Otherwise,
8893 just mark that we cannot increment in place. */
8894 if (post)
8895 op0 = copy_to_reg (op0);
8896 else
8897 bad_subreg = 1;
e9a25f70
JL
8898 }
8899
b93a436e
JL
8900 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8901 && temp != get_last_insn ());
8902 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8903 EXPAND_MEMORY_USE_BAD);
1499e0a8 8904
b93a436e
JL
8905 /* Decide whether incrementing or decrementing. */
8906 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8907 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8908 this_optab = sub_optab;
8909
8910 /* Convert decrement by a constant into a negative increment. */
8911 if (this_optab == sub_optab
8912 && GET_CODE (op1) == CONST_INT)
ca695ac9 8913 {
b93a436e
JL
8914 op1 = GEN_INT (- INTVAL (op1));
8915 this_optab = add_optab;
ca695ac9 8916 }
1499e0a8 8917
b93a436e
JL
8918 /* For a preincrement, see if we can do this with a single instruction. */
8919 if (!post)
8920 {
8921 icode = (int) this_optab->handlers[(int) mode].insn_code;
8922 if (icode != (int) CODE_FOR_nothing
8923 /* Make sure that OP0 is valid for operands 0 and 1
8924 of the insn we want to queue. */
a995e389
RH
8925 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8926 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8927 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
8928 single_insn = 1;
8929 }
bbf6f052 8930
b93a436e
JL
8931 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8932 then we cannot just increment OP0. We must therefore contrive to
8933 increment the original value. Then, for postincrement, we can return
8934 OP0 since it is a copy of the old value. For preincrement, expand here
8935 unless we can do it with a single insn.
bbf6f052 8936
b93a436e
JL
8937 Likewise if storing directly into OP0 would clobber high bits
8938 we need to preserve (bad_subreg). */
8939 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8940 {
b93a436e
JL
8941 /* This is the easiest way to increment the value wherever it is.
8942 Problems with multiple evaluation of INCREMENTED are prevented
8943 because either (1) it is a component_ref or preincrement,
8944 in which case it was stabilized above, or (2) it is an array_ref
8945 with constant index in an array in a register, which is
8946 safe to reevaluate. */
8947 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8948 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8949 ? MINUS_EXPR : PLUS_EXPR),
8950 TREE_TYPE (exp),
8951 incremented,
8952 TREE_OPERAND (exp, 1));
a358cee0 8953
b93a436e
JL
8954 while (TREE_CODE (incremented) == NOP_EXPR
8955 || TREE_CODE (incremented) == CONVERT_EXPR)
8956 {
8957 newexp = convert (TREE_TYPE (incremented), newexp);
8958 incremented = TREE_OPERAND (incremented, 0);
8959 }
bbf6f052 8960
b93a436e
JL
8961 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8962 return post ? op0 : temp;
8963 }
bbf6f052 8964
b93a436e
JL
8965 if (post)
8966 {
8967 /* We have a true reference to the value in OP0.
8968 If there is an insn to add or subtract in this mode, queue it.
8969 Queueing the increment insn avoids the register shuffling
8970 that often results if we must increment now and first save
8971 the old value for subsequent use. */
bbf6f052 8972
b93a436e
JL
8973#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8974 op0 = stabilize (op0);
8975#endif
41dfd40c 8976
b93a436e
JL
8977 icode = (int) this_optab->handlers[(int) mode].insn_code;
8978 if (icode != (int) CODE_FOR_nothing
8979 /* Make sure that OP0 is valid for operands 0 and 1
8980 of the insn we want to queue. */
a995e389
RH
8981 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8982 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 8983 {
a995e389 8984 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8985 op1 = force_reg (mode, op1);
bbf6f052 8986
b93a436e
JL
8987 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8988 }
8989 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8990 {
8991 rtx addr = (general_operand (XEXP (op0, 0), mode)
8992 ? force_reg (Pmode, XEXP (op0, 0))
8993 : copy_to_reg (XEXP (op0, 0)));
8994 rtx temp, result;
ca695ac9 8995
b93a436e
JL
8996 op0 = change_address (op0, VOIDmode, addr);
8997 temp = force_reg (GET_MODE (op0), op0);
a995e389 8998 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8999 op1 = force_reg (mode, op1);
ca695ac9 9000
b93a436e
JL
9001 /* The increment queue is LIFO, thus we have to `queue'
9002 the instructions in reverse order. */
9003 enqueue_insn (op0, gen_move_insn (op0, temp));
9004 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
9005 return result;
bbf6f052
RK
9006 }
9007 }
ca695ac9 9008
b93a436e
JL
9009 /* Preincrement, or we can't increment with one simple insn. */
9010 if (post)
9011 /* Save a copy of the value before inc or dec, to return it later. */
9012 temp = value = copy_to_reg (op0);
9013 else
9014 /* Arrange to return the incremented value. */
9015 /* Copy the rtx because expand_binop will protect from the queue,
9016 and the results of that would be invalid for us to return
9017 if our caller does emit_queue before using our result. */
9018 temp = copy_rtx (value = op0);
bbf6f052 9019
b93a436e
JL
9020 /* Increment however we can. */
9021 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 9022 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9023 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9024 /* Make sure the value is stored into OP0. */
9025 if (op1 != op0)
9026 emit_move_insn (op0, op1);
5718612f 9027
b93a436e
JL
9028 return temp;
9029}
9030\f
9031/* Expand all function calls contained within EXP, innermost ones first.
9032 But don't look within expressions that have sequence points.
9033 For each CALL_EXPR, record the rtx for its value
9034 in the CALL_EXPR_RTL field. */
5718612f 9035
b93a436e
JL
9036static void
9037preexpand_calls (exp)
9038 tree exp;
9039{
9040 register int nops, i;
9041 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9042
b93a436e
JL
9043 if (! do_preexpand_calls)
9044 return;
5718612f 9045
b93a436e 9046 /* Only expressions and references can contain calls. */
bbf6f052 9047
b93a436e
JL
9048 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9049 return;
bbf6f052 9050
b93a436e
JL
9051 switch (TREE_CODE (exp))
9052 {
9053 case CALL_EXPR:
9054 /* Do nothing if already expanded. */
9055 if (CALL_EXPR_RTL (exp) != 0
9056 /* Do nothing if the call returns a variable-sized object. */
9057 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9058 /* Do nothing to built-in functions. */
9059 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9060 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9061 == FUNCTION_DECL)
9062 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9063 return;
bbf6f052 9064
b93a436e
JL
9065 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9066 return;
bbf6f052 9067
b93a436e
JL
9068 case COMPOUND_EXPR:
9069 case COND_EXPR:
9070 case TRUTH_ANDIF_EXPR:
9071 case TRUTH_ORIF_EXPR:
9072 /* If we find one of these, then we can be sure
9073 the adjust will be done for it (since it makes jumps).
9074 Do it now, so that if this is inside an argument
9075 of a function, we don't get the stack adjustment
9076 after some other args have already been pushed. */
9077 do_pending_stack_adjust ();
9078 return;
bbf6f052 9079
b93a436e
JL
9080 case BLOCK:
9081 case RTL_EXPR:
9082 case WITH_CLEANUP_EXPR:
9083 case CLEANUP_POINT_EXPR:
9084 case TRY_CATCH_EXPR:
9085 return;
bbf6f052 9086
b93a436e
JL
9087 case SAVE_EXPR:
9088 if (SAVE_EXPR_RTL (exp) != 0)
9089 return;
9090
9091 default:
9092 break;
ca695ac9 9093 }
bbf6f052 9094
b93a436e
JL
9095 nops = tree_code_length[(int) TREE_CODE (exp)];
9096 for (i = 0; i < nops; i++)
9097 if (TREE_OPERAND (exp, i) != 0)
9098 {
19832c77
MM
9099 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9100 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9101 It doesn't happen before the call is made. */
9102 ;
9103 else
9104 {
9105 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9106 if (type == 'e' || type == '<' || type == '1' || type == '2'
9107 || type == 'r')
9108 preexpand_calls (TREE_OPERAND (exp, i));
9109 }
b93a436e
JL
9110 }
9111}
9112\f
9113/* At the start of a function, record that we have no previously-pushed
9114 arguments waiting to be popped. */
bbf6f052 9115
b93a436e
JL
9116void
9117init_pending_stack_adjust ()
9118{
9119 pending_stack_adjust = 0;
9120}
bbf6f052 9121
b93a436e 9122/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9123 so the adjustment won't get done.
9124
9125 Note, if the current function calls alloca, then it must have a
9126 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9127
b93a436e
JL
9128void
9129clear_pending_stack_adjust ()
9130{
9131#ifdef EXIT_IGNORE_STACK
9132 if (optimize > 0
060fbabf
JL
9133 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9134 && EXIT_IGNORE_STACK
b93a436e
JL
9135 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9136 && ! flag_inline_functions)
9137 pending_stack_adjust = 0;
9138#endif
9139}
bbf6f052 9140
b93a436e
JL
9141/* Pop any previously-pushed arguments that have not been popped yet. */
9142
9143void
9144do_pending_stack_adjust ()
9145{
9146 if (inhibit_defer_pop == 0)
ca695ac9 9147 {
b93a436e
JL
9148 if (pending_stack_adjust != 0)
9149 adjust_stack (GEN_INT (pending_stack_adjust));
9150 pending_stack_adjust = 0;
bbf6f052 9151 }
bbf6f052
RK
9152}
9153\f
b93a436e 9154/* Expand conditional expressions. */
bbf6f052 9155
b93a436e
JL
9156/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9157 LABEL is an rtx of code CODE_LABEL, in this function and all the
9158 functions here. */
bbf6f052 9159
b93a436e
JL
9160void
9161jumpifnot (exp, label)
ca695ac9 9162 tree exp;
b93a436e 9163 rtx label;
bbf6f052 9164{
b93a436e
JL
9165 do_jump (exp, label, NULL_RTX);
9166}
bbf6f052 9167
b93a436e 9168/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9169
b93a436e
JL
9170void
9171jumpif (exp, label)
9172 tree exp;
9173 rtx label;
9174{
9175 do_jump (exp, NULL_RTX, label);
9176}
ca695ac9 9177
b93a436e
JL
9178/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9179 the result is zero, or IF_TRUE_LABEL if the result is one.
9180 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9181 meaning fall through in that case.
ca695ac9 9182
b93a436e
JL
9183 do_jump always does any pending stack adjust except when it does not
9184 actually perform a jump. An example where there is no jump
9185 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9186
b93a436e
JL
9187 This function is responsible for optimizing cases such as
9188 &&, || and comparison operators in EXP. */
5718612f 9189
b93a436e
JL
9190void
9191do_jump (exp, if_false_label, if_true_label)
9192 tree exp;
9193 rtx if_false_label, if_true_label;
9194{
9195 register enum tree_code code = TREE_CODE (exp);
9196 /* Some cases need to create a label to jump to
9197 in order to properly fall through.
9198 These cases set DROP_THROUGH_LABEL nonzero. */
9199 rtx drop_through_label = 0;
9200 rtx temp;
b93a436e
JL
9201 int i;
9202 tree type;
9203 enum machine_mode mode;
ca695ac9 9204
dbecbbe4
JL
9205#ifdef MAX_INTEGER_COMPUTATION_MODE
9206 check_max_integer_computation_mode (exp);
9207#endif
9208
b93a436e 9209 emit_queue ();
ca695ac9 9210
b93a436e 9211 switch (code)
ca695ac9 9212 {
b93a436e 9213 case ERROR_MARK:
ca695ac9 9214 break;
bbf6f052 9215
b93a436e
JL
9216 case INTEGER_CST:
9217 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9218 if (temp)
9219 emit_jump (temp);
9220 break;
bbf6f052 9221
b93a436e
JL
9222#if 0
9223 /* This is not true with #pragma weak */
9224 case ADDR_EXPR:
9225 /* The address of something can never be zero. */
9226 if (if_true_label)
9227 emit_jump (if_true_label);
9228 break;
9229#endif
bbf6f052 9230
b93a436e
JL
9231 case NOP_EXPR:
9232 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9233 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9234 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9235 goto normal;
9236 case CONVERT_EXPR:
9237 /* If we are narrowing the operand, we have to do the compare in the
9238 narrower mode. */
9239 if ((TYPE_PRECISION (TREE_TYPE (exp))
9240 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9241 goto normal;
9242 case NON_LVALUE_EXPR:
9243 case REFERENCE_EXPR:
9244 case ABS_EXPR:
9245 case NEGATE_EXPR:
9246 case LROTATE_EXPR:
9247 case RROTATE_EXPR:
9248 /* These cannot change zero->non-zero or vice versa. */
9249 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9250 break;
bbf6f052 9251
14a774a9
RK
9252 case WITH_RECORD_EXPR:
9253 /* Put the object on the placeholder list, recurse through our first
9254 operand, and pop the list. */
9255 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9256 placeholder_list);
9257 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9258 placeholder_list = TREE_CHAIN (placeholder_list);
9259 break;
9260
b93a436e
JL
9261#if 0
9262 /* This is never less insns than evaluating the PLUS_EXPR followed by
9263 a test and can be longer if the test is eliminated. */
9264 case PLUS_EXPR:
9265 /* Reduce to minus. */
9266 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9267 TREE_OPERAND (exp, 0),
9268 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9269 TREE_OPERAND (exp, 1))));
9270 /* Process as MINUS. */
ca695ac9 9271#endif
bbf6f052 9272
b93a436e
JL
9273 case MINUS_EXPR:
9274 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9275 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9276 TREE_OPERAND (exp, 0),
9277 TREE_OPERAND (exp, 1)),
9278 NE, NE, if_false_label, if_true_label);
b93a436e 9279 break;
bbf6f052 9280
b93a436e
JL
9281 case BIT_AND_EXPR:
9282 /* If we are AND'ing with a small constant, do this comparison in the
9283 smallest type that fits. If the machine doesn't have comparisons
9284 that small, it will be converted back to the wider comparison.
9285 This helps if we are testing the sign bit of a narrower object.
9286 combine can't do this for us because it can't know whether a
9287 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9288
b93a436e
JL
9289 if (! SLOW_BYTE_ACCESS
9290 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9291 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9292 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9293 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9294 && (type = type_for_mode (mode, 1)) != 0
9295 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9296 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9297 != CODE_FOR_nothing))
9298 {
9299 do_jump (convert (type, exp), if_false_label, if_true_label);
9300 break;
9301 }
9302 goto normal;
bbf6f052 9303
b93a436e
JL
9304 case TRUTH_NOT_EXPR:
9305 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9306 break;
bbf6f052 9307
b93a436e
JL
9308 case TRUTH_ANDIF_EXPR:
9309 if (if_false_label == 0)
9310 if_false_label = drop_through_label = gen_label_rtx ();
9311 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9312 start_cleanup_deferral ();
9313 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9314 end_cleanup_deferral ();
9315 break;
bbf6f052 9316
b93a436e
JL
9317 case TRUTH_ORIF_EXPR:
9318 if (if_true_label == 0)
9319 if_true_label = drop_through_label = gen_label_rtx ();
9320 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9321 start_cleanup_deferral ();
9322 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9323 end_cleanup_deferral ();
9324 break;
bbf6f052 9325
b93a436e
JL
9326 case COMPOUND_EXPR:
9327 push_temp_slots ();
9328 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9329 preserve_temp_slots (NULL_RTX);
9330 free_temp_slots ();
9331 pop_temp_slots ();
9332 emit_queue ();
9333 do_pending_stack_adjust ();
9334 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9335 break;
bbf6f052 9336
b93a436e
JL
9337 case COMPONENT_REF:
9338 case BIT_FIELD_REF:
9339 case ARRAY_REF:
9340 {
9341 int bitsize, bitpos, unsignedp;
9342 enum machine_mode mode;
9343 tree type;
9344 tree offset;
9345 int volatilep = 0;
729a2125 9346 unsigned int alignment;
bbf6f052 9347
b93a436e
JL
9348 /* Get description of this reference. We don't actually care
9349 about the underlying object here. */
9350 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9351 &mode, &unsignedp, &volatilep,
9352 &alignment);
bbf6f052 9353
b93a436e
JL
9354 type = type_for_size (bitsize, unsignedp);
9355 if (! SLOW_BYTE_ACCESS
9356 && type != 0 && bitsize >= 0
9357 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9358 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9359 != CODE_FOR_nothing))
9360 {
9361 do_jump (convert (type, exp), if_false_label, if_true_label);
9362 break;
9363 }
9364 goto normal;
9365 }
bbf6f052 9366
b93a436e
JL
9367 case COND_EXPR:
9368 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9369 if (integer_onep (TREE_OPERAND (exp, 1))
9370 && integer_zerop (TREE_OPERAND (exp, 2)))
9371 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9372
b93a436e
JL
9373 else if (integer_zerop (TREE_OPERAND (exp, 1))
9374 && integer_onep (TREE_OPERAND (exp, 2)))
9375 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9376
b93a436e
JL
9377 else
9378 {
9379 register rtx label1 = gen_label_rtx ();
9380 drop_through_label = gen_label_rtx ();
bbf6f052 9381
b93a436e 9382 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9383
b93a436e
JL
9384 start_cleanup_deferral ();
9385 /* Now the THEN-expression. */
9386 do_jump (TREE_OPERAND (exp, 1),
9387 if_false_label ? if_false_label : drop_through_label,
9388 if_true_label ? if_true_label : drop_through_label);
9389 /* In case the do_jump just above never jumps. */
9390 do_pending_stack_adjust ();
9391 emit_label (label1);
bbf6f052 9392
b93a436e
JL
9393 /* Now the ELSE-expression. */
9394 do_jump (TREE_OPERAND (exp, 2),
9395 if_false_label ? if_false_label : drop_through_label,
9396 if_true_label ? if_true_label : drop_through_label);
9397 end_cleanup_deferral ();
9398 }
9399 break;
bbf6f052 9400
b93a436e
JL
9401 case EQ_EXPR:
9402 {
9403 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9404
9ec36da5
JL
9405 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9406 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9407 {
9408 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9409 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9410 do_jump
9411 (fold
9412 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9413 fold (build (EQ_EXPR, TREE_TYPE (exp),
9414 fold (build1 (REALPART_EXPR,
9415 TREE_TYPE (inner_type),
9416 exp0)),
9417 fold (build1 (REALPART_EXPR,
9418 TREE_TYPE (inner_type),
9419 exp1)))),
9420 fold (build (EQ_EXPR, TREE_TYPE (exp),
9421 fold (build1 (IMAGPART_EXPR,
9422 TREE_TYPE (inner_type),
9423 exp0)),
9424 fold (build1 (IMAGPART_EXPR,
9425 TREE_TYPE (inner_type),
9426 exp1)))))),
9427 if_false_label, if_true_label);
9428 }
9ec36da5
JL
9429
9430 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9431 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9432
b93a436e 9433 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9434 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9435 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9436 else
b30f05db 9437 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9438 break;
9439 }
bbf6f052 9440
b93a436e
JL
9441 case NE_EXPR:
9442 {
9443 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9444
9ec36da5
JL
9445 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9446 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9447 {
9448 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9449 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9450 do_jump
9451 (fold
9452 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9453 fold (build (NE_EXPR, TREE_TYPE (exp),
9454 fold (build1 (REALPART_EXPR,
9455 TREE_TYPE (inner_type),
9456 exp0)),
9457 fold (build1 (REALPART_EXPR,
9458 TREE_TYPE (inner_type),
9459 exp1)))),
9460 fold (build (NE_EXPR, TREE_TYPE (exp),
9461 fold (build1 (IMAGPART_EXPR,
9462 TREE_TYPE (inner_type),
9463 exp0)),
9464 fold (build1 (IMAGPART_EXPR,
9465 TREE_TYPE (inner_type),
9466 exp1)))))),
9467 if_false_label, if_true_label);
9468 }
9ec36da5
JL
9469
9470 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9471 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9472
b93a436e 9473 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9474 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9475 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9476 else
b30f05db 9477 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9478 break;
9479 }
bbf6f052 9480
b93a436e 9481 case LT_EXPR:
1c0290ea
BS
9482 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9483 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9484 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9485 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9486 else
b30f05db 9487 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9488 break;
bbf6f052 9489
b93a436e 9490 case LE_EXPR:
1c0290ea
BS
9491 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9492 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9493 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9494 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9495 else
b30f05db 9496 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9497 break;
bbf6f052 9498
b93a436e 9499 case GT_EXPR:
1c0290ea
BS
9500 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9501 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9502 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9503 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9504 else
b30f05db 9505 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9506 break;
bbf6f052 9507
b93a436e 9508 case GE_EXPR:
1c0290ea
BS
9509 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9510 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9511 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9512 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9513 else
b30f05db 9514 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9515 break;
bbf6f052 9516
1eb8759b
RH
9517 case UNORDERED_EXPR:
9518 case ORDERED_EXPR:
9519 {
9520 enum rtx_code cmp, rcmp;
9521 int do_rev;
9522
9523 if (code == UNORDERED_EXPR)
9524 cmp = UNORDERED, rcmp = ORDERED;
9525 else
9526 cmp = ORDERED, rcmp = UNORDERED;
9527 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9528
9529 do_rev = 0;
9530 if (! can_compare_p (cmp, mode, ccp_jump)
9531 && (can_compare_p (rcmp, mode, ccp_jump)
9532 /* If the target doesn't provide either UNORDERED or ORDERED
9533 comparisons, canonicalize on UNORDERED for the library. */
9534 || rcmp == UNORDERED))
9535 do_rev = 1;
9536
9537 if (! do_rev)
9538 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9539 else
9540 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9541 }
9542 break;
9543
9544 {
9545 enum rtx_code rcode1;
9546 enum tree_code tcode2;
9547
9548 case UNLT_EXPR:
9549 rcode1 = UNLT;
9550 tcode2 = LT_EXPR;
9551 goto unordered_bcc;
9552 case UNLE_EXPR:
9553 rcode1 = UNLE;
9554 tcode2 = LE_EXPR;
9555 goto unordered_bcc;
9556 case UNGT_EXPR:
9557 rcode1 = UNGT;
9558 tcode2 = GT_EXPR;
9559 goto unordered_bcc;
9560 case UNGE_EXPR:
9561 rcode1 = UNGE;
9562 tcode2 = GE_EXPR;
9563 goto unordered_bcc;
9564 case UNEQ_EXPR:
9565 rcode1 = UNEQ;
9566 tcode2 = EQ_EXPR;
9567 goto unordered_bcc;
7913f3d0 9568
1eb8759b
RH
9569 unordered_bcc:
9570 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9571 if (can_compare_p (rcode1, mode, ccp_jump))
9572 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9573 if_true_label);
9574 else
9575 {
9576 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9577 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9578 tree cmp0, cmp1;
9579
9580 /* If the target doesn't support combined unordered
9581 compares, decompose into UNORDERED + comparison. */
9582 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9583 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9584 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9585 do_jump (exp, if_false_label, if_true_label);
9586 }
9587 }
9588 break;
9589
b93a436e
JL
9590 default:
9591 normal:
9592 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9593#if 0
9594 /* This is not needed any more and causes poor code since it causes
9595 comparisons and tests from non-SI objects to have different code
9596 sequences. */
9597 /* Copy to register to avoid generating bad insns by cse
9598 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9599 if (!cse_not_expected && GET_CODE (temp) == MEM)
9600 temp = copy_to_reg (temp);
ca695ac9 9601#endif
b93a436e 9602 do_pending_stack_adjust ();
b30f05db
BS
9603 /* Do any postincrements in the expression that was tested. */
9604 emit_queue ();
9605
9606 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9607 {
9608 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9609 if (target)
9610 emit_jump (target);
9611 }
b93a436e 9612 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9613 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9614 /* Note swapping the labels gives us not-equal. */
9615 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9616 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9617 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9618 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9619 GET_MODE (temp), NULL_RTX, 0,
9620 if_false_label, if_true_label);
b93a436e
JL
9621 else
9622 abort ();
9623 }
bbf6f052 9624
b93a436e
JL
9625 if (drop_through_label)
9626 {
9627 /* If do_jump produces code that might be jumped around,
9628 do any stack adjusts from that code, before the place
9629 where control merges in. */
9630 do_pending_stack_adjust ();
9631 emit_label (drop_through_label);
9632 }
bbf6f052 9633}
b93a436e
JL
9634\f
9635/* Given a comparison expression EXP for values too wide to be compared
9636 with one insn, test the comparison and jump to the appropriate label.
9637 The code of EXP is ignored; we always test GT if SWAP is 0,
9638 and LT if SWAP is 1. */
bbf6f052 9639
b93a436e
JL
9640static void
9641do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9642 tree exp;
9643 int swap;
9644 rtx if_false_label, if_true_label;
9645{
9646 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9647 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9648 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9649 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9650
b30f05db 9651 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9652}
9653
b93a436e
JL
9654/* Compare OP0 with OP1, word at a time, in mode MODE.
9655 UNSIGNEDP says to do unsigned comparison.
9656 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9657
b93a436e
JL
9658void
9659do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9660 enum machine_mode mode;
9661 int unsignedp;
9662 rtx op0, op1;
9663 rtx if_false_label, if_true_label;
f81497d9 9664{
b93a436e
JL
9665 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9666 rtx drop_through_label = 0;
9667 int i;
f81497d9 9668
b93a436e
JL
9669 if (! if_true_label || ! if_false_label)
9670 drop_through_label = gen_label_rtx ();
9671 if (! if_true_label)
9672 if_true_label = drop_through_label;
9673 if (! if_false_label)
9674 if_false_label = drop_through_label;
f81497d9 9675
b93a436e
JL
9676 /* Compare a word at a time, high order first. */
9677 for (i = 0; i < nwords; i++)
9678 {
b93a436e 9679 rtx op0_word, op1_word;
bbf6f052 9680
b93a436e
JL
9681 if (WORDS_BIG_ENDIAN)
9682 {
9683 op0_word = operand_subword_force (op0, i, mode);
9684 op1_word = operand_subword_force (op1, i, mode);
9685 }
9686 else
9687 {
9688 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9689 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9690 }
bbf6f052 9691
b93a436e 9692 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9693 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9694 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9695 NULL_RTX, if_true_label);
bbf6f052 9696
b93a436e 9697 /* Consider lower words only if these are equal. */
b30f05db
BS
9698 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9699 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9700 }
bbf6f052 9701
b93a436e
JL
9702 if (if_false_label)
9703 emit_jump (if_false_label);
9704 if (drop_through_label)
9705 emit_label (drop_through_label);
bbf6f052
RK
9706}
9707
b93a436e
JL
9708/* Given an EQ_EXPR expression EXP for values too wide to be compared
9709 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9710
b93a436e
JL
9711static void
9712do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9713 tree exp;
9714 rtx if_false_label, if_true_label;
bbf6f052 9715{
b93a436e
JL
9716 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9717 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9718 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9719 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9720 int i;
9721 rtx drop_through_label = 0;
bbf6f052 9722
b93a436e
JL
9723 if (! if_false_label)
9724 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9725
b93a436e 9726 for (i = 0; i < nwords; i++)
b30f05db
BS
9727 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9728 operand_subword_force (op1, i, mode),
9729 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9730 word_mode, NULL_RTX, 0, if_false_label,
9731 NULL_RTX);
bbf6f052 9732
b93a436e
JL
9733 if (if_true_label)
9734 emit_jump (if_true_label);
9735 if (drop_through_label)
9736 emit_label (drop_through_label);
bbf6f052 9737}
b93a436e
JL
9738\f
9739/* Jump according to whether OP0 is 0.
9740 We assume that OP0 has an integer mode that is too wide
9741 for the available compare insns. */
bbf6f052 9742
f5963e61 9743void
b93a436e
JL
9744do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9745 rtx op0;
9746 rtx if_false_label, if_true_label;
ca695ac9 9747{
b93a436e
JL
9748 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9749 rtx part;
9750 int i;
9751 rtx drop_through_label = 0;
bbf6f052 9752
b93a436e
JL
9753 /* The fastest way of doing this comparison on almost any machine is to
9754 "or" all the words and compare the result. If all have to be loaded
9755 from memory and this is a very wide item, it's possible this may
9756 be slower, but that's highly unlikely. */
bbf6f052 9757
b93a436e
JL
9758 part = gen_reg_rtx (word_mode);
9759 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9760 for (i = 1; i < nwords && part != 0; i++)
9761 part = expand_binop (word_mode, ior_optab, part,
9762 operand_subword_force (op0, i, GET_MODE (op0)),
9763 part, 1, OPTAB_WIDEN);
bbf6f052 9764
b93a436e
JL
9765 if (part != 0)
9766 {
b30f05db
BS
9767 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9768 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9769
b93a436e
JL
9770 return;
9771 }
bbf6f052 9772
b93a436e
JL
9773 /* If we couldn't do the "or" simply, do this with a series of compares. */
9774 if (! if_false_label)
9775 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9776
b93a436e 9777 for (i = 0; i < nwords; i++)
b30f05db
BS
9778 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9779 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9780 if_false_label, NULL_RTX);
bbf6f052 9781
b93a436e
JL
9782 if (if_true_label)
9783 emit_jump (if_true_label);
0f41302f 9784
b93a436e
JL
9785 if (drop_through_label)
9786 emit_label (drop_through_label);
bbf6f052 9787}
b93a436e 9788\f
b30f05db 9789/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9790 (including code to compute the values to be compared)
9791 and set (CC0) according to the result.
b30f05db 9792 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9793
b93a436e 9794 We force a stack adjustment unless there are currently
b30f05db 9795 things pushed on the stack that aren't yet used.
ca695ac9 9796
b30f05db
BS
9797 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9798 compared.
9799
9800 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9801 size of MODE should be used. */
9802
9803rtx
9804compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9805 register rtx op0, op1;
9806 enum rtx_code code;
9807 int unsignedp;
9808 enum machine_mode mode;
9809 rtx size;
729a2125 9810 unsigned int align;
b93a436e 9811{
b30f05db 9812 rtx tem;
76bbe028 9813
b30f05db
BS
9814 /* If one operand is constant, make it the second one. Only do this
9815 if the other operand is not constant as well. */
ca695ac9 9816
b30f05db
BS
9817 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9818 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 9819 {
b30f05db
BS
9820 tem = op0;
9821 op0 = op1;
9822 op1 = tem;
9823 code = swap_condition (code);
ca695ac9 9824 }
bbf6f052 9825
b30f05db 9826 if (flag_force_mem)
b93a436e 9827 {
b30f05db
BS
9828 op0 = force_not_mem (op0);
9829 op1 = force_not_mem (op1);
9830 }
bbf6f052 9831
b30f05db
BS
9832 do_pending_stack_adjust ();
9833
9834 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9835 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9836 return tem;
9837
9838#if 0
9839 /* There's no need to do this now that combine.c can eliminate lots of
9840 sign extensions. This can be less efficient in certain cases on other
9841 machines. */
9842
9843 /* If this is a signed equality comparison, we can do it as an
9844 unsigned comparison since zero-extension is cheaper than sign
9845 extension and comparisons with zero are done as unsigned. This is
9846 the case even on machines that can do fast sign extension, since
9847 zero-extension is easier to combine with other operations than
9848 sign-extension is. If we are comparing against a constant, we must
9849 convert it to what it would look like unsigned. */
9850 if ((code == EQ || code == NE) && ! unsignedp
9851 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9852 {
9853 if (GET_CODE (op1) == CONST_INT
9854 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9855 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9856 unsignedp = 1;
b93a436e
JL
9857 }
9858#endif
b30f05db
BS
9859
9860 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 9861
b30f05db 9862 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9863}
bbf6f052 9864
b30f05db 9865/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9866 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9867
b93a436e
JL
9868 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9869 compared.
bbf6f052 9870
b93a436e
JL
9871 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9872 size of MODE should be used. */
ca695ac9 9873
b30f05db
BS
9874void
9875do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9876 if_false_label, if_true_label)
b93a436e
JL
9877 register rtx op0, op1;
9878 enum rtx_code code;
9879 int unsignedp;
9880 enum machine_mode mode;
9881 rtx size;
729a2125 9882 unsigned int align;
b30f05db 9883 rtx if_false_label, if_true_label;
bbf6f052 9884{
b93a436e 9885 rtx tem;
b30f05db
BS
9886 int dummy_true_label = 0;
9887
9888 /* Reverse the comparison if that is safe and we want to jump if it is
9889 false. */
9890 if (! if_true_label && ! FLOAT_MODE_P (mode))
9891 {
9892 if_true_label = if_false_label;
9893 if_false_label = 0;
9894 code = reverse_condition (code);
9895 }
bbf6f052 9896
b93a436e
JL
9897 /* If one operand is constant, make it the second one. Only do this
9898 if the other operand is not constant as well. */
e7c33f54 9899
b93a436e
JL
9900 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9901 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 9902 {
b93a436e
JL
9903 tem = op0;
9904 op0 = op1;
9905 op1 = tem;
9906 code = swap_condition (code);
9907 }
bbf6f052 9908
b93a436e
JL
9909 if (flag_force_mem)
9910 {
9911 op0 = force_not_mem (op0);
9912 op1 = force_not_mem (op1);
9913 }
bbf6f052 9914
b93a436e 9915 do_pending_stack_adjust ();
ca695ac9 9916
b93a436e
JL
9917 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9918 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9919 {
9920 if (tem == const_true_rtx)
9921 {
9922 if (if_true_label)
9923 emit_jump (if_true_label);
9924 }
9925 else
9926 {
9927 if (if_false_label)
9928 emit_jump (if_false_label);
9929 }
9930 return;
9931 }
ca695ac9 9932
b93a436e
JL
9933#if 0
9934 /* There's no need to do this now that combine.c can eliminate lots of
9935 sign extensions. This can be less efficient in certain cases on other
9936 machines. */
ca695ac9 9937
b93a436e
JL
9938 /* If this is a signed equality comparison, we can do it as an
9939 unsigned comparison since zero-extension is cheaper than sign
9940 extension and comparisons with zero are done as unsigned. This is
9941 the case even on machines that can do fast sign extension, since
9942 zero-extension is easier to combine with other operations than
9943 sign-extension is. If we are comparing against a constant, we must
9944 convert it to what it would look like unsigned. */
9945 if ((code == EQ || code == NE) && ! unsignedp
9946 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9947 {
9948 if (GET_CODE (op1) == CONST_INT
9949 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9950 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9951 unsignedp = 1;
9952 }
9953#endif
ca695ac9 9954
b30f05db
BS
9955 if (! if_true_label)
9956 {
9957 dummy_true_label = 1;
9958 if_true_label = gen_label_rtx ();
9959 }
9960
9961 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9962 if_true_label);
9963
9964 if (if_false_label)
9965 emit_jump (if_false_label);
9966 if (dummy_true_label)
9967 emit_label (if_true_label);
9968}
9969
9970/* Generate code for a comparison expression EXP (including code to compute
9971 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9972 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9973 generated code will drop through.
9974 SIGNED_CODE should be the rtx operation for this comparison for
9975 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9976
9977 We force a stack adjustment unless there are currently
9978 things pushed on the stack that aren't yet used. */
9979
9980static void
9981do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9982 if_true_label)
9983 register tree exp;
9984 enum rtx_code signed_code, unsigned_code;
9985 rtx if_false_label, if_true_label;
9986{
729a2125 9987 unsigned int align0, align1;
b30f05db
BS
9988 register rtx op0, op1;
9989 register tree type;
9990 register enum machine_mode mode;
9991 int unsignedp;
9992 enum rtx_code code;
9993
9994 /* Don't crash if the comparison was erroneous. */
14a774a9 9995 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
9996 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9997 return;
9998
14a774a9 9999 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
b30f05db
BS
10000 type = TREE_TYPE (TREE_OPERAND (exp, 0));
10001 mode = TYPE_MODE (type);
10002 unsignedp = TREE_UNSIGNED (type);
10003 code = unsignedp ? unsigned_code : signed_code;
10004
10005#ifdef HAVE_canonicalize_funcptr_for_compare
10006 /* If function pointers need to be "canonicalized" before they can
10007 be reliably compared, then canonicalize them. */
10008 if (HAVE_canonicalize_funcptr_for_compare
10009 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10010 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10011 == FUNCTION_TYPE))
10012 {
10013 rtx new_op0 = gen_reg_rtx (mode);
10014
10015 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10016 op0 = new_op0;
10017 }
10018
10019 if (HAVE_canonicalize_funcptr_for_compare
10020 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10021 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10022 == FUNCTION_TYPE))
10023 {
10024 rtx new_op1 = gen_reg_rtx (mode);
10025
10026 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10027 op1 = new_op1;
10028 }
10029#endif
10030
10031 /* Do any postincrements in the expression that was tested. */
10032 emit_queue ();
10033
10034 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10035 ((mode == BLKmode)
10036 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
14a774a9 10037 MIN (align0, align1) / BITS_PER_UNIT,
b30f05db 10038 if_false_label, if_true_label);
b93a436e
JL
10039}
10040\f
10041/* Generate code to calculate EXP using a store-flag instruction
10042 and return an rtx for the result. EXP is either a comparison
10043 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10044
b93a436e 10045 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10046
b93a436e
JL
10047 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10048 cheap.
ca695ac9 10049
b93a436e
JL
10050 Return zero if there is no suitable set-flag instruction
10051 available on this machine.
ca695ac9 10052
b93a436e
JL
10053 Once expand_expr has been called on the arguments of the comparison,
10054 we are committed to doing the store flag, since it is not safe to
10055 re-evaluate the expression. We emit the store-flag insn by calling
10056 emit_store_flag, but only expand the arguments if we have a reason
10057 to believe that emit_store_flag will be successful. If we think that
10058 it will, but it isn't, we have to simulate the store-flag with a
10059 set/jump/set sequence. */
ca695ac9 10060
b93a436e
JL
10061static rtx
10062do_store_flag (exp, target, mode, only_cheap)
10063 tree exp;
10064 rtx target;
10065 enum machine_mode mode;
10066 int only_cheap;
10067{
10068 enum rtx_code code;
10069 tree arg0, arg1, type;
10070 tree tem;
10071 enum machine_mode operand_mode;
10072 int invert = 0;
10073 int unsignedp;
10074 rtx op0, op1;
10075 enum insn_code icode;
10076 rtx subtarget = target;
381127e8 10077 rtx result, label;
ca695ac9 10078
b93a436e
JL
10079 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10080 result at the end. We can't simply invert the test since it would
10081 have already been inverted if it were valid. This case occurs for
10082 some floating-point comparisons. */
ca695ac9 10083
b93a436e
JL
10084 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10085 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10086
b93a436e
JL
10087 arg0 = TREE_OPERAND (exp, 0);
10088 arg1 = TREE_OPERAND (exp, 1);
10089 type = TREE_TYPE (arg0);
10090 operand_mode = TYPE_MODE (type);
10091 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10092
b93a436e
JL
10093 /* We won't bother with BLKmode store-flag operations because it would mean
10094 passing a lot of information to emit_store_flag. */
10095 if (operand_mode == BLKmode)
10096 return 0;
ca695ac9 10097
b93a436e
JL
10098 /* We won't bother with store-flag operations involving function pointers
10099 when function pointers must be canonicalized before comparisons. */
10100#ifdef HAVE_canonicalize_funcptr_for_compare
10101 if (HAVE_canonicalize_funcptr_for_compare
10102 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10103 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10104 == FUNCTION_TYPE))
10105 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10106 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10107 == FUNCTION_TYPE))))
10108 return 0;
ca695ac9
JB
10109#endif
10110
b93a436e
JL
10111 STRIP_NOPS (arg0);
10112 STRIP_NOPS (arg1);
ca695ac9 10113
b93a436e
JL
10114 /* Get the rtx comparison code to use. We know that EXP is a comparison
10115 operation of some type. Some comparisons against 1 and -1 can be
10116 converted to comparisons with zero. Do so here so that the tests
10117 below will be aware that we have a comparison with zero. These
10118 tests will not catch constants in the first operand, but constants
10119 are rarely passed as the first operand. */
ca695ac9 10120
b93a436e
JL
10121 switch (TREE_CODE (exp))
10122 {
10123 case EQ_EXPR:
10124 code = EQ;
bbf6f052 10125 break;
b93a436e
JL
10126 case NE_EXPR:
10127 code = NE;
bbf6f052 10128 break;
b93a436e
JL
10129 case LT_EXPR:
10130 if (integer_onep (arg1))
10131 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10132 else
10133 code = unsignedp ? LTU : LT;
ca695ac9 10134 break;
b93a436e
JL
10135 case LE_EXPR:
10136 if (! unsignedp && integer_all_onesp (arg1))
10137 arg1 = integer_zero_node, code = LT;
10138 else
10139 code = unsignedp ? LEU : LE;
ca695ac9 10140 break;
b93a436e
JL
10141 case GT_EXPR:
10142 if (! unsignedp && integer_all_onesp (arg1))
10143 arg1 = integer_zero_node, code = GE;
10144 else
10145 code = unsignedp ? GTU : GT;
10146 break;
10147 case GE_EXPR:
10148 if (integer_onep (arg1))
10149 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10150 else
10151 code = unsignedp ? GEU : GE;
ca695ac9 10152 break;
1eb8759b
RH
10153
10154 case UNORDERED_EXPR:
10155 code = UNORDERED;
10156 break;
10157 case ORDERED_EXPR:
10158 code = ORDERED;
10159 break;
10160 case UNLT_EXPR:
10161 code = UNLT;
10162 break;
10163 case UNLE_EXPR:
10164 code = UNLE;
10165 break;
10166 case UNGT_EXPR:
10167 code = UNGT;
10168 break;
10169 case UNGE_EXPR:
10170 code = UNGE;
10171 break;
10172 case UNEQ_EXPR:
10173 code = UNEQ;
10174 break;
1eb8759b 10175
ca695ac9 10176 default:
b93a436e 10177 abort ();
bbf6f052 10178 }
bbf6f052 10179
b93a436e
JL
10180 /* Put a constant second. */
10181 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10182 {
10183 tem = arg0; arg0 = arg1; arg1 = tem;
10184 code = swap_condition (code);
ca695ac9 10185 }
bbf6f052 10186
b93a436e
JL
10187 /* If this is an equality or inequality test of a single bit, we can
10188 do this by shifting the bit being tested to the low-order bit and
10189 masking the result with the constant 1. If the condition was EQ,
10190 we xor it with 1. This does not require an scc insn and is faster
10191 than an scc insn even if we have it. */
d39985fa 10192
b93a436e
JL
10193 if ((code == NE || code == EQ)
10194 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10195 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10196 {
10197 tree inner = TREE_OPERAND (arg0, 0);
10198 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10199 int ops_unsignedp;
bbf6f052 10200
b93a436e
JL
10201 /* If INNER is a right shift of a constant and it plus BITNUM does
10202 not overflow, adjust BITNUM and INNER. */
ca695ac9 10203
b93a436e
JL
10204 if (TREE_CODE (inner) == RSHIFT_EXPR
10205 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10206 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10207 && bitnum < TYPE_PRECISION (type)
10208 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10209 bitnum - TYPE_PRECISION (type)))
ca695ac9 10210 {
b93a436e
JL
10211 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10212 inner = TREE_OPERAND (inner, 0);
ca695ac9 10213 }
ca695ac9 10214
b93a436e
JL
10215 /* If we are going to be able to omit the AND below, we must do our
10216 operations as unsigned. If we must use the AND, we have a choice.
10217 Normally unsigned is faster, but for some machines signed is. */
10218 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10219#ifdef LOAD_EXTEND_OP
10220 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10221#else
10222 : 1
10223#endif
10224 );
bbf6f052 10225
b93a436e
JL
10226 if (subtarget == 0 || GET_CODE (subtarget) != REG
10227 || GET_MODE (subtarget) != operand_mode
e5e809f4 10228 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10229 subtarget = 0;
bbf6f052 10230
b93a436e 10231 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10232
b93a436e
JL
10233 if (bitnum != 0)
10234 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10235 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10236
b93a436e
JL
10237 if (GET_MODE (op0) != mode)
10238 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10239
b93a436e
JL
10240 if ((code == EQ && ! invert) || (code == NE && invert))
10241 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10242 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10243
b93a436e
JL
10244 /* Put the AND last so it can combine with more things. */
10245 if (bitnum != TYPE_PRECISION (type) - 1)
10246 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10247
b93a436e
JL
10248 return op0;
10249 }
bbf6f052 10250
b93a436e 10251 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10252 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10253 return 0;
1eb8759b 10254
b93a436e
JL
10255 icode = setcc_gen_code[(int) code];
10256 if (icode == CODE_FOR_nothing
a995e389 10257 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10258 {
b93a436e
JL
10259 /* We can only do this if it is one of the special cases that
10260 can be handled without an scc insn. */
10261 if ((code == LT && integer_zerop (arg1))
10262 || (! only_cheap && code == GE && integer_zerop (arg1)))
10263 ;
10264 else if (BRANCH_COST >= 0
10265 && ! only_cheap && (code == NE || code == EQ)
10266 && TREE_CODE (type) != REAL_TYPE
10267 && ((abs_optab->handlers[(int) operand_mode].insn_code
10268 != CODE_FOR_nothing)
10269 || (ffs_optab->handlers[(int) operand_mode].insn_code
10270 != CODE_FOR_nothing)))
10271 ;
10272 else
10273 return 0;
ca695ac9 10274 }
b93a436e
JL
10275
10276 preexpand_calls (exp);
10277 if (subtarget == 0 || GET_CODE (subtarget) != REG
10278 || GET_MODE (subtarget) != operand_mode
e5e809f4 10279 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10280 subtarget = 0;
10281
10282 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10283 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10284
10285 if (target == 0)
10286 target = gen_reg_rtx (mode);
10287
10288 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10289 because, if the emit_store_flag does anything it will succeed and
10290 OP0 and OP1 will not be used subsequently. */
ca695ac9 10291
b93a436e
JL
10292 result = emit_store_flag (target, code,
10293 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10294 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10295 operand_mode, unsignedp, 1);
ca695ac9 10296
b93a436e
JL
10297 if (result)
10298 {
10299 if (invert)
10300 result = expand_binop (mode, xor_optab, result, const1_rtx,
10301 result, 0, OPTAB_LIB_WIDEN);
10302 return result;
ca695ac9 10303 }
bbf6f052 10304
b93a436e
JL
10305 /* If this failed, we have to do this with set/compare/jump/set code. */
10306 if (GET_CODE (target) != REG
10307 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10308 target = gen_reg_rtx (GET_MODE (target));
10309
10310 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10311 result = compare_from_rtx (op0, op1, code, unsignedp,
10312 operand_mode, NULL_RTX, 0);
10313 if (GET_CODE (result) == CONST_INT)
10314 return (((result == const0_rtx && ! invert)
10315 || (result != const0_rtx && invert))
10316 ? const0_rtx : const1_rtx);
ca695ac9 10317
b93a436e
JL
10318 label = gen_label_rtx ();
10319 if (bcc_gen_fctn[(int) code] == 0)
10320 abort ();
0f41302f 10321
b93a436e
JL
10322 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10323 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10324 emit_label (label);
bbf6f052 10325
b93a436e 10326 return target;
ca695ac9 10327}
b93a436e
JL
10328\f
10329/* Generate a tablejump instruction (used for switch statements). */
10330
10331#ifdef HAVE_tablejump
e87b4f3f 10332
b93a436e
JL
10333/* INDEX is the value being switched on, with the lowest value
10334 in the table already subtracted.
10335 MODE is its expected mode (needed if INDEX is constant).
10336 RANGE is the length of the jump table.
10337 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10338
b93a436e
JL
10339 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10340 index value is out of range. */
0f41302f 10341
ca695ac9 10342void
b93a436e
JL
10343do_tablejump (index, mode, range, table_label, default_label)
10344 rtx index, range, table_label, default_label;
10345 enum machine_mode mode;
ca695ac9 10346{
b93a436e 10347 register rtx temp, vector;
88d3b7f0 10348
b93a436e
JL
10349 /* Do an unsigned comparison (in the proper mode) between the index
10350 expression and the value which represents the length of the range.
10351 Since we just finished subtracting the lower bound of the range
10352 from the index expression, this comparison allows us to simultaneously
10353 check that the original index expression value is both greater than
10354 or equal to the minimum value of the range and less than or equal to
10355 the maximum value of the range. */
709f5be1 10356
c5d5d461
JL
10357 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10358 0, default_label);
bbf6f052 10359
b93a436e
JL
10360 /* If index is in range, it must fit in Pmode.
10361 Convert to Pmode so we can index with it. */
10362 if (mode != Pmode)
10363 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10364
b93a436e
JL
10365 /* Don't let a MEM slip thru, because then INDEX that comes
10366 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10367 and break_out_memory_refs will go to work on it and mess it up. */
10368#ifdef PIC_CASE_VECTOR_ADDRESS
10369 if (flag_pic && GET_CODE (index) != REG)
10370 index = copy_to_mode_reg (Pmode, index);
10371#endif
ca695ac9 10372
b93a436e
JL
10373 /* If flag_force_addr were to affect this address
10374 it could interfere with the tricky assumptions made
10375 about addresses that contain label-refs,
10376 which may be valid only very near the tablejump itself. */
10377 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10378 GET_MODE_SIZE, because this indicates how large insns are. The other
10379 uses should all be Pmode, because they are addresses. This code
10380 could fail if addresses and insns are not the same size. */
10381 index = gen_rtx_PLUS (Pmode,
10382 gen_rtx_MULT (Pmode, index,
10383 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10384 gen_rtx_LABEL_REF (Pmode, table_label));
10385#ifdef PIC_CASE_VECTOR_ADDRESS
10386 if (flag_pic)
10387 index = PIC_CASE_VECTOR_ADDRESS (index);
10388 else
bbf6f052 10389#endif
b93a436e
JL
10390 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10391 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10392 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10393 RTX_UNCHANGING_P (vector) = 1;
10394 convert_move (temp, vector, 0);
10395
10396 emit_jump_insn (gen_tablejump (temp, table_label));
10397
10398 /* If we are generating PIC code or if the table is PC-relative, the
10399 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10400 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10401 emit_barrier ();
bbf6f052 10402}
b93a436e
JL
10403
10404#endif /* HAVE_tablejump */
This page took 2.784968 seconds and 5 git commands to generate.