]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Merge in gcc2-ss-010999
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
fdf004cf 2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
d7db6646 43#include "ggc.h"
bbf6f052
RK
44
45#define CEIL(x,y) (((x) + (y) - 1) / (y))
46
47/* Decide whether a function's arguments should be processed
bbc8a071
RK
48 from first to last or from last to first.
49
50 They should if the stack and args grow in opposite directions, but
51 only if we have push insns. */
bbf6f052 52
bbf6f052 53#ifdef PUSH_ROUNDING
bbc8a071 54
3319a347 55#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
56#define PUSH_ARGS_REVERSED /* If it's last to first */
57#endif
bbc8a071 58
bbf6f052
RK
59#endif
60
61#ifndef STACK_PUSH_CODE
62#ifdef STACK_GROWS_DOWNWARD
63#define STACK_PUSH_CODE PRE_DEC
64#else
65#define STACK_PUSH_CODE PRE_INC
66#endif
67#endif
68
18543a22
ILT
69/* Assume that case vectors are not pc-relative. */
70#ifndef CASE_VECTOR_PC_RELATIVE
71#define CASE_VECTOR_PC_RELATIVE 0
72#endif
73
bbf6f052
RK
74/* If this is nonzero, we do not bother generating VOLATILE
75 around volatile memory references, and we are willing to
76 output indirect addresses. If cse is to follow, we reject
77 indirect addresses so a useful potential cse is generated;
78 if it is used only once, instruction combination will produce
79 the same indirect address eventually. */
80int cse_not_expected;
81
82/* Nonzero to generate code for all the subroutines within an
83 expression before generating the upper levels of the expression.
84 Nowadays this is never zero. */
85int do_preexpand_calls = 1;
86
956d6950 87/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
88 usage. Used when current_function_check_memory_usage is true, to avoid
89 infinite recursion. */
956d6950
JL
90static int in_check_memory_usage;
91
4969d05d
RK
92/* This structure is used by move_by_pieces to describe the move to
93 be performed. */
4969d05d
RK
94struct move_by_pieces
95{
96 rtx to;
97 rtx to_addr;
98 int autinc_to;
99 int explicit_inc_to;
e9cf6a97 100 int to_struct;
c5c76735 101 int to_readonly;
4969d05d
RK
102 rtx from;
103 rtx from_addr;
104 int autinc_from;
105 int explicit_inc_from;
e9cf6a97 106 int from_struct;
c5c76735 107 int from_readonly;
4969d05d
RK
108 int len;
109 int offset;
110 int reverse;
111};
112
9de08200
RK
113/* This structure is used by clear_by_pieces to describe the clear to
114 be performed. */
115
116struct clear_by_pieces
117{
118 rtx to;
119 rtx to_addr;
120 int autinc_to;
121 int explicit_inc_to;
122 int to_struct;
123 int len;
124 int offset;
125 int reverse;
126};
127
292b1216 128extern struct obstack permanent_obstack;
c02bd5d9 129
03566575
JW
130static rtx get_push_address PROTO ((int));
131
4969d05d 132static rtx enqueue_insn PROTO((rtx, rtx));
4969d05d 133static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 134static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 135 struct move_by_pieces *));
9de08200 136static void clear_by_pieces PROTO((rtx, int, int));
c5c76735
JL
137static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
138 enum machine_mode,
9de08200
RK
139 struct clear_by_pieces *));
140static int is_zeros_p PROTO((tree));
141static int mostly_zeros_p PROTO((tree));
d77fac3b 142static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
c5c76735
JL
143 tree, tree, int, int));
144static void store_constructor PROTO((tree, rtx, int, int));
4969d05d 145static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
146 enum machine_mode, int, int,
147 int, int));
e009aaf3
JL
148static enum memory_use_mode
149 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
150static tree save_noncopied_parts PROTO((tree, tree));
151static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 152static int safe_from_p PROTO((rtx, tree, int));
4969d05d 153static int fixed_type_p PROTO((tree));
01c8a7c8 154static rtx var_rtx PROTO((tree));
7b8b9722 155static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
156static void preexpand_calls PROTO((tree));
157static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
158static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
b30f05db 159static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
4969d05d 160static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 161
4fa52007
RK
162/* Record for each mode whether we can move a register directly to or
163 from an object of that mode in memory. If we can't, we won't try
164 to use that mode directly when accessing a field of that mode. */
165
166static char direct_load[NUM_MACHINE_MODES];
167static char direct_store[NUM_MACHINE_MODES];
168
7e24ffc9
HPN
169/* If a memory-to-memory move would take MOVE_RATIO or more simple
170 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
171
172#ifndef MOVE_RATIO
266007a7 173#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
174#define MOVE_RATIO 2
175#else
996d9dac
MM
176/* If we are optimizing for space (-Os), cut down the default move ratio */
177#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
178#endif
179#endif
e87b4f3f 180
fbe1758d
AM
181/* This macro is used to determine whether move_by_pieces should be called
182 to perform a structure copy. */
183#ifndef MOVE_BY_PIECES_P
184#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
185 (SIZE, ALIGN) < MOVE_RATIO)
186#endif
187
266007a7 188/* This array records the insn_code of insns to perform block moves. */
e6677db3 189enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 190
9de08200
RK
191/* This array records the insn_code of insns to perform block clears. */
192enum insn_code clrstr_optab[NUM_MACHINE_MODES];
193
0f41302f 194/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
195
196#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 197#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 198#endif
bbf6f052 199\f
4fa52007 200/* This is run once per compilation to set up which modes can be used
266007a7 201 directly in memory and to initialize the block move optab. */
4fa52007
RK
202
203void
204init_expr_once ()
205{
206 rtx insn, pat;
207 enum machine_mode mode;
cff48d8f 208 int num_clobbers;
9ec36da5
JL
209 rtx mem, mem1;
210 char *free_point;
211
212 start_sequence ();
213
214 /* Since we are on the permanent obstack, we must be sure we save this
215 spot AFTER we call start_sequence, since it will reuse the rtl it
216 makes. */
217 free_point = (char *) oballoc (0);
218
e2549997
RS
219 /* Try indexing by frame ptr and try by stack ptr.
220 It is known that on the Convex the stack ptr isn't a valid index.
221 With luck, one or the other is valid on any machine. */
9ec36da5
JL
222 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
223 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 224
38a448ca 225 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
226 pat = PATTERN (insn);
227
228 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
229 mode = (enum machine_mode) ((int) mode + 1))
230 {
231 int regno;
232 rtx reg;
4fa52007
RK
233
234 direct_load[(int) mode] = direct_store[(int) mode] = 0;
235 PUT_MODE (mem, mode);
e2549997 236 PUT_MODE (mem1, mode);
4fa52007 237
e6fe56a4
RK
238 /* See if there is some register that can be used in this mode and
239 directly loaded or stored from memory. */
240
7308a047
RS
241 if (mode != VOIDmode && mode != BLKmode)
242 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
243 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
244 regno++)
245 {
246 if (! HARD_REGNO_MODE_OK (regno, mode))
247 continue;
e6fe56a4 248
38a448ca 249 reg = gen_rtx_REG (mode, regno);
e6fe56a4 250
7308a047
RS
251 SET_SRC (pat) = mem;
252 SET_DEST (pat) = reg;
253 if (recog (pat, insn, &num_clobbers) >= 0)
254 direct_load[(int) mode] = 1;
e6fe56a4 255
e2549997
RS
256 SET_SRC (pat) = mem1;
257 SET_DEST (pat) = reg;
258 if (recog (pat, insn, &num_clobbers) >= 0)
259 direct_load[(int) mode] = 1;
260
7308a047
RS
261 SET_SRC (pat) = reg;
262 SET_DEST (pat) = mem;
263 if (recog (pat, insn, &num_clobbers) >= 0)
264 direct_store[(int) mode] = 1;
e2549997
RS
265
266 SET_SRC (pat) = reg;
267 SET_DEST (pat) = mem1;
268 if (recog (pat, insn, &num_clobbers) >= 0)
269 direct_store[(int) mode] = 1;
7308a047 270 }
4fa52007
RK
271 }
272
273 end_sequence ();
9ec36da5 274 obfree (free_point);
4fa52007 275}
cff48d8f 276
bbf6f052
RK
277/* This is run at the start of compiling a function. */
278
279void
280init_expr ()
281{
49ad7cfa
BS
282 current_function->expr
283 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 284
49ad7cfa 285 pending_chain = 0;
bbf6f052
RK
286 pending_stack_adjust = 0;
287 inhibit_defer_pop = 0;
bbf6f052 288 saveregs_value = 0;
0006469d 289 apply_args_value = 0;
e87b4f3f 290 forced_labels = 0;
bbf6f052
RK
291}
292
49ad7cfa 293/* Small sanity check that the queue is empty at the end of a function. */
bbf6f052 294void
49ad7cfa 295finish_expr_for_function ()
bbf6f052 296{
49ad7cfa
BS
297 if (pending_chain)
298 abort ();
bbf6f052
RK
299}
300\f
301/* Manage the queue of increment instructions to be output
302 for POSTINCREMENT_EXPR expressions, etc. */
303
bbf6f052
RK
304/* Queue up to increment (or change) VAR later. BODY says how:
305 BODY should be the same thing you would pass to emit_insn
306 to increment right away. It will go to emit_insn later on.
307
308 The value is a QUEUED expression to be used in place of VAR
309 where you want to guarantee the pre-incrementation value of VAR. */
310
311static rtx
312enqueue_insn (var, body)
313 rtx var, body;
314{
c5c76735
JL
315 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
316 body, pending_chain);
bbf6f052
RK
317 return pending_chain;
318}
319
320/* Use protect_from_queue to convert a QUEUED expression
321 into something that you can put immediately into an instruction.
322 If the queued incrementation has not happened yet,
323 protect_from_queue returns the variable itself.
324 If the incrementation has happened, protect_from_queue returns a temp
325 that contains a copy of the old value of the variable.
326
327 Any time an rtx which might possibly be a QUEUED is to be put
328 into an instruction, it must be passed through protect_from_queue first.
329 QUEUED expressions are not meaningful in instructions.
330
331 Do not pass a value through protect_from_queue and then hold
332 on to it for a while before putting it in an instruction!
333 If the queue is flushed in between, incorrect code will result. */
334
335rtx
336protect_from_queue (x, modify)
337 register rtx x;
338 int modify;
339{
340 register RTX_CODE code = GET_CODE (x);
341
342#if 0 /* A QUEUED can hang around after the queue is forced out. */
343 /* Shortcut for most common case. */
344 if (pending_chain == 0)
345 return x;
346#endif
347
348 if (code != QUEUED)
349 {
e9baa644
RK
350 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
351 use of autoincrement. Make a copy of the contents of the memory
352 location rather than a copy of the address, but not if the value is
353 of mode BLKmode. Don't modify X in place since it might be
354 shared. */
bbf6f052
RK
355 if (code == MEM && GET_MODE (x) != BLKmode
356 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
357 {
358 register rtx y = XEXP (x, 0);
38a448ca 359 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 360
e9baa644 361 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 362 MEM_COPY_ATTRIBUTES (new, x);
41472af8 363 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 364
bbf6f052
RK
365 if (QUEUED_INSN (y))
366 {
e9baa644
RK
367 register rtx temp = gen_reg_rtx (GET_MODE (new));
368 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
369 QUEUED_INSN (y));
370 return temp;
371 }
e9baa644 372 return new;
bbf6f052
RK
373 }
374 /* Otherwise, recursively protect the subexpressions of all
375 the kinds of rtx's that can contain a QUEUED. */
376 if (code == MEM)
3f15938e
RS
377 {
378 rtx tem = protect_from_queue (XEXP (x, 0), 0);
379 if (tem != XEXP (x, 0))
380 {
381 x = copy_rtx (x);
382 XEXP (x, 0) = tem;
383 }
384 }
bbf6f052
RK
385 else if (code == PLUS || code == MULT)
386 {
3f15938e
RS
387 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
388 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
389 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
390 {
391 x = copy_rtx (x);
392 XEXP (x, 0) = new0;
393 XEXP (x, 1) = new1;
394 }
bbf6f052
RK
395 }
396 return x;
397 }
398 /* If the increment has not happened, use the variable itself. */
399 if (QUEUED_INSN (x) == 0)
400 return QUEUED_VAR (x);
401 /* If the increment has happened and a pre-increment copy exists,
402 use that copy. */
403 if (QUEUED_COPY (x) != 0)
404 return QUEUED_COPY (x);
405 /* The increment has happened but we haven't set up a pre-increment copy.
406 Set one up now, and use it. */
407 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
408 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
409 QUEUED_INSN (x));
410 return QUEUED_COPY (x);
411}
412
413/* Return nonzero if X contains a QUEUED expression:
414 if it contains anything that will be altered by a queued increment.
415 We handle only combinations of MEM, PLUS, MINUS and MULT operators
416 since memory addresses generally contain only those. */
417
1f06ee8d 418int
bbf6f052
RK
419queued_subexp_p (x)
420 rtx x;
421{
422 register enum rtx_code code = GET_CODE (x);
423 switch (code)
424 {
425 case QUEUED:
426 return 1;
427 case MEM:
428 return queued_subexp_p (XEXP (x, 0));
429 case MULT:
430 case PLUS:
431 case MINUS:
e9a25f70
JL
432 return (queued_subexp_p (XEXP (x, 0))
433 || queued_subexp_p (XEXP (x, 1)));
434 default:
435 return 0;
bbf6f052 436 }
bbf6f052
RK
437}
438
439/* Perform all the pending incrementations. */
440
441void
442emit_queue ()
443{
444 register rtx p;
381127e8 445 while ((p = pending_chain))
bbf6f052 446 {
41b083c4
R
447 rtx body = QUEUED_BODY (p);
448
449 if (GET_CODE (body) == SEQUENCE)
450 {
451 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
452 emit_insn (QUEUED_BODY (p));
453 }
454 else
455 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
456 pending_chain = QUEUED_NEXT (p);
457 }
458}
bbf6f052
RK
459\f
460/* Copy data from FROM to TO, where the machine modes are not the same.
461 Both modes may be integer, or both may be floating.
462 UNSIGNEDP should be nonzero if FROM is an unsigned type.
463 This causes zero-extension instead of sign-extension. */
464
465void
466convert_move (to, from, unsignedp)
467 register rtx to, from;
468 int unsignedp;
469{
470 enum machine_mode to_mode = GET_MODE (to);
471 enum machine_mode from_mode = GET_MODE (from);
472 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
473 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
474 enum insn_code code;
475 rtx libcall;
476
477 /* rtx code for making an equivalent value. */
478 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
479
480 to = protect_from_queue (to, 1);
481 from = protect_from_queue (from, 0);
482
483 if (to_real != from_real)
484 abort ();
485
1499e0a8
RK
486 /* If FROM is a SUBREG that indicates that we have already done at least
487 the required extension, strip it. We don't handle such SUBREGs as
488 TO here. */
489
490 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
491 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
492 >= GET_MODE_SIZE (to_mode))
493 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
494 from = gen_lowpart (to_mode, from), from_mode = to_mode;
495
496 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
497 abort ();
498
bbf6f052
RK
499 if (to_mode == from_mode
500 || (from_mode == VOIDmode && CONSTANT_P (from)))
501 {
502 emit_move_insn (to, from);
503 return;
504 }
505
506 if (to_real)
507 {
81d79e2c
RS
508 rtx value;
509
2b01c326 510 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 511 {
2b01c326
RK
512 /* Try converting directly if the insn is supported. */
513 if ((code = can_extend_p (to_mode, from_mode, 0))
514 != CODE_FOR_nothing)
515 {
516 emit_unop_insn (code, to, from, UNKNOWN);
517 return;
518 }
bbf6f052 519 }
2b01c326 520
b424402e
RS
521#ifdef HAVE_trunchfqf2
522 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
523 {
524 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
525 return;
526 }
527#endif
704af6a1
JL
528#ifdef HAVE_trunctqfqf2
529 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
530 {
531 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
532 return;
533 }
534#endif
b424402e
RS
535#ifdef HAVE_truncsfqf2
536 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
537 {
538 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
539 return;
540 }
541#endif
542#ifdef HAVE_truncdfqf2
543 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
544 {
545 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
546 return;
547 }
548#endif
549#ifdef HAVE_truncxfqf2
550 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
551 {
552 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
553 return;
554 }
555#endif
556#ifdef HAVE_trunctfqf2
557 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
558 {
559 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
560 return;
561 }
562#endif
03747aa3
RK
563
564#ifdef HAVE_trunctqfhf2
565 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
566 {
567 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
568 return;
569 }
570#endif
b424402e
RS
571#ifdef HAVE_truncsfhf2
572 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
573 {
574 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
578#ifdef HAVE_truncdfhf2
579 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
580 {
581 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
585#ifdef HAVE_truncxfhf2
586 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
587 {
588 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592#ifdef HAVE_trunctfhf2
593 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
594 {
595 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
2b01c326
RK
599
600#ifdef HAVE_truncsftqf2
601 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
602 {
603 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
604 return;
605 }
606#endif
607#ifdef HAVE_truncdftqf2
608 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
609 {
610 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
614#ifdef HAVE_truncxftqf2
615 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
616 {
617 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_trunctftqf2
622 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
623 {
624 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628
bbf6f052
RK
629#ifdef HAVE_truncdfsf2
630 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
631 {
632 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
633 return;
634 }
635#endif
b092b471
JW
636#ifdef HAVE_truncxfsf2
637 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
638 {
639 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
640 return;
641 }
642#endif
bbf6f052
RK
643#ifdef HAVE_trunctfsf2
644 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
645 {
646 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
b092b471
JW
650#ifdef HAVE_truncxfdf2
651 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
652 {
653 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
bbf6f052
RK
657#ifdef HAVE_trunctfdf2
658 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
659 {
660 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
664
b092b471
JW
665 libcall = (rtx) 0;
666 switch (from_mode)
667 {
668 case SFmode:
669 switch (to_mode)
670 {
671 case DFmode:
672 libcall = extendsfdf2_libfunc;
673 break;
674
675 case XFmode:
676 libcall = extendsfxf2_libfunc;
677 break;
678
679 case TFmode:
680 libcall = extendsftf2_libfunc;
681 break;
e9a25f70
JL
682
683 default:
684 break;
b092b471
JW
685 }
686 break;
687
688 case DFmode:
689 switch (to_mode)
690 {
691 case SFmode:
692 libcall = truncdfsf2_libfunc;
693 break;
694
695 case XFmode:
696 libcall = extenddfxf2_libfunc;
697 break;
698
699 case TFmode:
700 libcall = extenddftf2_libfunc;
701 break;
e9a25f70
JL
702
703 default:
704 break;
b092b471
JW
705 }
706 break;
707
708 case XFmode:
709 switch (to_mode)
710 {
711 case SFmode:
712 libcall = truncxfsf2_libfunc;
713 break;
714
715 case DFmode:
716 libcall = truncxfdf2_libfunc;
717 break;
e9a25f70
JL
718
719 default:
720 break;
b092b471
JW
721 }
722 break;
723
724 case TFmode:
725 switch (to_mode)
726 {
727 case SFmode:
728 libcall = trunctfsf2_libfunc;
729 break;
730
731 case DFmode:
732 libcall = trunctfdf2_libfunc;
733 break;
e9a25f70
JL
734
735 default:
736 break;
b092b471
JW
737 }
738 break;
e9a25f70
JL
739
740 default:
741 break;
b092b471
JW
742 }
743
744 if (libcall == (rtx) 0)
745 /* This conversion is not implemented yet. */
bbf6f052
RK
746 abort ();
747
81d79e2c
RS
748 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
749 1, from, from_mode);
750 emit_move_insn (to, value);
bbf6f052
RK
751 return;
752 }
753
754 /* Now both modes are integers. */
755
756 /* Handle expanding beyond a word. */
757 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
758 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
759 {
760 rtx insns;
761 rtx lowpart;
762 rtx fill_value;
763 rtx lowfrom;
764 int i;
765 enum machine_mode lowpart_mode;
766 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
767
768 /* Try converting directly if the insn is supported. */
769 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
770 != CODE_FOR_nothing)
771 {
cd1b4b44
RK
772 /* If FROM is a SUBREG, put it into a register. Do this
773 so that we always generate the same set of insns for
774 better cse'ing; if an intermediate assignment occurred,
775 we won't be doing the operation directly on the SUBREG. */
776 if (optimize > 0 && GET_CODE (from) == SUBREG)
777 from = force_reg (from_mode, from);
bbf6f052
RK
778 emit_unop_insn (code, to, from, equiv_code);
779 return;
780 }
781 /* Next, try converting via full word. */
782 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
783 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
784 != CODE_FOR_nothing))
785 {
a81fee56 786 if (GET_CODE (to) == REG)
38a448ca 787 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
788 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
789 emit_unop_insn (code, to,
790 gen_lowpart (word_mode, to), equiv_code);
791 return;
792 }
793
794 /* No special multiword conversion insn; do it by hand. */
795 start_sequence ();
796
5c5033c3
RK
797 /* Since we will turn this into a no conflict block, we must ensure
798 that the source does not overlap the target. */
799
800 if (reg_overlap_mentioned_p (to, from))
801 from = force_reg (from_mode, from);
802
bbf6f052
RK
803 /* Get a copy of FROM widened to a word, if necessary. */
804 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
805 lowpart_mode = word_mode;
806 else
807 lowpart_mode = from_mode;
808
809 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
810
811 lowpart = gen_lowpart (lowpart_mode, to);
812 emit_move_insn (lowpart, lowfrom);
813
814 /* Compute the value to put in each remaining word. */
815 if (unsignedp)
816 fill_value = const0_rtx;
817 else
818 {
819#ifdef HAVE_slt
820 if (HAVE_slt
821 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
822 && STORE_FLAG_VALUE == -1)
823 {
906c4e36
RK
824 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
825 lowpart_mode, 0, 0);
bbf6f052
RK
826 fill_value = gen_reg_rtx (word_mode);
827 emit_insn (gen_slt (fill_value));
828 }
829 else
830#endif
831 {
832 fill_value
833 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
834 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 835 NULL_RTX, 0);
bbf6f052
RK
836 fill_value = convert_to_mode (word_mode, fill_value, 1);
837 }
838 }
839
840 /* Fill the remaining words. */
841 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
842 {
843 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
844 rtx subword = operand_subword (to, index, 1, to_mode);
845
846 if (subword == 0)
847 abort ();
848
849 if (fill_value != subword)
850 emit_move_insn (subword, fill_value);
851 }
852
853 insns = get_insns ();
854 end_sequence ();
855
906c4e36 856 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 857 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
858 return;
859 }
860
d3c64ee3
RS
861 /* Truncating multi-word to a word or less. */
862 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
863 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 864 {
431a6eca
JW
865 if (!((GET_CODE (from) == MEM
866 && ! MEM_VOLATILE_P (from)
867 && direct_load[(int) to_mode]
868 && ! mode_dependent_address_p (XEXP (from, 0)))
869 || GET_CODE (from) == REG
870 || GET_CODE (from) == SUBREG))
871 from = force_reg (from_mode, from);
bbf6f052
RK
872 convert_move (to, gen_lowpart (word_mode, from), 0);
873 return;
874 }
875
876 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
877 if (to_mode == PQImode)
878 {
879 if (from_mode != QImode)
880 from = convert_to_mode (QImode, from, unsignedp);
881
882#ifdef HAVE_truncqipqi2
883 if (HAVE_truncqipqi2)
884 {
885 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
886 return;
887 }
888#endif /* HAVE_truncqipqi2 */
889 abort ();
890 }
891
892 if (from_mode == PQImode)
893 {
894 if (to_mode != QImode)
895 {
896 from = convert_to_mode (QImode, from, unsignedp);
897 from_mode = QImode;
898 }
899 else
900 {
901#ifdef HAVE_extendpqiqi2
902 if (HAVE_extendpqiqi2)
903 {
904 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
905 return;
906 }
907#endif /* HAVE_extendpqiqi2 */
908 abort ();
909 }
910 }
911
bbf6f052
RK
912 if (to_mode == PSImode)
913 {
914 if (from_mode != SImode)
915 from = convert_to_mode (SImode, from, unsignedp);
916
1f584163
DE
917#ifdef HAVE_truncsipsi2
918 if (HAVE_truncsipsi2)
bbf6f052 919 {
1f584163 920 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
921 return;
922 }
1f584163 923#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
924 abort ();
925 }
926
927 if (from_mode == PSImode)
928 {
929 if (to_mode != SImode)
930 {
931 from = convert_to_mode (SImode, from, unsignedp);
932 from_mode = SImode;
933 }
934 else
935 {
1f584163
DE
936#ifdef HAVE_extendpsisi2
937 if (HAVE_extendpsisi2)
bbf6f052 938 {
1f584163 939 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
940 return;
941 }
1f584163 942#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
943 abort ();
944 }
945 }
946
0407367d
RK
947 if (to_mode == PDImode)
948 {
949 if (from_mode != DImode)
950 from = convert_to_mode (DImode, from, unsignedp);
951
952#ifdef HAVE_truncdipdi2
953 if (HAVE_truncdipdi2)
954 {
955 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
956 return;
957 }
958#endif /* HAVE_truncdipdi2 */
959 abort ();
960 }
961
962 if (from_mode == PDImode)
963 {
964 if (to_mode != DImode)
965 {
966 from = convert_to_mode (DImode, from, unsignedp);
967 from_mode = DImode;
968 }
969 else
970 {
971#ifdef HAVE_extendpdidi2
972 if (HAVE_extendpdidi2)
973 {
974 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
975 return;
976 }
977#endif /* HAVE_extendpdidi2 */
978 abort ();
979 }
980 }
981
bbf6f052
RK
982 /* Now follow all the conversions between integers
983 no more than a word long. */
984
985 /* For truncation, usually we can just refer to FROM in a narrower mode. */
986 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
987 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 988 GET_MODE_BITSIZE (from_mode)))
bbf6f052 989 {
d3c64ee3
RS
990 if (!((GET_CODE (from) == MEM
991 && ! MEM_VOLATILE_P (from)
992 && direct_load[(int) to_mode]
993 && ! mode_dependent_address_p (XEXP (from, 0)))
994 || GET_CODE (from) == REG
995 || GET_CODE (from) == SUBREG))
996 from = force_reg (from_mode, from);
34aa3599
RK
997 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
998 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
999 from = copy_to_reg (from);
bbf6f052
RK
1000 emit_move_insn (to, gen_lowpart (to_mode, from));
1001 return;
1002 }
1003
d3c64ee3 1004 /* Handle extension. */
bbf6f052
RK
1005 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1006 {
1007 /* Convert directly if that works. */
1008 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1009 != CODE_FOR_nothing)
1010 {
1011 emit_unop_insn (code, to, from, equiv_code);
1012 return;
1013 }
1014 else
1015 {
1016 enum machine_mode intermediate;
2b28d92e
NC
1017 rtx tmp;
1018 tree shift_amount;
bbf6f052
RK
1019
1020 /* Search for a mode to convert via. */
1021 for (intermediate = from_mode; intermediate != VOIDmode;
1022 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1023 if (((can_extend_p (to_mode, intermediate, unsignedp)
1024 != CODE_FOR_nothing)
1025 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1026 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1027 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1028 && (can_extend_p (intermediate, from_mode, unsignedp)
1029 != CODE_FOR_nothing))
1030 {
1031 convert_move (to, convert_to_mode (intermediate, from,
1032 unsignedp), unsignedp);
1033 return;
1034 }
1035
2b28d92e
NC
1036 /* No suitable intermediate mode.
1037 Generate what we need with shifts. */
1038 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1039 - GET_MODE_BITSIZE (from_mode), 0);
1040 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1041 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1042 to, unsignedp);
1043 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1044 to, unsignedp);
1045 if (tmp != to)
1046 emit_move_insn (to, tmp);
1047 return;
bbf6f052
RK
1048 }
1049 }
1050
1051 /* Support special truncate insns for certain modes. */
1052
1053 if (from_mode == DImode && to_mode == SImode)
1054 {
1055#ifdef HAVE_truncdisi2
1056 if (HAVE_truncdisi2)
1057 {
1058 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1059 return;
1060 }
1061#endif
1062 convert_move (to, force_reg (from_mode, from), unsignedp);
1063 return;
1064 }
1065
1066 if (from_mode == DImode && to_mode == HImode)
1067 {
1068#ifdef HAVE_truncdihi2
1069 if (HAVE_truncdihi2)
1070 {
1071 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1072 return;
1073 }
1074#endif
1075 convert_move (to, force_reg (from_mode, from), unsignedp);
1076 return;
1077 }
1078
1079 if (from_mode == DImode && to_mode == QImode)
1080 {
1081#ifdef HAVE_truncdiqi2
1082 if (HAVE_truncdiqi2)
1083 {
1084 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1085 return;
1086 }
1087#endif
1088 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 return;
1090 }
1091
1092 if (from_mode == SImode && to_mode == HImode)
1093 {
1094#ifdef HAVE_truncsihi2
1095 if (HAVE_truncsihi2)
1096 {
1097 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1098 return;
1099 }
1100#endif
1101 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 return;
1103 }
1104
1105 if (from_mode == SImode && to_mode == QImode)
1106 {
1107#ifdef HAVE_truncsiqi2
1108 if (HAVE_truncsiqi2)
1109 {
1110 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1111 return;
1112 }
1113#endif
1114 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 return;
1116 }
1117
1118 if (from_mode == HImode && to_mode == QImode)
1119 {
1120#ifdef HAVE_trunchiqi2
1121 if (HAVE_trunchiqi2)
1122 {
1123 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1124 return;
1125 }
1126#endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1129 }
1130
b9bcad65
RK
1131 if (from_mode == TImode && to_mode == DImode)
1132 {
1133#ifdef HAVE_trunctidi2
1134 if (HAVE_trunctidi2)
1135 {
1136 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1137 return;
1138 }
1139#endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == TImode && to_mode == SImode)
1145 {
1146#ifdef HAVE_trunctisi2
1147 if (HAVE_trunctisi2)
1148 {
1149 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1150 return;
1151 }
1152#endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
1157 if (from_mode == TImode && to_mode == HImode)
1158 {
1159#ifdef HAVE_trunctihi2
1160 if (HAVE_trunctihi2)
1161 {
1162 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1163 return;
1164 }
1165#endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == TImode && to_mode == QImode)
1171 {
1172#ifdef HAVE_trunctiqi2
1173 if (HAVE_trunctiqi2)
1174 {
1175 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1176 return;
1177 }
1178#endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
bbf6f052
RK
1183 /* Handle truncation of volatile memrefs, and so on;
1184 the things that couldn't be truncated directly,
1185 and for which there was no special instruction. */
1186 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1187 {
1188 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1189 emit_move_insn (to, temp);
1190 return;
1191 }
1192
1193 /* Mode combination is not recognized. */
1194 abort ();
1195}
1196
1197/* Return an rtx for a value that would result
1198 from converting X to mode MODE.
1199 Both X and MODE may be floating, or both integer.
1200 UNSIGNEDP is nonzero if X is an unsigned value.
1201 This can be done by referring to a part of X in place
5d901c31
RS
1202 or by copying to a new temporary with conversion.
1203
1204 This function *must not* call protect_from_queue
1205 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1206
1207rtx
1208convert_to_mode (mode, x, unsignedp)
1209 enum machine_mode mode;
1210 rtx x;
1211 int unsignedp;
5ffe63ed
RS
1212{
1213 return convert_modes (mode, VOIDmode, x, unsignedp);
1214}
1215
1216/* Return an rtx for a value that would result
1217 from converting X from mode OLDMODE to mode MODE.
1218 Both modes may be floating, or both integer.
1219 UNSIGNEDP is nonzero if X is an unsigned value.
1220
1221 This can be done by referring to a part of X in place
1222 or by copying to a new temporary with conversion.
1223
1224 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1225
1226 This function *must not* call protect_from_queue
1227 except when putting X into an insn (in which case convert_move does it). */
1228
1229rtx
1230convert_modes (mode, oldmode, x, unsignedp)
1231 enum machine_mode mode, oldmode;
1232 rtx x;
1233 int unsignedp;
bbf6f052
RK
1234{
1235 register rtx temp;
5ffe63ed 1236
1499e0a8
RK
1237 /* If FROM is a SUBREG that indicates that we have already done at least
1238 the required extension, strip it. */
1239
1240 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1241 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1242 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1243 x = gen_lowpart (mode, x);
bbf6f052 1244
64791b18
RK
1245 if (GET_MODE (x) != VOIDmode)
1246 oldmode = GET_MODE (x);
1247
5ffe63ed 1248 if (mode == oldmode)
bbf6f052
RK
1249 return x;
1250
1251 /* There is one case that we must handle specially: If we are converting
906c4e36 1252 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1253 we are to interpret the constant as unsigned, gen_lowpart will do
1254 the wrong if the constant appears negative. What we want to do is
1255 make the high-order word of the constant zero, not all ones. */
1256
1257 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1258 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1259 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1260 {
1261 HOST_WIDE_INT val = INTVAL (x);
1262
1263 if (oldmode != VOIDmode
1264 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1265 {
1266 int width = GET_MODE_BITSIZE (oldmode);
1267
1268 /* We need to zero extend VAL. */
1269 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1270 }
1271
1272 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1273 }
bbf6f052
RK
1274
1275 /* We can do this with a gen_lowpart if both desired and current modes
1276 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1277 non-volatile MEM. Except for the constant case where MODE is no
1278 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1279
ba2e110c
RK
1280 if ((GET_CODE (x) == CONST_INT
1281 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1282 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1283 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1284 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1285 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1286 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1287 && direct_load[(int) mode])
2bf29316
JW
1288 || (GET_CODE (x) == REG
1289 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1290 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1291 {
1292 /* ?? If we don't know OLDMODE, we have to assume here that
1293 X does not need sign- or zero-extension. This may not be
1294 the case, but it's the best we can do. */
1295 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1296 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1297 {
1298 HOST_WIDE_INT val = INTVAL (x);
1299 int width = GET_MODE_BITSIZE (oldmode);
1300
1301 /* We must sign or zero-extend in this case. Start by
1302 zero-extending, then sign extend if we need to. */
1303 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1304 if (! unsignedp
1305 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1306 val |= (HOST_WIDE_INT) (-1) << width;
1307
1308 return GEN_INT (val);
1309 }
1310
1311 return gen_lowpart (mode, x);
1312 }
bbf6f052
RK
1313
1314 temp = gen_reg_rtx (mode);
1315 convert_move (temp, x, unsignedp);
1316 return temp;
1317}
1318\f
fbe1758d
AM
1319
1320/* This macro is used to determine what the largest unit size that
1321 move_by_pieces can use is. */
1322
1323/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1324 move efficiently, as opposed to MOVE_MAX which is the maximum
1325 number of bhytes we can move with a single instruction. */
1326
1327#ifndef MOVE_MAX_PIECES
1328#define MOVE_MAX_PIECES MOVE_MAX
1329#endif
1330
bbf6f052
RK
1331/* Generate several move instructions to copy LEN bytes
1332 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1333 The caller must pass FROM and TO
1334 through protect_from_queue before calling.
1335 ALIGN (in bytes) is maximum alignment we can assume. */
1336
2e245dac 1337void
bbf6f052
RK
1338move_by_pieces (to, from, len, align)
1339 rtx to, from;
1340 int len, align;
1341{
1342 struct move_by_pieces data;
1343 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1344 int max_size = MOVE_MAX_PIECES + 1;
1345 enum machine_mode mode = VOIDmode, tmode;
1346 enum insn_code icode;
bbf6f052
RK
1347
1348 data.offset = 0;
1349 data.to_addr = to_addr;
1350 data.from_addr = from_addr;
1351 data.to = to;
1352 data.from = from;
1353 data.autinc_to
1354 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1355 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1356 data.autinc_from
1357 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1358 || GET_CODE (from_addr) == POST_INC
1359 || GET_CODE (from_addr) == POST_DEC);
1360
1361 data.explicit_inc_from = 0;
1362 data.explicit_inc_to = 0;
1363 data.reverse
1364 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1365 if (data.reverse) data.offset = len;
1366 data.len = len;
1367
e9cf6a97
JW
1368 data.to_struct = MEM_IN_STRUCT_P (to);
1369 data.from_struct = MEM_IN_STRUCT_P (from);
c5c76735
JL
1370 data.to_readonly = RTX_UNCHANGING_P (to);
1371 data.from_readonly = RTX_UNCHANGING_P (from);
e9cf6a97 1372
bbf6f052
RK
1373 /* If copying requires more than two move insns,
1374 copy addresses to registers (to make displacements shorter)
1375 and use post-increment if available. */
1376 if (!(data.autinc_from && data.autinc_to)
1377 && move_by_pieces_ninsns (len, align) > 2)
1378 {
fbe1758d
AM
1379 /* Find the mode of the largest move... */
1380 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1381 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1382 if (GET_MODE_SIZE (tmode) < max_size)
1383 mode = tmode;
1384
1385 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1386 {
1387 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1388 data.autinc_from = 1;
1389 data.explicit_inc_from = -1;
1390 }
fbe1758d 1391 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1392 {
1393 data.from_addr = copy_addr_to_reg (from_addr);
1394 data.autinc_from = 1;
1395 data.explicit_inc_from = 1;
1396 }
bbf6f052
RK
1397 if (!data.autinc_from && CONSTANT_P (from_addr))
1398 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1399 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1400 {
1401 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1402 data.autinc_to = 1;
1403 data.explicit_inc_to = -1;
1404 }
fbe1758d 1405 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1406 {
1407 data.to_addr = copy_addr_to_reg (to_addr);
1408 data.autinc_to = 1;
1409 data.explicit_inc_to = 1;
1410 }
bbf6f052
RK
1411 if (!data.autinc_to && CONSTANT_P (to_addr))
1412 data.to_addr = copy_addr_to_reg (to_addr);
1413 }
1414
c7a7ac46 1415 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1416 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1417 align = MOVE_MAX;
bbf6f052
RK
1418
1419 /* First move what we can in the largest integer mode, then go to
1420 successively smaller modes. */
1421
1422 while (max_size > 1)
1423 {
e7c33f54
RK
1424 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1425 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1426 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1427 mode = tmode;
1428
1429 if (mode == VOIDmode)
1430 break;
1431
1432 icode = mov_optab->handlers[(int) mode].insn_code;
1433 if (icode != CODE_FOR_nothing
1434 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1435 GET_MODE_SIZE (mode)))
1436 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1437
1438 max_size = GET_MODE_SIZE (mode);
1439 }
1440
1441 /* The code above should have handled everything. */
2a8e278c 1442 if (data.len > 0)
bbf6f052
RK
1443 abort ();
1444}
1445
1446/* Return number of insns required to move L bytes by pieces.
1447 ALIGN (in bytes) is maximum alignment we can assume. */
1448
1449static int
1450move_by_pieces_ninsns (l, align)
1451 unsigned int l;
1452 int align;
1453{
1454 register int n_insns = 0;
e87b4f3f 1455 int max_size = MOVE_MAX + 1;
bbf6f052 1456
c7a7ac46 1457 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1458 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1459 align = MOVE_MAX;
bbf6f052
RK
1460
1461 while (max_size > 1)
1462 {
1463 enum machine_mode mode = VOIDmode, tmode;
1464 enum insn_code icode;
1465
e7c33f54
RK
1466 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1467 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1468 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1469 mode = tmode;
1470
1471 if (mode == VOIDmode)
1472 break;
1473
1474 icode = mov_optab->handlers[(int) mode].insn_code;
1475 if (icode != CODE_FOR_nothing
1476 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1477 GET_MODE_SIZE (mode)))
1478 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1479
1480 max_size = GET_MODE_SIZE (mode);
1481 }
1482
1483 return n_insns;
1484}
1485
1486/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1487 with move instructions for mode MODE. GENFUN is the gen_... function
1488 to make a move insn for that mode. DATA has all the other info. */
1489
1490static void
1491move_by_pieces_1 (genfun, mode, data)
eae4b970 1492 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1493 enum machine_mode mode;
1494 struct move_by_pieces *data;
1495{
1496 register int size = GET_MODE_SIZE (mode);
1497 register rtx to1, from1;
1498
1499 while (data->len >= size)
1500 {
1501 if (data->reverse) data->offset -= size;
1502
1503 to1 = (data->autinc_to
38a448ca 1504 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1505 : copy_rtx (change_address (data->to, mode,
1506 plus_constant (data->to_addr,
1507 data->offset))));
e9cf6a97 1508 MEM_IN_STRUCT_P (to1) = data->to_struct;
c5c76735 1509 RTX_UNCHANGING_P (to1) = data->to_readonly;
effbcc6a 1510
db3cf6fb
MS
1511 from1
1512 = (data->autinc_from
38a448ca 1513 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1514 : copy_rtx (change_address (data->from, mode,
1515 plus_constant (data->from_addr,
1516 data->offset))));
e9cf6a97 1517 MEM_IN_STRUCT_P (from1) = data->from_struct;
c5c76735 1518 RTX_UNCHANGING_P (from1) = data->from_readonly;
bbf6f052 1519
940da324 1520 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1521 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1522 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1523 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1524
1525 emit_insn ((*genfun) (to1, from1));
940da324 1526 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1527 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1528 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1529 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1530
1531 if (! data->reverse) data->offset += size;
1532
1533 data->len -= size;
1534 }
1535}
1536\f
1537/* Emit code to move a block Y to a block X.
1538 This may be done with string-move instructions,
1539 with multiple scalar move instructions, or with a library call.
1540
1541 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1542 with mode BLKmode.
1543 SIZE is an rtx that says how long they are.
1544 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1545 measured in bytes.
bbf6f052 1546
e9a25f70
JL
1547 Return the address of the new block, if memcpy is called and returns it,
1548 0 otherwise. */
1549
1550rtx
bbf6f052
RK
1551emit_block_move (x, y, size, align)
1552 rtx x, y;
1553 rtx size;
1554 int align;
1555{
e9a25f70 1556 rtx retval = 0;
52cf7115
JL
1557#ifdef TARGET_MEM_FUNCTIONS
1558 static tree fn;
1559 tree call_expr, arg_list;
1560#endif
e9a25f70 1561
bbf6f052
RK
1562 if (GET_MODE (x) != BLKmode)
1563 abort ();
1564
1565 if (GET_MODE (y) != BLKmode)
1566 abort ();
1567
1568 x = protect_from_queue (x, 1);
1569 y = protect_from_queue (y, 0);
5d901c31 1570 size = protect_from_queue (size, 0);
bbf6f052
RK
1571
1572 if (GET_CODE (x) != MEM)
1573 abort ();
1574 if (GET_CODE (y) != MEM)
1575 abort ();
1576 if (size == 0)
1577 abort ();
1578
fbe1758d 1579 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1580 move_by_pieces (x, y, INTVAL (size), align);
1581 else
1582 {
1583 /* Try the most limited insn first, because there's no point
1584 including more than one in the machine description unless
1585 the more limited one has some advantage. */
266007a7 1586
0bba3f6f 1587 rtx opalign = GEN_INT (align);
266007a7
RK
1588 enum machine_mode mode;
1589
1590 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1591 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1592 {
266007a7 1593 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1594
1595 if (code != CODE_FOR_nothing
803090c4
RK
1596 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1597 here because if SIZE is less than the mode mask, as it is
8008b228 1598 returned by the macro, it will definitely be less than the
803090c4 1599 actual mode mask. */
8ca00751
RK
1600 && ((GET_CODE (size) == CONST_INT
1601 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1602 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1603 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1604 && (insn_operand_predicate[(int) code][0] == 0
1605 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1606 && (insn_operand_predicate[(int) code][1] == 0
1607 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1608 && (insn_operand_predicate[(int) code][3] == 0
1609 || (*insn_operand_predicate[(int) code][3]) (opalign,
1610 VOIDmode)))
bbf6f052 1611 {
1ba1e2a8 1612 rtx op2;
266007a7
RK
1613 rtx last = get_last_insn ();
1614 rtx pat;
1615
1ba1e2a8 1616 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1617 if (insn_operand_predicate[(int) code][2] != 0
1618 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1619 op2 = copy_to_mode_reg (mode, op2);
1620
1621 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1622 if (pat)
1623 {
1624 emit_insn (pat);
e9a25f70 1625 return 0;
266007a7
RK
1626 }
1627 else
1628 delete_insns_since (last);
bbf6f052
RK
1629 }
1630 }
bbf6f052 1631
4bc973ae
JL
1632 /* X, Y, or SIZE may have been passed through protect_from_queue.
1633
1634 It is unsafe to save the value generated by protect_from_queue
1635 and reuse it later. Consider what happens if emit_queue is
1636 called before the return value from protect_from_queue is used.
1637
1638 Expansion of the CALL_EXPR below will call emit_queue before
1639 we are finished emitting RTL for argument setup. So if we are
1640 not careful we could get the wrong value for an argument.
1641
1642 To avoid this problem we go ahead and emit code to copy X, Y &
1643 SIZE into new pseudos. We can then place those new pseudos
1644 into an RTL_EXPR and use them later, even after a call to
1645 emit_queue.
1646
1647 Note this is not strictly needed for library calls since they
1648 do not call emit_queue before loading their arguments. However,
1649 we may need to have library calls call emit_queue in the future
1650 since failing to do so could cause problems for targets which
1651 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1652 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1653 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1654
1655#ifdef TARGET_MEM_FUNCTIONS
1656 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1657#else
1658 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1659 TREE_UNSIGNED (integer_type_node));
f3dc586a 1660 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1661#endif
1662
bbf6f052 1663#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1664 /* It is incorrect to use the libcall calling conventions to call
1665 memcpy in this context.
1666
1667 This could be a user call to memcpy and the user may wish to
1668 examine the return value from memcpy.
1669
1670 For targets where libcalls and normal calls have different conventions
1671 for returning pointers, we could end up generating incorrect code.
1672
1673 So instead of using a libcall sequence we build up a suitable
1674 CALL_EXPR and expand the call in the normal fashion. */
1675 if (fn == NULL_TREE)
1676 {
1677 tree fntype;
1678
1679 /* This was copied from except.c, I don't know if all this is
1680 necessary in this context or not. */
1681 fn = get_identifier ("memcpy");
1682 push_obstacks_nochange ();
1683 end_temporary_allocation ();
1684 fntype = build_pointer_type (void_type_node);
1685 fntype = build_function_type (fntype, NULL_TREE);
1686 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 1687 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1688 DECL_EXTERNAL (fn) = 1;
1689 TREE_PUBLIC (fn) = 1;
1690 DECL_ARTIFICIAL (fn) = 1;
1691 make_decl_rtl (fn, NULL_PTR, 1);
1692 assemble_external (fn);
1693 pop_obstacks ();
1694 }
1695
1696 /* We need to make an argument list for the function call.
1697
1698 memcpy has three arguments, the first two are void * addresses and
1699 the last is a size_t byte count for the copy. */
1700 arg_list
1701 = build_tree_list (NULL_TREE,
4bc973ae 1702 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1703 TREE_CHAIN (arg_list)
1704 = build_tree_list (NULL_TREE,
4bc973ae 1705 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1706 TREE_CHAIN (TREE_CHAIN (arg_list))
1707 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1708 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1709
1710 /* Now we have to build up the CALL_EXPR itself. */
1711 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1712 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1713 call_expr, arg_list, NULL_TREE);
1714 TREE_SIDE_EFFECTS (call_expr) = 1;
1715
1716 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1717#else
d562e42e 1718 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1719 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1720 convert_to_mode (TYPE_MODE (integer_type_node), size,
1721 TREE_UNSIGNED (integer_type_node)),
1722 TYPE_MODE (integer_type_node));
bbf6f052
RK
1723#endif
1724 }
e9a25f70
JL
1725
1726 return retval;
bbf6f052
RK
1727}
1728\f
1729/* Copy all or part of a value X into registers starting at REGNO.
1730 The number of registers to be filled is NREGS. */
1731
1732void
1733move_block_to_reg (regno, x, nregs, mode)
1734 int regno;
1735 rtx x;
1736 int nregs;
1737 enum machine_mode mode;
1738{
1739 int i;
381127e8
RL
1740#ifdef HAVE_load_multiple
1741 rtx pat;
1742 rtx last;
1743#endif
bbf6f052 1744
72bb9717
RK
1745 if (nregs == 0)
1746 return;
1747
bbf6f052
RK
1748 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1749 x = validize_mem (force_const_mem (mode, x));
1750
1751 /* See if the machine can do this with a load multiple insn. */
1752#ifdef HAVE_load_multiple
c3a02afe 1753 if (HAVE_load_multiple)
bbf6f052 1754 {
c3a02afe 1755 last = get_last_insn ();
38a448ca 1756 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1757 GEN_INT (nregs));
1758 if (pat)
1759 {
1760 emit_insn (pat);
1761 return;
1762 }
1763 else
1764 delete_insns_since (last);
bbf6f052 1765 }
bbf6f052
RK
1766#endif
1767
1768 for (i = 0; i < nregs; i++)
38a448ca 1769 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1770 operand_subword_force (x, i, mode));
1771}
1772
1773/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1774 The number of registers to be filled is NREGS. SIZE indicates the number
1775 of bytes in the object X. */
1776
bbf6f052
RK
1777
1778void
0040593d 1779move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1780 int regno;
1781 rtx x;
1782 int nregs;
0040593d 1783 int size;
bbf6f052
RK
1784{
1785 int i;
381127e8
RL
1786#ifdef HAVE_store_multiple
1787 rtx pat;
1788 rtx last;
1789#endif
58a32c5c 1790 enum machine_mode mode;
bbf6f052 1791
58a32c5c
DE
1792 /* If SIZE is that of a mode no bigger than a word, just use that
1793 mode's store operation. */
1794 if (size <= UNITS_PER_WORD
1795 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1796 {
1797 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1798 gen_rtx_REG (mode, regno));
58a32c5c
DE
1799 return;
1800 }
1801
0040593d 1802 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1803 to the left before storing to memory. Note that the previous test
1804 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1805 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1806 {
1807 rtx tem = operand_subword (x, 0, 1, BLKmode);
1808 rtx shift;
1809
1810 if (tem == 0)
1811 abort ();
1812
1813 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1814 gen_rtx_REG (word_mode, regno),
0040593d
JW
1815 build_int_2 ((UNITS_PER_WORD - size)
1816 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1817 emit_move_insn (tem, shift);
1818 return;
1819 }
1820
bbf6f052
RK
1821 /* See if the machine can do this with a store multiple insn. */
1822#ifdef HAVE_store_multiple
c3a02afe 1823 if (HAVE_store_multiple)
bbf6f052 1824 {
c3a02afe 1825 last = get_last_insn ();
38a448ca 1826 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1827 GEN_INT (nregs));
1828 if (pat)
1829 {
1830 emit_insn (pat);
1831 return;
1832 }
1833 else
1834 delete_insns_since (last);
bbf6f052 1835 }
bbf6f052
RK
1836#endif
1837
1838 for (i = 0; i < nregs; i++)
1839 {
1840 rtx tem = operand_subword (x, i, 1, BLKmode);
1841
1842 if (tem == 0)
1843 abort ();
1844
38a448ca 1845 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1846 }
1847}
1848
aac5cc16
RH
1849/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1850 registers represented by a PARALLEL. SSIZE represents the total size of
1851 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1852 SRC in bits. */
1853/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1854 the balance will be in what would be the low-order memory addresses, i.e.
1855 left justified for big endian, right justified for little endian. This
1856 happens to be true for the targets currently using this support. If this
1857 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1858 would be needed. */
fffa9c1d
JW
1859
1860void
aac5cc16
RH
1861emit_group_load (dst, orig_src, ssize, align)
1862 rtx dst, orig_src;
1863 int align, ssize;
fffa9c1d 1864{
aac5cc16
RH
1865 rtx *tmps, src;
1866 int start, i;
fffa9c1d 1867
aac5cc16 1868 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1869 abort ();
1870
1871 /* Check for a NULL entry, used to indicate that the parameter goes
1872 both on the stack and in registers. */
aac5cc16
RH
1873 if (XEXP (XVECEXP (dst, 0, 0), 0))
1874 start = 0;
fffa9c1d 1875 else
aac5cc16
RH
1876 start = 1;
1877
1878 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1879
1880 /* If we won't be loading directly from memory, protect the real source
1881 from strange tricks we might play. */
1882 src = orig_src;
1883 if (GET_CODE (src) != MEM)
1884 {
1885 src = gen_reg_rtx (GET_MODE (orig_src));
1886 emit_move_insn (src, orig_src);
1887 }
1888
1889 /* Process the pieces. */
1890 for (i = start; i < XVECLEN (dst, 0); i++)
1891 {
1892 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1893 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1894 int bytelen = GET_MODE_SIZE (mode);
1895 int shift = 0;
1896
1897 /* Handle trailing fragments that run over the size of the struct. */
1898 if (ssize >= 0 && bytepos + bytelen > ssize)
1899 {
1900 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1901 bytelen = ssize - bytepos;
1902 if (bytelen <= 0)
1903 abort();
1904 }
1905
1906 /* Optimize the access just a bit. */
1907 if (GET_CODE (src) == MEM
1908 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1909 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1910 && bytelen == GET_MODE_SIZE (mode))
1911 {
1912 tmps[i] = gen_reg_rtx (mode);
1913 emit_move_insn (tmps[i],
1914 change_address (src, mode,
1915 plus_constant (XEXP (src, 0),
1916 bytepos)));
fffa9c1d 1917 }
7c4a6db0
JW
1918 else if (GET_CODE (src) == CONCAT)
1919 {
1920 if (bytepos == 0
1921 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1922 tmps[i] = XEXP (src, 0);
1923 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1924 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1925 tmps[i] = XEXP (src, 1);
1926 else
1927 abort ();
1928 }
fffa9c1d 1929 else
aac5cc16
RH
1930 {
1931 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1932 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1933 mode, mode, align, ssize);
1934 }
fffa9c1d 1935
aac5cc16
RH
1936 if (BYTES_BIG_ENDIAN && shift)
1937 {
1938 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1939 tmps[i], 0, OPTAB_WIDEN);
1940 }
fffa9c1d 1941 }
aac5cc16
RH
1942 emit_queue();
1943
1944 /* Copy the extracted pieces into the proper (probable) hard regs. */
1945 for (i = start; i < XVECLEN (dst, 0); i++)
1946 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1947}
1948
aac5cc16
RH
1949/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1950 registers represented by a PARALLEL. SSIZE represents the total size of
1951 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1952
1953void
aac5cc16
RH
1954emit_group_store (orig_dst, src, ssize, align)
1955 rtx orig_dst, src;
1956 int ssize, align;
fffa9c1d 1957{
aac5cc16
RH
1958 rtx *tmps, dst;
1959 int start, i;
fffa9c1d 1960
aac5cc16 1961 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1962 abort ();
1963
1964 /* Check for a NULL entry, used to indicate that the parameter goes
1965 both on the stack and in registers. */
aac5cc16
RH
1966 if (XEXP (XVECEXP (src, 0, 0), 0))
1967 start = 0;
fffa9c1d 1968 else
aac5cc16
RH
1969 start = 1;
1970
1971 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 1972
aac5cc16
RH
1973 /* Copy the (probable) hard regs into pseudos. */
1974 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1975 {
aac5cc16
RH
1976 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1977 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1978 emit_move_insn (tmps[i], reg);
1979 }
1980 emit_queue();
fffa9c1d 1981
aac5cc16
RH
1982 /* If we won't be storing directly into memory, protect the real destination
1983 from strange tricks we might play. */
1984 dst = orig_dst;
10a9f2be
JW
1985 if (GET_CODE (dst) == PARALLEL)
1986 {
1987 rtx temp;
1988
1989 /* We can get a PARALLEL dst if there is a conditional expression in
1990 a return statement. In that case, the dst and src are the same,
1991 so no action is necessary. */
1992 if (rtx_equal_p (dst, src))
1993 return;
1994
1995 /* It is unclear if we can ever reach here, but we may as well handle
1996 it. Allocate a temporary, and split this into a store/load to/from
1997 the temporary. */
1998
1999 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2000 emit_group_store (temp, src, ssize, align);
2001 emit_group_load (dst, temp, ssize, align);
2002 return;
2003 }
2004 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2005 {
2006 dst = gen_reg_rtx (GET_MODE (orig_dst));
2007 /* Make life a bit easier for combine. */
2008 emit_move_insn (dst, const0_rtx);
2009 }
2010 else if (! MEM_IN_STRUCT_P (dst))
2011 {
2012 /* store_bit_field requires that memory operations have
2013 mem_in_struct_p set; we might not. */
fffa9c1d 2014
aac5cc16 2015 dst = copy_rtx (orig_dst);
c6df88cb 2016 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2017 }
2018
2019 /* Process the pieces. */
2020 for (i = start; i < XVECLEN (src, 0); i++)
2021 {
2022 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2023 enum machine_mode mode = GET_MODE (tmps[i]);
2024 int bytelen = GET_MODE_SIZE (mode);
2025
2026 /* Handle trailing fragments that run over the size of the struct. */
2027 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2028 {
aac5cc16
RH
2029 if (BYTES_BIG_ENDIAN)
2030 {
2031 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2032 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2033 tmps[i], 0, OPTAB_WIDEN);
2034 }
2035 bytelen = ssize - bytepos;
71bc0330 2036 }
fffa9c1d 2037
aac5cc16
RH
2038 /* Optimize the access just a bit. */
2039 if (GET_CODE (dst) == MEM
2040 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2041 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2042 && bytelen == GET_MODE_SIZE (mode))
2043 {
2044 emit_move_insn (change_address (dst, mode,
2045 plus_constant (XEXP (dst, 0),
2046 bytepos)),
2047 tmps[i]);
2048 }
2049 else
2050 {
2051 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2052 mode, tmps[i], align, ssize);
2053 }
fffa9c1d 2054 }
aac5cc16
RH
2055 emit_queue();
2056
2057 /* Copy from the pseudo into the (probable) hard reg. */
2058 if (GET_CODE (dst) == REG)
2059 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2060}
2061
c36fce9a
GRK
2062/* Generate code to copy a BLKmode object of TYPE out of a
2063 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2064 is null, a stack temporary is created. TGTBLK is returned.
2065
2066 The primary purpose of this routine is to handle functions
2067 that return BLKmode structures in registers. Some machines
2068 (the PA for example) want to return all small structures
2069 in registers regardless of the structure's alignment.
2070 */
2071
2072rtx
2073copy_blkmode_from_reg(tgtblk,srcreg,type)
2074 rtx tgtblk;
2075 rtx srcreg;
2076 tree type;
2077{
2078 int bytes = int_size_in_bytes (type);
2079 rtx src = NULL, dst = NULL;
c84e2712 2080 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
c36fce9a
GRK
2081 int bitpos, xbitpos, big_endian_correction = 0;
2082
2083 if (tgtblk == 0)
2084 {
2085 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2086 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2087 preserve_temp_slots (tgtblk);
2088 }
2089
2090 /* This code assumes srcreg is at least a full word. If it isn't,
2091 copy it into a new pseudo which is a full word. */
2092 if (GET_MODE (srcreg) != BLKmode
2093 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2094 srcreg = convert_to_mode (word_mode, srcreg,
2095 TREE_UNSIGNED (type));
2096
2097 /* Structures whose size is not a multiple of a word are aligned
2098 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2099 machine, this means we must skip the empty high order bytes when
2100 calculating the bit offset. */
2101 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2102 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2103 * BITS_PER_UNIT));
2104
2105 /* Copy the structure BITSIZE bites at a time.
2106
2107 We could probably emit more efficient code for machines
2108 which do not use strict alignment, but it doesn't seem
2109 worth the effort at the current time. */
2110 for (bitpos = 0, xbitpos = big_endian_correction;
2111 bitpos < bytes * BITS_PER_UNIT;
2112 bitpos += bitsize, xbitpos += bitsize)
2113 {
2114
2115 /* We need a new source operand each time xbitpos is on a
2116 word boundary and when xbitpos == big_endian_correction
2117 (the first time through). */
2118 if (xbitpos % BITS_PER_WORD == 0
2119 || xbitpos == big_endian_correction)
2120 src = operand_subword_force (srcreg,
2121 xbitpos / BITS_PER_WORD,
2122 BLKmode);
2123
2124 /* We need a new destination operand each time bitpos is on
2125 a word boundary. */
2126 if (bitpos % BITS_PER_WORD == 0)
2127 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2128
2129 /* Use xbitpos for the source extraction (right justified) and
2130 xbitpos for the destination store (left justified). */
2131 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2132 extract_bit_field (src, bitsize,
2133 xbitpos % BITS_PER_WORD, 1,
2134 NULL_RTX, word_mode,
2135 word_mode,
2136 bitsize / BITS_PER_UNIT,
2137 BITS_PER_WORD),
2138 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2139 }
2140 return tgtblk;
2141}
2142
2143
94b25f81
RK
2144/* Add a USE expression for REG to the (possibly empty) list pointed
2145 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2146
2147void
b3f8cf4a
RK
2148use_reg (call_fusage, reg)
2149 rtx *call_fusage, reg;
2150{
0304dfbb
DE
2151 if (GET_CODE (reg) != REG
2152 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2153 abort();
2154
2155 *call_fusage
38a448ca
RH
2156 = gen_rtx_EXPR_LIST (VOIDmode,
2157 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2158}
2159
94b25f81
RK
2160/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2161 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2162
2163void
0304dfbb
DE
2164use_regs (call_fusage, regno, nregs)
2165 rtx *call_fusage;
bbf6f052
RK
2166 int regno;
2167 int nregs;
2168{
0304dfbb 2169 int i;
bbf6f052 2170
0304dfbb
DE
2171 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2172 abort ();
2173
2174 for (i = 0; i < nregs; i++)
38a448ca 2175 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2176}
fffa9c1d
JW
2177
2178/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2179 PARALLEL REGS. This is for calls that pass values in multiple
2180 non-contiguous locations. The Irix 6 ABI has examples of this. */
2181
2182void
2183use_group_regs (call_fusage, regs)
2184 rtx *call_fusage;
2185 rtx regs;
2186{
2187 int i;
2188
6bd35f86
DE
2189 for (i = 0; i < XVECLEN (regs, 0); i++)
2190 {
2191 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2192
6bd35f86
DE
2193 /* A NULL entry means the parameter goes both on the stack and in
2194 registers. This can also be a MEM for targets that pass values
2195 partially on the stack and partially in registers. */
e9a25f70 2196 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2197 use_reg (call_fusage, reg);
2198 }
fffa9c1d 2199}
bbf6f052 2200\f
9de08200
RK
2201/* Generate several move instructions to clear LEN bytes of block TO.
2202 (A MEM rtx with BLKmode). The caller must pass TO through
2203 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2204 we can assume. */
2205
2206static void
2207clear_by_pieces (to, len, align)
2208 rtx to;
2209 int len, align;
2210{
2211 struct clear_by_pieces data;
2212 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2213 int max_size = MOVE_MAX_PIECES + 1;
2214 enum machine_mode mode = VOIDmode, tmode;
2215 enum insn_code icode;
9de08200
RK
2216
2217 data.offset = 0;
2218 data.to_addr = to_addr;
2219 data.to = to;
2220 data.autinc_to
2221 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2222 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2223
2224 data.explicit_inc_to = 0;
2225 data.reverse
2226 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2227 if (data.reverse) data.offset = len;
2228 data.len = len;
2229
2230 data.to_struct = MEM_IN_STRUCT_P (to);
2231
2232 /* If copying requires more than two move insns,
2233 copy addresses to registers (to make displacements shorter)
2234 and use post-increment if available. */
2235 if (!data.autinc_to
2236 && move_by_pieces_ninsns (len, align) > 2)
2237 {
fbe1758d
AM
2238 /* Determine the main mode we'll be using */
2239 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2240 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2241 if (GET_MODE_SIZE (tmode) < max_size)
2242 mode = tmode;
2243
2244 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2245 {
2246 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2247 data.autinc_to = 1;
2248 data.explicit_inc_to = -1;
2249 }
fbe1758d 2250 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2251 {
2252 data.to_addr = copy_addr_to_reg (to_addr);
2253 data.autinc_to = 1;
2254 data.explicit_inc_to = 1;
2255 }
9de08200
RK
2256 if (!data.autinc_to && CONSTANT_P (to_addr))
2257 data.to_addr = copy_addr_to_reg (to_addr);
2258 }
2259
2260 if (! SLOW_UNALIGNED_ACCESS
2261 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2262 align = MOVE_MAX;
2263
2264 /* First move what we can in the largest integer mode, then go to
2265 successively smaller modes. */
2266
2267 while (max_size > 1)
2268 {
9de08200
RK
2269 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2270 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2271 if (GET_MODE_SIZE (tmode) < max_size)
2272 mode = tmode;
2273
2274 if (mode == VOIDmode)
2275 break;
2276
2277 icode = mov_optab->handlers[(int) mode].insn_code;
2278 if (icode != CODE_FOR_nothing
2279 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2280 GET_MODE_SIZE (mode)))
2281 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2282
2283 max_size = GET_MODE_SIZE (mode);
2284 }
2285
2286 /* The code above should have handled everything. */
2287 if (data.len != 0)
2288 abort ();
2289}
2290
2291/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2292 with move instructions for mode MODE. GENFUN is the gen_... function
2293 to make a move insn for that mode. DATA has all the other info. */
2294
2295static void
2296clear_by_pieces_1 (genfun, mode, data)
eae4b970 2297 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2298 enum machine_mode mode;
2299 struct clear_by_pieces *data;
2300{
2301 register int size = GET_MODE_SIZE (mode);
2302 register rtx to1;
2303
2304 while (data->len >= size)
2305 {
2306 if (data->reverse) data->offset -= size;
2307
2308 to1 = (data->autinc_to
38a448ca 2309 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2310 : copy_rtx (change_address (data->to, mode,
2311 plus_constant (data->to_addr,
2312 data->offset))));
9de08200
RK
2313 MEM_IN_STRUCT_P (to1) = data->to_struct;
2314
940da324 2315 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2316 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2317
2318 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2319 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2320 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2321
2322 if (! data->reverse) data->offset += size;
2323
2324 data->len -= size;
2325 }
2326}
2327\f
bbf6f052 2328/* Write zeros through the storage of OBJECT.
9de08200 2329 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2330 the maximum alignment we can is has, measured in bytes.
bbf6f052 2331
e9a25f70
JL
2332 If we call a function that returns the length of the block, return it. */
2333
2334rtx
9de08200 2335clear_storage (object, size, align)
bbf6f052 2336 rtx object;
4c08eef0 2337 rtx size;
9de08200 2338 int align;
bbf6f052 2339{
52cf7115
JL
2340#ifdef TARGET_MEM_FUNCTIONS
2341 static tree fn;
2342 tree call_expr, arg_list;
2343#endif
e9a25f70
JL
2344 rtx retval = 0;
2345
bbf6f052
RK
2346 if (GET_MODE (object) == BLKmode)
2347 {
9de08200
RK
2348 object = protect_from_queue (object, 1);
2349 size = protect_from_queue (size, 0);
2350
2351 if (GET_CODE (size) == CONST_INT
fbe1758d 2352 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2353 clear_by_pieces (object, INTVAL (size), align);
2354
2355 else
2356 {
2357 /* Try the most limited insn first, because there's no point
2358 including more than one in the machine description unless
2359 the more limited one has some advantage. */
2360
2361 rtx opalign = GEN_INT (align);
2362 enum machine_mode mode;
2363
2364 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2365 mode = GET_MODE_WIDER_MODE (mode))
2366 {
2367 enum insn_code code = clrstr_optab[(int) mode];
2368
2369 if (code != CODE_FOR_nothing
2370 /* We don't need MODE to be narrower than
2371 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2372 the mode mask, as it is returned by the macro, it will
2373 definitely be less than the actual mode mask. */
2374 && ((GET_CODE (size) == CONST_INT
2375 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2376 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2377 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2378 && (insn_operand_predicate[(int) code][0] == 0
2379 || (*insn_operand_predicate[(int) code][0]) (object,
2380 BLKmode))
2381 && (insn_operand_predicate[(int) code][2] == 0
2382 || (*insn_operand_predicate[(int) code][2]) (opalign,
2383 VOIDmode)))
2384 {
2385 rtx op1;
2386 rtx last = get_last_insn ();
2387 rtx pat;
2388
2389 op1 = convert_to_mode (mode, size, 1);
2390 if (insn_operand_predicate[(int) code][1] != 0
2391 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2392 mode))
2393 op1 = copy_to_mode_reg (mode, op1);
2394
2395 pat = GEN_FCN ((int) code) (object, op1, opalign);
2396 if (pat)
2397 {
2398 emit_insn (pat);
e9a25f70 2399 return 0;
9de08200
RK
2400 }
2401 else
2402 delete_insns_since (last);
2403 }
2404 }
2405
4bc973ae 2406 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2407
4bc973ae
JL
2408 It is unsafe to save the value generated by protect_from_queue
2409 and reuse it later. Consider what happens if emit_queue is
2410 called before the return value from protect_from_queue is used.
52cf7115 2411
4bc973ae
JL
2412 Expansion of the CALL_EXPR below will call emit_queue before
2413 we are finished emitting RTL for argument setup. So if we are
2414 not careful we could get the wrong value for an argument.
52cf7115 2415
4bc973ae
JL
2416 To avoid this problem we go ahead and emit code to copy OBJECT
2417 and SIZE into new pseudos. We can then place those new pseudos
2418 into an RTL_EXPR and use them later, even after a call to
2419 emit_queue.
52cf7115 2420
4bc973ae
JL
2421 Note this is not strictly needed for library calls since they
2422 do not call emit_queue before loading their arguments. However,
2423 we may need to have library calls call emit_queue in the future
2424 since failing to do so could cause problems for targets which
2425 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2426 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2427
4bc973ae
JL
2428#ifdef TARGET_MEM_FUNCTIONS
2429 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2430#else
2431 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2432 TREE_UNSIGNED (integer_type_node));
f3dc586a 2433 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2434#endif
52cf7115 2435
52cf7115 2436
4bc973ae
JL
2437#ifdef TARGET_MEM_FUNCTIONS
2438 /* It is incorrect to use the libcall calling conventions to call
2439 memset in this context.
52cf7115 2440
4bc973ae
JL
2441 This could be a user call to memset and the user may wish to
2442 examine the return value from memset.
52cf7115 2443
4bc973ae
JL
2444 For targets where libcalls and normal calls have different
2445 conventions for returning pointers, we could end up generating
2446 incorrect code.
2447
2448 So instead of using a libcall sequence we build up a suitable
2449 CALL_EXPR and expand the call in the normal fashion. */
2450 if (fn == NULL_TREE)
2451 {
2452 tree fntype;
2453
2454 /* This was copied from except.c, I don't know if all this is
2455 necessary in this context or not. */
2456 fn = get_identifier ("memset");
2457 push_obstacks_nochange ();
2458 end_temporary_allocation ();
2459 fntype = build_pointer_type (void_type_node);
2460 fntype = build_function_type (fntype, NULL_TREE);
2461 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2462 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2463 DECL_EXTERNAL (fn) = 1;
2464 TREE_PUBLIC (fn) = 1;
2465 DECL_ARTIFICIAL (fn) = 1;
2466 make_decl_rtl (fn, NULL_PTR, 1);
2467 assemble_external (fn);
2468 pop_obstacks ();
2469 }
2470
2471 /* We need to make an argument list for the function call.
2472
2473 memset has three arguments, the first is a void * addresses, the
2474 second a integer with the initialization value, the last is a
2475 size_t byte count for the copy. */
2476 arg_list
2477 = build_tree_list (NULL_TREE,
2478 make_tree (build_pointer_type (void_type_node),
2479 object));
2480 TREE_CHAIN (arg_list)
2481 = build_tree_list (NULL_TREE,
2482 make_tree (integer_type_node, const0_rtx));
2483 TREE_CHAIN (TREE_CHAIN (arg_list))
2484 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2485 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2486
2487 /* Now we have to build up the CALL_EXPR itself. */
2488 call_expr = build1 (ADDR_EXPR,
2489 build_pointer_type (TREE_TYPE (fn)), fn);
2490 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2491 call_expr, arg_list, NULL_TREE);
2492 TREE_SIDE_EFFECTS (call_expr) = 1;
2493
2494 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2495#else
9de08200 2496 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2497 VOIDmode, 2, object, Pmode, size,
9de08200 2498 TYPE_MODE (integer_type_node));
bbf6f052 2499#endif
9de08200 2500 }
bbf6f052
RK
2501 }
2502 else
66ed0683 2503 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2504
2505 return retval;
bbf6f052
RK
2506}
2507
2508/* Generate code to copy Y into X.
2509 Both Y and X must have the same mode, except that
2510 Y can be a constant with VOIDmode.
2511 This mode cannot be BLKmode; use emit_block_move for that.
2512
2513 Return the last instruction emitted. */
2514
2515rtx
2516emit_move_insn (x, y)
2517 rtx x, y;
2518{
2519 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2520
2521 x = protect_from_queue (x, 1);
2522 y = protect_from_queue (y, 0);
2523
2524 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2525 abort ();
2526
ee5332b8
RH
2527 /* Never force constant_p_rtx to memory. */
2528 if (GET_CODE (y) == CONSTANT_P_RTX)
2529 ;
2530 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2531 y = force_const_mem (mode, y);
2532
2533 /* If X or Y are memory references, verify that their addresses are valid
2534 for the machine. */
2535 if (GET_CODE (x) == MEM
2536 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2537 && ! push_operand (x, GET_MODE (x)))
2538 || (flag_force_addr
2539 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2540 x = change_address (x, VOIDmode, XEXP (x, 0));
2541
2542 if (GET_CODE (y) == MEM
2543 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2544 || (flag_force_addr
2545 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2546 y = change_address (y, VOIDmode, XEXP (y, 0));
2547
2548 if (mode == BLKmode)
2549 abort ();
2550
261c4230
RS
2551 return emit_move_insn_1 (x, y);
2552}
2553
2554/* Low level part of emit_move_insn.
2555 Called just like emit_move_insn, but assumes X and Y
2556 are basically valid. */
2557
2558rtx
2559emit_move_insn_1 (x, y)
2560 rtx x, y;
2561{
2562 enum machine_mode mode = GET_MODE (x);
2563 enum machine_mode submode;
2564 enum mode_class class = GET_MODE_CLASS (mode);
2565 int i;
2566
76bbe028
ZW
2567 if (mode >= MAX_MACHINE_MODE)
2568 abort ();
2569
bbf6f052
RK
2570 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2571 return
2572 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2573
89742723 2574 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2575 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2576 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2577 * BITS_PER_UNIT),
2578 (class == MODE_COMPLEX_INT
2579 ? MODE_INT : MODE_FLOAT),
2580 0))
7308a047
RS
2581 && (mov_optab->handlers[(int) submode].insn_code
2582 != CODE_FOR_nothing))
2583 {
2584 /* Don't split destination if it is a stack push. */
2585 int stack = push_operand (x, GET_MODE (x));
7308a047 2586
7308a047
RS
2587 /* If this is a stack, push the highpart first, so it
2588 will be in the argument order.
2589
2590 In that case, change_address is used only to convert
2591 the mode, not to change the address. */
c937357e
RS
2592 if (stack)
2593 {
e33c0d66
RS
2594 /* Note that the real part always precedes the imag part in memory
2595 regardless of machine's endianness. */
c937357e
RS
2596#ifdef STACK_GROWS_DOWNWARD
2597 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2598 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2599 gen_imagpart (submode, y)));
c937357e 2600 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2601 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2602 gen_realpart (submode, y)));
c937357e
RS
2603#else
2604 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2605 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2606 gen_realpart (submode, y)));
c937357e 2607 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2608 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2609 gen_imagpart (submode, y)));
c937357e
RS
2610#endif
2611 }
2612 else
2613 {
c14c6529
RH
2614 /* Show the output dies here. This is necessary for pseudos;
2615 hard regs shouldn't appear here except as return values.
2616 We never want to emit such a clobber after reload. */
2617 if (x != y
2618 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2619 {
c14c6529 2620 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2621 }
2638126a 2622
c937357e 2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2624 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2625 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2626 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2627 }
7308a047 2628
7a1ab50a 2629 return get_last_insn ();
7308a047
RS
2630 }
2631
bbf6f052
RK
2632 /* This will handle any multi-word mode that lacks a move_insn pattern.
2633 However, you will get better code if you define such patterns,
2634 even if they must turn into multiple assembler instructions. */
a4320483 2635 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2636 {
2637 rtx last_insn = 0;
6551fa4d 2638
a98c9f1a
RK
2639#ifdef PUSH_ROUNDING
2640
2641 /* If X is a push on the stack, do the push now and replace
2642 X with a reference to the stack pointer. */
2643 if (push_operand (x, GET_MODE (x)))
2644 {
2645 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2646 x = change_address (x, VOIDmode, stack_pointer_rtx);
2647 }
2648#endif
2649
c14c6529
RH
2650 /* Show the output dies here. This is necessary for pseudos;
2651 hard regs shouldn't appear here except as return values.
2652 We never want to emit such a clobber after reload. */
2653 if (x != y
2654 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2655 {
c14c6529 2656 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2657 }
15a7a8ec 2658
bbf6f052
RK
2659 for (i = 0;
2660 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2661 i++)
2662 {
2663 rtx xpart = operand_subword (x, i, 1, mode);
2664 rtx ypart = operand_subword (y, i, 1, mode);
2665
2666 /* If we can't get a part of Y, put Y into memory if it is a
2667 constant. Otherwise, force it into a register. If we still
2668 can't get a part of Y, abort. */
2669 if (ypart == 0 && CONSTANT_P (y))
2670 {
2671 y = force_const_mem (mode, y);
2672 ypart = operand_subword (y, i, 1, mode);
2673 }
2674 else if (ypart == 0)
2675 ypart = operand_subword_force (y, i, mode);
2676
2677 if (xpart == 0 || ypart == 0)
2678 abort ();
2679
2680 last_insn = emit_move_insn (xpart, ypart);
2681 }
6551fa4d 2682
bbf6f052
RK
2683 return last_insn;
2684 }
2685 else
2686 abort ();
2687}
2688\f
2689/* Pushing data onto the stack. */
2690
2691/* Push a block of length SIZE (perhaps variable)
2692 and return an rtx to address the beginning of the block.
2693 Note that it is not possible for the value returned to be a QUEUED.
2694 The value may be virtual_outgoing_args_rtx.
2695
2696 EXTRA is the number of bytes of padding to push in addition to SIZE.
2697 BELOW nonzero means this padding comes at low addresses;
2698 otherwise, the padding comes at high addresses. */
2699
2700rtx
2701push_block (size, extra, below)
2702 rtx size;
2703 int extra, below;
2704{
2705 register rtx temp;
88f63c77
RK
2706
2707 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2708 if (CONSTANT_P (size))
2709 anti_adjust_stack (plus_constant (size, extra));
2710 else if (GET_CODE (size) == REG && extra == 0)
2711 anti_adjust_stack (size);
2712 else
2713 {
2714 rtx temp = copy_to_mode_reg (Pmode, size);
2715 if (extra != 0)
906c4e36 2716 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2717 temp, 0, OPTAB_LIB_WIDEN);
2718 anti_adjust_stack (temp);
2719 }
2720
e1a9b2ab
HB
2721#if defined (STACK_GROWS_DOWNWARD) \
2722 || (defined (ARGS_GROW_DOWNWARD) \
2723 && !defined (ACCUMULATE_OUTGOING_ARGS))
2724
2725 /* Return the lowest stack address when STACK or ARGS grow downward and
2726 we are not aaccumulating outgoing arguments (the c4x port uses such
2727 conventions). */
bbf6f052
RK
2728 temp = virtual_outgoing_args_rtx;
2729 if (extra != 0 && below)
2730 temp = plus_constant (temp, extra);
2731#else
2732 if (GET_CODE (size) == CONST_INT)
2733 temp = plus_constant (virtual_outgoing_args_rtx,
2734 - INTVAL (size) - (below ? 0 : extra));
2735 else if (extra != 0 && !below)
38a448ca 2736 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2737 negate_rtx (Pmode, plus_constant (size, extra)));
2738 else
38a448ca 2739 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
c5c76735 2740 negate_rtx (Pmode, size));
bbf6f052
RK
2741#endif
2742
2743 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2744}
2745
87e38d84 2746rtx
bbf6f052
RK
2747gen_push_operand ()
2748{
38a448ca 2749 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2750}
2751
921b3427
RK
2752/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2753 block of SIZE bytes. */
2754
2755static rtx
2756get_push_address (size)
2757 int size;
2758{
2759 register rtx temp;
2760
2761 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2762 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2763 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2764 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2765 else
2766 temp = stack_pointer_rtx;
2767
c85f7c16 2768 return copy_to_reg (temp);
921b3427
RK
2769}
2770
bbf6f052
RK
2771/* Generate code to push X onto the stack, assuming it has mode MODE and
2772 type TYPE.
2773 MODE is redundant except when X is a CONST_INT (since they don't
2774 carry mode info).
2775 SIZE is an rtx for the size of data to be copied (in bytes),
2776 needed only if X is BLKmode.
2777
2778 ALIGN (in bytes) is maximum alignment we can assume.
2779
cd048831
RK
2780 If PARTIAL and REG are both nonzero, then copy that many of the first
2781 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2782 The amount of space pushed is decreased by PARTIAL words,
2783 rounded *down* to a multiple of PARM_BOUNDARY.
2784 REG must be a hard register in this case.
cd048831
RK
2785 If REG is zero but PARTIAL is not, take any all others actions for an
2786 argument partially in registers, but do not actually load any
2787 registers.
bbf6f052
RK
2788
2789 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2790 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2791
2792 On a machine that lacks real push insns, ARGS_ADDR is the address of
2793 the bottom of the argument block for this call. We use indexing off there
2794 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2795 argument block has not been preallocated.
2796
e5e809f4
JL
2797 ARGS_SO_FAR is the size of args previously pushed for this call.
2798
2799 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2800 for arguments passed in registers. If nonzero, it will be the number
2801 of bytes required. */
bbf6f052
RK
2802
2803void
2804emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2805 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2806 register rtx x;
2807 enum machine_mode mode;
2808 tree type;
2809 rtx size;
2810 int align;
2811 int partial;
2812 rtx reg;
2813 int extra;
2814 rtx args_addr;
2815 rtx args_so_far;
e5e809f4 2816 int reg_parm_stack_space;
bbf6f052
RK
2817{
2818 rtx xinner;
2819 enum direction stack_direction
2820#ifdef STACK_GROWS_DOWNWARD
2821 = downward;
2822#else
2823 = upward;
2824#endif
2825
2826 /* Decide where to pad the argument: `downward' for below,
2827 `upward' for above, or `none' for don't pad it.
2828 Default is below for small data on big-endian machines; else above. */
2829 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2830
2831 /* Invert direction if stack is post-update. */
2832 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2833 if (where_pad != none)
2834 where_pad = (where_pad == downward ? upward : downward);
2835
2836 xinner = x = protect_from_queue (x, 0);
2837
2838 if (mode == BLKmode)
2839 {
2840 /* Copy a block into the stack, entirely or partially. */
2841
2842 register rtx temp;
2843 int used = partial * UNITS_PER_WORD;
2844 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2845 int skip;
2846
2847 if (size == 0)
2848 abort ();
2849
2850 used -= offset;
2851
2852 /* USED is now the # of bytes we need not copy to the stack
2853 because registers will take care of them. */
2854
2855 if (partial != 0)
2856 xinner = change_address (xinner, BLKmode,
2857 plus_constant (XEXP (xinner, 0), used));
2858
2859 /* If the partial register-part of the arg counts in its stack size,
2860 skip the part of stack space corresponding to the registers.
2861 Otherwise, start copying to the beginning of the stack space,
2862 by setting SKIP to 0. */
e5e809f4 2863 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2864
2865#ifdef PUSH_ROUNDING
2866 /* Do it with several push insns if that doesn't take lots of insns
2867 and if there is no difficulty with push insns that skip bytes
2868 on the stack for alignment purposes. */
2869 if (args_addr == 0
2870 && GET_CODE (size) == CONST_INT
2871 && skip == 0
15914757 2872 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2873 /* Here we avoid the case of a structure whose weak alignment
2874 forces many pushes of a small amount of data,
2875 and such small pushes do rounding that causes trouble. */
c7a7ac46 2876 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2877 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2878 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2879 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2880 {
2881 /* Push padding now if padding above and stack grows down,
2882 or if padding below and stack grows up.
2883 But if space already allocated, this has already been done. */
2884 if (extra && args_addr == 0
2885 && where_pad != none && where_pad != stack_direction)
906c4e36 2886 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2887
38a448ca 2888 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2889 INTVAL (size) - used, align);
921b3427 2890
7d384cc0 2891 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2892 {
2893 rtx temp;
2894
956d6950 2895 in_check_memory_usage = 1;
921b3427 2896 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2897 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2898 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2899 temp, Pmode,
2900 XEXP (xinner, 0), Pmode,
921b3427
RK
2901 GEN_INT (INTVAL(size) - used),
2902 TYPE_MODE (sizetype));
2903 else
2904 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 2905 temp, Pmode,
921b3427
RK
2906 GEN_INT (INTVAL(size) - used),
2907 TYPE_MODE (sizetype),
956d6950
JL
2908 GEN_INT (MEMORY_USE_RW),
2909 TYPE_MODE (integer_type_node));
2910 in_check_memory_usage = 0;
921b3427 2911 }
bbf6f052
RK
2912 }
2913 else
2914#endif /* PUSH_ROUNDING */
2915 {
2916 /* Otherwise make space on the stack and copy the data
2917 to the address of that space. */
2918
2919 /* Deduct words put into registers from the size we must copy. */
2920 if (partial != 0)
2921 {
2922 if (GET_CODE (size) == CONST_INT)
906c4e36 2923 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2924 else
2925 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2926 GEN_INT (used), NULL_RTX, 0,
2927 OPTAB_LIB_WIDEN);
bbf6f052
RK
2928 }
2929
2930 /* Get the address of the stack space.
2931 In this case, we do not deal with EXTRA separately.
2932 A single stack adjust will do. */
2933 if (! args_addr)
2934 {
2935 temp = push_block (size, extra, where_pad == downward);
2936 extra = 0;
2937 }
2938 else if (GET_CODE (args_so_far) == CONST_INT)
2939 temp = memory_address (BLKmode,
2940 plus_constant (args_addr,
2941 skip + INTVAL (args_so_far)));
2942 else
2943 temp = memory_address (BLKmode,
38a448ca
RH
2944 plus_constant (gen_rtx_PLUS (Pmode,
2945 args_addr,
2946 args_so_far),
bbf6f052 2947 skip));
7d384cc0 2948 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2949 {
2950 rtx target;
2951
956d6950 2952 in_check_memory_usage = 1;
921b3427 2953 target = copy_to_reg (temp);
c85f7c16 2954 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2955 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2956 target, Pmode,
2957 XEXP (xinner, 0), Pmode,
921b3427
RK
2958 size, TYPE_MODE (sizetype));
2959 else
2960 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 2961 target, Pmode,
921b3427 2962 size, TYPE_MODE (sizetype),
956d6950
JL
2963 GEN_INT (MEMORY_USE_RW),
2964 TYPE_MODE (integer_type_node));
2965 in_check_memory_usage = 0;
921b3427 2966 }
bbf6f052
RK
2967
2968 /* TEMP is the address of the block. Copy the data there. */
2969 if (GET_CODE (size) == CONST_INT
fbe1758d 2970 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
bbf6f052 2971 {
38a448ca 2972 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2973 INTVAL (size), align);
2974 goto ret;
2975 }
e5e809f4 2976 else
bbf6f052 2977 {
e5e809f4
JL
2978 rtx opalign = GEN_INT (align);
2979 enum machine_mode mode;
9e6a5703 2980 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
2981
2982 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2983 mode != VOIDmode;
2984 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2985 {
e5e809f4
JL
2986 enum insn_code code = movstr_optab[(int) mode];
2987
2988 if (code != CODE_FOR_nothing
2989 && ((GET_CODE (size) == CONST_INT
2990 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2991 <= (GET_MODE_MASK (mode) >> 1)))
2992 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2993 && (insn_operand_predicate[(int) code][0] == 0
2994 || ((*insn_operand_predicate[(int) code][0])
2995 (target, BLKmode)))
2996 && (insn_operand_predicate[(int) code][1] == 0
2997 || ((*insn_operand_predicate[(int) code][1])
2998 (xinner, BLKmode)))
2999 && (insn_operand_predicate[(int) code][3] == 0
3000 || ((*insn_operand_predicate[(int) code][3])
3001 (opalign, VOIDmode))))
3002 {
3003 rtx op2 = convert_to_mode (mode, size, 1);
3004 rtx last = get_last_insn ();
3005 rtx pat;
3006
3007 if (insn_operand_predicate[(int) code][2] != 0
3008 && ! ((*insn_operand_predicate[(int) code][2])
3009 (op2, mode)))
3010 op2 = copy_to_mode_reg (mode, op2);
3011
3012 pat = GEN_FCN ((int) code) (target, xinner,
3013 op2, opalign);
3014 if (pat)
3015 {
3016 emit_insn (pat);
3017 goto ret;
3018 }
3019 else
3020 delete_insns_since (last);
3021 }
c841050e 3022 }
bbf6f052 3023 }
bbf6f052
RK
3024
3025#ifndef ACCUMULATE_OUTGOING_ARGS
3026 /* If the source is referenced relative to the stack pointer,
3027 copy it to another register to stabilize it. We do not need
3028 to do this if we know that we won't be changing sp. */
3029
3030 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3031 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3032 temp = copy_to_reg (temp);
3033#endif
3034
3035 /* Make inhibit_defer_pop nonzero around the library call
3036 to force it to pop the bcopy-arguments right away. */
3037 NO_DEFER_POP;
3038#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3039 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3040 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3041 convert_to_mode (TYPE_MODE (sizetype),
3042 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3043 TYPE_MODE (sizetype));
bbf6f052 3044#else
d562e42e 3045 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3046 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3047 convert_to_mode (TYPE_MODE (integer_type_node),
3048 size,
3049 TREE_UNSIGNED (integer_type_node)),
3050 TYPE_MODE (integer_type_node));
bbf6f052
RK
3051#endif
3052 OK_DEFER_POP;
3053 }
3054 }
3055 else if (partial > 0)
3056 {
3057 /* Scalar partly in registers. */
3058
3059 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3060 int i;
3061 int not_stack;
3062 /* # words of start of argument
3063 that we must make space for but need not store. */
3064 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3065 int args_offset = INTVAL (args_so_far);
3066 int skip;
3067
3068 /* Push padding now if padding above and stack grows down,
3069 or if padding below and stack grows up.
3070 But if space already allocated, this has already been done. */
3071 if (extra && args_addr == 0
3072 && where_pad != none && where_pad != stack_direction)
906c4e36 3073 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3074
3075 /* If we make space by pushing it, we might as well push
3076 the real data. Otherwise, we can leave OFFSET nonzero
3077 and leave the space uninitialized. */
3078 if (args_addr == 0)
3079 offset = 0;
3080
3081 /* Now NOT_STACK gets the number of words that we don't need to
3082 allocate on the stack. */
3083 not_stack = partial - offset;
3084
3085 /* If the partial register-part of the arg counts in its stack size,
3086 skip the part of stack space corresponding to the registers.
3087 Otherwise, start copying to the beginning of the stack space,
3088 by setting SKIP to 0. */
e5e809f4 3089 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3090
3091 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3092 x = validize_mem (force_const_mem (mode, x));
3093
3094 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3095 SUBREGs of such registers are not allowed. */
3096 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3097 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3098 x = copy_to_reg (x);
3099
3100 /* Loop over all the words allocated on the stack for this arg. */
3101 /* We can do it by words, because any scalar bigger than a word
3102 has a size a multiple of a word. */
3103#ifndef PUSH_ARGS_REVERSED
3104 for (i = not_stack; i < size; i++)
3105#else
3106 for (i = size - 1; i >= not_stack; i--)
3107#endif
3108 if (i >= not_stack + offset)
3109 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3110 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3111 0, args_addr,
3112 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
3113 * UNITS_PER_WORD)),
3114 reg_parm_stack_space);
bbf6f052
RK
3115 }
3116 else
3117 {
3118 rtx addr;
921b3427 3119 rtx target = NULL_RTX;
bbf6f052
RK
3120
3121 /* Push padding now if padding above and stack grows down,
3122 or if padding below and stack grows up.
3123 But if space already allocated, this has already been done. */
3124 if (extra && args_addr == 0
3125 && where_pad != none && where_pad != stack_direction)
906c4e36 3126 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3127
3128#ifdef PUSH_ROUNDING
3129 if (args_addr == 0)
3130 addr = gen_push_operand ();
3131 else
3132#endif
921b3427
RK
3133 {
3134 if (GET_CODE (args_so_far) == CONST_INT)
3135 addr
3136 = memory_address (mode,
3137 plus_constant (args_addr,
3138 INTVAL (args_so_far)));
3139 else
38a448ca
RH
3140 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3141 args_so_far));
921b3427
RK
3142 target = addr;
3143 }
bbf6f052 3144
38a448ca 3145 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3146
7d384cc0 3147 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3148 {
956d6950 3149 in_check_memory_usage = 1;
921b3427
RK
3150 if (target == 0)
3151 target = get_push_address (GET_MODE_SIZE (mode));
3152
c85f7c16 3153 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3154 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3155 target, Pmode,
3156 XEXP (x, 0), Pmode,
921b3427
RK
3157 GEN_INT (GET_MODE_SIZE (mode)),
3158 TYPE_MODE (sizetype));
3159 else
3160 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3161 target, Pmode,
921b3427
RK
3162 GEN_INT (GET_MODE_SIZE (mode)),
3163 TYPE_MODE (sizetype),
956d6950
JL
3164 GEN_INT (MEMORY_USE_RW),
3165 TYPE_MODE (integer_type_node));
3166 in_check_memory_usage = 0;
921b3427 3167 }
bbf6f052
RK
3168 }
3169
3170 ret:
3171 /* If part should go in registers, copy that part
3172 into the appropriate registers. Do this now, at the end,
3173 since mem-to-mem copies above may do function calls. */
cd048831 3174 if (partial > 0 && reg != 0)
fffa9c1d
JW
3175 {
3176 /* Handle calls that pass values in multiple non-contiguous locations.
3177 The Irix 6 ABI has examples of this. */
3178 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3179 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3180 else
3181 move_block_to_reg (REGNO (reg), x, partial, mode);
3182 }
bbf6f052
RK
3183
3184 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3185 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3186}
3187\f
bbf6f052
RK
3188/* Expand an assignment that stores the value of FROM into TO.
3189 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3190 (This may contain a QUEUED rtx;
3191 if the value is constant, this rtx is a constant.)
3192 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3193
3194 SUGGEST_REG is no longer actually used.
3195 It used to mean, copy the value through a register
3196 and return that register, if that is possible.
709f5be1 3197 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3198
3199rtx
3200expand_assignment (to, from, want_value, suggest_reg)
3201 tree to, from;
3202 int want_value;
c5c76735 3203 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3204{
3205 register rtx to_rtx = 0;
3206 rtx result;
3207
3208 /* Don't crash if the lhs of the assignment was erroneous. */
3209
3210 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3211 {
3212 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3213 return want_value ? result : NULL_RTX;
3214 }
bbf6f052
RK
3215
3216 /* Assignment of a structure component needs special treatment
3217 if the structure component's rtx is not simply a MEM.
6be58303
JW
3218 Assignment of an array element at a constant index, and assignment of
3219 an array element in an unaligned packed structure field, has the same
3220 problem. */
bbf6f052 3221
08293add
RK
3222 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3223 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3224 {
3225 enum machine_mode mode1;
3226 int bitsize;
3227 int bitpos;
7bb0943f 3228 tree offset;
bbf6f052
RK
3229 int unsignedp;
3230 int volatilep = 0;
0088fcb1 3231 tree tem;
d78d243c 3232 int alignment;
0088fcb1
RK
3233
3234 push_temp_slots ();
839c4796
RK
3235 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3236 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3237
3238 /* If we are going to use store_bit_field and extract_bit_field,
3239 make sure to_rtx will be safe for multiple use. */
3240
3241 if (mode1 == VOIDmode && want_value)
3242 tem = stabilize_reference (tem);
3243
921b3427 3244 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3245 if (offset != 0)
3246 {
906c4e36 3247 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3248
3249 if (GET_CODE (to_rtx) != MEM)
3250 abort ();
bd070e1a
RH
3251
3252 if (GET_MODE (offset_rtx) != ptr_mode)
3253 {
3254#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3255 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3256#else
3257 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3258#endif
3259 }
3260
9a7b9f4f
JL
3261 /* A constant address in TO_RTX can have VOIDmode, we must not try
3262 to call force_reg for that case. Avoid that case. */
89752202
HB
3263 if (GET_CODE (to_rtx) == MEM
3264 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3265 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3266 && bitsize
3267 && (bitpos % bitsize) == 0
3268 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3269 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3270 {
3271 rtx temp = change_address (to_rtx, mode1,
3272 plus_constant (XEXP (to_rtx, 0),
3273 (bitpos /
3274 BITS_PER_UNIT)));
3275 if (GET_CODE (XEXP (temp, 0)) == REG)
3276 to_rtx = temp;
3277 else
3278 to_rtx = change_address (to_rtx, mode1,
3279 force_reg (GET_MODE (XEXP (temp, 0)),
3280 XEXP (temp, 0)));
3281 bitpos = 0;
3282 }
3283
7bb0943f 3284 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3285 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3286 force_reg (ptr_mode,
3287 offset_rtx)));
7bb0943f 3288 }
c5c76735 3289
bbf6f052
RK
3290 if (volatilep)
3291 {
3292 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3293 {
3294 /* When the offset is zero, to_rtx is the address of the
3295 structure we are storing into, and hence may be shared.
3296 We must make a new MEM before setting the volatile bit. */
3297 if (offset == 0)
effbcc6a
RK
3298 to_rtx = copy_rtx (to_rtx);
3299
01188446
JW
3300 MEM_VOLATILE_P (to_rtx) = 1;
3301 }
bbf6f052
RK
3302#if 0 /* This was turned off because, when a field is volatile
3303 in an object which is not volatile, the object may be in a register,
3304 and then we would abort over here. */
3305 else
3306 abort ();
3307#endif
3308 }
3309
956d6950
JL
3310 if (TREE_CODE (to) == COMPONENT_REF
3311 && TREE_READONLY (TREE_OPERAND (to, 1)))
3312 {
8bd6ecc2 3313 if (offset == 0)
956d6950
JL
3314 to_rtx = copy_rtx (to_rtx);
3315
3316 RTX_UNCHANGING_P (to_rtx) = 1;
3317 }
3318
921b3427 3319 /* Check the access. */
7d384cc0 3320 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3321 {
3322 rtx to_addr;
3323 int size;
3324 int best_mode_size;
3325 enum machine_mode best_mode;
3326
3327 best_mode = get_best_mode (bitsize, bitpos,
3328 TYPE_ALIGN (TREE_TYPE (tem)),
3329 mode1, volatilep);
3330 if (best_mode == VOIDmode)
3331 best_mode = QImode;
3332
3333 best_mode_size = GET_MODE_BITSIZE (best_mode);
3334 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3335 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3336 size *= GET_MODE_SIZE (best_mode);
3337
3338 /* Check the access right of the pointer. */
e9a25f70
JL
3339 if (size)
3340 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3341 to_addr, Pmode,
e9a25f70 3342 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3343 GEN_INT (MEMORY_USE_WO),
3344 TYPE_MODE (integer_type_node));
921b3427
RK
3345 }
3346
bbf6f052
RK
3347 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3348 (want_value
3349 /* Spurious cast makes HPUX compiler happy. */
3350 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3351 : VOIDmode),
3352 unsignedp,
3353 /* Required alignment of containing datum. */
d78d243c 3354 alignment,
ece32014
MM
3355 int_size_in_bytes (TREE_TYPE (tem)),
3356 get_alias_set (to));
bbf6f052
RK
3357 preserve_temp_slots (result);
3358 free_temp_slots ();
0088fcb1 3359 pop_temp_slots ();
bbf6f052 3360
709f5be1
RS
3361 /* If the value is meaningful, convert RESULT to the proper mode.
3362 Otherwise, return nothing. */
5ffe63ed
RS
3363 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3364 TYPE_MODE (TREE_TYPE (from)),
3365 result,
3366 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3367 : NULL_RTX);
bbf6f052
RK
3368 }
3369
cd1db108
RS
3370 /* If the rhs is a function call and its value is not an aggregate,
3371 call the function before we start to compute the lhs.
3372 This is needed for correct code for cases such as
3373 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3374 requires loading up part of an address in a separate insn.
3375
3376 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3377 a promoted variable where the zero- or sign- extension needs to be done.
3378 Handling this in the normal way is safe because no computation is done
3379 before the call. */
3380 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3381 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3382 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3383 {
0088fcb1
RK
3384 rtx value;
3385
3386 push_temp_slots ();
3387 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3388 if (to_rtx == 0)
921b3427 3389 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3390
fffa9c1d
JW
3391 /* Handle calls that return values in multiple non-contiguous locations.
3392 The Irix 6 ABI has examples of this. */
3393 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3394 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3395 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3396 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3397 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3398 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45 3399 else
6419e5b0
DT
3400 {
3401#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3402 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3403 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3404 value = convert_memory_address (GET_MODE (to_rtx), value);
3405#endif
3406 emit_move_insn (to_rtx, value);
3407 }
cd1db108
RS
3408 preserve_temp_slots (to_rtx);
3409 free_temp_slots ();
0088fcb1 3410 pop_temp_slots ();
709f5be1 3411 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3412 }
3413
bbf6f052
RK
3414 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3415 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3416
3417 if (to_rtx == 0)
41472af8
MM
3418 {
3419 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3420 if (GET_CODE (to_rtx) == MEM)
3421 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3422 }
bbf6f052 3423
86d38d25
RS
3424 /* Don't move directly into a return register. */
3425 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3426 {
0088fcb1
RK
3427 rtx temp;
3428
3429 push_temp_slots ();
3430 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3431 emit_move_insn (to_rtx, temp);
3432 preserve_temp_slots (to_rtx);
3433 free_temp_slots ();
0088fcb1 3434 pop_temp_slots ();
709f5be1 3435 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3436 }
3437
bbf6f052
RK
3438 /* In case we are returning the contents of an object which overlaps
3439 the place the value is being stored, use a safe function when copying
3440 a value through a pointer into a structure value return block. */
3441 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3442 && current_function_returns_struct
3443 && !current_function_returns_pcc_struct)
3444 {
0088fcb1
RK
3445 rtx from_rtx, size;
3446
3447 push_temp_slots ();
33a20d10 3448 size = expr_size (from);
921b3427
RK
3449 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3450 EXPAND_MEMORY_USE_DONT);
3451
3452 /* Copy the rights of the bitmap. */
7d384cc0 3453 if (current_function_check_memory_usage)
921b3427 3454 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3455 XEXP (to_rtx, 0), Pmode,
3456 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3457 convert_to_mode (TYPE_MODE (sizetype),
3458 size, TREE_UNSIGNED (sizetype)),
3459 TYPE_MODE (sizetype));
bbf6f052
RK
3460
3461#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3462 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3463 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3464 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3465 convert_to_mode (TYPE_MODE (sizetype),
3466 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3467 TYPE_MODE (sizetype));
bbf6f052 3468#else
d562e42e 3469 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3470 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3471 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3472 convert_to_mode (TYPE_MODE (integer_type_node),
3473 size, TREE_UNSIGNED (integer_type_node)),
3474 TYPE_MODE (integer_type_node));
bbf6f052
RK
3475#endif
3476
3477 preserve_temp_slots (to_rtx);
3478 free_temp_slots ();
0088fcb1 3479 pop_temp_slots ();
709f5be1 3480 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3481 }
3482
3483 /* Compute FROM and store the value in the rtx we got. */
3484
0088fcb1 3485 push_temp_slots ();
bbf6f052
RK
3486 result = store_expr (from, to_rtx, want_value);
3487 preserve_temp_slots (result);
3488 free_temp_slots ();
0088fcb1 3489 pop_temp_slots ();
709f5be1 3490 return want_value ? result : NULL_RTX;
bbf6f052
RK
3491}
3492
3493/* Generate code for computing expression EXP,
3494 and storing the value into TARGET.
bbf6f052
RK
3495 TARGET may contain a QUEUED rtx.
3496
709f5be1
RS
3497 If WANT_VALUE is nonzero, return a copy of the value
3498 not in TARGET, so that we can be sure to use the proper
3499 value in a containing expression even if TARGET has something
3500 else stored in it. If possible, we copy the value through a pseudo
3501 and return that pseudo. Or, if the value is constant, we try to
3502 return the constant. In some cases, we return a pseudo
3503 copied *from* TARGET.
3504
3505 If the mode is BLKmode then we may return TARGET itself.
3506 It turns out that in BLKmode it doesn't cause a problem.
3507 because C has no operators that could combine two different
3508 assignments into the same BLKmode object with different values
3509 with no sequence point. Will other languages need this to
3510 be more thorough?
3511
3512 If WANT_VALUE is 0, we return NULL, to make sure
3513 to catch quickly any cases where the caller uses the value
3514 and fails to set WANT_VALUE. */
bbf6f052
RK
3515
3516rtx
709f5be1 3517store_expr (exp, target, want_value)
bbf6f052
RK
3518 register tree exp;
3519 register rtx target;
709f5be1 3520 int want_value;
bbf6f052
RK
3521{
3522 register rtx temp;
3523 int dont_return_target = 0;
3524
3525 if (TREE_CODE (exp) == COMPOUND_EXPR)
3526 {
3527 /* Perform first part of compound expression, then assign from second
3528 part. */
3529 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3530 emit_queue ();
709f5be1 3531 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3532 }
3533 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3534 {
3535 /* For conditional expression, get safe form of the target. Then
3536 test the condition, doing the appropriate assignment on either
3537 side. This avoids the creation of unnecessary temporaries.
3538 For non-BLKmode, it is more efficient not to do this. */
3539
3540 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3541
3542 emit_queue ();
3543 target = protect_from_queue (target, 1);
3544
dabf8373 3545 do_pending_stack_adjust ();
bbf6f052
RK
3546 NO_DEFER_POP;
3547 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3548 start_cleanup_deferral ();
709f5be1 3549 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3550 end_cleanup_deferral ();
bbf6f052
RK
3551 emit_queue ();
3552 emit_jump_insn (gen_jump (lab2));
3553 emit_barrier ();
3554 emit_label (lab1);
956d6950 3555 start_cleanup_deferral ();
709f5be1 3556 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3557 end_cleanup_deferral ();
bbf6f052
RK
3558 emit_queue ();
3559 emit_label (lab2);
3560 OK_DEFER_POP;
a3a58acc 3561
709f5be1 3562 return want_value ? target : NULL_RTX;
bbf6f052 3563 }
bbf6f052 3564 else if (queued_subexp_p (target))
709f5be1
RS
3565 /* If target contains a postincrement, let's not risk
3566 using it as the place to generate the rhs. */
bbf6f052
RK
3567 {
3568 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3569 {
3570 /* Expand EXP into a new pseudo. */
3571 temp = gen_reg_rtx (GET_MODE (target));
3572 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3573 }
3574 else
906c4e36 3575 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3576
3577 /* If target is volatile, ANSI requires accessing the value
3578 *from* the target, if it is accessed. So make that happen.
3579 In no case return the target itself. */
3580 if (! MEM_VOLATILE_P (target) && want_value)
3581 dont_return_target = 1;
bbf6f052 3582 }
12f06d17
CH
3583 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3584 && GET_MODE (target) != BLKmode)
3585 /* If target is in memory and caller wants value in a register instead,
3586 arrange that. Pass TARGET as target for expand_expr so that,
3587 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3588 We know expand_expr will not use the target in that case.
3589 Don't do this if TARGET is volatile because we are supposed
3590 to write it and then read it. */
3591 {
1da93fe0 3592 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3593 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3594 temp = copy_to_reg (temp);
3595 dont_return_target = 1;
3596 }
1499e0a8
RK
3597 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3598 /* If this is an scalar in a register that is stored in a wider mode
3599 than the declared mode, compute the result into its declared mode
3600 and then convert to the wider mode. Our value is the computed
3601 expression. */
3602 {
5a32d038 3603 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3604 which will often result in some optimizations. Do the conversion
3605 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3606 the extend. But don't do this if the type of EXP is a subtype
3607 of something else since then the conversion might involve
3608 more than just converting modes. */
3609 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3610 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3611 {
3612 if (TREE_UNSIGNED (TREE_TYPE (exp))
3613 != SUBREG_PROMOTED_UNSIGNED_P (target))
3614 exp
3615 = convert
3616 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3617 TREE_TYPE (exp)),
3618 exp);
3619
3620 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3621 SUBREG_PROMOTED_UNSIGNED_P (target)),
3622 exp);
3623 }
5a32d038 3624
1499e0a8 3625 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3626
766f36c7 3627 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3628 the access now so it gets done only once. Likewise if
3629 it contains TARGET. */
3630 if (GET_CODE (temp) == MEM && want_value
3631 && (MEM_VOLATILE_P (temp)
3632 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3633 temp = copy_to_reg (temp);
3634
b258707c
RS
3635 /* If TEMP is a VOIDmode constant, use convert_modes to make
3636 sure that we properly convert it. */
3637 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3638 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3639 TYPE_MODE (TREE_TYPE (exp)), temp,
3640 SUBREG_PROMOTED_UNSIGNED_P (target));
3641
1499e0a8
RK
3642 convert_move (SUBREG_REG (target), temp,
3643 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3644
3645 /* If we promoted a constant, change the mode back down to match
3646 target. Otherwise, the caller might get confused by a result whose
3647 mode is larger than expected. */
3648
3649 if (want_value && GET_MODE (temp) != GET_MODE (target)
3650 && GET_MODE (temp) != VOIDmode)
3651 {
3652 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3653 SUBREG_PROMOTED_VAR_P (temp) = 1;
3654 SUBREG_PROMOTED_UNSIGNED_P (temp)
3655 = SUBREG_PROMOTED_UNSIGNED_P (target);
3656 }
3657
709f5be1 3658 return want_value ? temp : NULL_RTX;
1499e0a8 3659 }
bbf6f052
RK
3660 else
3661 {
3662 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3663 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3664 If TARGET is a volatile mem ref, either return TARGET
3665 or return a reg copied *from* TARGET; ANSI requires this.
3666
3667 Otherwise, if TEMP is not TARGET, return TEMP
3668 if it is constant (for efficiency),
3669 or if we really want the correct value. */
bbf6f052
RK
3670 if (!(target && GET_CODE (target) == REG
3671 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3672 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3673 && ! rtx_equal_p (temp, target)
709f5be1 3674 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3675 dont_return_target = 1;
3676 }
3677
b258707c
RS
3678 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3679 the same as that of TARGET, adjust the constant. This is needed, for
3680 example, in case it is a CONST_DOUBLE and we want only a word-sized
3681 value. */
3682 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3683 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3684 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3685 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3686 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3687
7d384cc0 3688 if (current_function_check_memory_usage
921b3427
RK
3689 && GET_CODE (target) == MEM
3690 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3691 {
3692 if (GET_CODE (temp) == MEM)
3693 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3694 XEXP (target, 0), Pmode,
3695 XEXP (temp, 0), Pmode,
921b3427
RK
3696 expr_size (exp), TYPE_MODE (sizetype));
3697 else
3698 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3699 XEXP (target, 0), Pmode,
921b3427 3700 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3701 GEN_INT (MEMORY_USE_WO),
3702 TYPE_MODE (integer_type_node));
921b3427
RK
3703 }
3704
bbf6f052
RK
3705 /* If value was not generated in the target, store it there.
3706 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3707 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3708 one or both of them are volatile memory refs, we have to distinguish
3709 two cases:
3710 - expand_expr has used TARGET. In this case, we must not generate
3711 another copy. This can be detected by TARGET being equal according
3712 to == .
3713 - expand_expr has not used TARGET - that means that the source just
3714 happens to have the same RTX form. Since temp will have been created
3715 by expand_expr, it will compare unequal according to == .
3716 We must generate a copy in this case, to reach the correct number
3717 of volatile memory references. */
bbf6f052 3718
6036acbb 3719 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3720 || (temp != target && (side_effects_p (temp)
3721 || side_effects_p (target))))
6036acbb 3722 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3723 {
3724 target = protect_from_queue (target, 1);
3725 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3726 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3727 {
3728 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3729 if (dont_return_target)
3730 {
3731 /* In this case, we will return TEMP,
3732 so make sure it has the proper mode.
3733 But don't forget to store the value into TARGET. */
3734 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3735 emit_move_insn (target, temp);
3736 }
3737 else
3738 convert_move (target, temp, unsignedp);
3739 }
3740
3741 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3742 {
3743 /* Handle copying a string constant into an array.
3744 The string constant may be shorter than the array.
3745 So copy just the string's actual length, and clear the rest. */
3746 rtx size;
22619c3f 3747 rtx addr;
bbf6f052 3748
e87b4f3f
RS
3749 /* Get the size of the data type of the string,
3750 which is actually the size of the target. */
3751 size = expr_size (exp);
3752 if (GET_CODE (size) == CONST_INT
3753 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3754 emit_block_move (target, temp, size,
3755 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3756 else
bbf6f052 3757 {
e87b4f3f
RS
3758 /* Compute the size of the data to copy from the string. */
3759 tree copy_size
c03b7665 3760 = size_binop (MIN_EXPR,
b50d17a1 3761 make_tree (sizetype, size),
c03b7665
RK
3762 convert (sizetype,
3763 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3764 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3765 VOIDmode, 0);
e87b4f3f
RS
3766 rtx label = 0;
3767
3768 /* Copy that much. */
3769 emit_block_move (target, temp, copy_size_rtx,
3770 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3771
88f63c77
RK
3772 /* Figure out how much is left in TARGET that we have to clear.
3773 Do all calculations in ptr_mode. */
3774
3775 addr = XEXP (target, 0);
3776 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3777
e87b4f3f
RS
3778 if (GET_CODE (copy_size_rtx) == CONST_INT)
3779 {
88f63c77 3780 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3781 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3782 }
3783 else
3784 {
88f63c77
RK
3785 addr = force_reg (ptr_mode, addr);
3786 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3787 copy_size_rtx, NULL_RTX, 0,
3788 OPTAB_LIB_WIDEN);
e87b4f3f 3789
88f63c77 3790 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3791 copy_size_rtx, NULL_RTX, 0,
3792 OPTAB_LIB_WIDEN);
e87b4f3f 3793
e87b4f3f 3794 label = gen_label_rtx ();
c5d5d461
JL
3795 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3796 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3797 }
3798
3799 if (size != const0_rtx)
3800 {
921b3427 3801 /* Be sure we can write on ADDR. */
7d384cc0 3802 if (current_function_check_memory_usage)
921b3427 3803 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3804 addr, Pmode,
921b3427 3805 size, TYPE_MODE (sizetype),
956d6950
JL
3806 GEN_INT (MEMORY_USE_WO),
3807 TYPE_MODE (integer_type_node));
bbf6f052 3808#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3809 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3810 addr, ptr_mode,
3b6f75e2
JW
3811 const0_rtx, TYPE_MODE (integer_type_node),
3812 convert_to_mode (TYPE_MODE (sizetype),
3813 size,
3814 TREE_UNSIGNED (sizetype)),
3815 TYPE_MODE (sizetype));
bbf6f052 3816#else
d562e42e 3817 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3818 addr, ptr_mode,
3b6f75e2
JW
3819 convert_to_mode (TYPE_MODE (integer_type_node),
3820 size,
3821 TREE_UNSIGNED (integer_type_node)),
3822 TYPE_MODE (integer_type_node));
bbf6f052 3823#endif
e87b4f3f 3824 }
22619c3f 3825
e87b4f3f
RS
3826 if (label)
3827 emit_label (label);
bbf6f052
RK
3828 }
3829 }
fffa9c1d
JW
3830 /* Handle calls that return values in multiple non-contiguous locations.
3831 The Irix 6 ABI has examples of this. */
3832 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3833 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3834 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3835 else if (GET_MODE (temp) == BLKmode)
3836 emit_block_move (target, temp, expr_size (exp),
3837 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3838 else
3839 emit_move_insn (target, temp);
3840 }
709f5be1 3841
766f36c7
RK
3842 /* If we don't want a value, return NULL_RTX. */
3843 if (! want_value)
3844 return NULL_RTX;
3845
3846 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3847 ??? The latter test doesn't seem to make sense. */
3848 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3849 return temp;
766f36c7
RK
3850
3851 /* Return TARGET itself if it is a hard register. */
3852 else if (want_value && GET_MODE (target) != BLKmode
3853 && ! (GET_CODE (target) == REG
3854 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3855 return copy_to_reg (target);
766f36c7
RK
3856
3857 else
709f5be1 3858 return target;
bbf6f052
RK
3859}
3860\f
9de08200
RK
3861/* Return 1 if EXP just contains zeros. */
3862
3863static int
3864is_zeros_p (exp)
3865 tree exp;
3866{
3867 tree elt;
3868
3869 switch (TREE_CODE (exp))
3870 {
3871 case CONVERT_EXPR:
3872 case NOP_EXPR:
3873 case NON_LVALUE_EXPR:
3874 return is_zeros_p (TREE_OPERAND (exp, 0));
3875
3876 case INTEGER_CST:
3877 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3878
3879 case COMPLEX_CST:
3880 return
3881 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3882
3883 case REAL_CST:
41c9120b 3884 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3885
3886 case CONSTRUCTOR:
e1a43f73
PB
3887 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3888 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3889 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3890 if (! is_zeros_p (TREE_VALUE (elt)))
3891 return 0;
3892
3893 return 1;
e9a25f70
JL
3894
3895 default:
3896 return 0;
9de08200 3897 }
9de08200
RK
3898}
3899
3900/* Return 1 if EXP contains mostly (3/4) zeros. */
3901
3902static int
3903mostly_zeros_p (exp)
3904 tree exp;
3905{
9de08200
RK
3906 if (TREE_CODE (exp) == CONSTRUCTOR)
3907 {
e1a43f73
PB
3908 int elts = 0, zeros = 0;
3909 tree elt = CONSTRUCTOR_ELTS (exp);
3910 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3911 {
3912 /* If there are no ranges of true bits, it is all zero. */
3913 return elt == NULL_TREE;
3914 }
3915 for (; elt; elt = TREE_CHAIN (elt))
3916 {
3917 /* We do not handle the case where the index is a RANGE_EXPR,
3918 so the statistic will be somewhat inaccurate.
3919 We do make a more accurate count in store_constructor itself,
3920 so since this function is only used for nested array elements,
0f41302f 3921 this should be close enough. */
e1a43f73
PB
3922 if (mostly_zeros_p (TREE_VALUE (elt)))
3923 zeros++;
3924 elts++;
3925 }
9de08200
RK
3926
3927 return 4 * zeros >= 3 * elts;
3928 }
3929
3930 return is_zeros_p (exp);
3931}
3932\f
e1a43f73
PB
3933/* Helper function for store_constructor.
3934 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3935 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 3936 ALIGN and CLEARED are as for store_constructor.
23ccec44
JW
3937
3938 This provides a recursive shortcut back to store_constructor when it isn't
3939 necessary to go through store_field. This is so that we can pass through
3940 the cleared field to let store_constructor know that we may not have to
3941 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3942
3943static void
3944store_constructor_field (target, bitsize, bitpos,
c5c76735 3945 mode, exp, type, align, cleared)
e1a43f73
PB
3946 rtx target;
3947 int bitsize, bitpos;
3948 enum machine_mode mode;
3949 tree exp, type;
c5c76735 3950 int align;
e1a43f73
PB
3951 int cleared;
3952{
3953 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3954 && bitpos % BITS_PER_UNIT == 0
3955 /* If we have a non-zero bitpos for a register target, then we just
3956 let store_field do the bitfield handling. This is unlikely to
3957 generate unnecessary clear instructions anyways. */
3958 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3959 {
126e5b0d
JW
3960 if (bitpos != 0)
3961 target = change_address (target, VOIDmode,
3962 plus_constant (XEXP (target, 0),
3963 bitpos / BITS_PER_UNIT));
c5c76735 3964 store_constructor (exp, target, align, cleared);
e1a43f73
PB
3965 }
3966 else
c5c76735
JL
3967 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
3968 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
3969 int_size_in_bytes (type), cleared);
e1a43f73
PB
3970}
3971
bbf6f052 3972/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3973 TARGET is either a REG or a MEM.
c5c76735 3974 ALIGN is the maximum known alignment for TARGET, in bits.
0f41302f 3975 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3976
3977static void
c5c76735 3978store_constructor (exp, target, align, cleared)
bbf6f052
RK
3979 tree exp;
3980 rtx target;
c5c76735 3981 int align;
e1a43f73 3982 int cleared;
bbf6f052 3983{
4af3895e 3984 tree type = TREE_TYPE (exp);
a5efcd63 3985#ifdef WORD_REGISTER_OPERATIONS
34c73909 3986 rtx exp_size = expr_size (exp);
a5efcd63 3987#endif
4af3895e 3988
bbf6f052
RK
3989 /* We know our target cannot conflict, since safe_from_p has been called. */
3990#if 0
3991 /* Don't try copying piece by piece into a hard register
3992 since that is vulnerable to being clobbered by EXP.
3993 Instead, construct in a pseudo register and then copy it all. */
3994 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3995 {
3996 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3997 store_constructor (exp, temp, 0);
bbf6f052
RK
3998 emit_move_insn (target, temp);
3999 return;
4000 }
4001#endif
4002
e44842fe
RK
4003 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4004 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4005 {
4006 register tree elt;
4007
4af3895e 4008 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
4009 if (TREE_CODE (type) == UNION_TYPE
4010 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 4011 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
4012
4013 /* If we are building a static constructor into a register,
4014 set the initial value as zero so we can fold the value into
67225c15
RK
4015 a constant. But if more than one register is involved,
4016 this probably loses. */
4017 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4018 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4019 {
4020 if (! cleared)
e9a25f70 4021 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4022
9de08200
RK
4023 cleared = 1;
4024 }
4025
4026 /* If the constructor has fewer fields than the structure
4027 or if we are initializing the structure to mostly zeros,
bbf6f052 4028 clear the whole structure first. */
9de08200
RK
4029 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4030 != list_length (TYPE_FIELDS (type)))
4031 || mostly_zeros_p (exp))
4032 {
4033 if (! cleared)
4034 clear_storage (target, expr_size (exp),
c5c76735 4035 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4036
4037 cleared = 1;
4038 }
bbf6f052
RK
4039 else
4040 /* Inform later passes that the old value is dead. */
38a448ca 4041 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4042
4043 /* Store each element of the constructor into
4044 the corresponding field of TARGET. */
4045
4046 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4047 {
4048 register tree field = TREE_PURPOSE (elt);
c5c76735 4049#ifdef WORD_REGISTER_OPERATIONS
34c73909 4050 tree value = TREE_VALUE (elt);
c5c76735 4051#endif
bbf6f052
RK
4052 register enum machine_mode mode;
4053 int bitsize;
b50d17a1 4054 int bitpos = 0;
bbf6f052 4055 int unsignedp;
b50d17a1
RK
4056 tree pos, constant = 0, offset = 0;
4057 rtx to_rtx = target;
bbf6f052 4058
f32fd778
RS
4059 /* Just ignore missing fields.
4060 We cleared the whole structure, above,
4061 if any fields are missing. */
4062 if (field == 0)
4063 continue;
4064
e1a43f73
PB
4065 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4066 continue;
9de08200 4067
bbf6f052
RK
4068 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4069 unsignedp = TREE_UNSIGNED (field);
4070 mode = DECL_MODE (field);
4071 if (DECL_BIT_FIELD (field))
4072 mode = VOIDmode;
4073
b50d17a1
RK
4074 pos = DECL_FIELD_BITPOS (field);
4075 if (TREE_CODE (pos) == INTEGER_CST)
4076 constant = pos;
4077 else if (TREE_CODE (pos) == PLUS_EXPR
4078 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4079 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4080 else
4081 offset = pos;
4082
4083 if (constant)
cd11b87e 4084 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4085
4086 if (offset)
4087 {
4088 rtx offset_rtx;
4089
4090 if (contains_placeholder_p (offset))
4091 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4092 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4093
b50d17a1
RK
4094 offset = size_binop (FLOOR_DIV_EXPR, offset,
4095 size_int (BITS_PER_UNIT));
bbf6f052 4096
b50d17a1
RK
4097 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4098 if (GET_CODE (to_rtx) != MEM)
4099 abort ();
4100
bd070e1a
RH
4101 if (GET_MODE (offset_rtx) != ptr_mode)
4102 {
4103#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4104 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4105#else
4106 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4107#endif
4108 }
4109
b50d17a1
RK
4110 to_rtx
4111 = change_address (to_rtx, VOIDmode,
38a448ca 4112 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4113 force_reg (ptr_mode,
4114 offset_rtx)));
b50d17a1 4115 }
c5c76735 4116
cf04eb80
RK
4117 if (TREE_READONLY (field))
4118 {
9151b3bf 4119 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4120 to_rtx = copy_rtx (to_rtx);
4121
cf04eb80
RK
4122 RTX_UNCHANGING_P (to_rtx) = 1;
4123 }
4124
34c73909
R
4125#ifdef WORD_REGISTER_OPERATIONS
4126 /* If this initializes a field that is smaller than a word, at the
4127 start of a word, try to widen it to a full word.
4128 This special case allows us to output C++ member function
4129 initializations in a form that the optimizers can understand. */
4130 if (constant
4131 && GET_CODE (target) == REG
4132 && bitsize < BITS_PER_WORD
4133 && bitpos % BITS_PER_WORD == 0
4134 && GET_MODE_CLASS (mode) == MODE_INT
4135 && TREE_CODE (value) == INTEGER_CST
4136 && GET_CODE (exp_size) == CONST_INT
4137 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4138 {
4139 tree type = TREE_TYPE (value);
4140 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4141 {
4142 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4143 value = convert (type, value);
4144 }
4145 if (BYTES_BIG_ENDIAN)
4146 value
4147 = fold (build (LSHIFT_EXPR, type, value,
4148 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4149 bitsize = BITS_PER_WORD;
4150 mode = word_mode;
4151 }
4152#endif
c5c76735
JL
4153 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4154 TREE_VALUE (elt), type,
4155 MIN (align,
4156 DECL_ALIGN (TREE_PURPOSE (elt))),
4157 cleared);
bbf6f052
RK
4158 }
4159 }
4af3895e 4160 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4161 {
4162 register tree elt;
4163 register int i;
e1a43f73 4164 int need_to_clear;
4af3895e 4165 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4166 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4167 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4168 tree elttype = TREE_TYPE (type);
bbf6f052 4169
e1a43f73 4170 /* If the constructor has fewer elements than the array,
38e01259 4171 clear the whole array first. Similarly if this is
e1a43f73
PB
4172 static constructor of a non-BLKmode object. */
4173 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4174 need_to_clear = 1;
4175 else
4176 {
4177 HOST_WIDE_INT count = 0, zero_count = 0;
4178 need_to_clear = 0;
4179 /* This loop is a more accurate version of the loop in
4180 mostly_zeros_p (it handles RANGE_EXPR in an index).
4181 It is also needed to check for missing elements. */
4182 for (elt = CONSTRUCTOR_ELTS (exp);
4183 elt != NULL_TREE;
df0faff1 4184 elt = TREE_CHAIN (elt))
e1a43f73
PB
4185 {
4186 tree index = TREE_PURPOSE (elt);
4187 HOST_WIDE_INT this_node_count;
4188 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4189 {
4190 tree lo_index = TREE_OPERAND (index, 0);
4191 tree hi_index = TREE_OPERAND (index, 1);
4192 if (TREE_CODE (lo_index) != INTEGER_CST
4193 || TREE_CODE (hi_index) != INTEGER_CST)
4194 {
4195 need_to_clear = 1;
4196 break;
4197 }
4198 this_node_count = TREE_INT_CST_LOW (hi_index)
4199 - TREE_INT_CST_LOW (lo_index) + 1;
4200 }
4201 else
4202 this_node_count = 1;
4203 count += this_node_count;
4204 if (mostly_zeros_p (TREE_VALUE (elt)))
4205 zero_count += this_node_count;
4206 }
8e958f70 4207 /* Clear the entire array first if there are any missing elements,
0f41302f 4208 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4209 if (count < maxelt - minelt + 1
4210 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4211 need_to_clear = 1;
4212 }
4213 if (need_to_clear)
9de08200
RK
4214 {
4215 if (! cleared)
4216 clear_storage (target, expr_size (exp),
c5c76735 4217 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4218 cleared = 1;
4219 }
bbf6f052
RK
4220 else
4221 /* Inform later passes that the old value is dead. */
38a448ca 4222 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4223
4224 /* Store each element of the constructor into
4225 the corresponding element of TARGET, determined
4226 by counting the elements. */
4227 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4228 elt;
4229 elt = TREE_CHAIN (elt), i++)
4230 {
4231 register enum machine_mode mode;
4232 int bitsize;
4233 int bitpos;
4234 int unsignedp;
e1a43f73 4235 tree value = TREE_VALUE (elt);
c5c76735 4236 int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4237 tree index = TREE_PURPOSE (elt);
4238 rtx xtarget = target;
bbf6f052 4239
e1a43f73
PB
4240 if (cleared && is_zeros_p (value))
4241 continue;
9de08200 4242
bbf6f052
RK
4243 mode = TYPE_MODE (elttype);
4244 bitsize = GET_MODE_BITSIZE (mode);
4245 unsignedp = TREE_UNSIGNED (elttype);
4246
e1a43f73
PB
4247 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4248 {
4249 tree lo_index = TREE_OPERAND (index, 0);
4250 tree hi_index = TREE_OPERAND (index, 1);
4251 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4252 struct nesting *loop;
05c0b405
PB
4253 HOST_WIDE_INT lo, hi, count;
4254 tree position;
e1a43f73 4255
0f41302f 4256 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4257 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4258 && TREE_CODE (hi_index) == INTEGER_CST
4259 && (lo = TREE_INT_CST_LOW (lo_index),
4260 hi = TREE_INT_CST_LOW (hi_index),
4261 count = hi - lo + 1,
4262 (GET_CODE (target) != MEM
4263 || count <= 2
4264 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4265 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4266 <= 40 * 8))))
e1a43f73 4267 {
05c0b405
PB
4268 lo -= minelt; hi -= minelt;
4269 for (; lo <= hi; lo++)
e1a43f73 4270 {
05c0b405 4271 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
c5c76735
JL
4272 store_constructor_field (target, bitsize, bitpos, mode,
4273 value, type, align, cleared);
e1a43f73
PB
4274 }
4275 }
4276 else
4277 {
4278 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4279 loop_top = gen_label_rtx ();
4280 loop_end = gen_label_rtx ();
4281
4282 unsignedp = TREE_UNSIGNED (domain);
4283
4284 index = build_decl (VAR_DECL, NULL_TREE, domain);
4285
4286 DECL_RTL (index) = index_r
4287 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4288 &unsignedp, 0));
4289
4290 if (TREE_CODE (value) == SAVE_EXPR
4291 && SAVE_EXPR_RTL (value) == 0)
4292 {
0f41302f
MS
4293 /* Make sure value gets expanded once before the
4294 loop. */
e1a43f73
PB
4295 expand_expr (value, const0_rtx, VOIDmode, 0);
4296 emit_queue ();
4297 }
4298 store_expr (lo_index, index_r, 0);
4299 loop = expand_start_loop (0);
4300
0f41302f 4301 /* Assign value to element index. */
e1a43f73
PB
4302 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4303 size_int (BITS_PER_UNIT));
4304 position = size_binop (MULT_EXPR,
4305 size_binop (MINUS_EXPR, index,
4306 TYPE_MIN_VALUE (domain)),
4307 position);
4308 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4309 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4310 xtarget = change_address (target, mode, addr);
4311 if (TREE_CODE (value) == CONSTRUCTOR)
c5c76735 4312 store_constructor (value, xtarget, align, cleared);
e1a43f73
PB
4313 else
4314 store_expr (value, xtarget, 0);
4315
4316 expand_exit_loop_if_false (loop,
4317 build (LT_EXPR, integer_type_node,
4318 index, hi_index));
4319
4320 expand_increment (build (PREINCREMENT_EXPR,
4321 TREE_TYPE (index),
7b8b9722 4322 index, integer_one_node), 0, 0);
e1a43f73
PB
4323 expand_end_loop ();
4324 emit_label (loop_end);
4325
4326 /* Needed by stupid register allocation. to extend the
4327 lifetime of pseudo-regs used by target past the end
4328 of the loop. */
38a448ca 4329 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4330 }
4331 }
4332 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4333 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4334 {
e1a43f73 4335 rtx pos_rtx, addr;
03dc44a6
RS
4336 tree position;
4337
5b6c44ff
RK
4338 if (index == 0)
4339 index = size_int (i);
4340
e1a43f73
PB
4341 if (minelt)
4342 index = size_binop (MINUS_EXPR, index,
4343 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4344 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4345 size_int (BITS_PER_UNIT));
4346 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4347 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4348 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4349 xtarget = change_address (target, mode, addr);
e1a43f73 4350 store_expr (value, xtarget, 0);
03dc44a6
RS
4351 }
4352 else
4353 {
4354 if (index != 0)
7c314719 4355 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4356 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4357 else
4358 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
c5c76735
JL
4359 store_constructor_field (target, bitsize, bitpos, mode, value,
4360 type, align, cleared);
03dc44a6 4361 }
bbf6f052
RK
4362 }
4363 }
071a6595
PB
4364 /* set constructor assignments */
4365 else if (TREE_CODE (type) == SET_TYPE)
4366 {
e1a43f73 4367 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4368 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4369 tree domain = TYPE_DOMAIN (type);
4370 tree domain_min, domain_max, bitlength;
4371
9faa82d8 4372 /* The default implementation strategy is to extract the constant
071a6595
PB
4373 parts of the constructor, use that to initialize the target,
4374 and then "or" in whatever non-constant ranges we need in addition.
4375
4376 If a large set is all zero or all ones, it is
4377 probably better to set it using memset (if available) or bzero.
4378 Also, if a large set has just a single range, it may also be
4379 better to first clear all the first clear the set (using
0f41302f 4380 bzero/memset), and set the bits we want. */
071a6595 4381
0f41302f 4382 /* Check for all zeros. */
e1a43f73 4383 if (elt == NULL_TREE)
071a6595 4384 {
e1a43f73
PB
4385 if (!cleared)
4386 clear_storage (target, expr_size (exp),
4387 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4388 return;
4389 }
4390
071a6595
PB
4391 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4392 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4393 bitlength = size_binop (PLUS_EXPR,
4394 size_binop (MINUS_EXPR, domain_max, domain_min),
4395 size_one_node);
4396
e1a43f73
PB
4397 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4398 abort ();
4399 nbits = TREE_INT_CST_LOW (bitlength);
4400
4401 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4402 are "complicated" (more than one range), initialize (the
4403 constant parts) by copying from a constant. */
4404 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4405 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4406 {
b4ee5a72
PB
4407 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4408 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4409 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4410 HOST_WIDE_INT word = 0;
4411 int bit_pos = 0;
4412 int ibit = 0;
0f41302f 4413 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4414 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4415 for (;;)
071a6595 4416 {
b4ee5a72
PB
4417 if (bit_buffer[ibit])
4418 {
b09f3348 4419 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4420 word |= (1 << (set_word_size - 1 - bit_pos));
4421 else
4422 word |= 1 << bit_pos;
4423 }
4424 bit_pos++; ibit++;
4425 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4426 {
e1a43f73
PB
4427 if (word != 0 || ! cleared)
4428 {
4429 rtx datum = GEN_INT (word);
4430 rtx to_rtx;
0f41302f
MS
4431 /* The assumption here is that it is safe to use
4432 XEXP if the set is multi-word, but not if
4433 it's single-word. */
e1a43f73
PB
4434 if (GET_CODE (target) == MEM)
4435 {
4436 to_rtx = plus_constant (XEXP (target, 0), offset);
4437 to_rtx = change_address (target, mode, to_rtx);
4438 }
4439 else if (offset == 0)
4440 to_rtx = target;
4441 else
4442 abort ();
4443 emit_move_insn (to_rtx, datum);
4444 }
b4ee5a72
PB
4445 if (ibit == nbits)
4446 break;
4447 word = 0;
4448 bit_pos = 0;
4449 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4450 }
4451 }
071a6595 4452 }
e1a43f73
PB
4453 else if (!cleared)
4454 {
0f41302f 4455 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4456 if (TREE_CHAIN (elt) != NULL_TREE
4457 || (TREE_PURPOSE (elt) == NULL_TREE
4458 ? nbits != 1
4459 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4460 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4461 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4462 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4463 != nbits))))
4464 clear_storage (target, expr_size (exp),
4465 TYPE_ALIGN (type) / BITS_PER_UNIT);
4466 }
4467
4468 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4469 {
4470 /* start of range of element or NULL */
4471 tree startbit = TREE_PURPOSE (elt);
4472 /* end of range of element, or element value */
4473 tree endbit = TREE_VALUE (elt);
381127e8 4474#ifdef TARGET_MEM_FUNCTIONS
071a6595 4475 HOST_WIDE_INT startb, endb;
381127e8 4476#endif
071a6595
PB
4477 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4478
4479 bitlength_rtx = expand_expr (bitlength,
4480 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4481
4482 /* handle non-range tuple element like [ expr ] */
4483 if (startbit == NULL_TREE)
4484 {
4485 startbit = save_expr (endbit);
4486 endbit = startbit;
4487 }
4488 startbit = convert (sizetype, startbit);
4489 endbit = convert (sizetype, endbit);
4490 if (! integer_zerop (domain_min))
4491 {
4492 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4493 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4494 }
4495 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4496 EXPAND_CONST_ADDRESS);
4497 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4498 EXPAND_CONST_ADDRESS);
4499
4500 if (REG_P (target))
4501 {
4502 targetx = assign_stack_temp (GET_MODE (target),
4503 GET_MODE_SIZE (GET_MODE (target)),
4504 0);
4505 emit_move_insn (targetx, target);
4506 }
4507 else if (GET_CODE (target) == MEM)
4508 targetx = target;
4509 else
4510 abort ();
4511
4512#ifdef TARGET_MEM_FUNCTIONS
4513 /* Optimization: If startbit and endbit are
9faa82d8 4514 constants divisible by BITS_PER_UNIT,
0f41302f 4515 call memset instead. */
071a6595
PB
4516 if (TREE_CODE (startbit) == INTEGER_CST
4517 && TREE_CODE (endbit) == INTEGER_CST
4518 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4519 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4520 {
071a6595
PB
4521 emit_library_call (memset_libfunc, 0,
4522 VOIDmode, 3,
e1a43f73
PB
4523 plus_constant (XEXP (targetx, 0),
4524 startb / BITS_PER_UNIT),
071a6595 4525 Pmode,
3b6f75e2 4526 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4527 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4528 TYPE_MODE (sizetype));
071a6595
PB
4529 }
4530 else
4531#endif
4532 {
38a448ca 4533 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4534 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4535 bitlength_rtx, TYPE_MODE (sizetype),
4536 startbit_rtx, TYPE_MODE (sizetype),
4537 endbit_rtx, TYPE_MODE (sizetype));
4538 }
4539 if (REG_P (target))
4540 emit_move_insn (target, targetx);
4541 }
4542 }
bbf6f052
RK
4543
4544 else
4545 abort ();
4546}
4547
4548/* Store the value of EXP (an expression tree)
4549 into a subfield of TARGET which has mode MODE and occupies
4550 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4551 If MODE is VOIDmode, it means that we are storing into a bit-field.
4552
4553 If VALUE_MODE is VOIDmode, return nothing in particular.
4554 UNSIGNEDP is not used in this case.
4555
4556 Otherwise, return an rtx for the value stored. This rtx
4557 has mode VALUE_MODE if that is convenient to do.
4558 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4559
4560 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4561 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4562
4563 ALIAS_SET is the alias set for the destination. This value will
4564 (in general) be different from that for TARGET, since TARGET is a
4565 reference to the containing structure. */
bbf6f052
RK
4566
4567static rtx
4568store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4569 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4570 rtx target;
4571 int bitsize, bitpos;
4572 enum machine_mode mode;
4573 tree exp;
4574 enum machine_mode value_mode;
4575 int unsignedp;
4576 int align;
4577 int total_size;
ece32014 4578 int alias_set;
bbf6f052 4579{
906c4e36 4580 HOST_WIDE_INT width_mask = 0;
bbf6f052 4581
e9a25f70
JL
4582 if (TREE_CODE (exp) == ERROR_MARK)
4583 return const0_rtx;
4584
906c4e36
RK
4585 if (bitsize < HOST_BITS_PER_WIDE_INT)
4586 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4587
4588 /* If we are storing into an unaligned field of an aligned union that is
4589 in a register, we may have the mode of TARGET being an integer mode but
4590 MODE == BLKmode. In that case, get an aligned object whose size and
4591 alignment are the same as TARGET and store TARGET into it (we can avoid
4592 the store if the field being stored is the entire width of TARGET). Then
4593 call ourselves recursively to store the field into a BLKmode version of
4594 that object. Finally, load from the object into TARGET. This is not
4595 very efficient in general, but should only be slightly more expensive
4596 than the otherwise-required unaligned accesses. Perhaps this can be
4597 cleaned up later. */
4598
4599 if (mode == BLKmode
4600 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4601 {
4602 rtx object = assign_stack_temp (GET_MODE (target),
4603 GET_MODE_SIZE (GET_MODE (target)), 0);
4604 rtx blk_object = copy_rtx (object);
4605
c6df88cb
MM
4606 MEM_SET_IN_STRUCT_P (object, 1);
4607 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4608 PUT_MODE (blk_object, BLKmode);
4609
4610 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4611 emit_move_insn (object, target);
4612
4613 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4614 align, total_size, alias_set);
bbf6f052 4615
46093b97
RS
4616 /* Even though we aren't returning target, we need to
4617 give it the updated value. */
bbf6f052
RK
4618 emit_move_insn (target, object);
4619
46093b97 4620 return blk_object;
bbf6f052
RK
4621 }
4622
4623 /* If the structure is in a register or if the component
4624 is a bit field, we cannot use addressing to access it.
4625 Use bit-field techniques or SUBREG to store in it. */
4626
4fa52007 4627 if (mode == VOIDmode
6ab06cbb
JW
4628 || (mode != BLKmode && ! direct_store[(int) mode]
4629 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4630 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4631 || GET_CODE (target) == REG
c980ac49 4632 || GET_CODE (target) == SUBREG
ccc98036
RS
4633 /* If the field isn't aligned enough to store as an ordinary memref,
4634 store it as a bit field. */
c7a7ac46 4635 || (SLOW_UNALIGNED_ACCESS
ccc98036 4636 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4637 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4638 {
906c4e36 4639 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4640
ef19912d
RK
4641 /* If BITSIZE is narrower than the size of the type of EXP
4642 we will be narrowing TEMP. Normally, what's wanted are the
4643 low-order bits. However, if EXP's type is a record and this is
4644 big-endian machine, we want the upper BITSIZE bits. */
4645 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4646 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4647 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4648 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4649 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4650 - bitsize),
4651 temp, 1);
4652
bbd6cf73
RK
4653 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4654 MODE. */
4655 if (mode != VOIDmode && mode != BLKmode
4656 && mode != TYPE_MODE (TREE_TYPE (exp)))
4657 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4658
a281e72d
RK
4659 /* If the modes of TARGET and TEMP are both BLKmode, both
4660 must be in memory and BITPOS must be aligned on a byte
4661 boundary. If so, we simply do a block copy. */
4662 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4663 {
4664 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4665 || bitpos % BITS_PER_UNIT != 0)
4666 abort ();
4667
0086427c
RK
4668 target = change_address (target, VOIDmode,
4669 plus_constant (XEXP (target, 0),
a281e72d
RK
4670 bitpos / BITS_PER_UNIT));
4671
4672 emit_block_move (target, temp,
4673 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4674 / BITS_PER_UNIT),
4675 1);
4676
4677 return value_mode == VOIDmode ? const0_rtx : target;
4678 }
4679
bbf6f052
RK
4680 /* Store the value in the bitfield. */
4681 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4682 if (value_mode != VOIDmode)
4683 {
4684 /* The caller wants an rtx for the value. */
4685 /* If possible, avoid refetching from the bitfield itself. */
4686 if (width_mask != 0
4687 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4688 {
9074de27 4689 tree count;
5c4d7cfb 4690 enum machine_mode tmode;
86a2c12a 4691
5c4d7cfb
RS
4692 if (unsignedp)
4693 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4694 tmode = GET_MODE (temp);
86a2c12a
RS
4695 if (tmode == VOIDmode)
4696 tmode = value_mode;
5c4d7cfb
RS
4697 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4698 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4699 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4700 }
bbf6f052 4701 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4702 NULL_RTX, value_mode, 0, align,
4703 total_size);
bbf6f052
RK
4704 }
4705 return const0_rtx;
4706 }
4707 else
4708 {
4709 rtx addr = XEXP (target, 0);
4710 rtx to_rtx;
4711
4712 /* If a value is wanted, it must be the lhs;
4713 so make the address stable for multiple use. */
4714
4715 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4716 && ! CONSTANT_ADDRESS_P (addr)
4717 /* A frame-pointer reference is already stable. */
4718 && ! (GET_CODE (addr) == PLUS
4719 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4720 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4721 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4722 addr = copy_to_reg (addr);
4723
4724 /* Now build a reference to just the desired component. */
4725
effbcc6a
RK
4726 to_rtx = copy_rtx (change_address (target, mode,
4727 plus_constant (addr,
4728 (bitpos
4729 / BITS_PER_UNIT))));
c6df88cb 4730 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4731 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4732
4733 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4734 }
4735}
4736\f
4737/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4738 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4739 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4740
4741 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4742 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4743 If the position of the field is variable, we store a tree
4744 giving the variable offset (in units) in *POFFSET.
4745 This offset is in addition to the bit position.
4746 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4747 We set *PALIGNMENT to the alignment in bytes of the address that will be
4748 computed. This is the alignment of the thing we return if *POFFSET
4749 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4750
4751 If any of the extraction expressions is volatile,
4752 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4753
4754 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4755 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4756 is redundant.
4757
4758 If the field describes a variable-sized object, *PMODE is set to
4759 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4760 this case, but the address of the object can be found. */
bbf6f052
RK
4761
4762tree
4969d05d 4763get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4764 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4765 tree exp;
4766 int *pbitsize;
4767 int *pbitpos;
7bb0943f 4768 tree *poffset;
bbf6f052
RK
4769 enum machine_mode *pmode;
4770 int *punsignedp;
4771 int *pvolatilep;
839c4796 4772 int *palignment;
bbf6f052 4773{
b50d17a1 4774 tree orig_exp = exp;
bbf6f052
RK
4775 tree size_tree = 0;
4776 enum machine_mode mode = VOIDmode;
742920c7 4777 tree offset = integer_zero_node;
c84e2712 4778 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4779
4780 if (TREE_CODE (exp) == COMPONENT_REF)
4781 {
4782 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4783 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4784 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4785 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4786 }
4787 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4788 {
4789 size_tree = TREE_OPERAND (exp, 1);
4790 *punsignedp = TREE_UNSIGNED (exp);
4791 }
4792 else
4793 {
4794 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4795 if (mode == BLKmode)
4796 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4797
bbf6f052
RK
4798 *pbitsize = GET_MODE_BITSIZE (mode);
4799 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4800 }
4801
4802 if (size_tree)
4803 {
4804 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4805 mode = BLKmode, *pbitsize = -1;
4806 else
4807 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4808 }
4809
4810 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4811 and find the ultimate containing object. */
4812
4813 *pbitpos = 0;
4814
4815 while (1)
4816 {
7bb0943f 4817 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4818 {
7bb0943f
RS
4819 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4820 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4821 : TREE_OPERAND (exp, 2));
e6d8c385 4822 tree constant = integer_zero_node, var = pos;
bbf6f052 4823
e7f3c83f
RK
4824 /* If this field hasn't been filled in yet, don't go
4825 past it. This should only happen when folding expressions
4826 made during type construction. */
4827 if (pos == 0)
4828 break;
4829
e6d8c385
RK
4830 /* Assume here that the offset is a multiple of a unit.
4831 If not, there should be an explicitly added constant. */
4832 if (TREE_CODE (pos) == PLUS_EXPR
4833 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4834 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4835 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4836 constant = pos, var = integer_zero_node;
4837
4838 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4839 offset = size_binop (PLUS_EXPR, offset,
4840 size_binop (EXACT_DIV_EXPR, var,
4841 size_int (BITS_PER_UNIT)));
bbf6f052 4842 }
bbf6f052 4843
742920c7 4844 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4845 {
742920c7
RK
4846 /* This code is based on the code in case ARRAY_REF in expand_expr
4847 below. We assume here that the size of an array element is
4848 always an integral multiple of BITS_PER_UNIT. */
4849
4850 tree index = TREE_OPERAND (exp, 1);
4851 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4852 tree low_bound
4853 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4854 tree index_type = TREE_TYPE (index);
ead17059 4855 tree xindex;
742920c7 4856
4c08eef0 4857 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4858 {
4c08eef0
RK
4859 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4860 index);
742920c7
RK
4861 index_type = TREE_TYPE (index);
4862 }
4863
74a4fbfc
DB
4864 /* Optimize the special-case of a zero lower bound.
4865
4866 We convert the low_bound to sizetype to avoid some problems
4867 with constant folding. (E.g. suppose the lower bound is 1,
4868 and its mode is QI. Without the conversion, (ARRAY
4869 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4870 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4871
4872 But sizetype isn't quite right either (especially if
4873 the lowbound is negative). FIXME */
4874
ca0f2220 4875 if (! integer_zerop (low_bound))
74a4fbfc
DB
4876 index = fold (build (MINUS_EXPR, index_type, index,
4877 convert (sizetype, low_bound)));
ca0f2220 4878
f8dac6eb
R
4879 if (TREE_CODE (index) == INTEGER_CST)
4880 {
4881 index = convert (sbitsizetype, index);
4882 index_type = TREE_TYPE (index);
4883 }
4884
ead17059
RH
4885 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4886 convert (sbitsizetype,
4887 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4888
ead17059
RH
4889 if (TREE_CODE (xindex) == INTEGER_CST
4890 && TREE_INT_CST_HIGH (xindex) == 0)
4891 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4892 else
956d6950 4893 {
ead17059
RH
4894 /* Either the bit offset calculated above is not constant, or
4895 it overflowed. In either case, redo the multiplication
4896 against the size in units. This is especially important
4897 in the non-constant case to avoid a division at runtime. */
4898 xindex = fold (build (MULT_EXPR, ssizetype, index,
4899 convert (ssizetype,
4900 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4901
4902 if (contains_placeholder_p (xindex))
4903 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4904
4905 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4906 }
bbf6f052
RK
4907 }
4908 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4909 && ! ((TREE_CODE (exp) == NOP_EXPR
4910 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4911 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4912 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4913 != UNION_TYPE))
bbf6f052
RK
4914 && (TYPE_MODE (TREE_TYPE (exp))
4915 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4916 break;
7bb0943f
RS
4917
4918 /* If any reference in the chain is volatile, the effect is volatile. */
4919 if (TREE_THIS_VOLATILE (exp))
4920 *pvolatilep = 1;
839c4796
RK
4921
4922 /* If the offset is non-constant already, then we can't assume any
4923 alignment more than the alignment here. */
4924 if (! integer_zerop (offset))
4925 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4926
bbf6f052
RK
4927 exp = TREE_OPERAND (exp, 0);
4928 }
4929
839c4796
RK
4930 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4931 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4932 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4933 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4934
742920c7
RK
4935 if (integer_zerop (offset))
4936 offset = 0;
4937
b50d17a1
RK
4938 if (offset != 0 && contains_placeholder_p (offset))
4939 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4940
bbf6f052 4941 *pmode = mode;
7bb0943f 4942 *poffset = offset;
839c4796 4943 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4944 return exp;
4945}
921b3427
RK
4946
4947/* Subroutine of expand_exp: compute memory_usage from modifier. */
4948static enum memory_use_mode
4949get_memory_usage_from_modifier (modifier)
4950 enum expand_modifier modifier;
4951{
4952 switch (modifier)
4953 {
4954 case EXPAND_NORMAL:
e5e809f4 4955 case EXPAND_SUM:
921b3427
RK
4956 return MEMORY_USE_RO;
4957 break;
4958 case EXPAND_MEMORY_USE_WO:
4959 return MEMORY_USE_WO;
4960 break;
4961 case EXPAND_MEMORY_USE_RW:
4962 return MEMORY_USE_RW;
4963 break;
921b3427 4964 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4965 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4966 MEMORY_USE_DONT, because they are modifiers to a call of
4967 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4968 case EXPAND_CONST_ADDRESS:
e5e809f4 4969 case EXPAND_INITIALIZER:
921b3427
RK
4970 return MEMORY_USE_DONT;
4971 case EXPAND_MEMORY_USE_BAD:
4972 default:
4973 abort ();
4974 }
4975}
bbf6f052
RK
4976\f
4977/* Given an rtx VALUE that may contain additions and multiplications,
4978 return an equivalent value that just refers to a register or memory.
4979 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4980 and returning a pseudo-register containing the value.
4981
4982 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4983
4984rtx
4985force_operand (value, target)
4986 rtx value, target;
4987{
4988 register optab binoptab = 0;
4989 /* Use a temporary to force order of execution of calls to
4990 `force_operand'. */
4991 rtx tmp;
4992 register rtx op2;
4993 /* Use subtarget as the target for operand 0 of a binary operation. */
4994 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4995
8b015896
RH
4996 /* Check for a PIC address load. */
4997 if (flag_pic
4998 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4999 && XEXP (value, 0) == pic_offset_table_rtx
5000 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5001 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5002 || GET_CODE (XEXP (value, 1)) == CONST))
5003 {
5004 if (!subtarget)
5005 subtarget = gen_reg_rtx (GET_MODE (value));
5006 emit_move_insn (subtarget, value);
5007 return subtarget;
5008 }
5009
bbf6f052
RK
5010 if (GET_CODE (value) == PLUS)
5011 binoptab = add_optab;
5012 else if (GET_CODE (value) == MINUS)
5013 binoptab = sub_optab;
5014 else if (GET_CODE (value) == MULT)
5015 {
5016 op2 = XEXP (value, 1);
5017 if (!CONSTANT_P (op2)
5018 && !(GET_CODE (op2) == REG && op2 != subtarget))
5019 subtarget = 0;
5020 tmp = force_operand (XEXP (value, 0), subtarget);
5021 return expand_mult (GET_MODE (value), tmp,
906c4e36 5022 force_operand (op2, NULL_RTX),
bbf6f052
RK
5023 target, 0);
5024 }
5025
5026 if (binoptab)
5027 {
5028 op2 = XEXP (value, 1);
5029 if (!CONSTANT_P (op2)
5030 && !(GET_CODE (op2) == REG && op2 != subtarget))
5031 subtarget = 0;
5032 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5033 {
5034 binoptab = add_optab;
5035 op2 = negate_rtx (GET_MODE (value), op2);
5036 }
5037
5038 /* Check for an addition with OP2 a constant integer and our first
5039 operand a PLUS of a virtual register and something else. In that
5040 case, we want to emit the sum of the virtual register and the
5041 constant first and then add the other value. This allows virtual
5042 register instantiation to simply modify the constant rather than
5043 creating another one around this addition. */
5044 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5045 && GET_CODE (XEXP (value, 0)) == PLUS
5046 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5047 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5048 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5049 {
5050 rtx temp = expand_binop (GET_MODE (value), binoptab,
5051 XEXP (XEXP (value, 0), 0), op2,
5052 subtarget, 0, OPTAB_LIB_WIDEN);
5053 return expand_binop (GET_MODE (value), binoptab, temp,
5054 force_operand (XEXP (XEXP (value, 0), 1), 0),
5055 target, 0, OPTAB_LIB_WIDEN);
5056 }
5057
5058 tmp = force_operand (XEXP (value, 0), subtarget);
5059 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5060 force_operand (op2, NULL_RTX),
bbf6f052 5061 target, 0, OPTAB_LIB_WIDEN);
8008b228 5062 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5063 because the only operations we are expanding here are signed ones. */
5064 }
5065 return value;
5066}
5067\f
5068/* Subroutine of expand_expr:
5069 save the non-copied parts (LIST) of an expr (LHS), and return a list
5070 which can restore these values to their previous values,
5071 should something modify their storage. */
5072
5073static tree
5074save_noncopied_parts (lhs, list)
5075 tree lhs;
5076 tree list;
5077{
5078 tree tail;
5079 tree parts = 0;
5080
5081 for (tail = list; tail; tail = TREE_CHAIN (tail))
5082 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5083 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5084 else
5085 {
5086 tree part = TREE_VALUE (tail);
5087 tree part_type = TREE_TYPE (part);
906c4e36 5088 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5089 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5090 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5091 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5092 parts = tree_cons (to_be_saved,
906c4e36
RK
5093 build (RTL_EXPR, part_type, NULL_TREE,
5094 (tree) target),
bbf6f052
RK
5095 parts);
5096 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5097 }
5098 return parts;
5099}
5100
5101/* Subroutine of expand_expr:
5102 record the non-copied parts (LIST) of an expr (LHS), and return a list
5103 which specifies the initial values of these parts. */
5104
5105static tree
5106init_noncopied_parts (lhs, list)
5107 tree lhs;
5108 tree list;
5109{
5110 tree tail;
5111 tree parts = 0;
5112
5113 for (tail = list; tail; tail = TREE_CHAIN (tail))
5114 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5115 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5116 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5117 {
5118 tree part = TREE_VALUE (tail);
5119 tree part_type = TREE_TYPE (part);
906c4e36 5120 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5121 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5122 }
5123 return parts;
5124}
5125
5126/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5127 EXP can reference X, which is being modified. TOP_P is nonzero if this
5128 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5129 for EXP, as opposed to a recursive call to this function.
5130
5131 It is always safe for this routine to return zero since it merely
5132 searches for optimization opportunities. */
bbf6f052
RK
5133
5134static int
e5e809f4 5135safe_from_p (x, exp, top_p)
bbf6f052
RK
5136 rtx x;
5137 tree exp;
e5e809f4 5138 int top_p;
bbf6f052
RK
5139{
5140 rtx exp_rtl = 0;
5141 int i, nops;
ff439b5f
CB
5142 static int save_expr_count;
5143 static int save_expr_size = 0;
5144 static tree *save_expr_rewritten;
5145 static tree save_expr_trees[256];
bbf6f052 5146
6676e72f
RK
5147 if (x == 0
5148 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5149 have no way of allocating temporaries of variable size
5150 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5151 So we assume here that something at a higher level has prevented a
f4510f37 5152 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5153 do this when X is BLKmode and when we are at the top level. */
5154 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5155 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5156 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5157 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5158 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5159 != INTEGER_CST)
f4510f37 5160 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5161 return 1;
5162
ff439b5f
CB
5163 if (top_p && save_expr_size == 0)
5164 {
5165 int rtn;
5166
5167 save_expr_count = 0;
5168 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5169 save_expr_rewritten = &save_expr_trees[0];
5170
5171 rtn = safe_from_p (x, exp, 1);
5172
5173 for (i = 0; i < save_expr_count; ++i)
5174 {
5175 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5176 abort ();
5177 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5178 }
5179
5180 save_expr_size = 0;
5181
5182 return rtn;
5183 }
5184
bbf6f052
RK
5185 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5186 find the underlying pseudo. */
5187 if (GET_CODE (x) == SUBREG)
5188 {
5189 x = SUBREG_REG (x);
5190 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5191 return 0;
5192 }
5193
5194 /* If X is a location in the outgoing argument area, it is always safe. */
5195 if (GET_CODE (x) == MEM
5196 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5197 || (GET_CODE (XEXP (x, 0)) == PLUS
5198 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5199 return 1;
5200
5201 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5202 {
5203 case 'd':
5204 exp_rtl = DECL_RTL (exp);
5205 break;
5206
5207 case 'c':
5208 return 1;
5209
5210 case 'x':
5211 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5212 return ((TREE_VALUE (exp) == 0
e5e809f4 5213 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5214 && (TREE_CHAIN (exp) == 0
e5e809f4 5215 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5216 else if (TREE_CODE (exp) == ERROR_MARK)
5217 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5218 else
5219 return 0;
5220
5221 case '1':
e5e809f4 5222 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5223
5224 case '2':
5225 case '<':
e5e809f4
JL
5226 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5227 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5228
5229 case 'e':
5230 case 'r':
5231 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5232 the expression. If it is set, we conflict iff we are that rtx or
5233 both are in memory. Otherwise, we check all operands of the
5234 expression recursively. */
5235
5236 switch (TREE_CODE (exp))
5237 {
5238 case ADDR_EXPR:
e44842fe 5239 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5240 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5241 || TREE_STATIC (exp));
bbf6f052
RK
5242
5243 case INDIRECT_REF:
5244 if (GET_CODE (x) == MEM)
5245 return 0;
5246 break;
5247
5248 case CALL_EXPR:
5249 exp_rtl = CALL_EXPR_RTL (exp);
5250 if (exp_rtl == 0)
5251 {
5252 /* Assume that the call will clobber all hard registers and
5253 all of memory. */
5254 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5255 || GET_CODE (x) == MEM)
5256 return 0;
5257 }
5258
5259 break;
5260
5261 case RTL_EXPR:
3bb5826a
RK
5262 /* If a sequence exists, we would have to scan every instruction
5263 in the sequence to see if it was safe. This is probably not
5264 worthwhile. */
5265 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5266 return 0;
5267
3bb5826a 5268 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5269 break;
5270
5271 case WITH_CLEANUP_EXPR:
5272 exp_rtl = RTL_EXPR_RTL (exp);
5273 break;
5274
5dab5552 5275 case CLEANUP_POINT_EXPR:
e5e809f4 5276 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5277
bbf6f052
RK
5278 case SAVE_EXPR:
5279 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5280 if (exp_rtl)
5281 break;
5282
5283 /* This SAVE_EXPR might appear many times in the top-level
5284 safe_from_p() expression, and if it has a complex
5285 subexpression, examining it multiple times could result
5286 in a combinatorial explosion. E.g. on an Alpha
5287 running at least 200MHz, a Fortran test case compiled with
5288 optimization took about 28 minutes to compile -- even though
5289 it was only a few lines long, and the complicated line causing
5290 so much time to be spent in the earlier version of safe_from_p()
5291 had only 293 or so unique nodes.
5292
5293 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5294 where it is so we can turn it back in the top-level safe_from_p()
5295 when we're done. */
5296
5297 /* For now, don't bother re-sizing the array. */
5298 if (save_expr_count >= save_expr_size)
5299 return 0;
5300 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5301
5302 nops = tree_code_length[(int) SAVE_EXPR];
5303 for (i = 0; i < nops; i++)
ff59bfe6
JM
5304 {
5305 tree operand = TREE_OPERAND (exp, i);
5306 if (operand == NULL_TREE)
5307 continue;
5308 TREE_SET_CODE (exp, ERROR_MARK);
5309 if (!safe_from_p (x, operand, 0))
5310 return 0;
5311 TREE_SET_CODE (exp, SAVE_EXPR);
5312 }
5313 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5314 return 1;
bbf6f052 5315
8129842c
RS
5316 case BIND_EXPR:
5317 /* The only operand we look at is operand 1. The rest aren't
5318 part of the expression. */
e5e809f4 5319 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5320
bbf6f052 5321 case METHOD_CALL_EXPR:
0f41302f 5322 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5323 abort ();
e9a25f70
JL
5324
5325 default:
5326 break;
bbf6f052
RK
5327 }
5328
5329 /* If we have an rtx, we do not need to scan our operands. */
5330 if (exp_rtl)
5331 break;
5332
5333 nops = tree_code_length[(int) TREE_CODE (exp)];
5334 for (i = 0; i < nops; i++)
5335 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5336 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5337 return 0;
5338 }
5339
5340 /* If we have an rtl, find any enclosed object. Then see if we conflict
5341 with it. */
5342 if (exp_rtl)
5343 {
5344 if (GET_CODE (exp_rtl) == SUBREG)
5345 {
5346 exp_rtl = SUBREG_REG (exp_rtl);
5347 if (GET_CODE (exp_rtl) == REG
5348 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5349 return 0;
5350 }
5351
5352 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5353 are memory and EXP is not readonly. */
5354 return ! (rtx_equal_p (x, exp_rtl)
5355 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5356 && ! TREE_READONLY (exp)));
5357 }
5358
5359 /* If we reach here, it is safe. */
5360 return 1;
5361}
5362
5363/* Subroutine of expand_expr: return nonzero iff EXP is an
5364 expression whose type is statically determinable. */
5365
5366static int
5367fixed_type_p (exp)
5368 tree exp;
5369{
5370 if (TREE_CODE (exp) == PARM_DECL
5371 || TREE_CODE (exp) == VAR_DECL
5372 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5373 || TREE_CODE (exp) == COMPONENT_REF
5374 || TREE_CODE (exp) == ARRAY_REF)
5375 return 1;
5376 return 0;
5377}
01c8a7c8
RK
5378
5379/* Subroutine of expand_expr: return rtx if EXP is a
5380 variable or parameter; else return 0. */
5381
5382static rtx
5383var_rtx (exp)
5384 tree exp;
5385{
5386 STRIP_NOPS (exp);
5387 switch (TREE_CODE (exp))
5388 {
5389 case PARM_DECL:
5390 case VAR_DECL:
5391 return DECL_RTL (exp);
5392 default:
5393 return 0;
5394 }
5395}
dbecbbe4
JL
5396
5397#ifdef MAX_INTEGER_COMPUTATION_MODE
5398void
5399check_max_integer_computation_mode (exp)
5400 tree exp;
5401{
5f652c07 5402 enum tree_code code;
dbecbbe4
JL
5403 enum machine_mode mode;
5404
5f652c07
JM
5405 /* Strip any NOPs that don't change the mode. */
5406 STRIP_NOPS (exp);
5407 code = TREE_CODE (exp);
5408
71bca506
JL
5409 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5410 if (code == NOP_EXPR
5411 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5412 return;
5413
dbecbbe4
JL
5414 /* First check the type of the overall operation. We need only look at
5415 unary, binary and relational operations. */
5416 if (TREE_CODE_CLASS (code) == '1'
5417 || TREE_CODE_CLASS (code) == '2'
5418 || TREE_CODE_CLASS (code) == '<')
5419 {
5420 mode = TYPE_MODE (TREE_TYPE (exp));
5421 if (GET_MODE_CLASS (mode) == MODE_INT
5422 && mode > MAX_INTEGER_COMPUTATION_MODE)
5423 fatal ("unsupported wide integer operation");
5424 }
5425
5426 /* Check operand of a unary op. */
5427 if (TREE_CODE_CLASS (code) == '1')
5428 {
5429 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5430 if (GET_MODE_CLASS (mode) == MODE_INT
5431 && mode > MAX_INTEGER_COMPUTATION_MODE)
5432 fatal ("unsupported wide integer operation");
5433 }
5434
5435 /* Check operands of a binary/comparison op. */
5436 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5437 {
5438 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5439 if (GET_MODE_CLASS (mode) == MODE_INT
5440 && mode > MAX_INTEGER_COMPUTATION_MODE)
5441 fatal ("unsupported wide integer operation");
5442
5443 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5444 if (GET_MODE_CLASS (mode) == MODE_INT
5445 && mode > MAX_INTEGER_COMPUTATION_MODE)
5446 fatal ("unsupported wide integer operation");
5447 }
5448}
5449#endif
5450
bbf6f052
RK
5451\f
5452/* expand_expr: generate code for computing expression EXP.
5453 An rtx for the computed value is returned. The value is never null.
5454 In the case of a void EXP, const0_rtx is returned.
5455
5456 The value may be stored in TARGET if TARGET is nonzero.
5457 TARGET is just a suggestion; callers must assume that
5458 the rtx returned may not be the same as TARGET.
5459
5460 If TARGET is CONST0_RTX, it means that the value will be ignored.
5461
5462 If TMODE is not VOIDmode, it suggests generating the
5463 result in mode TMODE. But this is done only when convenient.
5464 Otherwise, TMODE is ignored and the value generated in its natural mode.
5465 TMODE is just a suggestion; callers must assume that
5466 the rtx returned may not have mode TMODE.
5467
d6a5ac33
RK
5468 Note that TARGET may have neither TMODE nor MODE. In that case, it
5469 probably will not be used.
bbf6f052
RK
5470
5471 If MODIFIER is EXPAND_SUM then when EXP is an addition
5472 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5473 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5474 products as above, or REG or MEM, or constant.
5475 Ordinarily in such cases we would output mul or add instructions
5476 and then return a pseudo reg containing the sum.
5477
5478 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5479 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5480 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5481 This is used for outputting expressions used in initializers.
5482
5483 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5484 with a constant address even if that address is not normally legitimate.
5485 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5486
5487rtx
5488expand_expr (exp, target, tmode, modifier)
5489 register tree exp;
5490 rtx target;
5491 enum machine_mode tmode;
5492 enum expand_modifier modifier;
5493{
b50d17a1
RK
5494 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5495 This is static so it will be accessible to our recursive callees. */
5496 static tree placeholder_list = 0;
bbf6f052
RK
5497 register rtx op0, op1, temp;
5498 tree type = TREE_TYPE (exp);
5499 int unsignedp = TREE_UNSIGNED (type);
68557e14 5500 register enum machine_mode mode;
bbf6f052
RK
5501 register enum tree_code code = TREE_CODE (exp);
5502 optab this_optab;
68557e14
ML
5503 rtx subtarget, original_target;
5504 int ignore;
bbf6f052 5505 tree context;
921b3427
RK
5506 /* Used by check-memory-usage to make modifier read only. */
5507 enum expand_modifier ro_modifier;
bbf6f052 5508
68557e14
ML
5509 /* Handle ERROR_MARK before anybody tries to access its type. */
5510 if (TREE_CODE (exp) == ERROR_MARK)
5511 {
5512 op0 = CONST0_RTX (tmode);
5513 if (op0 != 0)
5514 return op0;
5515 return const0_rtx;
5516 }
5517
5518 mode = TYPE_MODE (type);
5519 /* Use subtarget as the target for operand 0 of a binary operation. */
5520 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5521 original_target = target;
5522 ignore = (target == const0_rtx
5523 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5524 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5525 || code == COND_EXPR)
5526 && TREE_CODE (type) == VOID_TYPE));
5527
921b3427
RK
5528 /* Make a read-only version of the modifier. */
5529 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5530 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5531 ro_modifier = modifier;
5532 else
5533 ro_modifier = EXPAND_NORMAL;
ca695ac9 5534
bbf6f052
RK
5535 /* Don't use hard regs as subtargets, because the combiner
5536 can only handle pseudo regs. */
5537 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5538 subtarget = 0;
5539 /* Avoid subtargets inside loops,
5540 since they hide some invariant expressions. */
5541 if (preserve_subexpressions_p ())
5542 subtarget = 0;
5543
dd27116b
RK
5544 /* If we are going to ignore this result, we need only do something
5545 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5546 is, short-circuit the most common cases here. Note that we must
5547 not call expand_expr with anything but const0_rtx in case this
5548 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5549
dd27116b
RK
5550 if (ignore)
5551 {
5552 if (! TREE_SIDE_EFFECTS (exp))
5553 return const0_rtx;
5554
5555 /* Ensure we reference a volatile object even if value is ignored. */
5556 if (TREE_THIS_VOLATILE (exp)
5557 && TREE_CODE (exp) != FUNCTION_DECL
5558 && mode != VOIDmode && mode != BLKmode)
5559 {
921b3427 5560 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5561 if (GET_CODE (temp) == MEM)
5562 temp = copy_to_reg (temp);
5563 return const0_rtx;
5564 }
5565
5566 if (TREE_CODE_CLASS (code) == '1')
5567 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5568 VOIDmode, ro_modifier);
dd27116b
RK
5569 else if (TREE_CODE_CLASS (code) == '2'
5570 || TREE_CODE_CLASS (code) == '<')
5571 {
921b3427
RK
5572 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5573 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5574 return const0_rtx;
5575 }
5576 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5577 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5578 /* If the second operand has no side effects, just evaluate
0f41302f 5579 the first. */
dd27116b 5580 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5581 VOIDmode, ro_modifier);
dd27116b 5582
90764a87 5583 target = 0;
dd27116b 5584 }
bbf6f052 5585
dbecbbe4 5586#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5587 /* Only check stuff here if the mode we want is different from the mode
5588 of the expression; if it's the same, check_max_integer_computiation_mode
5589 will handle it. Do we really need to check this stuff at all? */
5590
ce3c0b53 5591 if (target
5f652c07 5592 && GET_MODE (target) != mode
ce3c0b53
JL
5593 && TREE_CODE (exp) != INTEGER_CST
5594 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5595 && TREE_CODE (exp) != ARRAY_REF
5596 && TREE_CODE (exp) != COMPONENT_REF
5597 && TREE_CODE (exp) != BIT_FIELD_REF
5598 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5599 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5600 && TREE_CODE (exp) != VAR_DECL
5601 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5602 {
5603 enum machine_mode mode = GET_MODE (target);
5604
5605 if (GET_MODE_CLASS (mode) == MODE_INT
5606 && mode > MAX_INTEGER_COMPUTATION_MODE)
5607 fatal ("unsupported wide integer operation");
5608 }
5609
5f652c07
JM
5610 if (tmode != mode
5611 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5612 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5613 && TREE_CODE (exp) != ARRAY_REF
5614 && TREE_CODE (exp) != COMPONENT_REF
5615 && TREE_CODE (exp) != BIT_FIELD_REF
5616 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5617 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5618 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5619 && TREE_CODE (exp) != RTL_EXPR
71bca506 5620 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5621 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5622 fatal ("unsupported wide integer operation");
5623
5624 check_max_integer_computation_mode (exp);
5625#endif
5626
e44842fe
RK
5627 /* If will do cse, generate all results into pseudo registers
5628 since 1) that allows cse to find more things
5629 and 2) otherwise cse could produce an insn the machine
5630 cannot support. */
5631
bbf6f052
RK
5632 if (! cse_not_expected && mode != BLKmode && target
5633 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5634 target = subtarget;
5635
bbf6f052
RK
5636 switch (code)
5637 {
5638 case LABEL_DECL:
b552441b
RS
5639 {
5640 tree function = decl_function_context (exp);
5641 /* Handle using a label in a containing function. */
d0977240
RK
5642 if (function != current_function_decl
5643 && function != inline_function_decl && function != 0)
b552441b
RS
5644 {
5645 struct function *p = find_function_data (function);
5646 /* Allocate in the memory associated with the function
5647 that the label is in. */
5648 push_obstacks (p->function_obstack,
5649 p->function_maybepermanent_obstack);
5650
49ad7cfa
BS
5651 p->expr->x_forced_labels
5652 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5653 p->expr->x_forced_labels);
b552441b
RS
5654 pop_obstacks ();
5655 }
ab87f8c8
JL
5656 else
5657 {
ab87f8c8
JL
5658 if (modifier == EXPAND_INITIALIZER)
5659 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5660 label_rtx (exp),
5661 forced_labels);
5662 }
c5c76735 5663
38a448ca
RH
5664 temp = gen_rtx_MEM (FUNCTION_MODE,
5665 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5666 if (function != current_function_decl
5667 && function != inline_function_decl && function != 0)
26fcb35a
RS
5668 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5669 return temp;
b552441b 5670 }
bbf6f052
RK
5671
5672 case PARM_DECL:
5673 if (DECL_RTL (exp) == 0)
5674 {
5675 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5676 return CONST0_RTX (mode);
bbf6f052
RK
5677 }
5678
0f41302f 5679 /* ... fall through ... */
d6a5ac33 5680
bbf6f052 5681 case VAR_DECL:
2dca20cd
RS
5682 /* If a static var's type was incomplete when the decl was written,
5683 but the type is complete now, lay out the decl now. */
5684 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5685 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5686 {
5687 push_obstacks_nochange ();
5688 end_temporary_allocation ();
5689 layout_decl (exp, 0);
5690 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5691 pop_obstacks ();
5692 }
d6a5ac33 5693
7d384cc0
KR
5694 /* Although static-storage variables start off initialized, according to
5695 ANSI C, a memcpy could overwrite them with uninitialized values. So
5696 we check them too. This also lets us check for read-only variables
5697 accessed via a non-const declaration, in case it won't be detected
5698 any other way (e.g., in an embedded system or OS kernel without
5699 memory protection).
5700
5701 Aggregates are not checked here; they're handled elsewhere. */
49ad7cfa
BS
5702 if (current_function && current_function_check_memory_usage
5703 && code == VAR_DECL
921b3427 5704 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5705 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5706 {
5707 enum memory_use_mode memory_usage;
5708 memory_usage = get_memory_usage_from_modifier (modifier);
5709
5710 if (memory_usage != MEMORY_USE_DONT)
5711 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5712 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5713 GEN_INT (int_size_in_bytes (type)),
5714 TYPE_MODE (sizetype),
956d6950
JL
5715 GEN_INT (memory_usage),
5716 TYPE_MODE (integer_type_node));
921b3427
RK
5717 }
5718
0f41302f 5719 /* ... fall through ... */
d6a5ac33 5720
2dca20cd 5721 case FUNCTION_DECL:
bbf6f052
RK
5722 case RESULT_DECL:
5723 if (DECL_RTL (exp) == 0)
5724 abort ();
d6a5ac33 5725
e44842fe
RK
5726 /* Ensure variable marked as used even if it doesn't go through
5727 a parser. If it hasn't be used yet, write out an external
5728 definition. */
5729 if (! TREE_USED (exp))
5730 {
5731 assemble_external (exp);
5732 TREE_USED (exp) = 1;
5733 }
5734
dc6d66b3
RK
5735 /* Show we haven't gotten RTL for this yet. */
5736 temp = 0;
5737
bbf6f052
RK
5738 /* Handle variables inherited from containing functions. */
5739 context = decl_function_context (exp);
5740
5741 /* We treat inline_function_decl as an alias for the current function
5742 because that is the inline function whose vars, types, etc.
5743 are being merged into the current function.
5744 See expand_inline_function. */
d6a5ac33 5745
bbf6f052
RK
5746 if (context != 0 && context != current_function_decl
5747 && context != inline_function_decl
5748 /* If var is static, we don't need a static chain to access it. */
5749 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5750 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5751 {
5752 rtx addr;
5753
5754 /* Mark as non-local and addressable. */
81feeecb 5755 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5756 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5757 abort ();
bbf6f052
RK
5758 mark_addressable (exp);
5759 if (GET_CODE (DECL_RTL (exp)) != MEM)
5760 abort ();
5761 addr = XEXP (DECL_RTL (exp), 0);
5762 if (GET_CODE (addr) == MEM)
38a448ca
RH
5763 addr = gen_rtx_MEM (Pmode,
5764 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5765 else
5766 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5767 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5768 }
4af3895e 5769
bbf6f052
RK
5770 /* This is the case of an array whose size is to be determined
5771 from its initializer, while the initializer is still being parsed.
5772 See expand_decl. */
d6a5ac33 5773
dc6d66b3
RK
5774 else if (GET_CODE (DECL_RTL (exp)) == MEM
5775 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5776 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5777 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5778
5779 /* If DECL_RTL is memory, we are in the normal case and either
5780 the address is not valid or it is not a register and -fforce-addr
5781 is specified, get the address into a register. */
5782
dc6d66b3
RK
5783 else if (GET_CODE (DECL_RTL (exp)) == MEM
5784 && modifier != EXPAND_CONST_ADDRESS
5785 && modifier != EXPAND_SUM
5786 && modifier != EXPAND_INITIALIZER
5787 && (! memory_address_p (DECL_MODE (exp),
5788 XEXP (DECL_RTL (exp), 0))
5789 || (flag_force_addr
5790 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5791 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5792 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5793
dc6d66b3
RK
5794 /* If we got something, return it. But first, set the alignment
5795 the address is a register. */
5796 if (temp != 0)
5797 {
5798 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5799 mark_reg_pointer (XEXP (temp, 0),
5800 DECL_ALIGN (exp) / BITS_PER_UNIT);
5801
5802 return temp;
5803 }
5804
1499e0a8
RK
5805 /* If the mode of DECL_RTL does not match that of the decl, it
5806 must be a promoted value. We return a SUBREG of the wanted mode,
5807 but mark it so that we know that it was already extended. */
5808
5809 if (GET_CODE (DECL_RTL (exp)) == REG
5810 && GET_MODE (DECL_RTL (exp)) != mode)
5811 {
1499e0a8
RK
5812 /* Get the signedness used for this variable. Ensure we get the
5813 same mode we got when the variable was declared. */
78911e8b
RK
5814 if (GET_MODE (DECL_RTL (exp))
5815 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5816 abort ();
5817
38a448ca 5818 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5819 SUBREG_PROMOTED_VAR_P (temp) = 1;
5820 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5821 return temp;
5822 }
5823
bbf6f052
RK
5824 return DECL_RTL (exp);
5825
5826 case INTEGER_CST:
5827 return immed_double_const (TREE_INT_CST_LOW (exp),
5828 TREE_INT_CST_HIGH (exp),
5829 mode);
5830
5831 case CONST_DECL:
921b3427
RK
5832 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5833 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5834
5835 case REAL_CST:
5836 /* If optimized, generate immediate CONST_DOUBLE
5837 which will be turned into memory by reload if necessary.
5838
5839 We used to force a register so that loop.c could see it. But
5840 this does not allow gen_* patterns to perform optimizations with
5841 the constants. It also produces two insns in cases like "x = 1.0;".
5842 On most machines, floating-point constants are not permitted in
5843 many insns, so we'd end up copying it to a register in any case.
5844
5845 Now, we do the copying in expand_binop, if appropriate. */
5846 return immed_real_const (exp);
5847
5848 case COMPLEX_CST:
5849 case STRING_CST:
5850 if (! TREE_CST_RTL (exp))
5851 output_constant_def (exp);
5852
5853 /* TREE_CST_RTL probably contains a constant address.
5854 On RISC machines where a constant address isn't valid,
5855 make some insns to get that address into a register. */
5856 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5857 && modifier != EXPAND_CONST_ADDRESS
5858 && modifier != EXPAND_INITIALIZER
5859 && modifier != EXPAND_SUM
d6a5ac33
RK
5860 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5861 || (flag_force_addr
5862 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5863 return change_address (TREE_CST_RTL (exp), VOIDmode,
5864 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5865 return TREE_CST_RTL (exp);
5866
bf1e5319 5867 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5868 {
5869 rtx to_return;
5870 char *saved_input_filename = input_filename;
5871 int saved_lineno = lineno;
5872 input_filename = EXPR_WFL_FILENAME (exp);
5873 lineno = EXPR_WFL_LINENO (exp);
5874 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5875 emit_line_note (input_filename, lineno);
5876 /* Possibly avoid switching back and force here */
5877 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5878 input_filename = saved_input_filename;
5879 lineno = saved_lineno;
5880 return to_return;
5881 }
bf1e5319 5882
bbf6f052
RK
5883 case SAVE_EXPR:
5884 context = decl_function_context (exp);
d6a5ac33 5885
d0977240
RK
5886 /* If this SAVE_EXPR was at global context, assume we are an
5887 initialization function and move it into our context. */
5888 if (context == 0)
5889 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5890
bbf6f052
RK
5891 /* We treat inline_function_decl as an alias for the current function
5892 because that is the inline function whose vars, types, etc.
5893 are being merged into the current function.
5894 See expand_inline_function. */
5895 if (context == current_function_decl || context == inline_function_decl)
5896 context = 0;
5897
5898 /* If this is non-local, handle it. */
5899 if (context)
5900 {
d0977240
RK
5901 /* The following call just exists to abort if the context is
5902 not of a containing function. */
5903 find_function_data (context);
5904
bbf6f052
RK
5905 temp = SAVE_EXPR_RTL (exp);
5906 if (temp && GET_CODE (temp) == REG)
5907 {
5908 put_var_into_stack (exp);
5909 temp = SAVE_EXPR_RTL (exp);
5910 }
5911 if (temp == 0 || GET_CODE (temp) != MEM)
5912 abort ();
5913 return change_address (temp, mode,
5914 fix_lexical_addr (XEXP (temp, 0), exp));
5915 }
5916 if (SAVE_EXPR_RTL (exp) == 0)
5917 {
06089a8b
RK
5918 if (mode == VOIDmode)
5919 temp = const0_rtx;
5920 else
e5e809f4 5921 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5922
bbf6f052 5923 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5924 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5925 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5926 save_expr_regs);
ff78f773
RK
5927
5928 /* If the mode of TEMP does not match that of the expression, it
5929 must be a promoted value. We pass store_expr a SUBREG of the
5930 wanted mode but mark it so that we know that it was already
5931 extended. Note that `unsignedp' was modified above in
5932 this case. */
5933
5934 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5935 {
38a448ca 5936 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5937 SUBREG_PROMOTED_VAR_P (temp) = 1;
5938 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5939 }
5940
4c7a0be9 5941 if (temp == const0_rtx)
921b3427
RK
5942 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5943 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5944 else
5945 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5946
5947 TREE_USED (exp) = 1;
bbf6f052 5948 }
1499e0a8
RK
5949
5950 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5951 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5952 but mark it so that we know that it was already extended. */
1499e0a8
RK
5953
5954 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5955 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5956 {
e70d22c8
RK
5957 /* Compute the signedness and make the proper SUBREG. */
5958 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5959 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5960 SUBREG_PROMOTED_VAR_P (temp) = 1;
5961 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5962 return temp;
5963 }
5964
bbf6f052
RK
5965 return SAVE_EXPR_RTL (exp);
5966
679163cf
MS
5967 case UNSAVE_EXPR:
5968 {
5969 rtx temp;
5970 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5971 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5972 return temp;
5973 }
5974
b50d17a1 5975 case PLACEHOLDER_EXPR:
e9a25f70
JL
5976 {
5977 tree placeholder_expr;
5978
5979 /* If there is an object on the head of the placeholder list,
e5e809f4 5980 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5981 further information, see tree.def. */
5982 for (placeholder_expr = placeholder_list;
5983 placeholder_expr != 0;
5984 placeholder_expr = TREE_CHAIN (placeholder_expr))
5985 {
5986 tree need_type = TYPE_MAIN_VARIANT (type);
5987 tree object = 0;
5988 tree old_list = placeholder_list;
5989 tree elt;
5990
e5e809f4
JL
5991 /* Find the outermost reference that is of the type we want.
5992 If none, see if any object has a type that is a pointer to
5993 the type we want. */
5994 for (elt = TREE_PURPOSE (placeholder_expr);
5995 elt != 0 && object == 0;
5996 elt
5997 = ((TREE_CODE (elt) == COMPOUND_EXPR
5998 || TREE_CODE (elt) == COND_EXPR)
5999 ? TREE_OPERAND (elt, 1)
6000 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6001 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6002 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6003 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6004 ? TREE_OPERAND (elt, 0) : 0))
6005 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6006 object = elt;
e9a25f70 6007
e9a25f70 6008 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6009 elt != 0 && object == 0;
6010 elt
6011 = ((TREE_CODE (elt) == COMPOUND_EXPR
6012 || TREE_CODE (elt) == COND_EXPR)
6013 ? TREE_OPERAND (elt, 1)
6014 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6015 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6016 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6017 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6018 ? TREE_OPERAND (elt, 0) : 0))
6019 if (POINTER_TYPE_P (TREE_TYPE (elt))
6020 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6021 == need_type))
e5e809f4 6022 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6023
e9a25f70 6024 if (object != 0)
2cde2255 6025 {
e9a25f70
JL
6026 /* Expand this object skipping the list entries before
6027 it was found in case it is also a PLACEHOLDER_EXPR.
6028 In that case, we want to translate it using subsequent
6029 entries. */
6030 placeholder_list = TREE_CHAIN (placeholder_expr);
6031 temp = expand_expr (object, original_target, tmode,
6032 ro_modifier);
6033 placeholder_list = old_list;
6034 return temp;
2cde2255 6035 }
e9a25f70
JL
6036 }
6037 }
b50d17a1
RK
6038
6039 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6040 abort ();
6041
6042 case WITH_RECORD_EXPR:
6043 /* Put the object on the placeholder list, expand our first operand,
6044 and pop the list. */
6045 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6046 placeholder_list);
6047 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6048 tmode, ro_modifier);
b50d17a1
RK
6049 placeholder_list = TREE_CHAIN (placeholder_list);
6050 return target;
6051
70e6ca43
APB
6052 case GOTO_EXPR:
6053 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6054 expand_goto (TREE_OPERAND (exp, 0));
6055 else
6056 expand_computed_goto (TREE_OPERAND (exp, 0));
6057 return const0_rtx;
6058
bbf6f052 6059 case EXIT_EXPR:
e44842fe
RK
6060 expand_exit_loop_if_false (NULL_PTR,
6061 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6062 return const0_rtx;
6063
f42e28dd
APB
6064 case LABELED_BLOCK_EXPR:
6065 if (LABELED_BLOCK_BODY (exp))
6066 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6067 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6068 return const0_rtx;
6069
6070 case EXIT_BLOCK_EXPR:
6071 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6072 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6073 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6074 return const0_rtx;
6075
bbf6f052 6076 case LOOP_EXPR:
0088fcb1 6077 push_temp_slots ();
bbf6f052
RK
6078 expand_start_loop (1);
6079 expand_expr_stmt (TREE_OPERAND (exp, 0));
6080 expand_end_loop ();
0088fcb1 6081 pop_temp_slots ();
bbf6f052
RK
6082
6083 return const0_rtx;
6084
6085 case BIND_EXPR:
6086 {
6087 tree vars = TREE_OPERAND (exp, 0);
6088 int vars_need_expansion = 0;
6089
6090 /* Need to open a binding contour here because
e976b8b2 6091 if there are any cleanups they must be contained here. */
bbf6f052
RK
6092 expand_start_bindings (0);
6093
2df53c0b
RS
6094 /* Mark the corresponding BLOCK for output in its proper place. */
6095 if (TREE_OPERAND (exp, 2) != 0
6096 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6097 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6098
6099 /* If VARS have not yet been expanded, expand them now. */
6100 while (vars)
6101 {
6102 if (DECL_RTL (vars) == 0)
6103 {
6104 vars_need_expansion = 1;
6105 expand_decl (vars);
6106 }
6107 expand_decl_init (vars);
6108 vars = TREE_CHAIN (vars);
6109 }
6110
921b3427 6111 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6112
6113 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6114
6115 return temp;
6116 }
6117
6118 case RTL_EXPR:
83b853c9
JM
6119 if (RTL_EXPR_SEQUENCE (exp))
6120 {
6121 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6122 abort ();
6123 emit_insns (RTL_EXPR_SEQUENCE (exp));
6124 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6125 }
99310285 6126 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 6127 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6128 return RTL_EXPR_RTL (exp);
6129
6130 case CONSTRUCTOR:
dd27116b
RK
6131 /* If we don't need the result, just ensure we evaluate any
6132 subexpressions. */
6133 if (ignore)
6134 {
6135 tree elt;
6136 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6137 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6138 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6139 return const0_rtx;
6140 }
3207b172 6141
4af3895e
JVA
6142 /* All elts simple constants => refer to a constant in memory. But
6143 if this is a non-BLKmode mode, let it store a field at a time
6144 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6145 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6146 store directly into the target unless the type is large enough
6147 that memcpy will be used. If we are making an initializer and
3207b172 6148 all operands are constant, put it in memory as well. */
dd27116b 6149 else if ((TREE_STATIC (exp)
3207b172 6150 && ((mode == BLKmode
e5e809f4 6151 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6152 || TREE_ADDRESSABLE (exp)
6153 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
fbe1758d
AM
6154 && (!MOVE_BY_PIECES_P
6155 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6156 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6157 && ! mostly_zeros_p (exp))))
dd27116b 6158 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6159 {
6160 rtx constructor = output_constant_def (exp);
b552441b
RS
6161 if (modifier != EXPAND_CONST_ADDRESS
6162 && modifier != EXPAND_INITIALIZER
6163 && modifier != EXPAND_SUM
d6a5ac33
RK
6164 && (! memory_address_p (GET_MODE (constructor),
6165 XEXP (constructor, 0))
6166 || (flag_force_addr
6167 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6168 constructor = change_address (constructor, VOIDmode,
6169 XEXP (constructor, 0));
6170 return constructor;
6171 }
6172
bbf6f052
RK
6173 else
6174 {
e9ac02a6
JW
6175 /* Handle calls that pass values in multiple non-contiguous
6176 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6177 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6178 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6179 {
6180 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6181 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6182 else
6183 target = assign_temp (type, 0, 1, 1);
6184 }
07604beb
RK
6185
6186 if (TREE_READONLY (exp))
6187 {
9151b3bf 6188 if (GET_CODE (target) == MEM)
effbcc6a
RK
6189 target = copy_rtx (target);
6190
07604beb
RK
6191 RTX_UNCHANGING_P (target) = 1;
6192 }
6193
c5c76735 6194 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
bbf6f052
RK
6195 return target;
6196 }
6197
6198 case INDIRECT_REF:
6199 {
6200 tree exp1 = TREE_OPERAND (exp, 0);
6201 tree exp2;
7581a30f
JW
6202 tree index;
6203 tree string = string_constant (exp1, &index);
6204 int i;
6205
06eaa86f 6206 /* Try to optimize reads from const strings. */
7581a30f
JW
6207 if (string
6208 && TREE_CODE (string) == STRING_CST
6209 && TREE_CODE (index) == INTEGER_CST
6210 && !TREE_INT_CST_HIGH (index)
6211 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6212 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6213 && GET_MODE_SIZE (mode) == 1
6214 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6215 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6216
405f0da6
JW
6217 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6218 op0 = memory_address (mode, op0);
8c8a8e34 6219
49ad7cfa
BS
6220 if (current_function && current_function_check_memory_usage
6221 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6222 {
6223 enum memory_use_mode memory_usage;
6224 memory_usage = get_memory_usage_from_modifier (modifier);
6225
6226 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6227 {
6228 in_check_memory_usage = 1;
6229 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6230 op0, Pmode,
c85f7c16
JL
6231 GEN_INT (int_size_in_bytes (type)),
6232 TYPE_MODE (sizetype),
6233 GEN_INT (memory_usage),
6234 TYPE_MODE (integer_type_node));
6235 in_check_memory_usage = 0;
6236 }
921b3427
RK
6237 }
6238
38a448ca 6239 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6240 /* If address was computed by addition,
6241 mark this as an element of an aggregate. */
9ec36da5
JL
6242 if (TREE_CODE (exp1) == PLUS_EXPR
6243 || (TREE_CODE (exp1) == SAVE_EXPR
6244 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6245 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6246 || (TREE_CODE (exp1) == ADDR_EXPR
6247 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6248 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6249 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6250
2c4c436a 6251 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6252 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6253
6254 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6255 here, because, in C and C++, the fact that a location is accessed
6256 through a pointer to const does not mean that the value there can
6257 never change. Languages where it can never change should
6258 also set TREE_STATIC. */
5cb7a25a 6259 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6260 return temp;
6261 }
bbf6f052
RK
6262
6263 case ARRAY_REF:
742920c7
RK
6264 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6265 abort ();
bbf6f052 6266
bbf6f052 6267 {
742920c7
RK
6268 tree array = TREE_OPERAND (exp, 0);
6269 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6270 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6271 tree index = TREE_OPERAND (exp, 1);
6272 tree index_type = TREE_TYPE (index);
08293add 6273 HOST_WIDE_INT i;
b50d17a1 6274
d4c89139
PB
6275 /* Optimize the special-case of a zero lower bound.
6276
6277 We convert the low_bound to sizetype to avoid some problems
6278 with constant folding. (E.g. suppose the lower bound is 1,
6279 and its mode is QI. Without the conversion, (ARRAY
6280 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6281 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6282
6283 But sizetype isn't quite right either (especially if
6284 the lowbound is negative). FIXME */
6285
742920c7 6286 if (! integer_zerop (low_bound))
d4c89139
PB
6287 index = fold (build (MINUS_EXPR, index_type, index,
6288 convert (sizetype, low_bound)));
742920c7 6289
742920c7 6290 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6291 This is not done in fold so it won't happen inside &.
6292 Don't fold if this is for wide characters since it's too
6293 difficult to do correctly and this is a very rare case. */
742920c7
RK
6294
6295 if (TREE_CODE (array) == STRING_CST
6296 && TREE_CODE (index) == INTEGER_CST
6297 && !TREE_INT_CST_HIGH (index)
307b821c 6298 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6299 && GET_MODE_CLASS (mode) == MODE_INT
6300 && GET_MODE_SIZE (mode) == 1)
307b821c 6301 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6302
742920c7
RK
6303 /* If this is a constant index into a constant array,
6304 just get the value from the array. Handle both the cases when
6305 we have an explicit constructor and when our operand is a variable
6306 that was declared const. */
4af3895e 6307
742920c7
RK
6308 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6309 {
6310 if (TREE_CODE (index) == INTEGER_CST
6311 && TREE_INT_CST_HIGH (index) == 0)
6312 {
6313 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6314
6315 i = TREE_INT_CST_LOW (index);
6316 while (elem && i--)
6317 elem = TREE_CHAIN (elem);
6318 if (elem)
6319 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6320 tmode, ro_modifier);
742920c7
RK
6321 }
6322 }
4af3895e 6323
742920c7
RK
6324 else if (optimize >= 1
6325 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6326 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6327 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6328 {
08293add 6329 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6330 {
6331 tree init = DECL_INITIAL (array);
6332
6333 i = TREE_INT_CST_LOW (index);
6334 if (TREE_CODE (init) == CONSTRUCTOR)
6335 {
6336 tree elem = CONSTRUCTOR_ELTS (init);
6337
03dc44a6
RS
6338 while (elem
6339 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6340 elem = TREE_CHAIN (elem);
6341 if (elem)
6342 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6343 tmode, ro_modifier);
742920c7
RK
6344 }
6345 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6346 && TREE_INT_CST_HIGH (index) == 0
6347 && (TREE_INT_CST_LOW (index)
6348 < TREE_STRING_LENGTH (init)))
6349 return (GEN_INT
6350 (TREE_STRING_POINTER
6351 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6352 }
6353 }
6354 }
8c8a8e34 6355
08293add 6356 /* ... fall through ... */
bbf6f052
RK
6357
6358 case COMPONENT_REF:
6359 case BIT_FIELD_REF:
4af3895e 6360 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6361 appropriate field if it is present. Don't do this if we have
6362 already written the data since we want to refer to that copy
6363 and varasm.c assumes that's what we'll do. */
4af3895e 6364 if (code != ARRAY_REF
7a0b7b9a
RK
6365 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6366 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6367 {
6368 tree elt;
6369
6370 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6371 elt = TREE_CHAIN (elt))
86b5812c
RK
6372 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6373 /* We can normally use the value of the field in the
6374 CONSTRUCTOR. However, if this is a bitfield in
6375 an integral mode that we can fit in a HOST_WIDE_INT,
6376 we must mask only the number of bits in the bitfield,
6377 since this is done implicitly by the constructor. If
6378 the bitfield does not meet either of those conditions,
6379 we can't do this optimization. */
6380 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6381 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6382 == MODE_INT)
6383 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6384 <= HOST_BITS_PER_WIDE_INT))))
6385 {
6386 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6387 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6388 {
6389 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6390
6391 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6392 {
6393 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6394 op0 = expand_and (op0, op1, target);
6395 }
6396 else
6397 {
e5e809f4
JL
6398 enum machine_mode imode
6399 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6400 tree count
e5e809f4
JL
6401 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6402 0);
86b5812c
RK
6403
6404 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6405 target, 0);
6406 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6407 target, 0);
6408 }
6409 }
6410
6411 return op0;
6412 }
4af3895e
JVA
6413 }
6414
bbf6f052
RK
6415 {
6416 enum machine_mode mode1;
6417 int bitsize;
6418 int bitpos;
7bb0943f 6419 tree offset;
bbf6f052 6420 int volatilep = 0;
034f9101 6421 int alignment;
839c4796
RK
6422 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6423 &mode1, &unsignedp, &volatilep,
6424 &alignment);
bbf6f052 6425
e7f3c83f
RK
6426 /* If we got back the original object, something is wrong. Perhaps
6427 we are evaluating an expression too early. In any event, don't
6428 infinitely recurse. */
6429 if (tem == exp)
6430 abort ();
6431
3d27140a 6432 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6433 computation, since it will need a temporary and TARGET is known
6434 to have to do. This occurs in unchecked conversion in Ada. */
6435
6436 op0 = expand_expr (tem,
6437 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6438 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6439 != INTEGER_CST)
6440 ? target : NULL_RTX),
4ed67205 6441 VOIDmode,
e5e809f4
JL
6442 modifier == EXPAND_INITIALIZER
6443 ? modifier : EXPAND_NORMAL);
bbf6f052 6444
8c8a8e34 6445 /* If this is a constant, put it into a register if it is a
8008b228 6446 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6447 if (CONSTANT_P (op0))
6448 {
6449 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6450 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6451 op0 = force_reg (mode, op0);
6452 else
6453 op0 = validize_mem (force_const_mem (mode, op0));
6454 }
6455
7bb0943f
RS
6456 if (offset != 0)
6457 {
906c4e36 6458 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6459
6460 if (GET_CODE (op0) != MEM)
6461 abort ();
2d48c13d
JL
6462
6463 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6464 {
2d48c13d 6465#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6466 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6467#else
bd070e1a 6468 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6469#endif
bd070e1a 6470 }
2d48c13d 6471
efd07ca7
JL
6472 /* A constant address in TO_RTX can have VOIDmode, we must not try
6473 to call force_reg for that case. Avoid that case. */
89752202
HB
6474 if (GET_CODE (op0) == MEM
6475 && GET_MODE (op0) == BLKmode
efd07ca7 6476 && GET_MODE (XEXP (op0, 0)) != VOIDmode
89752202
HB
6477 && bitsize
6478 && (bitpos % bitsize) == 0
6479 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6480 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6481 {
6482 rtx temp = change_address (op0, mode1,
6483 plus_constant (XEXP (op0, 0),
6484 (bitpos /
6485 BITS_PER_UNIT)));
6486 if (GET_CODE (XEXP (temp, 0)) == REG)
6487 op0 = temp;
6488 else
6489 op0 = change_address (op0, mode1,
6490 force_reg (GET_MODE (XEXP (temp, 0)),
6491 XEXP (temp, 0)));
6492 bitpos = 0;
6493 }
6494
6495
7bb0943f 6496 op0 = change_address (op0, VOIDmode,
38a448ca 6497 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6498 force_reg (ptr_mode,
6499 offset_rtx)));
7bb0943f
RS
6500 }
6501
bbf6f052
RK
6502 /* Don't forget about volatility even if this is a bitfield. */
6503 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6504 {
6505 op0 = copy_rtx (op0);
6506 MEM_VOLATILE_P (op0) = 1;
6507 }
6508
921b3427 6509 /* Check the access. */
c5c76735 6510 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6511 {
6512 enum memory_use_mode memory_usage;
6513 memory_usage = get_memory_usage_from_modifier (modifier);
6514
6515 if (memory_usage != MEMORY_USE_DONT)
6516 {
6517 rtx to;
6518 int size;
6519
6520 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6521 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6522
6523 /* Check the access right of the pointer. */
e9a25f70
JL
6524 if (size > BITS_PER_UNIT)
6525 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6526 to, Pmode,
e9a25f70
JL
6527 GEN_INT (size / BITS_PER_UNIT),
6528 TYPE_MODE (sizetype),
956d6950
JL
6529 GEN_INT (memory_usage),
6530 TYPE_MODE (integer_type_node));
921b3427
RK
6531 }
6532 }
6533
ccc98036
RS
6534 /* In cases where an aligned union has an unaligned object
6535 as a field, we might be extracting a BLKmode value from
6536 an integer-mode (e.g., SImode) object. Handle this case
6537 by doing the extract into an object as wide as the field
6538 (which we know to be the width of a basic mode), then
f2420d0b
JW
6539 storing into memory, and changing the mode to BLKmode.
6540 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6541 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6542 if (mode1 == VOIDmode
ccc98036 6543 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6544 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6545 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6546 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6547 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6548 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6549 /* If the field isn't aligned enough to fetch as a memref,
6550 fetch it as a bit field. */
6551 || (SLOW_UNALIGNED_ACCESS
c84e2712 6552 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
f9409c3a 6553 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6554 {
bbf6f052
RK
6555 enum machine_mode ext_mode = mode;
6556
6557 if (ext_mode == BLKmode)
6558 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6559
6560 if (ext_mode == BLKmode)
a281e72d
RK
6561 {
6562 /* In this case, BITPOS must start at a byte boundary and
6563 TARGET, if specified, must be a MEM. */
6564 if (GET_CODE (op0) != MEM
6565 || (target != 0 && GET_CODE (target) != MEM)
6566 || bitpos % BITS_PER_UNIT != 0)
6567 abort ();
6568
6569 op0 = change_address (op0, VOIDmode,
6570 plus_constant (XEXP (op0, 0),
6571 bitpos / BITS_PER_UNIT));
6572 if (target == 0)
6573 target = assign_temp (type, 0, 1, 1);
6574
6575 emit_block_move (target, op0,
6576 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6577 / BITS_PER_UNIT),
6578 1);
6579
6580 return target;
6581 }
bbf6f052 6582
dc6d66b3
RK
6583 op0 = validize_mem (op0);
6584
6585 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6586 mark_reg_pointer (XEXP (op0, 0), alignment);
6587
6588 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6589 unsignedp, target, ext_mode, ext_mode,
034f9101 6590 alignment,
bbf6f052 6591 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6592
6593 /* If the result is a record type and BITSIZE is narrower than
6594 the mode of OP0, an integral mode, and this is a big endian
6595 machine, we must put the field into the high-order bits. */
6596 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6597 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6598 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6599 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6600 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6601 - bitsize),
6602 op0, 1);
6603
bbf6f052
RK
6604 if (mode == BLKmode)
6605 {
6606 rtx new = assign_stack_temp (ext_mode,
6607 bitsize / BITS_PER_UNIT, 0);
6608
6609 emit_move_insn (new, op0);
6610 op0 = copy_rtx (new);
6611 PUT_MODE (op0, BLKmode);
c6df88cb 6612 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6613 }
6614
6615 return op0;
6616 }
6617
05019f83
RK
6618 /* If the result is BLKmode, use that to access the object
6619 now as well. */
6620 if (mode == BLKmode)
6621 mode1 = BLKmode;
6622
bbf6f052
RK
6623 /* Get a reference to just this component. */
6624 if (modifier == EXPAND_CONST_ADDRESS
6625 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6626 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6627 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6628 else
6629 op0 = change_address (op0, mode1,
6630 plus_constant (XEXP (op0, 0),
6631 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6632
6633 if (GET_CODE (op0) == MEM)
6634 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6635
dc6d66b3
RK
6636 if (GET_CODE (XEXP (op0, 0)) == REG)
6637 mark_reg_pointer (XEXP (op0, 0), alignment);
6638
c6df88cb 6639 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6640 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6641 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6642 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6643 || modifier == EXPAND_INITIALIZER)
bbf6f052 6644 return op0;
0d15e60c 6645 else if (target == 0)
bbf6f052 6646 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6647
bbf6f052
RK
6648 convert_move (target, op0, unsignedp);
6649 return target;
6650 }
6651
bbf6f052
RK
6652 /* Intended for a reference to a buffer of a file-object in Pascal.
6653 But it's not certain that a special tree code will really be
6654 necessary for these. INDIRECT_REF might work for them. */
6655 case BUFFER_REF:
6656 abort ();
6657
7308a047 6658 case IN_EXPR:
7308a047 6659 {
d6a5ac33
RK
6660 /* Pascal set IN expression.
6661
6662 Algorithm:
6663 rlo = set_low - (set_low%bits_per_word);
6664 the_word = set [ (index - rlo)/bits_per_word ];
6665 bit_index = index % bits_per_word;
6666 bitmask = 1 << bit_index;
6667 return !!(the_word & bitmask); */
6668
7308a047
RS
6669 tree set = TREE_OPERAND (exp, 0);
6670 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6671 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6672 tree set_type = TREE_TYPE (set);
7308a047
RS
6673 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6674 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6675 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6676 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6677 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6678 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6679 rtx setaddr = XEXP (setval, 0);
6680 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6681 rtx rlow;
6682 rtx diff, quo, rem, addr, bit, result;
7308a047 6683
d6a5ac33
RK
6684 preexpand_calls (exp);
6685
6686 /* If domain is empty, answer is no. Likewise if index is constant
6687 and out of bounds. */
51723711 6688 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6689 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6690 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6691 || (TREE_CODE (index) == INTEGER_CST
6692 && TREE_CODE (set_low_bound) == INTEGER_CST
6693 && tree_int_cst_lt (index, set_low_bound))
6694 || (TREE_CODE (set_high_bound) == INTEGER_CST
6695 && TREE_CODE (index) == INTEGER_CST
6696 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6697 return const0_rtx;
6698
d6a5ac33
RK
6699 if (target == 0)
6700 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6701
6702 /* If we get here, we have to generate the code for both cases
6703 (in range and out of range). */
6704
6705 op0 = gen_label_rtx ();
6706 op1 = gen_label_rtx ();
6707
6708 if (! (GET_CODE (index_val) == CONST_INT
6709 && GET_CODE (lo_r) == CONST_INT))
6710 {
c5d5d461
JL
6711 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6712 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6713 }
6714
6715 if (! (GET_CODE (index_val) == CONST_INT
6716 && GET_CODE (hi_r) == CONST_INT))
6717 {
c5d5d461
JL
6718 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6719 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6720 }
6721
6722 /* Calculate the element number of bit zero in the first word
6723 of the set. */
6724 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6725 rlow = GEN_INT (INTVAL (lo_r)
6726 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6727 else
17938e57
RK
6728 rlow = expand_binop (index_mode, and_optab, lo_r,
6729 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6730 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6731
d6a5ac33
RK
6732 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6733 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6734
6735 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6736 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6737 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6738 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6739
7308a047 6740 addr = memory_address (byte_mode,
d6a5ac33
RK
6741 expand_binop (index_mode, add_optab, diff,
6742 setaddr, NULL_RTX, iunsignedp,
17938e57 6743 OPTAB_LIB_WIDEN));
d6a5ac33 6744
7308a047
RS
6745 /* Extract the bit we want to examine */
6746 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6747 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6748 make_tree (TREE_TYPE (index), rem),
6749 NULL_RTX, 1);
6750 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6751 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6752 1, OPTAB_LIB_WIDEN);
17938e57
RK
6753
6754 if (result != target)
6755 convert_move (target, result, 1);
7308a047
RS
6756
6757 /* Output the code to handle the out-of-range case. */
6758 emit_jump (op0);
6759 emit_label (op1);
6760 emit_move_insn (target, const0_rtx);
6761 emit_label (op0);
6762 return target;
6763 }
6764
bbf6f052
RK
6765 case WITH_CLEANUP_EXPR:
6766 if (RTL_EXPR_RTL (exp) == 0)
6767 {
6768 RTL_EXPR_RTL (exp)
921b3427 6769 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6770 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6771
bbf6f052
RK
6772 /* That's it for this cleanup. */
6773 TREE_OPERAND (exp, 2) = 0;
6774 }
6775 return RTL_EXPR_RTL (exp);
6776
5dab5552
MS
6777 case CLEANUP_POINT_EXPR:
6778 {
e976b8b2
MS
6779 /* Start a new binding layer that will keep track of all cleanup
6780 actions to be performed. */
6781 expand_start_bindings (0);
6782
d93d4205 6783 target_temp_slot_level = temp_slot_level;
e976b8b2 6784
921b3427 6785 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6786 /* If we're going to use this value, load it up now. */
6787 if (! ignore)
6788 op0 = force_not_mem (op0);
d93d4205 6789 preserve_temp_slots (op0);
e976b8b2 6790 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6791 }
6792 return op0;
6793
bbf6f052
RK
6794 case CALL_EXPR:
6795 /* Check for a built-in function. */
6796 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6797 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6798 == FUNCTION_DECL)
bbf6f052
RK
6799 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6800 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6801
bbf6f052
RK
6802 /* If this call was expanded already by preexpand_calls,
6803 just return the result we got. */
6804 if (CALL_EXPR_RTL (exp) != 0)
6805 return CALL_EXPR_RTL (exp);
d6a5ac33 6806
8129842c 6807 return expand_call (exp, target, ignore);
bbf6f052
RK
6808
6809 case NON_LVALUE_EXPR:
6810 case NOP_EXPR:
6811 case CONVERT_EXPR:
6812 case REFERENCE_EXPR:
bbf6f052
RK
6813 if (TREE_CODE (type) == UNION_TYPE)
6814 {
6815 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6816 if (target == 0)
06089a8b
RK
6817 {
6818 if (mode != BLKmode)
6819 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6820 else
6821 target = assign_temp (type, 0, 1, 1);
6822 }
d6a5ac33 6823
bbf6f052
RK
6824 if (GET_CODE (target) == MEM)
6825 /* Store data into beginning of memory target. */
6826 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6827 change_address (target, TYPE_MODE (valtype), 0), 0);
6828
bbf6f052
RK
6829 else if (GET_CODE (target) == REG)
6830 /* Store this field into a union of the proper type. */
6831 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6832 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6833 VOIDmode, 0, 1,
ece32014
MM
6834 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6835 0);
bbf6f052
RK
6836 else
6837 abort ();
6838
6839 /* Return the entire union. */
6840 return target;
6841 }
d6a5ac33 6842
7f62854a
RK
6843 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6844 {
6845 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6846 ro_modifier);
7f62854a
RK
6847
6848 /* If the signedness of the conversion differs and OP0 is
6849 a promoted SUBREG, clear that indication since we now
6850 have to do the proper extension. */
6851 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6852 && GET_CODE (op0) == SUBREG)
6853 SUBREG_PROMOTED_VAR_P (op0) = 0;
6854
6855 return op0;
6856 }
6857
1499e0a8 6858 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6859 if (GET_MODE (op0) == mode)
6860 return op0;
12342f90 6861
d6a5ac33
RK
6862 /* If OP0 is a constant, just convert it into the proper mode. */
6863 if (CONSTANT_P (op0))
6864 return
6865 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6866 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6867
26fcb35a 6868 if (modifier == EXPAND_INITIALIZER)
38a448ca 6869 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6870
bbf6f052 6871 if (target == 0)
d6a5ac33
RK
6872 return
6873 convert_to_mode (mode, op0,
6874 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6875 else
d6a5ac33
RK
6876 convert_move (target, op0,
6877 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6878 return target;
6879
6880 case PLUS_EXPR:
0f41302f
MS
6881 /* We come here from MINUS_EXPR when the second operand is a
6882 constant. */
bbf6f052
RK
6883 plus_expr:
6884 this_optab = add_optab;
6885
6886 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6887 something else, make sure we add the register to the constant and
6888 then to the other thing. This case can occur during strength
6889 reduction and doing it this way will produce better code if the
6890 frame pointer or argument pointer is eliminated.
6891
6892 fold-const.c will ensure that the constant is always in the inner
6893 PLUS_EXPR, so the only case we need to do anything about is if
6894 sp, ap, or fp is our second argument, in which case we must swap
6895 the innermost first argument and our second argument. */
6896
6897 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6898 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6899 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6900 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6901 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6902 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6903 {
6904 tree t = TREE_OPERAND (exp, 1);
6905
6906 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6907 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6908 }
6909
88f63c77 6910 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6911 something, we might be forming a constant. So try to use
6912 plus_constant. If it produces a sum and we can't accept it,
6913 use force_operand. This allows P = &ARR[const] to generate
6914 efficient code on machines where a SYMBOL_REF is not a valid
6915 address.
6916
6917 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6918 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6919 || mode == ptr_mode)
bbf6f052 6920 {
c980ac49
RS
6921 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6922 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6923 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6924 {
cbbc503e
JL
6925 rtx constant_part;
6926
c980ac49
RS
6927 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6928 EXPAND_SUM);
cbbc503e
JL
6929 /* Use immed_double_const to ensure that the constant is
6930 truncated according to the mode of OP1, then sign extended
6931 to a HOST_WIDE_INT. Using the constant directly can result
6932 in non-canonical RTL in a 64x32 cross compile. */
6933 constant_part
6934 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6935 (HOST_WIDE_INT) 0,
a5efcd63 6936 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 6937 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
6938 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6939 op1 = force_operand (op1, target);
6940 return op1;
6941 }
bbf6f052 6942
c980ac49
RS
6943 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6944 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6945 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6946 {
cbbc503e
JL
6947 rtx constant_part;
6948
c980ac49
RS
6949 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6950 EXPAND_SUM);
6951 if (! CONSTANT_P (op0))
6952 {
6953 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6954 VOIDmode, modifier);
709f5be1
RS
6955 /* Don't go to both_summands if modifier
6956 says it's not right to return a PLUS. */
6957 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6958 goto binop2;
c980ac49
RS
6959 goto both_summands;
6960 }
cbbc503e
JL
6961 /* Use immed_double_const to ensure that the constant is
6962 truncated according to the mode of OP1, then sign extended
6963 to a HOST_WIDE_INT. Using the constant directly can result
6964 in non-canonical RTL in a 64x32 cross compile. */
6965 constant_part
6966 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6967 (HOST_WIDE_INT) 0,
2a94e396 6968 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 6969 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
6970 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6971 op0 = force_operand (op0, target);
6972 return op0;
6973 }
bbf6f052
RK
6974 }
6975
6976 /* No sense saving up arithmetic to be done
6977 if it's all in the wrong mode to form part of an address.
6978 And force_operand won't know whether to sign-extend or
6979 zero-extend. */
6980 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6981 || mode != ptr_mode)
c980ac49 6982 goto binop;
bbf6f052
RK
6983
6984 preexpand_calls (exp);
e5e809f4 6985 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6986 subtarget = 0;
6987
921b3427
RK
6988 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6989 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6990
c980ac49 6991 both_summands:
bbf6f052
RK
6992 /* Make sure any term that's a sum with a constant comes last. */
6993 if (GET_CODE (op0) == PLUS
6994 && CONSTANT_P (XEXP (op0, 1)))
6995 {
6996 temp = op0;
6997 op0 = op1;
6998 op1 = temp;
6999 }
7000 /* If adding to a sum including a constant,
7001 associate it to put the constant outside. */
7002 if (GET_CODE (op1) == PLUS
7003 && CONSTANT_P (XEXP (op1, 1)))
7004 {
7005 rtx constant_term = const0_rtx;
7006
7007 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7008 if (temp != 0)
7009 op0 = temp;
6f90e075
JW
7010 /* Ensure that MULT comes first if there is one. */
7011 else if (GET_CODE (op0) == MULT)
38a448ca 7012 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7013 else
38a448ca 7014 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7015
7016 /* Let's also eliminate constants from op0 if possible. */
7017 op0 = eliminate_constant_term (op0, &constant_term);
7018
7019 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7020 their sum should be a constant. Form it into OP1, since the
7021 result we want will then be OP0 + OP1. */
7022
7023 temp = simplify_binary_operation (PLUS, mode, constant_term,
7024 XEXP (op1, 1));
7025 if (temp != 0)
7026 op1 = temp;
7027 else
38a448ca 7028 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7029 }
7030
7031 /* Put a constant term last and put a multiplication first. */
7032 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7033 temp = op1, op1 = op0, op0 = temp;
7034
7035 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7036 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7037
7038 case MINUS_EXPR:
ea87523e
RK
7039 /* For initializers, we are allowed to return a MINUS of two
7040 symbolic constants. Here we handle all cases when both operands
7041 are constant. */
bbf6f052
RK
7042 /* Handle difference of two symbolic constants,
7043 for the sake of an initializer. */
7044 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7045 && really_constant_p (TREE_OPERAND (exp, 0))
7046 && really_constant_p (TREE_OPERAND (exp, 1)))
7047 {
906c4e36 7048 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7049 VOIDmode, ro_modifier);
906c4e36 7050 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7051 VOIDmode, ro_modifier);
ea87523e 7052
ea87523e
RK
7053 /* If the last operand is a CONST_INT, use plus_constant of
7054 the negated constant. Else make the MINUS. */
7055 if (GET_CODE (op1) == CONST_INT)
7056 return plus_constant (op0, - INTVAL (op1));
7057 else
38a448ca 7058 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7059 }
7060 /* Convert A - const to A + (-const). */
7061 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7062 {
ae431183
RK
7063 tree negated = fold (build1 (NEGATE_EXPR, type,
7064 TREE_OPERAND (exp, 1)));
7065
7066 /* Deal with the case where we can't negate the constant
7067 in TYPE. */
7068 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7069 {
7070 tree newtype = signed_type (type);
7071 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7072 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7073 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7074
7075 if (! TREE_OVERFLOW (newneg))
7076 return expand_expr (convert (type,
7077 build (PLUS_EXPR, newtype,
7078 newop0, newneg)),
921b3427 7079 target, tmode, ro_modifier);
ae431183
RK
7080 }
7081 else
7082 {
7083 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7084 goto plus_expr;
7085 }
bbf6f052
RK
7086 }
7087 this_optab = sub_optab;
7088 goto binop;
7089
7090 case MULT_EXPR:
7091 preexpand_calls (exp);
7092 /* If first operand is constant, swap them.
7093 Thus the following special case checks need only
7094 check the second operand. */
7095 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7096 {
7097 register tree t1 = TREE_OPERAND (exp, 0);
7098 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7099 TREE_OPERAND (exp, 1) = t1;
7100 }
7101
7102 /* Attempt to return something suitable for generating an
7103 indexed address, for machines that support that. */
7104
88f63c77 7105 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7106 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7107 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7108 {
921b3427
RK
7109 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7110 EXPAND_SUM);
bbf6f052
RK
7111
7112 /* Apply distributive law if OP0 is x+c. */
7113 if (GET_CODE (op0) == PLUS
7114 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7115 return
7116 gen_rtx_PLUS
7117 (mode,
7118 gen_rtx_MULT
7119 (mode, XEXP (op0, 0),
7120 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7121 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7122 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7123
7124 if (GET_CODE (op0) != REG)
906c4e36 7125 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7126 if (GET_CODE (op0) != REG)
7127 op0 = copy_to_mode_reg (mode, op0);
7128
c5c76735
JL
7129 return
7130 gen_rtx_MULT (mode, op0,
7131 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7132 }
7133
e5e809f4 7134 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7135 subtarget = 0;
7136
7137 /* Check for multiplying things that have been extended
7138 from a narrower type. If this machine supports multiplying
7139 in that narrower type with a result in the desired type,
7140 do it that way, and avoid the explicit type-conversion. */
7141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7142 && TREE_CODE (type) == INTEGER_TYPE
7143 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7144 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7145 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7146 && int_fits_type_p (TREE_OPERAND (exp, 1),
7147 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7148 /* Don't use a widening multiply if a shift will do. */
7149 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7150 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7151 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7152 ||
7153 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7154 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7155 ==
7156 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7157 /* If both operands are extended, they must either both
7158 be zero-extended or both be sign-extended. */
7159 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7160 ==
7161 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7162 {
7163 enum machine_mode innermode
7164 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7165 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7166 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7167 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7168 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7169 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7170 {
b10af0c8
TG
7171 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7172 {
7173 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7174 NULL_RTX, VOIDmode, 0);
7175 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7176 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7177 VOIDmode, 0);
7178 else
7179 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7180 NULL_RTX, VOIDmode, 0);
7181 goto binop2;
7182 }
7183 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7184 && innermode == word_mode)
7185 {
7186 rtx htem;
7187 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7188 NULL_RTX, VOIDmode, 0);
7189 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7190 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7191 VOIDmode, 0);
7192 else
7193 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7194 NULL_RTX, VOIDmode, 0);
7195 temp = expand_binop (mode, other_optab, op0, op1, target,
7196 unsignedp, OPTAB_LIB_WIDEN);
7197 htem = expand_mult_highpart_adjust (innermode,
7198 gen_highpart (innermode, temp),
7199 op0, op1,
7200 gen_highpart (innermode, temp),
7201 unsignedp);
7202 emit_move_insn (gen_highpart (innermode, temp), htem);
7203 return temp;
7204 }
bbf6f052
RK
7205 }
7206 }
7207 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7208 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7209 return expand_mult (mode, op0, op1, target, unsignedp);
7210
7211 case TRUNC_DIV_EXPR:
7212 case FLOOR_DIV_EXPR:
7213 case CEIL_DIV_EXPR:
7214 case ROUND_DIV_EXPR:
7215 case EXACT_DIV_EXPR:
7216 preexpand_calls (exp);
e5e809f4 7217 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7218 subtarget = 0;
7219 /* Possible optimization: compute the dividend with EXPAND_SUM
7220 then if the divisor is constant can optimize the case
7221 where some terms of the dividend have coeffs divisible by it. */
7222 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7223 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7224 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7225
7226 case RDIV_EXPR:
7227 this_optab = flodiv_optab;
7228 goto binop;
7229
7230 case TRUNC_MOD_EXPR:
7231 case FLOOR_MOD_EXPR:
7232 case CEIL_MOD_EXPR:
7233 case ROUND_MOD_EXPR:
7234 preexpand_calls (exp);
e5e809f4 7235 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7236 subtarget = 0;
7237 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7238 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7239 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7240
7241 case FIX_ROUND_EXPR:
7242 case FIX_FLOOR_EXPR:
7243 case FIX_CEIL_EXPR:
7244 abort (); /* Not used for C. */
7245
7246 case FIX_TRUNC_EXPR:
906c4e36 7247 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7248 if (target == 0)
7249 target = gen_reg_rtx (mode);
7250 expand_fix (target, op0, unsignedp);
7251 return target;
7252
7253 case FLOAT_EXPR:
906c4e36 7254 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7255 if (target == 0)
7256 target = gen_reg_rtx (mode);
7257 /* expand_float can't figure out what to do if FROM has VOIDmode.
7258 So give it the correct mode. With -O, cse will optimize this. */
7259 if (GET_MODE (op0) == VOIDmode)
7260 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7261 op0);
7262 expand_float (target, op0,
7263 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7264 return target;
7265
7266 case NEGATE_EXPR:
5b22bee8 7267 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7268 temp = expand_unop (mode, neg_optab, op0, target, 0);
7269 if (temp == 0)
7270 abort ();
7271 return temp;
7272
7273 case ABS_EXPR:
7274 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7275
2d7050fd 7276 /* Handle complex values specially. */
d6a5ac33
RK
7277 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7278 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7279 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7280
bbf6f052
RK
7281 /* Unsigned abs is simply the operand. Testing here means we don't
7282 risk generating incorrect code below. */
7283 if (TREE_UNSIGNED (type))
7284 return op0;
7285
91813b28 7286 return expand_abs (mode, op0, target,
e5e809f4 7287 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7288
7289 case MAX_EXPR:
7290 case MIN_EXPR:
7291 target = original_target;
e5e809f4 7292 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7293 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7294 || GET_MODE (target) != mode
bbf6f052
RK
7295 || (GET_CODE (target) == REG
7296 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7297 target = gen_reg_rtx (mode);
906c4e36 7298 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7299 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7300
7301 /* First try to do it with a special MIN or MAX instruction.
7302 If that does not win, use a conditional jump to select the proper
7303 value. */
7304 this_optab = (TREE_UNSIGNED (type)
7305 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7306 : (code == MIN_EXPR ? smin_optab : smax_optab));
7307
7308 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7309 OPTAB_WIDEN);
7310 if (temp != 0)
7311 return temp;
7312
fa2981d8
JW
7313 /* At this point, a MEM target is no longer useful; we will get better
7314 code without it. */
7315
7316 if (GET_CODE (target) == MEM)
7317 target = gen_reg_rtx (mode);
7318
ee456b1c
RK
7319 if (target != op0)
7320 emit_move_insn (target, op0);
d6a5ac33 7321
bbf6f052 7322 op0 = gen_label_rtx ();
d6a5ac33 7323
f81497d9
RS
7324 /* If this mode is an integer too wide to compare properly,
7325 compare word by word. Rely on cse to optimize constant cases. */
b30f05db 7326 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
bbf6f052 7327 {
f81497d9 7328 if (code == MAX_EXPR)
d6a5ac33
RK
7329 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7330 target, op1, NULL_RTX, op0);
bbf6f052 7331 else
d6a5ac33
RK
7332 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7333 op1, target, NULL_RTX, op0);
bbf6f052 7334 }
f81497d9
RS
7335 else
7336 {
b30f05db
BS
7337 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7338 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7339 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7340 op0);
f81497d9 7341 }
b30f05db 7342 emit_move_insn (target, op1);
bbf6f052
RK
7343 emit_label (op0);
7344 return target;
7345
bbf6f052
RK
7346 case BIT_NOT_EXPR:
7347 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7348 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7349 if (temp == 0)
7350 abort ();
7351 return temp;
7352
7353 case FFS_EXPR:
7354 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7355 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7356 if (temp == 0)
7357 abort ();
7358 return temp;
7359
d6a5ac33
RK
7360 /* ??? Can optimize bitwise operations with one arg constant.
7361 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7362 and (a bitwise1 b) bitwise2 b (etc)
7363 but that is probably not worth while. */
7364
7365 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7366 boolean values when we want in all cases to compute both of them. In
7367 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7368 as actual zero-or-1 values and then bitwise anding. In cases where
7369 there cannot be any side effects, better code would be made by
7370 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7371 how to recognize those cases. */
7372
bbf6f052
RK
7373 case TRUTH_AND_EXPR:
7374 case BIT_AND_EXPR:
7375 this_optab = and_optab;
7376 goto binop;
7377
bbf6f052
RK
7378 case TRUTH_OR_EXPR:
7379 case BIT_IOR_EXPR:
7380 this_optab = ior_optab;
7381 goto binop;
7382
874726a8 7383 case TRUTH_XOR_EXPR:
bbf6f052
RK
7384 case BIT_XOR_EXPR:
7385 this_optab = xor_optab;
7386 goto binop;
7387
7388 case LSHIFT_EXPR:
7389 case RSHIFT_EXPR:
7390 case LROTATE_EXPR:
7391 case RROTATE_EXPR:
7392 preexpand_calls (exp);
e5e809f4 7393 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7394 subtarget = 0;
7395 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7396 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7397 unsignedp);
7398
d6a5ac33
RK
7399 /* Could determine the answer when only additive constants differ. Also,
7400 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7401 case LT_EXPR:
7402 case LE_EXPR:
7403 case GT_EXPR:
7404 case GE_EXPR:
7405 case EQ_EXPR:
7406 case NE_EXPR:
7407 preexpand_calls (exp);
7408 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7409 if (temp != 0)
7410 return temp;
d6a5ac33 7411
0f41302f 7412 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7413 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7414 && original_target
7415 && GET_CODE (original_target) == REG
7416 && (GET_MODE (original_target)
7417 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7418 {
d6a5ac33
RK
7419 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7420 VOIDmode, 0);
7421
bbf6f052
RK
7422 if (temp != original_target)
7423 temp = copy_to_reg (temp);
d6a5ac33 7424
bbf6f052 7425 op1 = gen_label_rtx ();
c5d5d461
JL
7426 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7427 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7428 emit_move_insn (temp, const1_rtx);
7429 emit_label (op1);
7430 return temp;
7431 }
d6a5ac33 7432
bbf6f052
RK
7433 /* If no set-flag instruction, must generate a conditional
7434 store into a temporary variable. Drop through
7435 and handle this like && and ||. */
7436
7437 case TRUTH_ANDIF_EXPR:
7438 case TRUTH_ORIF_EXPR:
e44842fe 7439 if (! ignore
e5e809f4 7440 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7441 /* Make sure we don't have a hard reg (such as function's return
7442 value) live across basic blocks, if not optimizing. */
7443 || (!optimize && GET_CODE (target) == REG
7444 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7445 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7446
7447 if (target)
7448 emit_clr_insn (target);
7449
bbf6f052
RK
7450 op1 = gen_label_rtx ();
7451 jumpifnot (exp, op1);
e44842fe
RK
7452
7453 if (target)
7454 emit_0_to_1_insn (target);
7455
bbf6f052 7456 emit_label (op1);
e44842fe 7457 return ignore ? const0_rtx : target;
bbf6f052
RK
7458
7459 case TRUTH_NOT_EXPR:
7460 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7461 /* The parser is careful to generate TRUTH_NOT_EXPR
7462 only with operands that are always zero or one. */
906c4e36 7463 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7464 target, 1, OPTAB_LIB_WIDEN);
7465 if (temp == 0)
7466 abort ();
7467 return temp;
7468
7469 case COMPOUND_EXPR:
7470 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7471 emit_queue ();
7472 return expand_expr (TREE_OPERAND (exp, 1),
7473 (ignore ? const0_rtx : target),
7474 VOIDmode, 0);
7475
7476 case COND_EXPR:
ac01eace
RK
7477 /* If we would have a "singleton" (see below) were it not for a
7478 conversion in each arm, bring that conversion back out. */
7479 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7480 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7481 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7482 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7483 {
7484 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7485 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7486
7487 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7488 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7489 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7490 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7491 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7492 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7493 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7494 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7495 return expand_expr (build1 (NOP_EXPR, type,
7496 build (COND_EXPR, TREE_TYPE (true),
7497 TREE_OPERAND (exp, 0),
7498 true, false)),
7499 target, tmode, modifier);
7500 }
7501
bbf6f052
RK
7502 {
7503 /* Note that COND_EXPRs whose type is a structure or union
7504 are required to be constructed to contain assignments of
7505 a temporary variable, so that we can evaluate them here
7506 for side effect only. If type is void, we must do likewise. */
7507
7508 /* If an arm of the branch requires a cleanup,
7509 only that cleanup is performed. */
7510
7511 tree singleton = 0;
7512 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7513
7514 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7515 convert it to our mode, if necessary. */
7516 if (integer_onep (TREE_OPERAND (exp, 1))
7517 && integer_zerop (TREE_OPERAND (exp, 2))
7518 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7519 {
dd27116b
RK
7520 if (ignore)
7521 {
7522 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7523 ro_modifier);
dd27116b
RK
7524 return const0_rtx;
7525 }
7526
921b3427 7527 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7528 if (GET_MODE (op0) == mode)
7529 return op0;
d6a5ac33 7530
bbf6f052
RK
7531 if (target == 0)
7532 target = gen_reg_rtx (mode);
7533 convert_move (target, op0, unsignedp);
7534 return target;
7535 }
7536
ac01eace
RK
7537 /* Check for X ? A + B : A. If we have this, we can copy A to the
7538 output and conditionally add B. Similarly for unary operations.
7539 Don't do this if X has side-effects because those side effects
7540 might affect A or B and the "?" operation is a sequence point in
7541 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7542
7543 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7544 && operand_equal_p (TREE_OPERAND (exp, 2),
7545 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7546 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7547 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7548 && operand_equal_p (TREE_OPERAND (exp, 1),
7549 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7550 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7551 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7552 && operand_equal_p (TREE_OPERAND (exp, 2),
7553 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7554 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7555 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7556 && operand_equal_p (TREE_OPERAND (exp, 1),
7557 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7558 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7559
01c8a7c8
RK
7560 /* If we are not to produce a result, we have no target. Otherwise,
7561 if a target was specified use it; it will not be used as an
7562 intermediate target unless it is safe. If no target, use a
7563 temporary. */
7564
7565 if (ignore)
7566 temp = 0;
7567 else if (original_target
e5e809f4 7568 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7569 || (singleton && GET_CODE (original_target) == REG
7570 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7571 && original_target == var_rtx (singleton)))
7572 && GET_MODE (original_target) == mode
7c00d1fe
RK
7573#ifdef HAVE_conditional_move
7574 && (! can_conditionally_move_p (mode)
7575 || GET_CODE (original_target) == REG
7576 || TREE_ADDRESSABLE (type))
7577#endif
01c8a7c8
RK
7578 && ! (GET_CODE (original_target) == MEM
7579 && MEM_VOLATILE_P (original_target)))
7580 temp = original_target;
7581 else if (TREE_ADDRESSABLE (type))
7582 abort ();
7583 else
7584 temp = assign_temp (type, 0, 0, 1);
7585
ac01eace
RK
7586 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7587 do the test of X as a store-flag operation, do this as
7588 A + ((X != 0) << log C). Similarly for other simple binary
7589 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7590 if (temp && singleton && binary_op
bbf6f052
RK
7591 && (TREE_CODE (binary_op) == PLUS_EXPR
7592 || TREE_CODE (binary_op) == MINUS_EXPR
7593 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7594 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7595 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7596 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7597 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7598 {
7599 rtx result;
7600 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7601 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7602 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7603 : xor_optab);
bbf6f052
RK
7604
7605 /* If we had X ? A : A + 1, do this as A + (X == 0).
7606
7607 We have to invert the truth value here and then put it
7608 back later if do_store_flag fails. We cannot simply copy
7609 TREE_OPERAND (exp, 0) to another variable and modify that
7610 because invert_truthvalue can modify the tree pointed to
7611 by its argument. */
7612 if (singleton == TREE_OPERAND (exp, 1))
7613 TREE_OPERAND (exp, 0)
7614 = invert_truthvalue (TREE_OPERAND (exp, 0));
7615
7616 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7617 (safe_from_p (temp, singleton, 1)
906c4e36 7618 ? temp : NULL_RTX),
bbf6f052
RK
7619 mode, BRANCH_COST <= 1);
7620
ac01eace
RK
7621 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7622 result = expand_shift (LSHIFT_EXPR, mode, result,
7623 build_int_2 (tree_log2
7624 (TREE_OPERAND
7625 (binary_op, 1)),
7626 0),
e5e809f4 7627 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7628 ? temp : NULL_RTX), 0);
7629
bbf6f052
RK
7630 if (result)
7631 {
906c4e36 7632 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7633 return expand_binop (mode, boptab, op1, result, temp,
7634 unsignedp, OPTAB_LIB_WIDEN);
7635 }
7636 else if (singleton == TREE_OPERAND (exp, 1))
7637 TREE_OPERAND (exp, 0)
7638 = invert_truthvalue (TREE_OPERAND (exp, 0));
7639 }
7640
dabf8373 7641 do_pending_stack_adjust ();
bbf6f052
RK
7642 NO_DEFER_POP;
7643 op0 = gen_label_rtx ();
7644
7645 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7646 {
7647 if (temp != 0)
7648 {
7649 /* If the target conflicts with the other operand of the
7650 binary op, we can't use it. Also, we can't use the target
7651 if it is a hard register, because evaluating the condition
7652 might clobber it. */
7653 if ((binary_op
e5e809f4 7654 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7655 || (GET_CODE (temp) == REG
7656 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7657 temp = gen_reg_rtx (mode);
7658 store_expr (singleton, temp, 0);
7659 }
7660 else
906c4e36 7661 expand_expr (singleton,
2937cf87 7662 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7663 if (singleton == TREE_OPERAND (exp, 1))
7664 jumpif (TREE_OPERAND (exp, 0), op0);
7665 else
7666 jumpifnot (TREE_OPERAND (exp, 0), op0);
7667
956d6950 7668 start_cleanup_deferral ();
bbf6f052
RK
7669 if (binary_op && temp == 0)
7670 /* Just touch the other operand. */
7671 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7672 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7673 else if (binary_op)
7674 store_expr (build (TREE_CODE (binary_op), type,
7675 make_tree (type, temp),
7676 TREE_OPERAND (binary_op, 1)),
7677 temp, 0);
7678 else
7679 store_expr (build1 (TREE_CODE (unary_op), type,
7680 make_tree (type, temp)),
7681 temp, 0);
7682 op1 = op0;
bbf6f052 7683 }
bbf6f052
RK
7684 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7685 comparison operator. If we have one of these cases, set the
7686 output to A, branch on A (cse will merge these two references),
7687 then set the output to FOO. */
7688 else if (temp
7689 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7690 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7691 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7692 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7693 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7694 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7695 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7696 {
7697 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7698 temp = gen_reg_rtx (mode);
7699 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7700 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7701
956d6950 7702 start_cleanup_deferral ();
bbf6f052
RK
7703 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7704 op1 = op0;
7705 }
7706 else if (temp
7707 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7708 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7709 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7710 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7711 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7712 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7713 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7714 {
7715 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7716 temp = gen_reg_rtx (mode);
7717 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7718 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7719
956d6950 7720 start_cleanup_deferral ();
bbf6f052
RK
7721 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7722 op1 = op0;
7723 }
7724 else
7725 {
7726 op1 = gen_label_rtx ();
7727 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7728
956d6950 7729 start_cleanup_deferral ();
2ac84cfe
NS
7730
7731 /* One branch of the cond can be void, if it never returns. For
7732 example A ? throw : E */
7733 if (temp != 0
7734 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
7735 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7736 else
906c4e36
RK
7737 expand_expr (TREE_OPERAND (exp, 1),
7738 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7739 end_cleanup_deferral ();
bbf6f052
RK
7740 emit_queue ();
7741 emit_jump_insn (gen_jump (op1));
7742 emit_barrier ();
7743 emit_label (op0);
956d6950 7744 start_cleanup_deferral ();
2ac84cfe
NS
7745 if (temp != 0
7746 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
7747 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7748 else
906c4e36
RK
7749 expand_expr (TREE_OPERAND (exp, 2),
7750 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7751 }
7752
956d6950 7753 end_cleanup_deferral ();
bbf6f052
RK
7754
7755 emit_queue ();
7756 emit_label (op1);
7757 OK_DEFER_POP;
5dab5552 7758
bbf6f052
RK
7759 return temp;
7760 }
7761
7762 case TARGET_EXPR:
7763 {
7764 /* Something needs to be initialized, but we didn't know
7765 where that thing was when building the tree. For example,
7766 it could be the return value of a function, or a parameter
7767 to a function which lays down in the stack, or a temporary
7768 variable which must be passed by reference.
7769
7770 We guarantee that the expression will either be constructed
7771 or copied into our original target. */
7772
7773 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7774 tree cleanups = NULL_TREE;
5c062816 7775 tree exp1;
bbf6f052
RK
7776
7777 if (TREE_CODE (slot) != VAR_DECL)
7778 abort ();
7779
9c51f375
RK
7780 if (! ignore)
7781 target = original_target;
7782
bbf6f052
RK
7783 if (target == 0)
7784 {
7785 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7786 {
7787 target = DECL_RTL (slot);
5c062816 7788 /* If we have already expanded the slot, so don't do
ac993f4f 7789 it again. (mrs) */
5c062816
MS
7790 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7791 return target;
ac993f4f 7792 }
bbf6f052
RK
7793 else
7794 {
e9a25f70 7795 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7796 /* All temp slots at this level must not conflict. */
7797 preserve_temp_slots (target);
7798 DECL_RTL (slot) = target;
e9a25f70
JL
7799 if (TREE_ADDRESSABLE (slot))
7800 {
7801 TREE_ADDRESSABLE (slot) = 0;
7802 mark_addressable (slot);
7803 }
bbf6f052 7804
e287fd6e
RK
7805 /* Since SLOT is not known to the called function
7806 to belong to its stack frame, we must build an explicit
7807 cleanup. This case occurs when we must build up a reference
7808 to pass the reference as an argument. In this case,
7809 it is very likely that such a reference need not be
7810 built here. */
7811
7812 if (TREE_OPERAND (exp, 2) == 0)
7813 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7814 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7815 }
bbf6f052
RK
7816 }
7817 else
7818 {
7819 /* This case does occur, when expanding a parameter which
7820 needs to be constructed on the stack. The target
7821 is the actual stack address that we want to initialize.
7822 The function we call will perform the cleanup in this case. */
7823
8c042b47
RS
7824 /* If we have already assigned it space, use that space,
7825 not target that we were passed in, as our target
7826 parameter is only a hint. */
7827 if (DECL_RTL (slot) != 0)
7828 {
7829 target = DECL_RTL (slot);
7830 /* If we have already expanded the slot, so don't do
7831 it again. (mrs) */
7832 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7833 return target;
7834 }
21002281
JW
7835 else
7836 {
7837 DECL_RTL (slot) = target;
7838 /* If we must have an addressable slot, then make sure that
7839 the RTL that we just stored in slot is OK. */
7840 if (TREE_ADDRESSABLE (slot))
7841 {
7842 TREE_ADDRESSABLE (slot) = 0;
7843 mark_addressable (slot);
7844 }
7845 }
bbf6f052
RK
7846 }
7847
4847c938 7848 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7849 /* Mark it as expanded. */
7850 TREE_OPERAND (exp, 1) = NULL_TREE;
7851
e5e809f4 7852 TREE_USED (slot) = 1;
41531e5b 7853 store_expr (exp1, target, 0);
61d6b1cc 7854
e976b8b2 7855 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7856
41531e5b 7857 return target;
bbf6f052
RK
7858 }
7859
7860 case INIT_EXPR:
7861 {
7862 tree lhs = TREE_OPERAND (exp, 0);
7863 tree rhs = TREE_OPERAND (exp, 1);
7864 tree noncopied_parts = 0;
7865 tree lhs_type = TREE_TYPE (lhs);
7866
7867 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7868 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7869 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7870 TYPE_NONCOPIED_PARTS (lhs_type));
7871 while (noncopied_parts != 0)
7872 {
7873 expand_assignment (TREE_VALUE (noncopied_parts),
7874 TREE_PURPOSE (noncopied_parts), 0, 0);
7875 noncopied_parts = TREE_CHAIN (noncopied_parts);
7876 }
7877 return temp;
7878 }
7879
7880 case MODIFY_EXPR:
7881 {
7882 /* If lhs is complex, expand calls in rhs before computing it.
7883 That's so we don't compute a pointer and save it over a call.
7884 If lhs is simple, compute it first so we can give it as a
7885 target if the rhs is just a call. This avoids an extra temp and copy
7886 and that prevents a partial-subsumption which makes bad code.
7887 Actually we could treat component_ref's of vars like vars. */
7888
7889 tree lhs = TREE_OPERAND (exp, 0);
7890 tree rhs = TREE_OPERAND (exp, 1);
7891 tree noncopied_parts = 0;
7892 tree lhs_type = TREE_TYPE (lhs);
7893
7894 temp = 0;
7895
7896 if (TREE_CODE (lhs) != VAR_DECL
7897 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7898 && TREE_CODE (lhs) != PARM_DECL
7899 && ! (TREE_CODE (lhs) == INDIRECT_REF
7900 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7901 preexpand_calls (exp);
7902
7903 /* Check for |= or &= of a bitfield of size one into another bitfield
7904 of size 1. In this case, (unless we need the result of the
7905 assignment) we can do this more efficiently with a
7906 test followed by an assignment, if necessary.
7907
7908 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7909 things change so we do, this code should be enhanced to
7910 support it. */
7911 if (ignore
7912 && TREE_CODE (lhs) == COMPONENT_REF
7913 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7914 || TREE_CODE (rhs) == BIT_AND_EXPR)
7915 && TREE_OPERAND (rhs, 0) == lhs
7916 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7917 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7918 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7919 {
7920 rtx label = gen_label_rtx ();
7921
7922 do_jump (TREE_OPERAND (rhs, 1),
7923 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7924 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7925 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7926 (TREE_CODE (rhs) == BIT_IOR_EXPR
7927 ? integer_one_node
7928 : integer_zero_node)),
7929 0, 0);
e7c33f54 7930 do_pending_stack_adjust ();
bbf6f052
RK
7931 emit_label (label);
7932 return const0_rtx;
7933 }
7934
7935 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7936 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7937 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7938 TYPE_NONCOPIED_PARTS (lhs_type));
7939
7940 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7941 while (noncopied_parts != 0)
7942 {
7943 expand_assignment (TREE_PURPOSE (noncopied_parts),
7944 TREE_VALUE (noncopied_parts), 0, 0);
7945 noncopied_parts = TREE_CHAIN (noncopied_parts);
7946 }
7947 return temp;
7948 }
7949
6e7f84a7
APB
7950 case RETURN_EXPR:
7951 if (!TREE_OPERAND (exp, 0))
7952 expand_null_return ();
7953 else
7954 expand_return (TREE_OPERAND (exp, 0));
7955 return const0_rtx;
7956
bbf6f052
RK
7957 case PREINCREMENT_EXPR:
7958 case PREDECREMENT_EXPR:
7b8b9722 7959 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7960
7961 case POSTINCREMENT_EXPR:
7962 case POSTDECREMENT_EXPR:
7963 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7964 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7965
7966 case ADDR_EXPR:
987c71d9 7967 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7968 be a MEM corresponding to a stack slot. */
987c71d9
RK
7969 temp = 0;
7970
bbf6f052
RK
7971 /* Are we taking the address of a nested function? */
7972 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7973 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7974 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7975 && ! TREE_STATIC (exp))
bbf6f052
RK
7976 {
7977 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7978 op0 = force_operand (op0, target);
7979 }
682ba3a6
RK
7980 /* If we are taking the address of something erroneous, just
7981 return a zero. */
7982 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7983 return const0_rtx;
bbf6f052
RK
7984 else
7985 {
e287fd6e
RK
7986 /* We make sure to pass const0_rtx down if we came in with
7987 ignore set, to avoid doing the cleanups twice for something. */
7988 op0 = expand_expr (TREE_OPERAND (exp, 0),
7989 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7990 (modifier == EXPAND_INITIALIZER
7991 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7992
119af78a
RK
7993 /* If we are going to ignore the result, OP0 will have been set
7994 to const0_rtx, so just return it. Don't get confused and
7995 think we are taking the address of the constant. */
7996 if (ignore)
7997 return op0;
7998
3539e816
MS
7999 op0 = protect_from_queue (op0, 0);
8000
c5c76735
JL
8001 /* We would like the object in memory. If it is a constant, we can
8002 have it be statically allocated into memory. For a non-constant,
8003 we need to allocate some memory and store the value into it. */
896102d0
RK
8004
8005 if (CONSTANT_P (op0))
8006 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8007 op0);
987c71d9 8008 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8009 {
8010 mark_temp_addr_taken (op0);
8011 temp = XEXP (op0, 0);
8012 }
896102d0 8013
682ba3a6 8014 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 8015 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
8016 {
8017 /* If this object is in a register, it must be not
0f41302f 8018 be BLKmode. */
896102d0 8019 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 8020 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 8021
7a0b7b9a 8022 mark_temp_addr_taken (memloc);
896102d0
RK
8023 emit_move_insn (memloc, op0);
8024 op0 = memloc;
8025 }
8026
bbf6f052
RK
8027 if (GET_CODE (op0) != MEM)
8028 abort ();
8029
8030 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8031 {
8032 temp = XEXP (op0, 0);
8033#ifdef POINTERS_EXTEND_UNSIGNED
8034 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8035 && mode == ptr_mode)
9fcfcce7 8036 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8037#endif
8038 return temp;
8039 }
987c71d9 8040
bbf6f052
RK
8041 op0 = force_operand (XEXP (op0, 0), target);
8042 }
987c71d9 8043
bbf6f052 8044 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8045 op0 = force_reg (Pmode, op0);
8046
dc6d66b3
RK
8047 if (GET_CODE (op0) == REG
8048 && ! REG_USERVAR_P (op0))
8049 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
8050
8051 /* If we might have had a temp slot, add an equivalent address
8052 for it. */
8053 if (temp != 0)
8054 update_temp_slot_address (temp, op0);
8055
88f63c77
RK
8056#ifdef POINTERS_EXTEND_UNSIGNED
8057 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8058 && mode == ptr_mode)
9fcfcce7 8059 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8060#endif
8061
bbf6f052
RK
8062 return op0;
8063
8064 case ENTRY_VALUE_EXPR:
8065 abort ();
8066
7308a047
RS
8067 /* COMPLEX type for Extended Pascal & Fortran */
8068 case COMPLEX_EXPR:
8069 {
8070 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8071 rtx insns;
7308a047
RS
8072
8073 /* Get the rtx code of the operands. */
8074 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8075 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8076
8077 if (! target)
8078 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8079
6551fa4d 8080 start_sequence ();
7308a047
RS
8081
8082 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8083 emit_move_insn (gen_realpart (mode, target), op0);
8084 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8085
6551fa4d
JW
8086 insns = get_insns ();
8087 end_sequence ();
8088
7308a047 8089 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8090 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8091 each with a separate pseudo as destination.
8092 It's not correct for flow to treat them as a unit. */
6d6e61ce 8093 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8094 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8095 else
8096 emit_insns (insns);
7308a047
RS
8097
8098 return target;
8099 }
8100
8101 case REALPART_EXPR:
2d7050fd
RS
8102 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8103 return gen_realpart (mode, op0);
7308a047
RS
8104
8105 case IMAGPART_EXPR:
2d7050fd
RS
8106 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8107 return gen_imagpart (mode, op0);
7308a047
RS
8108
8109 case CONJ_EXPR:
8110 {
62acb978 8111 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8112 rtx imag_t;
6551fa4d 8113 rtx insns;
7308a047
RS
8114
8115 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8116
8117 if (! target)
d6a5ac33 8118 target = gen_reg_rtx (mode);
7308a047 8119
6551fa4d 8120 start_sequence ();
7308a047
RS
8121
8122 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8123 emit_move_insn (gen_realpart (partmode, target),
8124 gen_realpart (partmode, op0));
7308a047 8125
62acb978
RK
8126 imag_t = gen_imagpart (partmode, target);
8127 temp = expand_unop (partmode, neg_optab,
8128 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8129 if (temp != imag_t)
8130 emit_move_insn (imag_t, temp);
8131
6551fa4d
JW
8132 insns = get_insns ();
8133 end_sequence ();
8134
d6a5ac33
RK
8135 /* Conjugate should appear as a single unit
8136 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8137 each with a separate pseudo as destination.
8138 It's not correct for flow to treat them as a unit. */
6d6e61ce 8139 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8140 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8141 else
8142 emit_insns (insns);
7308a047
RS
8143
8144 return target;
8145 }
8146
e976b8b2
MS
8147 case TRY_CATCH_EXPR:
8148 {
8149 tree handler = TREE_OPERAND (exp, 1);
8150
8151 expand_eh_region_start ();
8152
8153 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8154
8155 expand_eh_region_end (handler);
8156
8157 return op0;
8158 }
8159
b335b813
PB
8160 case TRY_FINALLY_EXPR:
8161 {
8162 tree try_block = TREE_OPERAND (exp, 0);
8163 tree finally_block = TREE_OPERAND (exp, 1);
8164 rtx finally_label = gen_label_rtx ();
8165 rtx done_label = gen_label_rtx ();
8166 rtx return_link = gen_reg_rtx (Pmode);
8167 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8168 (tree) finally_label, (tree) return_link);
8169 TREE_SIDE_EFFECTS (cleanup) = 1;
8170
8171 /* Start a new binding layer that will keep track of all cleanup
8172 actions to be performed. */
8173 expand_start_bindings (0);
8174
8175 target_temp_slot_level = temp_slot_level;
8176
8177 expand_decl_cleanup (NULL_TREE, cleanup);
8178 op0 = expand_expr (try_block, target, tmode, modifier);
8179
8180 preserve_temp_slots (op0);
8181 expand_end_bindings (NULL_TREE, 0, 0);
8182 emit_jump (done_label);
8183 emit_label (finally_label);
8184 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8185 emit_indirect_jump (return_link);
8186 emit_label (done_label);
8187 return op0;
8188 }
8189
8190 case GOTO_SUBROUTINE_EXPR:
8191 {
8192 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8193 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8194 rtx return_address = gen_label_rtx ();
8195 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8196 emit_jump (subr);
8197 emit_label (return_address);
8198 return const0_rtx;
8199 }
8200
e976b8b2
MS
8201 case POPDCC_EXPR:
8202 {
8203 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8204 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8205 return const0_rtx;
8206 }
8207
8208 case POPDHC_EXPR:
8209 {
8210 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8211 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8212 return const0_rtx;
8213 }
8214
d3707adb
RH
8215 case VA_ARG_EXPR:
8216 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8217
bbf6f052 8218 default:
90764a87 8219 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8220 }
8221
8222 /* Here to do an ordinary binary operator, generating an instruction
8223 from the optab already placed in `this_optab'. */
8224 binop:
8225 preexpand_calls (exp);
e5e809f4 8226 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8227 subtarget = 0;
8228 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8229 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8230 binop2:
8231 temp = expand_binop (mode, this_optab, op0, op1, target,
8232 unsignedp, OPTAB_LIB_WIDEN);
8233 if (temp == 0)
8234 abort ();
8235 return temp;
8236}
b93a436e
JL
8237\f
8238/* Return the tree node and offset if a given argument corresponds to
8239 a string constant. */
8240
28f4ec01 8241tree
b93a436e
JL
8242string_constant (arg, ptr_offset)
8243 tree arg;
8244 tree *ptr_offset;
8245{
8246 STRIP_NOPS (arg);
8247
8248 if (TREE_CODE (arg) == ADDR_EXPR
8249 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8250 {
8251 *ptr_offset = integer_zero_node;
8252 return TREE_OPERAND (arg, 0);
8253 }
8254 else if (TREE_CODE (arg) == PLUS_EXPR)
8255 {
8256 tree arg0 = TREE_OPERAND (arg, 0);
8257 tree arg1 = TREE_OPERAND (arg, 1);
8258
8259 STRIP_NOPS (arg0);
8260 STRIP_NOPS (arg1);
8261
8262 if (TREE_CODE (arg0) == ADDR_EXPR
8263 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8264 {
b93a436e
JL
8265 *ptr_offset = arg1;
8266 return TREE_OPERAND (arg0, 0);
bbf6f052 8267 }
b93a436e
JL
8268 else if (TREE_CODE (arg1) == ADDR_EXPR
8269 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8270 {
b93a436e
JL
8271 *ptr_offset = arg0;
8272 return TREE_OPERAND (arg1, 0);
bbf6f052 8273 }
b93a436e 8274 }
ca695ac9 8275
b93a436e
JL
8276 return 0;
8277}
ca695ac9 8278\f
b93a436e
JL
8279/* Expand code for a post- or pre- increment or decrement
8280 and return the RTX for the result.
8281 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8282
b93a436e
JL
8283static rtx
8284expand_increment (exp, post, ignore)
8285 register tree exp;
8286 int post, ignore;
ca695ac9 8287{
b93a436e
JL
8288 register rtx op0, op1;
8289 register rtx temp, value;
8290 register tree incremented = TREE_OPERAND (exp, 0);
8291 optab this_optab = add_optab;
8292 int icode;
8293 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8294 int op0_is_copy = 0;
8295 int single_insn = 0;
8296 /* 1 means we can't store into OP0 directly,
8297 because it is a subreg narrower than a word,
8298 and we don't dare clobber the rest of the word. */
8299 int bad_subreg = 0;
1499e0a8 8300
b93a436e
JL
8301 /* Stabilize any component ref that might need to be
8302 evaluated more than once below. */
8303 if (!post
8304 || TREE_CODE (incremented) == BIT_FIELD_REF
8305 || (TREE_CODE (incremented) == COMPONENT_REF
8306 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8307 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8308 incremented = stabilize_reference (incremented);
8309 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8310 ones into save exprs so that they don't accidentally get evaluated
8311 more than once by the code below. */
8312 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8313 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8314 incremented = save_expr (incremented);
e9a25f70 8315
b93a436e
JL
8316 /* Compute the operands as RTX.
8317 Note whether OP0 is the actual lvalue or a copy of it:
8318 I believe it is a copy iff it is a register or subreg
8319 and insns were generated in computing it. */
e9a25f70 8320
b93a436e
JL
8321 temp = get_last_insn ();
8322 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 8323
b93a436e
JL
8324 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8325 in place but instead must do sign- or zero-extension during assignment,
8326 so we copy it into a new register and let the code below use it as
8327 a copy.
e9a25f70 8328
b93a436e
JL
8329 Note that we can safely modify this SUBREG since it is know not to be
8330 shared (it was made by the expand_expr call above). */
8331
8332 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8333 {
8334 if (post)
8335 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8336 else
8337 bad_subreg = 1;
8338 }
8339 else if (GET_CODE (op0) == SUBREG
8340 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8341 {
8342 /* We cannot increment this SUBREG in place. If we are
8343 post-incrementing, get a copy of the old value. Otherwise,
8344 just mark that we cannot increment in place. */
8345 if (post)
8346 op0 = copy_to_reg (op0);
8347 else
8348 bad_subreg = 1;
e9a25f70
JL
8349 }
8350
b93a436e
JL
8351 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8352 && temp != get_last_insn ());
8353 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8354 EXPAND_MEMORY_USE_BAD);
1499e0a8 8355
b93a436e
JL
8356 /* Decide whether incrementing or decrementing. */
8357 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8358 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8359 this_optab = sub_optab;
8360
8361 /* Convert decrement by a constant into a negative increment. */
8362 if (this_optab == sub_optab
8363 && GET_CODE (op1) == CONST_INT)
ca695ac9 8364 {
b93a436e
JL
8365 op1 = GEN_INT (- INTVAL (op1));
8366 this_optab = add_optab;
ca695ac9 8367 }
1499e0a8 8368
b93a436e
JL
8369 /* For a preincrement, see if we can do this with a single instruction. */
8370 if (!post)
8371 {
8372 icode = (int) this_optab->handlers[(int) mode].insn_code;
8373 if (icode != (int) CODE_FOR_nothing
8374 /* Make sure that OP0 is valid for operands 0 and 1
8375 of the insn we want to queue. */
8376 && (*insn_operand_predicate[icode][0]) (op0, mode)
8377 && (*insn_operand_predicate[icode][1]) (op0, mode)
8378 && (*insn_operand_predicate[icode][2]) (op1, mode))
8379 single_insn = 1;
8380 }
bbf6f052 8381
b93a436e
JL
8382 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8383 then we cannot just increment OP0. We must therefore contrive to
8384 increment the original value. Then, for postincrement, we can return
8385 OP0 since it is a copy of the old value. For preincrement, expand here
8386 unless we can do it with a single insn.
bbf6f052 8387
b93a436e
JL
8388 Likewise if storing directly into OP0 would clobber high bits
8389 we need to preserve (bad_subreg). */
8390 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8391 {
b93a436e
JL
8392 /* This is the easiest way to increment the value wherever it is.
8393 Problems with multiple evaluation of INCREMENTED are prevented
8394 because either (1) it is a component_ref or preincrement,
8395 in which case it was stabilized above, or (2) it is an array_ref
8396 with constant index in an array in a register, which is
8397 safe to reevaluate. */
8398 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8399 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8400 ? MINUS_EXPR : PLUS_EXPR),
8401 TREE_TYPE (exp),
8402 incremented,
8403 TREE_OPERAND (exp, 1));
a358cee0 8404
b93a436e
JL
8405 while (TREE_CODE (incremented) == NOP_EXPR
8406 || TREE_CODE (incremented) == CONVERT_EXPR)
8407 {
8408 newexp = convert (TREE_TYPE (incremented), newexp);
8409 incremented = TREE_OPERAND (incremented, 0);
8410 }
bbf6f052 8411
b93a436e
JL
8412 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8413 return post ? op0 : temp;
8414 }
bbf6f052 8415
b93a436e
JL
8416 if (post)
8417 {
8418 /* We have a true reference to the value in OP0.
8419 If there is an insn to add or subtract in this mode, queue it.
8420 Queueing the increment insn avoids the register shuffling
8421 that often results if we must increment now and first save
8422 the old value for subsequent use. */
bbf6f052 8423
b93a436e
JL
8424#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8425 op0 = stabilize (op0);
8426#endif
41dfd40c 8427
b93a436e
JL
8428 icode = (int) this_optab->handlers[(int) mode].insn_code;
8429 if (icode != (int) CODE_FOR_nothing
8430 /* Make sure that OP0 is valid for operands 0 and 1
8431 of the insn we want to queue. */
8432 && (*insn_operand_predicate[icode][0]) (op0, mode)
8433 && (*insn_operand_predicate[icode][1]) (op0, mode))
8434 {
8435 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8436 op1 = force_reg (mode, op1);
bbf6f052 8437
b93a436e
JL
8438 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8439 }
8440 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8441 {
8442 rtx addr = (general_operand (XEXP (op0, 0), mode)
8443 ? force_reg (Pmode, XEXP (op0, 0))
8444 : copy_to_reg (XEXP (op0, 0)));
8445 rtx temp, result;
ca695ac9 8446
b93a436e
JL
8447 op0 = change_address (op0, VOIDmode, addr);
8448 temp = force_reg (GET_MODE (op0), op0);
8449 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8450 op1 = force_reg (mode, op1);
ca695ac9 8451
b93a436e
JL
8452 /* The increment queue is LIFO, thus we have to `queue'
8453 the instructions in reverse order. */
8454 enqueue_insn (op0, gen_move_insn (op0, temp));
8455 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8456 return result;
bbf6f052
RK
8457 }
8458 }
ca695ac9 8459
b93a436e
JL
8460 /* Preincrement, or we can't increment with one simple insn. */
8461 if (post)
8462 /* Save a copy of the value before inc or dec, to return it later. */
8463 temp = value = copy_to_reg (op0);
8464 else
8465 /* Arrange to return the incremented value. */
8466 /* Copy the rtx because expand_binop will protect from the queue,
8467 and the results of that would be invalid for us to return
8468 if our caller does emit_queue before using our result. */
8469 temp = copy_rtx (value = op0);
bbf6f052 8470
b93a436e
JL
8471 /* Increment however we can. */
8472 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 8473 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
8474 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8475 /* Make sure the value is stored into OP0. */
8476 if (op1 != op0)
8477 emit_move_insn (op0, op1);
5718612f 8478
b93a436e
JL
8479 return temp;
8480}
8481\f
8482/* Expand all function calls contained within EXP, innermost ones first.
8483 But don't look within expressions that have sequence points.
8484 For each CALL_EXPR, record the rtx for its value
8485 in the CALL_EXPR_RTL field. */
5718612f 8486
b93a436e
JL
8487static void
8488preexpand_calls (exp)
8489 tree exp;
8490{
8491 register int nops, i;
8492 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 8493
b93a436e
JL
8494 if (! do_preexpand_calls)
8495 return;
5718612f 8496
b93a436e 8497 /* Only expressions and references can contain calls. */
bbf6f052 8498
b93a436e
JL
8499 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8500 return;
bbf6f052 8501
b93a436e
JL
8502 switch (TREE_CODE (exp))
8503 {
8504 case CALL_EXPR:
8505 /* Do nothing if already expanded. */
8506 if (CALL_EXPR_RTL (exp) != 0
8507 /* Do nothing if the call returns a variable-sized object. */
8508 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8509 /* Do nothing to built-in functions. */
8510 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8511 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8512 == FUNCTION_DECL)
8513 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8514 return;
bbf6f052 8515
b93a436e
JL
8516 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8517 return;
bbf6f052 8518
b93a436e
JL
8519 case COMPOUND_EXPR:
8520 case COND_EXPR:
8521 case TRUTH_ANDIF_EXPR:
8522 case TRUTH_ORIF_EXPR:
8523 /* If we find one of these, then we can be sure
8524 the adjust will be done for it (since it makes jumps).
8525 Do it now, so that if this is inside an argument
8526 of a function, we don't get the stack adjustment
8527 after some other args have already been pushed. */
8528 do_pending_stack_adjust ();
8529 return;
bbf6f052 8530
b93a436e
JL
8531 case BLOCK:
8532 case RTL_EXPR:
8533 case WITH_CLEANUP_EXPR:
8534 case CLEANUP_POINT_EXPR:
8535 case TRY_CATCH_EXPR:
8536 return;
bbf6f052 8537
b93a436e
JL
8538 case SAVE_EXPR:
8539 if (SAVE_EXPR_RTL (exp) != 0)
8540 return;
8541
8542 default:
8543 break;
ca695ac9 8544 }
bbf6f052 8545
b93a436e
JL
8546 nops = tree_code_length[(int) TREE_CODE (exp)];
8547 for (i = 0; i < nops; i++)
8548 if (TREE_OPERAND (exp, i) != 0)
8549 {
8550 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8551 if (type == 'e' || type == '<' || type == '1' || type == '2'
8552 || type == 'r')
8553 preexpand_calls (TREE_OPERAND (exp, i));
8554 }
8555}
8556\f
8557/* At the start of a function, record that we have no previously-pushed
8558 arguments waiting to be popped. */
bbf6f052 8559
b93a436e
JL
8560void
8561init_pending_stack_adjust ()
8562{
8563 pending_stack_adjust = 0;
8564}
bbf6f052 8565
b93a436e 8566/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
8567 so the adjustment won't get done.
8568
8569 Note, if the current function calls alloca, then it must have a
8570 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 8571
b93a436e
JL
8572void
8573clear_pending_stack_adjust ()
8574{
8575#ifdef EXIT_IGNORE_STACK
8576 if (optimize > 0
060fbabf
JL
8577 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8578 && EXIT_IGNORE_STACK
b93a436e
JL
8579 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8580 && ! flag_inline_functions)
8581 pending_stack_adjust = 0;
8582#endif
8583}
bbf6f052 8584
b93a436e
JL
8585/* Pop any previously-pushed arguments that have not been popped yet. */
8586
8587void
8588do_pending_stack_adjust ()
8589{
8590 if (inhibit_defer_pop == 0)
ca695ac9 8591 {
b93a436e
JL
8592 if (pending_stack_adjust != 0)
8593 adjust_stack (GEN_INT (pending_stack_adjust));
8594 pending_stack_adjust = 0;
bbf6f052 8595 }
bbf6f052
RK
8596}
8597\f
b93a436e 8598/* Expand conditional expressions. */
bbf6f052 8599
b93a436e
JL
8600/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8601 LABEL is an rtx of code CODE_LABEL, in this function and all the
8602 functions here. */
bbf6f052 8603
b93a436e
JL
8604void
8605jumpifnot (exp, label)
ca695ac9 8606 tree exp;
b93a436e 8607 rtx label;
bbf6f052 8608{
b93a436e
JL
8609 do_jump (exp, label, NULL_RTX);
8610}
bbf6f052 8611
b93a436e 8612/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 8613
b93a436e
JL
8614void
8615jumpif (exp, label)
8616 tree exp;
8617 rtx label;
8618{
8619 do_jump (exp, NULL_RTX, label);
8620}
ca695ac9 8621
b93a436e
JL
8622/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8623 the result is zero, or IF_TRUE_LABEL if the result is one.
8624 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8625 meaning fall through in that case.
ca695ac9 8626
b93a436e
JL
8627 do_jump always does any pending stack adjust except when it does not
8628 actually perform a jump. An example where there is no jump
8629 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 8630
b93a436e
JL
8631 This function is responsible for optimizing cases such as
8632 &&, || and comparison operators in EXP. */
5718612f 8633
b93a436e
JL
8634void
8635do_jump (exp, if_false_label, if_true_label)
8636 tree exp;
8637 rtx if_false_label, if_true_label;
8638{
8639 register enum tree_code code = TREE_CODE (exp);
8640 /* Some cases need to create a label to jump to
8641 in order to properly fall through.
8642 These cases set DROP_THROUGH_LABEL nonzero. */
8643 rtx drop_through_label = 0;
8644 rtx temp;
b93a436e
JL
8645 int i;
8646 tree type;
8647 enum machine_mode mode;
ca695ac9 8648
dbecbbe4
JL
8649#ifdef MAX_INTEGER_COMPUTATION_MODE
8650 check_max_integer_computation_mode (exp);
8651#endif
8652
b93a436e 8653 emit_queue ();
ca695ac9 8654
b93a436e 8655 switch (code)
ca695ac9 8656 {
b93a436e 8657 case ERROR_MARK:
ca695ac9 8658 break;
bbf6f052 8659
b93a436e
JL
8660 case INTEGER_CST:
8661 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8662 if (temp)
8663 emit_jump (temp);
8664 break;
bbf6f052 8665
b93a436e
JL
8666#if 0
8667 /* This is not true with #pragma weak */
8668 case ADDR_EXPR:
8669 /* The address of something can never be zero. */
8670 if (if_true_label)
8671 emit_jump (if_true_label);
8672 break;
8673#endif
bbf6f052 8674
b93a436e
JL
8675 case NOP_EXPR:
8676 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8677 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8678 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8679 goto normal;
8680 case CONVERT_EXPR:
8681 /* If we are narrowing the operand, we have to do the compare in the
8682 narrower mode. */
8683 if ((TYPE_PRECISION (TREE_TYPE (exp))
8684 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8685 goto normal;
8686 case NON_LVALUE_EXPR:
8687 case REFERENCE_EXPR:
8688 case ABS_EXPR:
8689 case NEGATE_EXPR:
8690 case LROTATE_EXPR:
8691 case RROTATE_EXPR:
8692 /* These cannot change zero->non-zero or vice versa. */
8693 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8694 break;
bbf6f052 8695
b93a436e
JL
8696#if 0
8697 /* This is never less insns than evaluating the PLUS_EXPR followed by
8698 a test and can be longer if the test is eliminated. */
8699 case PLUS_EXPR:
8700 /* Reduce to minus. */
8701 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8702 TREE_OPERAND (exp, 0),
8703 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8704 TREE_OPERAND (exp, 1))));
8705 /* Process as MINUS. */
ca695ac9 8706#endif
bbf6f052 8707
b93a436e
JL
8708 case MINUS_EXPR:
8709 /* Non-zero iff operands of minus differ. */
b30f05db
BS
8710 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8711 TREE_OPERAND (exp, 0),
8712 TREE_OPERAND (exp, 1)),
8713 NE, NE, if_false_label, if_true_label);
b93a436e 8714 break;
bbf6f052 8715
b93a436e
JL
8716 case BIT_AND_EXPR:
8717 /* If we are AND'ing with a small constant, do this comparison in the
8718 smallest type that fits. If the machine doesn't have comparisons
8719 that small, it will be converted back to the wider comparison.
8720 This helps if we are testing the sign bit of a narrower object.
8721 combine can't do this for us because it can't know whether a
8722 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 8723
b93a436e
JL
8724 if (! SLOW_BYTE_ACCESS
8725 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8726 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8727 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8728 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8729 && (type = type_for_mode (mode, 1)) != 0
8730 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8731 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8732 != CODE_FOR_nothing))
8733 {
8734 do_jump (convert (type, exp), if_false_label, if_true_label);
8735 break;
8736 }
8737 goto normal;
bbf6f052 8738
b93a436e
JL
8739 case TRUTH_NOT_EXPR:
8740 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8741 break;
bbf6f052 8742
b93a436e
JL
8743 case TRUTH_ANDIF_EXPR:
8744 if (if_false_label == 0)
8745 if_false_label = drop_through_label = gen_label_rtx ();
8746 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8747 start_cleanup_deferral ();
8748 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8749 end_cleanup_deferral ();
8750 break;
bbf6f052 8751
b93a436e
JL
8752 case TRUTH_ORIF_EXPR:
8753 if (if_true_label == 0)
8754 if_true_label = drop_through_label = gen_label_rtx ();
8755 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8756 start_cleanup_deferral ();
8757 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8758 end_cleanup_deferral ();
8759 break;
bbf6f052 8760
b93a436e
JL
8761 case COMPOUND_EXPR:
8762 push_temp_slots ();
8763 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8764 preserve_temp_slots (NULL_RTX);
8765 free_temp_slots ();
8766 pop_temp_slots ();
8767 emit_queue ();
8768 do_pending_stack_adjust ();
8769 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8770 break;
bbf6f052 8771
b93a436e
JL
8772 case COMPONENT_REF:
8773 case BIT_FIELD_REF:
8774 case ARRAY_REF:
8775 {
8776 int bitsize, bitpos, unsignedp;
8777 enum machine_mode mode;
8778 tree type;
8779 tree offset;
8780 int volatilep = 0;
8781 int alignment;
bbf6f052 8782
b93a436e
JL
8783 /* Get description of this reference. We don't actually care
8784 about the underlying object here. */
8785 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8786 &mode, &unsignedp, &volatilep,
8787 &alignment);
bbf6f052 8788
b93a436e
JL
8789 type = type_for_size (bitsize, unsignedp);
8790 if (! SLOW_BYTE_ACCESS
8791 && type != 0 && bitsize >= 0
8792 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8793 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8794 != CODE_FOR_nothing))
8795 {
8796 do_jump (convert (type, exp), if_false_label, if_true_label);
8797 break;
8798 }
8799 goto normal;
8800 }
bbf6f052 8801
b93a436e
JL
8802 case COND_EXPR:
8803 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8804 if (integer_onep (TREE_OPERAND (exp, 1))
8805 && integer_zerop (TREE_OPERAND (exp, 2)))
8806 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 8807
b93a436e
JL
8808 else if (integer_zerop (TREE_OPERAND (exp, 1))
8809 && integer_onep (TREE_OPERAND (exp, 2)))
8810 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 8811
b93a436e
JL
8812 else
8813 {
8814 register rtx label1 = gen_label_rtx ();
8815 drop_through_label = gen_label_rtx ();
bbf6f052 8816
b93a436e 8817 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 8818
b93a436e
JL
8819 start_cleanup_deferral ();
8820 /* Now the THEN-expression. */
8821 do_jump (TREE_OPERAND (exp, 1),
8822 if_false_label ? if_false_label : drop_through_label,
8823 if_true_label ? if_true_label : drop_through_label);
8824 /* In case the do_jump just above never jumps. */
8825 do_pending_stack_adjust ();
8826 emit_label (label1);
bbf6f052 8827
b93a436e
JL
8828 /* Now the ELSE-expression. */
8829 do_jump (TREE_OPERAND (exp, 2),
8830 if_false_label ? if_false_label : drop_through_label,
8831 if_true_label ? if_true_label : drop_through_label);
8832 end_cleanup_deferral ();
8833 }
8834 break;
bbf6f052 8835
b93a436e
JL
8836 case EQ_EXPR:
8837 {
8838 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 8839
9ec36da5
JL
8840 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8841 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
8842 {
8843 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8844 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8845 do_jump
8846 (fold
8847 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8848 fold (build (EQ_EXPR, TREE_TYPE (exp),
8849 fold (build1 (REALPART_EXPR,
8850 TREE_TYPE (inner_type),
8851 exp0)),
8852 fold (build1 (REALPART_EXPR,
8853 TREE_TYPE (inner_type),
8854 exp1)))),
8855 fold (build (EQ_EXPR, TREE_TYPE (exp),
8856 fold (build1 (IMAGPART_EXPR,
8857 TREE_TYPE (inner_type),
8858 exp0)),
8859 fold (build1 (IMAGPART_EXPR,
8860 TREE_TYPE (inner_type),
8861 exp1)))))),
8862 if_false_label, if_true_label);
8863 }
9ec36da5
JL
8864
8865 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8866 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8867
b93a436e
JL
8868 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8869 && !can_compare_p (TYPE_MODE (inner_type)))
8870 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8871 else
b30f05db 8872 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
8873 break;
8874 }
bbf6f052 8875
b93a436e
JL
8876 case NE_EXPR:
8877 {
8878 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 8879
9ec36da5
JL
8880 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8881 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
8882 {
8883 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8884 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8885 do_jump
8886 (fold
8887 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8888 fold (build (NE_EXPR, TREE_TYPE (exp),
8889 fold (build1 (REALPART_EXPR,
8890 TREE_TYPE (inner_type),
8891 exp0)),
8892 fold (build1 (REALPART_EXPR,
8893 TREE_TYPE (inner_type),
8894 exp1)))),
8895 fold (build (NE_EXPR, TREE_TYPE (exp),
8896 fold (build1 (IMAGPART_EXPR,
8897 TREE_TYPE (inner_type),
8898 exp0)),
8899 fold (build1 (IMAGPART_EXPR,
8900 TREE_TYPE (inner_type),
8901 exp1)))))),
8902 if_false_label, if_true_label);
8903 }
9ec36da5
JL
8904
8905 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8906 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8907
b93a436e
JL
8908 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8909 && !can_compare_p (TYPE_MODE (inner_type)))
8910 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8911 else
b30f05db 8912 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
8913 break;
8914 }
bbf6f052 8915
b93a436e
JL
8916 case LT_EXPR:
8917 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8918 == MODE_INT)
8919 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8920 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8921 else
b30f05db 8922 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 8923 break;
bbf6f052 8924
b93a436e
JL
8925 case LE_EXPR:
8926 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8927 == MODE_INT)
8928 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8929 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8930 else
b30f05db 8931 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 8932 break;
bbf6f052 8933
b93a436e
JL
8934 case GT_EXPR:
8935 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8936 == MODE_INT)
8937 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8938 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8939 else
b30f05db 8940 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 8941 break;
bbf6f052 8942
b93a436e
JL
8943 case GE_EXPR:
8944 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8945 == MODE_INT)
8946 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8947 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8948 else
b30f05db 8949 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 8950 break;
bbf6f052 8951
b93a436e
JL
8952 default:
8953 normal:
8954 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8955#if 0
8956 /* This is not needed any more and causes poor code since it causes
8957 comparisons and tests from non-SI objects to have different code
8958 sequences. */
8959 /* Copy to register to avoid generating bad insns by cse
8960 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8961 if (!cse_not_expected && GET_CODE (temp) == MEM)
8962 temp = copy_to_reg (temp);
ca695ac9 8963#endif
b93a436e 8964 do_pending_stack_adjust ();
b30f05db
BS
8965 /* Do any postincrements in the expression that was tested. */
8966 emit_queue ();
8967
8968 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
8969 {
8970 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
8971 if (target)
8972 emit_jump (target);
8973 }
b93a436e 8974 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
b30f05db 8975 && ! can_compare_p (GET_MODE (temp)))
b93a436e
JL
8976 /* Note swapping the labels gives us not-equal. */
8977 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8978 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
8979 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
8980 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8981 GET_MODE (temp), NULL_RTX, 0,
8982 if_false_label, if_true_label);
b93a436e
JL
8983 else
8984 abort ();
8985 }
bbf6f052 8986
b93a436e
JL
8987 if (drop_through_label)
8988 {
8989 /* If do_jump produces code that might be jumped around,
8990 do any stack adjusts from that code, before the place
8991 where control merges in. */
8992 do_pending_stack_adjust ();
8993 emit_label (drop_through_label);
8994 }
bbf6f052 8995}
b93a436e
JL
8996\f
8997/* Given a comparison expression EXP for values too wide to be compared
8998 with one insn, test the comparison and jump to the appropriate label.
8999 The code of EXP is ignored; we always test GT if SWAP is 0,
9000 and LT if SWAP is 1. */
bbf6f052 9001
b93a436e
JL
9002static void
9003do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9004 tree exp;
9005 int swap;
9006 rtx if_false_label, if_true_label;
9007{
9008 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9009 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9010 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9011 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9012
b30f05db 9013 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9014}
9015
b93a436e
JL
9016/* Compare OP0 with OP1, word at a time, in mode MODE.
9017 UNSIGNEDP says to do unsigned comparison.
9018 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9019
b93a436e
JL
9020void
9021do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9022 enum machine_mode mode;
9023 int unsignedp;
9024 rtx op0, op1;
9025 rtx if_false_label, if_true_label;
f81497d9 9026{
b93a436e
JL
9027 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9028 rtx drop_through_label = 0;
9029 int i;
f81497d9 9030
b93a436e
JL
9031 if (! if_true_label || ! if_false_label)
9032 drop_through_label = gen_label_rtx ();
9033 if (! if_true_label)
9034 if_true_label = drop_through_label;
9035 if (! if_false_label)
9036 if_false_label = drop_through_label;
f81497d9 9037
b93a436e
JL
9038 /* Compare a word at a time, high order first. */
9039 for (i = 0; i < nwords; i++)
9040 {
b93a436e 9041 rtx op0_word, op1_word;
bbf6f052 9042
b93a436e
JL
9043 if (WORDS_BIG_ENDIAN)
9044 {
9045 op0_word = operand_subword_force (op0, i, mode);
9046 op1_word = operand_subword_force (op1, i, mode);
9047 }
9048 else
9049 {
9050 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9051 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9052 }
bbf6f052 9053
b93a436e 9054 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9055 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9056 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9057 NULL_RTX, if_true_label);
bbf6f052 9058
b93a436e 9059 /* Consider lower words only if these are equal. */
b30f05db
BS
9060 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9061 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9062 }
bbf6f052 9063
b93a436e
JL
9064 if (if_false_label)
9065 emit_jump (if_false_label);
9066 if (drop_through_label)
9067 emit_label (drop_through_label);
bbf6f052
RK
9068}
9069
b93a436e
JL
9070/* Given an EQ_EXPR expression EXP for values too wide to be compared
9071 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9072
b93a436e
JL
9073static void
9074do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9075 tree exp;
9076 rtx if_false_label, if_true_label;
bbf6f052 9077{
b93a436e
JL
9078 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9079 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9080 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9081 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9082 int i;
9083 rtx drop_through_label = 0;
bbf6f052 9084
b93a436e
JL
9085 if (! if_false_label)
9086 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9087
b93a436e 9088 for (i = 0; i < nwords; i++)
b30f05db
BS
9089 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9090 operand_subword_force (op1, i, mode),
9091 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9092 word_mode, NULL_RTX, 0, if_false_label,
9093 NULL_RTX);
bbf6f052 9094
b93a436e
JL
9095 if (if_true_label)
9096 emit_jump (if_true_label);
9097 if (drop_through_label)
9098 emit_label (drop_through_label);
bbf6f052 9099}
b93a436e
JL
9100\f
9101/* Jump according to whether OP0 is 0.
9102 We assume that OP0 has an integer mode that is too wide
9103 for the available compare insns. */
bbf6f052 9104
f5963e61 9105void
b93a436e
JL
9106do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9107 rtx op0;
9108 rtx if_false_label, if_true_label;
ca695ac9 9109{
b93a436e
JL
9110 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9111 rtx part;
9112 int i;
9113 rtx drop_through_label = 0;
bbf6f052 9114
b93a436e
JL
9115 /* The fastest way of doing this comparison on almost any machine is to
9116 "or" all the words and compare the result. If all have to be loaded
9117 from memory and this is a very wide item, it's possible this may
9118 be slower, but that's highly unlikely. */
bbf6f052 9119
b93a436e
JL
9120 part = gen_reg_rtx (word_mode);
9121 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9122 for (i = 1; i < nwords && part != 0; i++)
9123 part = expand_binop (word_mode, ior_optab, part,
9124 operand_subword_force (op0, i, GET_MODE (op0)),
9125 part, 1, OPTAB_WIDEN);
bbf6f052 9126
b93a436e
JL
9127 if (part != 0)
9128 {
b30f05db
BS
9129 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9130 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9131
b93a436e
JL
9132 return;
9133 }
bbf6f052 9134
b93a436e
JL
9135 /* If we couldn't do the "or" simply, do this with a series of compares. */
9136 if (! if_false_label)
9137 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9138
b93a436e 9139 for (i = 0; i < nwords; i++)
b30f05db
BS
9140 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9141 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9142 if_false_label, NULL_RTX);
bbf6f052 9143
b93a436e
JL
9144 if (if_true_label)
9145 emit_jump (if_true_label);
0f41302f 9146
b93a436e
JL
9147 if (drop_through_label)
9148 emit_label (drop_through_label);
bbf6f052 9149}
b93a436e 9150\f
b30f05db 9151/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9152 (including code to compute the values to be compared)
9153 and set (CC0) according to the result.
b30f05db 9154 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9155
b93a436e 9156 We force a stack adjustment unless there are currently
b30f05db 9157 things pushed on the stack that aren't yet used.
ca695ac9 9158
b30f05db
BS
9159 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9160 compared.
9161
9162 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9163 size of MODE should be used. */
9164
9165rtx
9166compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9167 register rtx op0, op1;
9168 enum rtx_code code;
9169 int unsignedp;
9170 enum machine_mode mode;
9171 rtx size;
9172 int align;
b93a436e 9173{
b30f05db 9174 rtx tem;
76bbe028 9175
b30f05db
BS
9176 /* If one operand is constant, make it the second one. Only do this
9177 if the other operand is not constant as well. */
ca695ac9 9178
b30f05db
BS
9179 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9180 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 9181 {
b30f05db
BS
9182 tem = op0;
9183 op0 = op1;
9184 op1 = tem;
9185 code = swap_condition (code);
ca695ac9 9186 }
bbf6f052 9187
b30f05db 9188 if (flag_force_mem)
b93a436e 9189 {
b30f05db
BS
9190 op0 = force_not_mem (op0);
9191 op1 = force_not_mem (op1);
9192 }
bbf6f052 9193
b30f05db
BS
9194 do_pending_stack_adjust ();
9195
9196 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9197 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9198 return tem;
9199
9200#if 0
9201 /* There's no need to do this now that combine.c can eliminate lots of
9202 sign extensions. This can be less efficient in certain cases on other
9203 machines. */
9204
9205 /* If this is a signed equality comparison, we can do it as an
9206 unsigned comparison since zero-extension is cheaper than sign
9207 extension and comparisons with zero are done as unsigned. This is
9208 the case even on machines that can do fast sign extension, since
9209 zero-extension is easier to combine with other operations than
9210 sign-extension is. If we are comparing against a constant, we must
9211 convert it to what it would look like unsigned. */
9212 if ((code == EQ || code == NE) && ! unsignedp
9213 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9214 {
9215 if (GET_CODE (op1) == CONST_INT
9216 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9217 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9218 unsignedp = 1;
b93a436e
JL
9219 }
9220#endif
b30f05db
BS
9221
9222 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 9223
b30f05db 9224 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9225}
bbf6f052 9226
b30f05db 9227/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9228 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9229
b93a436e
JL
9230 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9231 compared.
bbf6f052 9232
b93a436e
JL
9233 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9234 size of MODE should be used. */
ca695ac9 9235
b30f05db
BS
9236void
9237do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9238 if_false_label, if_true_label)
b93a436e
JL
9239 register rtx op0, op1;
9240 enum rtx_code code;
9241 int unsignedp;
9242 enum machine_mode mode;
9243 rtx size;
9244 int align;
b30f05db 9245 rtx if_false_label, if_true_label;
bbf6f052 9246{
b93a436e 9247 rtx tem;
b30f05db
BS
9248 int dummy_true_label = 0;
9249
9250 /* Reverse the comparison if that is safe and we want to jump if it is
9251 false. */
9252 if (! if_true_label && ! FLOAT_MODE_P (mode))
9253 {
9254 if_true_label = if_false_label;
9255 if_false_label = 0;
9256 code = reverse_condition (code);
9257 }
bbf6f052 9258
b93a436e
JL
9259 /* If one operand is constant, make it the second one. Only do this
9260 if the other operand is not constant as well. */
e7c33f54 9261
b93a436e
JL
9262 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9263 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 9264 {
b93a436e
JL
9265 tem = op0;
9266 op0 = op1;
9267 op1 = tem;
9268 code = swap_condition (code);
9269 }
bbf6f052 9270
b93a436e
JL
9271 if (flag_force_mem)
9272 {
9273 op0 = force_not_mem (op0);
9274 op1 = force_not_mem (op1);
9275 }
bbf6f052 9276
b93a436e 9277 do_pending_stack_adjust ();
ca695ac9 9278
b93a436e
JL
9279 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9280 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9281 {
9282 if (tem == const_true_rtx)
9283 {
9284 if (if_true_label)
9285 emit_jump (if_true_label);
9286 }
9287 else
9288 {
9289 if (if_false_label)
9290 emit_jump (if_false_label);
9291 }
9292 return;
9293 }
ca695ac9 9294
b93a436e
JL
9295#if 0
9296 /* There's no need to do this now that combine.c can eliminate lots of
9297 sign extensions. This can be less efficient in certain cases on other
9298 machines. */
ca695ac9 9299
b93a436e
JL
9300 /* If this is a signed equality comparison, we can do it as an
9301 unsigned comparison since zero-extension is cheaper than sign
9302 extension and comparisons with zero are done as unsigned. This is
9303 the case even on machines that can do fast sign extension, since
9304 zero-extension is easier to combine with other operations than
9305 sign-extension is. If we are comparing against a constant, we must
9306 convert it to what it would look like unsigned. */
9307 if ((code == EQ || code == NE) && ! unsignedp
9308 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9309 {
9310 if (GET_CODE (op1) == CONST_INT
9311 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9312 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9313 unsignedp = 1;
9314 }
9315#endif
ca695ac9 9316
b30f05db
BS
9317 if (! if_true_label)
9318 {
9319 dummy_true_label = 1;
9320 if_true_label = gen_label_rtx ();
9321 }
9322
9323 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9324 if_true_label);
9325
9326 if (if_false_label)
9327 emit_jump (if_false_label);
9328 if (dummy_true_label)
9329 emit_label (if_true_label);
9330}
9331
9332/* Generate code for a comparison expression EXP (including code to compute
9333 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9334 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9335 generated code will drop through.
9336 SIGNED_CODE should be the rtx operation for this comparison for
9337 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9338
9339 We force a stack adjustment unless there are currently
9340 things pushed on the stack that aren't yet used. */
9341
9342static void
9343do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9344 if_true_label)
9345 register tree exp;
9346 enum rtx_code signed_code, unsigned_code;
9347 rtx if_false_label, if_true_label;
9348{
9349 register rtx op0, op1;
9350 register tree type;
9351 register enum machine_mode mode;
9352 int unsignedp;
9353 enum rtx_code code;
9354
9355 /* Don't crash if the comparison was erroneous. */
9356 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9357 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9358 return;
9359
9360 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9361 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9362 mode = TYPE_MODE (type);
9363 unsignedp = TREE_UNSIGNED (type);
9364 code = unsignedp ? unsigned_code : signed_code;
9365
9366#ifdef HAVE_canonicalize_funcptr_for_compare
9367 /* If function pointers need to be "canonicalized" before they can
9368 be reliably compared, then canonicalize them. */
9369 if (HAVE_canonicalize_funcptr_for_compare
9370 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9371 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9372 == FUNCTION_TYPE))
9373 {
9374 rtx new_op0 = gen_reg_rtx (mode);
9375
9376 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9377 op0 = new_op0;
9378 }
9379
9380 if (HAVE_canonicalize_funcptr_for_compare
9381 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9382 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9383 == FUNCTION_TYPE))
9384 {
9385 rtx new_op1 = gen_reg_rtx (mode);
9386
9387 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9388 op1 = new_op1;
9389 }
9390#endif
9391
9392 /* Do any postincrements in the expression that was tested. */
9393 emit_queue ();
9394
9395 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9396 ((mode == BLKmode)
9397 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9398 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9399 if_false_label, if_true_label);
b93a436e
JL
9400}
9401\f
9402/* Generate code to calculate EXP using a store-flag instruction
9403 and return an rtx for the result. EXP is either a comparison
9404 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9405
b93a436e 9406 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9407
b93a436e
JL
9408 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9409 cheap.
ca695ac9 9410
b93a436e
JL
9411 Return zero if there is no suitable set-flag instruction
9412 available on this machine.
ca695ac9 9413
b93a436e
JL
9414 Once expand_expr has been called on the arguments of the comparison,
9415 we are committed to doing the store flag, since it is not safe to
9416 re-evaluate the expression. We emit the store-flag insn by calling
9417 emit_store_flag, but only expand the arguments if we have a reason
9418 to believe that emit_store_flag will be successful. If we think that
9419 it will, but it isn't, we have to simulate the store-flag with a
9420 set/jump/set sequence. */
ca695ac9 9421
b93a436e
JL
9422static rtx
9423do_store_flag (exp, target, mode, only_cheap)
9424 tree exp;
9425 rtx target;
9426 enum machine_mode mode;
9427 int only_cheap;
9428{
9429 enum rtx_code code;
9430 tree arg0, arg1, type;
9431 tree tem;
9432 enum machine_mode operand_mode;
9433 int invert = 0;
9434 int unsignedp;
9435 rtx op0, op1;
9436 enum insn_code icode;
9437 rtx subtarget = target;
381127e8 9438 rtx result, label;
ca695ac9 9439
b93a436e
JL
9440 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9441 result at the end. We can't simply invert the test since it would
9442 have already been inverted if it were valid. This case occurs for
9443 some floating-point comparisons. */
ca695ac9 9444
b93a436e
JL
9445 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9446 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9447
b93a436e
JL
9448 arg0 = TREE_OPERAND (exp, 0);
9449 arg1 = TREE_OPERAND (exp, 1);
9450 type = TREE_TYPE (arg0);
9451 operand_mode = TYPE_MODE (type);
9452 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9453
b93a436e
JL
9454 /* We won't bother with BLKmode store-flag operations because it would mean
9455 passing a lot of information to emit_store_flag. */
9456 if (operand_mode == BLKmode)
9457 return 0;
ca695ac9 9458
b93a436e
JL
9459 /* We won't bother with store-flag operations involving function pointers
9460 when function pointers must be canonicalized before comparisons. */
9461#ifdef HAVE_canonicalize_funcptr_for_compare
9462 if (HAVE_canonicalize_funcptr_for_compare
9463 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9464 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9465 == FUNCTION_TYPE))
9466 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9467 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9468 == FUNCTION_TYPE))))
9469 return 0;
ca695ac9
JB
9470#endif
9471
b93a436e
JL
9472 STRIP_NOPS (arg0);
9473 STRIP_NOPS (arg1);
ca695ac9 9474
b93a436e
JL
9475 /* Get the rtx comparison code to use. We know that EXP is a comparison
9476 operation of some type. Some comparisons against 1 and -1 can be
9477 converted to comparisons with zero. Do so here so that the tests
9478 below will be aware that we have a comparison with zero. These
9479 tests will not catch constants in the first operand, but constants
9480 are rarely passed as the first operand. */
ca695ac9 9481
b93a436e
JL
9482 switch (TREE_CODE (exp))
9483 {
9484 case EQ_EXPR:
9485 code = EQ;
bbf6f052 9486 break;
b93a436e
JL
9487 case NE_EXPR:
9488 code = NE;
bbf6f052 9489 break;
b93a436e
JL
9490 case LT_EXPR:
9491 if (integer_onep (arg1))
9492 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9493 else
9494 code = unsignedp ? LTU : LT;
ca695ac9 9495 break;
b93a436e
JL
9496 case LE_EXPR:
9497 if (! unsignedp && integer_all_onesp (arg1))
9498 arg1 = integer_zero_node, code = LT;
9499 else
9500 code = unsignedp ? LEU : LE;
ca695ac9 9501 break;
b93a436e
JL
9502 case GT_EXPR:
9503 if (! unsignedp && integer_all_onesp (arg1))
9504 arg1 = integer_zero_node, code = GE;
9505 else
9506 code = unsignedp ? GTU : GT;
9507 break;
9508 case GE_EXPR:
9509 if (integer_onep (arg1))
9510 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9511 else
9512 code = unsignedp ? GEU : GE;
ca695ac9 9513 break;
ca695ac9 9514 default:
b93a436e 9515 abort ();
bbf6f052 9516 }
bbf6f052 9517
b93a436e
JL
9518 /* Put a constant second. */
9519 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9520 {
9521 tem = arg0; arg0 = arg1; arg1 = tem;
9522 code = swap_condition (code);
ca695ac9 9523 }
bbf6f052 9524
b93a436e
JL
9525 /* If this is an equality or inequality test of a single bit, we can
9526 do this by shifting the bit being tested to the low-order bit and
9527 masking the result with the constant 1. If the condition was EQ,
9528 we xor it with 1. This does not require an scc insn and is faster
9529 than an scc insn even if we have it. */
d39985fa 9530
b93a436e
JL
9531 if ((code == NE || code == EQ)
9532 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9533 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9534 {
9535 tree inner = TREE_OPERAND (arg0, 0);
9536 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9537 int ops_unsignedp;
bbf6f052 9538
b93a436e
JL
9539 /* If INNER is a right shift of a constant and it plus BITNUM does
9540 not overflow, adjust BITNUM and INNER. */
ca695ac9 9541
b93a436e
JL
9542 if (TREE_CODE (inner) == RSHIFT_EXPR
9543 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9544 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9545 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9546 < TYPE_PRECISION (type)))
ca695ac9 9547 {
b93a436e
JL
9548 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9549 inner = TREE_OPERAND (inner, 0);
ca695ac9 9550 }
ca695ac9 9551
b93a436e
JL
9552 /* If we are going to be able to omit the AND below, we must do our
9553 operations as unsigned. If we must use the AND, we have a choice.
9554 Normally unsigned is faster, but for some machines signed is. */
9555 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9556#ifdef LOAD_EXTEND_OP
9557 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9558#else
9559 : 1
9560#endif
9561 );
bbf6f052 9562
b93a436e
JL
9563 if (subtarget == 0 || GET_CODE (subtarget) != REG
9564 || GET_MODE (subtarget) != operand_mode
e5e809f4 9565 || ! safe_from_p (subtarget, inner, 1))
b93a436e 9566 subtarget = 0;
bbf6f052 9567
b93a436e 9568 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9569
b93a436e
JL
9570 if (bitnum != 0)
9571 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9572 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9573
b93a436e
JL
9574 if (GET_MODE (op0) != mode)
9575 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9576
b93a436e
JL
9577 if ((code == EQ && ! invert) || (code == NE && invert))
9578 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9579 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9580
b93a436e
JL
9581 /* Put the AND last so it can combine with more things. */
9582 if (bitnum != TYPE_PRECISION (type) - 1)
9583 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9584
b93a436e
JL
9585 return op0;
9586 }
bbf6f052 9587
b93a436e
JL
9588 /* Now see if we are likely to be able to do this. Return if not. */
9589 if (! can_compare_p (operand_mode))
9590 return 0;
9591 icode = setcc_gen_code[(int) code];
9592 if (icode == CODE_FOR_nothing
9593 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 9594 {
b93a436e
JL
9595 /* We can only do this if it is one of the special cases that
9596 can be handled without an scc insn. */
9597 if ((code == LT && integer_zerop (arg1))
9598 || (! only_cheap && code == GE && integer_zerop (arg1)))
9599 ;
9600 else if (BRANCH_COST >= 0
9601 && ! only_cheap && (code == NE || code == EQ)
9602 && TREE_CODE (type) != REAL_TYPE
9603 && ((abs_optab->handlers[(int) operand_mode].insn_code
9604 != CODE_FOR_nothing)
9605 || (ffs_optab->handlers[(int) operand_mode].insn_code
9606 != CODE_FOR_nothing)))
9607 ;
9608 else
9609 return 0;
ca695ac9 9610 }
b93a436e
JL
9611
9612 preexpand_calls (exp);
9613 if (subtarget == 0 || GET_CODE (subtarget) != REG
9614 || GET_MODE (subtarget) != operand_mode
e5e809f4 9615 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
9616 subtarget = 0;
9617
9618 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9619 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9620
9621 if (target == 0)
9622 target = gen_reg_rtx (mode);
9623
9624 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9625 because, if the emit_store_flag does anything it will succeed and
9626 OP0 and OP1 will not be used subsequently. */
ca695ac9 9627
b93a436e
JL
9628 result = emit_store_flag (target, code,
9629 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9630 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9631 operand_mode, unsignedp, 1);
ca695ac9 9632
b93a436e
JL
9633 if (result)
9634 {
9635 if (invert)
9636 result = expand_binop (mode, xor_optab, result, const1_rtx,
9637 result, 0, OPTAB_LIB_WIDEN);
9638 return result;
ca695ac9 9639 }
bbf6f052 9640
b93a436e
JL
9641 /* If this failed, we have to do this with set/compare/jump/set code. */
9642 if (GET_CODE (target) != REG
9643 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9644 target = gen_reg_rtx (GET_MODE (target));
9645
9646 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9647 result = compare_from_rtx (op0, op1, code, unsignedp,
9648 operand_mode, NULL_RTX, 0);
9649 if (GET_CODE (result) == CONST_INT)
9650 return (((result == const0_rtx && ! invert)
9651 || (result != const0_rtx && invert))
9652 ? const0_rtx : const1_rtx);
ca695ac9 9653
b93a436e
JL
9654 label = gen_label_rtx ();
9655 if (bcc_gen_fctn[(int) code] == 0)
9656 abort ();
0f41302f 9657
b93a436e
JL
9658 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9659 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9660 emit_label (label);
bbf6f052 9661
b93a436e 9662 return target;
ca695ac9 9663}
b93a436e
JL
9664\f
9665/* Generate a tablejump instruction (used for switch statements). */
9666
9667#ifdef HAVE_tablejump
e87b4f3f 9668
b93a436e
JL
9669/* INDEX is the value being switched on, with the lowest value
9670 in the table already subtracted.
9671 MODE is its expected mode (needed if INDEX is constant).
9672 RANGE is the length of the jump table.
9673 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9674
b93a436e
JL
9675 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9676 index value is out of range. */
0f41302f 9677
ca695ac9 9678void
b93a436e
JL
9679do_tablejump (index, mode, range, table_label, default_label)
9680 rtx index, range, table_label, default_label;
9681 enum machine_mode mode;
ca695ac9 9682{
b93a436e 9683 register rtx temp, vector;
88d3b7f0 9684
b93a436e
JL
9685 /* Do an unsigned comparison (in the proper mode) between the index
9686 expression and the value which represents the length of the range.
9687 Since we just finished subtracting the lower bound of the range
9688 from the index expression, this comparison allows us to simultaneously
9689 check that the original index expression value is both greater than
9690 or equal to the minimum value of the range and less than or equal to
9691 the maximum value of the range. */
709f5be1 9692
c5d5d461
JL
9693 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9694 0, default_label);
bbf6f052 9695
b93a436e
JL
9696 /* If index is in range, it must fit in Pmode.
9697 Convert to Pmode so we can index with it. */
9698 if (mode != Pmode)
9699 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9700
b93a436e
JL
9701 /* Don't let a MEM slip thru, because then INDEX that comes
9702 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9703 and break_out_memory_refs will go to work on it and mess it up. */
9704#ifdef PIC_CASE_VECTOR_ADDRESS
9705 if (flag_pic && GET_CODE (index) != REG)
9706 index = copy_to_mode_reg (Pmode, index);
9707#endif
ca695ac9 9708
b93a436e
JL
9709 /* If flag_force_addr were to affect this address
9710 it could interfere with the tricky assumptions made
9711 about addresses that contain label-refs,
9712 which may be valid only very near the tablejump itself. */
9713 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9714 GET_MODE_SIZE, because this indicates how large insns are. The other
9715 uses should all be Pmode, because they are addresses. This code
9716 could fail if addresses and insns are not the same size. */
9717 index = gen_rtx_PLUS (Pmode,
9718 gen_rtx_MULT (Pmode, index,
9719 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9720 gen_rtx_LABEL_REF (Pmode, table_label));
9721#ifdef PIC_CASE_VECTOR_ADDRESS
9722 if (flag_pic)
9723 index = PIC_CASE_VECTOR_ADDRESS (index);
9724 else
bbf6f052 9725#endif
b93a436e
JL
9726 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9727 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9728 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9729 RTX_UNCHANGING_P (vector) = 1;
9730 convert_move (temp, vector, 0);
9731
9732 emit_jump_insn (gen_tablejump (temp, table_label));
9733
9734 /* If we are generating PIC code or if the table is PC-relative, the
9735 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9736 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9737 emit_barrier ();
bbf6f052 9738}
b93a436e
JL
9739
9740#endif /* HAVE_tablejump */
This page took 2.28371 seconds and 5 git commands to generate.