]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
c-parse.in (language_string): Constify.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
fdf004cf 2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
18543a22
ILT
68/* Assume that case vectors are not pc-relative. */
69#ifndef CASE_VECTOR_PC_RELATIVE
70#define CASE_VECTOR_PC_RELATIVE 0
71#endif
72
bbf6f052
RK
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
956d6950 86/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
956d6950
JL
89static int in_check_memory_usage;
90
4969d05d
RK
91/* This structure is used by move_by_pieces to describe the move to
92 be performed. */
4969d05d
RK
93struct move_by_pieces
94{
95 rtx to;
96 rtx to_addr;
97 int autinc_to;
98 int explicit_inc_to;
e9cf6a97 99 int to_struct;
4969d05d
RK
100 rtx from;
101 rtx from_addr;
102 int autinc_from;
103 int explicit_inc_from;
e9cf6a97 104 int from_struct;
4969d05d
RK
105 int len;
106 int offset;
107 int reverse;
108};
109
9de08200
RK
110/* This structure is used by clear_by_pieces to describe the clear to
111 be performed. */
112
113struct clear_by_pieces
114{
115 rtx to;
116 rtx to_addr;
117 int autinc_to;
118 int explicit_inc_to;
119 int to_struct;
120 int len;
121 int offset;
122 int reverse;
123};
124
292b1216 125extern struct obstack permanent_obstack;
c02bd5d9 126
03566575
JW
127static rtx get_push_address PROTO ((int));
128
4969d05d 129static rtx enqueue_insn PROTO((rtx, rtx));
4969d05d 130static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 131static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 132 struct move_by_pieces *));
9de08200 133static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 134static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
135 struct clear_by_pieces *));
136static int is_zeros_p PROTO((tree));
137static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
138static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
139 tree, tree, int));
e1a43f73 140static void store_constructor PROTO((tree, rtx, int));
4969d05d 141static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
142 enum machine_mode, int, int,
143 int, int));
e009aaf3
JL
144static enum memory_use_mode
145 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
146static tree save_noncopied_parts PROTO((tree, tree));
147static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 148static int safe_from_p PROTO((rtx, tree, int));
4969d05d 149static int fixed_type_p PROTO((tree));
01c8a7c8 150static rtx var_rtx PROTO((tree));
7b8b9722 151static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
152static void preexpand_calls PROTO((tree));
153static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
154static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
b30f05db 155static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
4969d05d 156static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 157
4fa52007
RK
158/* Record for each mode whether we can move a register directly to or
159 from an object of that mode in memory. If we can't, we won't try
160 to use that mode directly when accessing a field of that mode. */
161
162static char direct_load[NUM_MACHINE_MODES];
163static char direct_store[NUM_MACHINE_MODES];
164
7e24ffc9
HPN
165/* If a memory-to-memory move would take MOVE_RATIO or more simple
166 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
167
168#ifndef MOVE_RATIO
266007a7 169#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
170#define MOVE_RATIO 2
171#else
996d9dac
MM
172/* If we are optimizing for space (-Os), cut down the default move ratio */
173#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
174#endif
175#endif
e87b4f3f 176
fbe1758d
AM
177/* This macro is used to determine whether move_by_pieces should be called
178 to perform a structure copy. */
179#ifndef MOVE_BY_PIECES_P
180#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
181 (SIZE, ALIGN) < MOVE_RATIO)
182#endif
183
266007a7 184/* This array records the insn_code of insns to perform block moves. */
e6677db3 185enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 186
9de08200
RK
187/* This array records the insn_code of insns to perform block clears. */
188enum insn_code clrstr_optab[NUM_MACHINE_MODES];
189
0f41302f 190/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
191
192#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 193#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 194#endif
bbf6f052 195\f
4fa52007 196/* This is run once per compilation to set up which modes can be used
266007a7 197 directly in memory and to initialize the block move optab. */
4fa52007
RK
198
199void
200init_expr_once ()
201{
202 rtx insn, pat;
203 enum machine_mode mode;
cff48d8f 204 int num_clobbers;
9ec36da5
JL
205 rtx mem, mem1;
206 char *free_point;
207
208 start_sequence ();
209
210 /* Since we are on the permanent obstack, we must be sure we save this
211 spot AFTER we call start_sequence, since it will reuse the rtl it
212 makes. */
213 free_point = (char *) oballoc (0);
214
e2549997
RS
215 /* Try indexing by frame ptr and try by stack ptr.
216 It is known that on the Convex the stack ptr isn't a valid index.
217 With luck, one or the other is valid on any machine. */
9ec36da5
JL
218 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
219 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 220
38a448ca 221 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
222 pat = PATTERN (insn);
223
224 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
225 mode = (enum machine_mode) ((int) mode + 1))
226 {
227 int regno;
228 rtx reg;
4fa52007
RK
229
230 direct_load[(int) mode] = direct_store[(int) mode] = 0;
231 PUT_MODE (mem, mode);
e2549997 232 PUT_MODE (mem1, mode);
4fa52007 233
e6fe56a4
RK
234 /* See if there is some register that can be used in this mode and
235 directly loaded or stored from memory. */
236
7308a047
RS
237 if (mode != VOIDmode && mode != BLKmode)
238 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
239 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
240 regno++)
241 {
242 if (! HARD_REGNO_MODE_OK (regno, mode))
243 continue;
e6fe56a4 244
38a448ca 245 reg = gen_rtx_REG (mode, regno);
e6fe56a4 246
7308a047
RS
247 SET_SRC (pat) = mem;
248 SET_DEST (pat) = reg;
249 if (recog (pat, insn, &num_clobbers) >= 0)
250 direct_load[(int) mode] = 1;
e6fe56a4 251
e2549997
RS
252 SET_SRC (pat) = mem1;
253 SET_DEST (pat) = reg;
254 if (recog (pat, insn, &num_clobbers) >= 0)
255 direct_load[(int) mode] = 1;
256
7308a047
RS
257 SET_SRC (pat) = reg;
258 SET_DEST (pat) = mem;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_store[(int) mode] = 1;
e2549997
RS
261
262 SET_SRC (pat) = reg;
263 SET_DEST (pat) = mem1;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_store[(int) mode] = 1;
7308a047 266 }
4fa52007
RK
267 }
268
269 end_sequence ();
9ec36da5 270 obfree (free_point);
4fa52007 271}
cff48d8f 272
bbf6f052
RK
273/* This is run at the start of compiling a function. */
274
275void
276init_expr ()
277{
49ad7cfa
BS
278 current_function->expr
279 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 280
49ad7cfa 281 pending_chain = 0;
bbf6f052
RK
282 pending_stack_adjust = 0;
283 inhibit_defer_pop = 0;
bbf6f052 284 saveregs_value = 0;
0006469d 285 apply_args_value = 0;
e87b4f3f 286 forced_labels = 0;
bbf6f052
RK
287}
288
49ad7cfa 289/* Small sanity check that the queue is empty at the end of a function. */
bbf6f052 290void
49ad7cfa 291finish_expr_for_function ()
bbf6f052 292{
49ad7cfa
BS
293 if (pending_chain)
294 abort ();
bbf6f052
RK
295}
296\f
297/* Manage the queue of increment instructions to be output
298 for POSTINCREMENT_EXPR expressions, etc. */
299
bbf6f052
RK
300/* Queue up to increment (or change) VAR later. BODY says how:
301 BODY should be the same thing you would pass to emit_insn
302 to increment right away. It will go to emit_insn later on.
303
304 The value is a QUEUED expression to be used in place of VAR
305 where you want to guarantee the pre-incrementation value of VAR. */
306
307static rtx
308enqueue_insn (var, body)
309 rtx var, body;
310{
38a448ca
RH
311 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
312 var, NULL_RTX, NULL_RTX, body,
313 pending_chain);
bbf6f052
RK
314 return pending_chain;
315}
316
317/* Use protect_from_queue to convert a QUEUED expression
318 into something that you can put immediately into an instruction.
319 If the queued incrementation has not happened yet,
320 protect_from_queue returns the variable itself.
321 If the incrementation has happened, protect_from_queue returns a temp
322 that contains a copy of the old value of the variable.
323
324 Any time an rtx which might possibly be a QUEUED is to be put
325 into an instruction, it must be passed through protect_from_queue first.
326 QUEUED expressions are not meaningful in instructions.
327
328 Do not pass a value through protect_from_queue and then hold
329 on to it for a while before putting it in an instruction!
330 If the queue is flushed in between, incorrect code will result. */
331
332rtx
333protect_from_queue (x, modify)
334 register rtx x;
335 int modify;
336{
337 register RTX_CODE code = GET_CODE (x);
338
339#if 0 /* A QUEUED can hang around after the queue is forced out. */
340 /* Shortcut for most common case. */
341 if (pending_chain == 0)
342 return x;
343#endif
344
345 if (code != QUEUED)
346 {
e9baa644
RK
347 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
348 use of autoincrement. Make a copy of the contents of the memory
349 location rather than a copy of the address, but not if the value is
350 of mode BLKmode. Don't modify X in place since it might be
351 shared. */
bbf6f052
RK
352 if (code == MEM && GET_MODE (x) != BLKmode
353 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
354 {
355 register rtx y = XEXP (x, 0);
38a448ca 356 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 357
e9baa644 358 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 359 MEM_COPY_ATTRIBUTES (new, x);
41472af8 360 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 361
bbf6f052
RK
362 if (QUEUED_INSN (y))
363 {
e9baa644
RK
364 register rtx temp = gen_reg_rtx (GET_MODE (new));
365 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
366 QUEUED_INSN (y));
367 return temp;
368 }
e9baa644 369 return new;
bbf6f052
RK
370 }
371 /* Otherwise, recursively protect the subexpressions of all
372 the kinds of rtx's that can contain a QUEUED. */
373 if (code == MEM)
3f15938e
RS
374 {
375 rtx tem = protect_from_queue (XEXP (x, 0), 0);
376 if (tem != XEXP (x, 0))
377 {
378 x = copy_rtx (x);
379 XEXP (x, 0) = tem;
380 }
381 }
bbf6f052
RK
382 else if (code == PLUS || code == MULT)
383 {
3f15938e
RS
384 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
385 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
386 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
387 {
388 x = copy_rtx (x);
389 XEXP (x, 0) = new0;
390 XEXP (x, 1) = new1;
391 }
bbf6f052
RK
392 }
393 return x;
394 }
395 /* If the increment has not happened, use the variable itself. */
396 if (QUEUED_INSN (x) == 0)
397 return QUEUED_VAR (x);
398 /* If the increment has happened and a pre-increment copy exists,
399 use that copy. */
400 if (QUEUED_COPY (x) != 0)
401 return QUEUED_COPY (x);
402 /* The increment has happened but we haven't set up a pre-increment copy.
403 Set one up now, and use it. */
404 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
405 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
406 QUEUED_INSN (x));
407 return QUEUED_COPY (x);
408}
409
410/* Return nonzero if X contains a QUEUED expression:
411 if it contains anything that will be altered by a queued increment.
412 We handle only combinations of MEM, PLUS, MINUS and MULT operators
413 since memory addresses generally contain only those. */
414
1f06ee8d 415int
bbf6f052
RK
416queued_subexp_p (x)
417 rtx x;
418{
419 register enum rtx_code code = GET_CODE (x);
420 switch (code)
421 {
422 case QUEUED:
423 return 1;
424 case MEM:
425 return queued_subexp_p (XEXP (x, 0));
426 case MULT:
427 case PLUS:
428 case MINUS:
e9a25f70
JL
429 return (queued_subexp_p (XEXP (x, 0))
430 || queued_subexp_p (XEXP (x, 1)));
431 default:
432 return 0;
bbf6f052 433 }
bbf6f052
RK
434}
435
436/* Perform all the pending incrementations. */
437
438void
439emit_queue ()
440{
441 register rtx p;
381127e8 442 while ((p = pending_chain))
bbf6f052 443 {
41b083c4
R
444 rtx body = QUEUED_BODY (p);
445
446 if (GET_CODE (body) == SEQUENCE)
447 {
448 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
449 emit_insn (QUEUED_BODY (p));
450 }
451 else
452 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
453 pending_chain = QUEUED_NEXT (p);
454 }
455}
bbf6f052
RK
456\f
457/* Copy data from FROM to TO, where the machine modes are not the same.
458 Both modes may be integer, or both may be floating.
459 UNSIGNEDP should be nonzero if FROM is an unsigned type.
460 This causes zero-extension instead of sign-extension. */
461
462void
463convert_move (to, from, unsignedp)
464 register rtx to, from;
465 int unsignedp;
466{
467 enum machine_mode to_mode = GET_MODE (to);
468 enum machine_mode from_mode = GET_MODE (from);
469 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
470 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
471 enum insn_code code;
472 rtx libcall;
473
474 /* rtx code for making an equivalent value. */
475 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
476
477 to = protect_from_queue (to, 1);
478 from = protect_from_queue (from, 0);
479
480 if (to_real != from_real)
481 abort ();
482
1499e0a8
RK
483 /* If FROM is a SUBREG that indicates that we have already done at least
484 the required extension, strip it. We don't handle such SUBREGs as
485 TO here. */
486
487 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
488 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
489 >= GET_MODE_SIZE (to_mode))
490 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
491 from = gen_lowpart (to_mode, from), from_mode = to_mode;
492
493 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
494 abort ();
495
bbf6f052
RK
496 if (to_mode == from_mode
497 || (from_mode == VOIDmode && CONSTANT_P (from)))
498 {
499 emit_move_insn (to, from);
500 return;
501 }
502
503 if (to_real)
504 {
81d79e2c
RS
505 rtx value;
506
2b01c326 507 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 508 {
2b01c326
RK
509 /* Try converting directly if the insn is supported. */
510 if ((code = can_extend_p (to_mode, from_mode, 0))
511 != CODE_FOR_nothing)
512 {
513 emit_unop_insn (code, to, from, UNKNOWN);
514 return;
515 }
bbf6f052 516 }
2b01c326 517
b424402e
RS
518#ifdef HAVE_trunchfqf2
519 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
520 {
521 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
522 return;
523 }
524#endif
704af6a1
JL
525#ifdef HAVE_trunctqfqf2
526 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
527 {
528 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
529 return;
530 }
531#endif
b424402e
RS
532#ifdef HAVE_truncsfqf2
533 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
534 {
535 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
536 return;
537 }
538#endif
539#ifdef HAVE_truncdfqf2
540 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
541 {
542 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
543 return;
544 }
545#endif
546#ifdef HAVE_truncxfqf2
547 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
548 {
549 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
550 return;
551 }
552#endif
553#ifdef HAVE_trunctfqf2
554 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
555 {
556 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
557 return;
558 }
559#endif
03747aa3
RK
560
561#ifdef HAVE_trunctqfhf2
562 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
563 {
564 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
565 return;
566 }
567#endif
b424402e
RS
568#ifdef HAVE_truncsfhf2
569 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
572 return;
573 }
574#endif
575#ifdef HAVE_truncdfhf2
576 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
579 return;
580 }
581#endif
582#ifdef HAVE_truncxfhf2
583 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
584 {
585 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
586 return;
587 }
588#endif
589#ifdef HAVE_trunctfhf2
590 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
591 {
592 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
593 return;
594 }
595#endif
2b01c326
RK
596
597#ifdef HAVE_truncsftqf2
598 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
599 {
600 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
604#ifdef HAVE_truncdftqf2
605 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611#ifdef HAVE_truncxftqf2
612 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
615 return;
616 }
617#endif
618#ifdef HAVE_trunctftqf2
619 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
622 return;
623 }
624#endif
625
bbf6f052
RK
626#ifdef HAVE_truncdfsf2
627 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
628 {
629 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
b092b471
JW
633#ifdef HAVE_truncxfsf2
634 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
bbf6f052
RK
640#ifdef HAVE_trunctfsf2
641 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
642 {
643 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
b092b471
JW
647#ifdef HAVE_truncxfdf2
648 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
649 {
650 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
bbf6f052
RK
654#ifdef HAVE_trunctfdf2
655 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
656 {
657 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
658 return;
659 }
660#endif
661
b092b471
JW
662 libcall = (rtx) 0;
663 switch (from_mode)
664 {
665 case SFmode:
666 switch (to_mode)
667 {
668 case DFmode:
669 libcall = extendsfdf2_libfunc;
670 break;
671
672 case XFmode:
673 libcall = extendsfxf2_libfunc;
674 break;
675
676 case TFmode:
677 libcall = extendsftf2_libfunc;
678 break;
e9a25f70
JL
679
680 default:
681 break;
b092b471
JW
682 }
683 break;
684
685 case DFmode:
686 switch (to_mode)
687 {
688 case SFmode:
689 libcall = truncdfsf2_libfunc;
690 break;
691
692 case XFmode:
693 libcall = extenddfxf2_libfunc;
694 break;
695
696 case TFmode:
697 libcall = extenddftf2_libfunc;
698 break;
e9a25f70
JL
699
700 default:
701 break;
b092b471
JW
702 }
703 break;
704
705 case XFmode:
706 switch (to_mode)
707 {
708 case SFmode:
709 libcall = truncxfsf2_libfunc;
710 break;
711
712 case DFmode:
713 libcall = truncxfdf2_libfunc;
714 break;
e9a25f70
JL
715
716 default:
717 break;
b092b471
JW
718 }
719 break;
720
721 case TFmode:
722 switch (to_mode)
723 {
724 case SFmode:
725 libcall = trunctfsf2_libfunc;
726 break;
727
728 case DFmode:
729 libcall = trunctfdf2_libfunc;
730 break;
e9a25f70
JL
731
732 default:
733 break;
b092b471
JW
734 }
735 break;
e9a25f70
JL
736
737 default:
738 break;
b092b471
JW
739 }
740
741 if (libcall == (rtx) 0)
742 /* This conversion is not implemented yet. */
bbf6f052
RK
743 abort ();
744
81d79e2c
RS
745 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
746 1, from, from_mode);
747 emit_move_insn (to, value);
bbf6f052
RK
748 return;
749 }
750
751 /* Now both modes are integers. */
752
753 /* Handle expanding beyond a word. */
754 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
755 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
756 {
757 rtx insns;
758 rtx lowpart;
759 rtx fill_value;
760 rtx lowfrom;
761 int i;
762 enum machine_mode lowpart_mode;
763 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
764
765 /* Try converting directly if the insn is supported. */
766 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
767 != CODE_FOR_nothing)
768 {
cd1b4b44
RK
769 /* If FROM is a SUBREG, put it into a register. Do this
770 so that we always generate the same set of insns for
771 better cse'ing; if an intermediate assignment occurred,
772 we won't be doing the operation directly on the SUBREG. */
773 if (optimize > 0 && GET_CODE (from) == SUBREG)
774 from = force_reg (from_mode, from);
bbf6f052
RK
775 emit_unop_insn (code, to, from, equiv_code);
776 return;
777 }
778 /* Next, try converting via full word. */
779 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
780 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
781 != CODE_FOR_nothing))
782 {
a81fee56 783 if (GET_CODE (to) == REG)
38a448ca 784 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
785 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
786 emit_unop_insn (code, to,
787 gen_lowpart (word_mode, to), equiv_code);
788 return;
789 }
790
791 /* No special multiword conversion insn; do it by hand. */
792 start_sequence ();
793
5c5033c3
RK
794 /* Since we will turn this into a no conflict block, we must ensure
795 that the source does not overlap the target. */
796
797 if (reg_overlap_mentioned_p (to, from))
798 from = force_reg (from_mode, from);
799
bbf6f052
RK
800 /* Get a copy of FROM widened to a word, if necessary. */
801 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
802 lowpart_mode = word_mode;
803 else
804 lowpart_mode = from_mode;
805
806 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
807
808 lowpart = gen_lowpart (lowpart_mode, to);
809 emit_move_insn (lowpart, lowfrom);
810
811 /* Compute the value to put in each remaining word. */
812 if (unsignedp)
813 fill_value = const0_rtx;
814 else
815 {
816#ifdef HAVE_slt
817 if (HAVE_slt
818 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
819 && STORE_FLAG_VALUE == -1)
820 {
906c4e36
RK
821 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
822 lowpart_mode, 0, 0);
bbf6f052
RK
823 fill_value = gen_reg_rtx (word_mode);
824 emit_insn (gen_slt (fill_value));
825 }
826 else
827#endif
828 {
829 fill_value
830 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
831 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 832 NULL_RTX, 0);
bbf6f052
RK
833 fill_value = convert_to_mode (word_mode, fill_value, 1);
834 }
835 }
836
837 /* Fill the remaining words. */
838 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
839 {
840 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
841 rtx subword = operand_subword (to, index, 1, to_mode);
842
843 if (subword == 0)
844 abort ();
845
846 if (fill_value != subword)
847 emit_move_insn (subword, fill_value);
848 }
849
850 insns = get_insns ();
851 end_sequence ();
852
906c4e36 853 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 854 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
855 return;
856 }
857
d3c64ee3
RS
858 /* Truncating multi-word to a word or less. */
859 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
860 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 861 {
431a6eca
JW
862 if (!((GET_CODE (from) == MEM
863 && ! MEM_VOLATILE_P (from)
864 && direct_load[(int) to_mode]
865 && ! mode_dependent_address_p (XEXP (from, 0)))
866 || GET_CODE (from) == REG
867 || GET_CODE (from) == SUBREG))
868 from = force_reg (from_mode, from);
bbf6f052
RK
869 convert_move (to, gen_lowpart (word_mode, from), 0);
870 return;
871 }
872
873 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
874 if (to_mode == PQImode)
875 {
876 if (from_mode != QImode)
877 from = convert_to_mode (QImode, from, unsignedp);
878
879#ifdef HAVE_truncqipqi2
880 if (HAVE_truncqipqi2)
881 {
882 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
883 return;
884 }
885#endif /* HAVE_truncqipqi2 */
886 abort ();
887 }
888
889 if (from_mode == PQImode)
890 {
891 if (to_mode != QImode)
892 {
893 from = convert_to_mode (QImode, from, unsignedp);
894 from_mode = QImode;
895 }
896 else
897 {
898#ifdef HAVE_extendpqiqi2
899 if (HAVE_extendpqiqi2)
900 {
901 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
902 return;
903 }
904#endif /* HAVE_extendpqiqi2 */
905 abort ();
906 }
907 }
908
bbf6f052
RK
909 if (to_mode == PSImode)
910 {
911 if (from_mode != SImode)
912 from = convert_to_mode (SImode, from, unsignedp);
913
1f584163
DE
914#ifdef HAVE_truncsipsi2
915 if (HAVE_truncsipsi2)
bbf6f052 916 {
1f584163 917 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
918 return;
919 }
1f584163 920#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
921 abort ();
922 }
923
924 if (from_mode == PSImode)
925 {
926 if (to_mode != SImode)
927 {
928 from = convert_to_mode (SImode, from, unsignedp);
929 from_mode = SImode;
930 }
931 else
932 {
1f584163
DE
933#ifdef HAVE_extendpsisi2
934 if (HAVE_extendpsisi2)
bbf6f052 935 {
1f584163 936 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
937 return;
938 }
1f584163 939#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
940 abort ();
941 }
942 }
943
0407367d
RK
944 if (to_mode == PDImode)
945 {
946 if (from_mode != DImode)
947 from = convert_to_mode (DImode, from, unsignedp);
948
949#ifdef HAVE_truncdipdi2
950 if (HAVE_truncdipdi2)
951 {
952 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
953 return;
954 }
955#endif /* HAVE_truncdipdi2 */
956 abort ();
957 }
958
959 if (from_mode == PDImode)
960 {
961 if (to_mode != DImode)
962 {
963 from = convert_to_mode (DImode, from, unsignedp);
964 from_mode = DImode;
965 }
966 else
967 {
968#ifdef HAVE_extendpdidi2
969 if (HAVE_extendpdidi2)
970 {
971 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
972 return;
973 }
974#endif /* HAVE_extendpdidi2 */
975 abort ();
976 }
977 }
978
bbf6f052
RK
979 /* Now follow all the conversions between integers
980 no more than a word long. */
981
982 /* For truncation, usually we can just refer to FROM in a narrower mode. */
983 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
984 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 985 GET_MODE_BITSIZE (from_mode)))
bbf6f052 986 {
d3c64ee3
RS
987 if (!((GET_CODE (from) == MEM
988 && ! MEM_VOLATILE_P (from)
989 && direct_load[(int) to_mode]
990 && ! mode_dependent_address_p (XEXP (from, 0)))
991 || GET_CODE (from) == REG
992 || GET_CODE (from) == SUBREG))
993 from = force_reg (from_mode, from);
34aa3599
RK
994 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
995 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
996 from = copy_to_reg (from);
bbf6f052
RK
997 emit_move_insn (to, gen_lowpart (to_mode, from));
998 return;
999 }
1000
d3c64ee3 1001 /* Handle extension. */
bbf6f052
RK
1002 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1003 {
1004 /* Convert directly if that works. */
1005 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1006 != CODE_FOR_nothing)
1007 {
1008 emit_unop_insn (code, to, from, equiv_code);
1009 return;
1010 }
1011 else
1012 {
1013 enum machine_mode intermediate;
2b28d92e
NC
1014 rtx tmp;
1015 tree shift_amount;
bbf6f052
RK
1016
1017 /* Search for a mode to convert via. */
1018 for (intermediate = from_mode; intermediate != VOIDmode;
1019 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1020 if (((can_extend_p (to_mode, intermediate, unsignedp)
1021 != CODE_FOR_nothing)
1022 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1023 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1024 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1025 && (can_extend_p (intermediate, from_mode, unsignedp)
1026 != CODE_FOR_nothing))
1027 {
1028 convert_move (to, convert_to_mode (intermediate, from,
1029 unsignedp), unsignedp);
1030 return;
1031 }
1032
2b28d92e
NC
1033 /* No suitable intermediate mode.
1034 Generate what we need with shifts. */
1035 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1036 - GET_MODE_BITSIZE (from_mode), 0);
1037 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1038 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1039 to, unsignedp);
1040 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1041 to, unsignedp);
1042 if (tmp != to)
1043 emit_move_insn (to, tmp);
1044 return;
bbf6f052
RK
1045 }
1046 }
1047
1048 /* Support special truncate insns for certain modes. */
1049
1050 if (from_mode == DImode && to_mode == SImode)
1051 {
1052#ifdef HAVE_truncdisi2
1053 if (HAVE_truncdisi2)
1054 {
1055 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1056 return;
1057 }
1058#endif
1059 convert_move (to, force_reg (from_mode, from), unsignedp);
1060 return;
1061 }
1062
1063 if (from_mode == DImode && to_mode == HImode)
1064 {
1065#ifdef HAVE_truncdihi2
1066 if (HAVE_truncdihi2)
1067 {
1068 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1069 return;
1070 }
1071#endif
1072 convert_move (to, force_reg (from_mode, from), unsignedp);
1073 return;
1074 }
1075
1076 if (from_mode == DImode && to_mode == QImode)
1077 {
1078#ifdef HAVE_truncdiqi2
1079 if (HAVE_truncdiqi2)
1080 {
1081 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1082 return;
1083 }
1084#endif
1085 convert_move (to, force_reg (from_mode, from), unsignedp);
1086 return;
1087 }
1088
1089 if (from_mode == SImode && to_mode == HImode)
1090 {
1091#ifdef HAVE_truncsihi2
1092 if (HAVE_truncsihi2)
1093 {
1094 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1095 return;
1096 }
1097#endif
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 return;
1100 }
1101
1102 if (from_mode == SImode && to_mode == QImode)
1103 {
1104#ifdef HAVE_truncsiqi2
1105 if (HAVE_truncsiqi2)
1106 {
1107 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1108 return;
1109 }
1110#endif
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 return;
1113 }
1114
1115 if (from_mode == HImode && to_mode == QImode)
1116 {
1117#ifdef HAVE_trunchiqi2
1118 if (HAVE_trunchiqi2)
1119 {
1120 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1121 return;
1122 }
1123#endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
b9bcad65
RK
1128 if (from_mode == TImode && to_mode == DImode)
1129 {
1130#ifdef HAVE_trunctidi2
1131 if (HAVE_trunctidi2)
1132 {
1133 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1134 return;
1135 }
1136#endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == TImode && to_mode == SImode)
1142 {
1143#ifdef HAVE_trunctisi2
1144 if (HAVE_trunctisi2)
1145 {
1146 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1147 return;
1148 }
1149#endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
1154 if (from_mode == TImode && to_mode == HImode)
1155 {
1156#ifdef HAVE_trunctihi2
1157 if (HAVE_trunctihi2)
1158 {
1159 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1160 return;
1161 }
1162#endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == TImode && to_mode == QImode)
1168 {
1169#ifdef HAVE_trunctiqi2
1170 if (HAVE_trunctiqi2)
1171 {
1172 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1173 return;
1174 }
1175#endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
bbf6f052
RK
1180 /* Handle truncation of volatile memrefs, and so on;
1181 the things that couldn't be truncated directly,
1182 and for which there was no special instruction. */
1183 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1184 {
1185 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1186 emit_move_insn (to, temp);
1187 return;
1188 }
1189
1190 /* Mode combination is not recognized. */
1191 abort ();
1192}
1193
1194/* Return an rtx for a value that would result
1195 from converting X to mode MODE.
1196 Both X and MODE may be floating, or both integer.
1197 UNSIGNEDP is nonzero if X is an unsigned value.
1198 This can be done by referring to a part of X in place
5d901c31
RS
1199 or by copying to a new temporary with conversion.
1200
1201 This function *must not* call protect_from_queue
1202 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1203
1204rtx
1205convert_to_mode (mode, x, unsignedp)
1206 enum machine_mode mode;
1207 rtx x;
1208 int unsignedp;
5ffe63ed
RS
1209{
1210 return convert_modes (mode, VOIDmode, x, unsignedp);
1211}
1212
1213/* Return an rtx for a value that would result
1214 from converting X from mode OLDMODE to mode MODE.
1215 Both modes may be floating, or both integer.
1216 UNSIGNEDP is nonzero if X is an unsigned value.
1217
1218 This can be done by referring to a part of X in place
1219 or by copying to a new temporary with conversion.
1220
1221 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1222
1223 This function *must not* call protect_from_queue
1224 except when putting X into an insn (in which case convert_move does it). */
1225
1226rtx
1227convert_modes (mode, oldmode, x, unsignedp)
1228 enum machine_mode mode, oldmode;
1229 rtx x;
1230 int unsignedp;
bbf6f052
RK
1231{
1232 register rtx temp;
5ffe63ed 1233
1499e0a8
RK
1234 /* If FROM is a SUBREG that indicates that we have already done at least
1235 the required extension, strip it. */
1236
1237 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1238 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1239 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1240 x = gen_lowpart (mode, x);
bbf6f052 1241
64791b18
RK
1242 if (GET_MODE (x) != VOIDmode)
1243 oldmode = GET_MODE (x);
1244
5ffe63ed 1245 if (mode == oldmode)
bbf6f052
RK
1246 return x;
1247
1248 /* There is one case that we must handle specially: If we are converting
906c4e36 1249 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1250 we are to interpret the constant as unsigned, gen_lowpart will do
1251 the wrong if the constant appears negative. What we want to do is
1252 make the high-order word of the constant zero, not all ones. */
1253
1254 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1255 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1256 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1257 {
1258 HOST_WIDE_INT val = INTVAL (x);
1259
1260 if (oldmode != VOIDmode
1261 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1262 {
1263 int width = GET_MODE_BITSIZE (oldmode);
1264
1265 /* We need to zero extend VAL. */
1266 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1267 }
1268
1269 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1270 }
bbf6f052
RK
1271
1272 /* We can do this with a gen_lowpart if both desired and current modes
1273 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1274 non-volatile MEM. Except for the constant case where MODE is no
1275 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1276
ba2e110c
RK
1277 if ((GET_CODE (x) == CONST_INT
1278 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1279 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1280 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1281 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1282 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1283 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1284 && direct_load[(int) mode])
2bf29316
JW
1285 || (GET_CODE (x) == REG
1286 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1287 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1288 {
1289 /* ?? If we don't know OLDMODE, we have to assume here that
1290 X does not need sign- or zero-extension. This may not be
1291 the case, but it's the best we can do. */
1292 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1293 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1294 {
1295 HOST_WIDE_INT val = INTVAL (x);
1296 int width = GET_MODE_BITSIZE (oldmode);
1297
1298 /* We must sign or zero-extend in this case. Start by
1299 zero-extending, then sign extend if we need to. */
1300 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1301 if (! unsignedp
1302 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1303 val |= (HOST_WIDE_INT) (-1) << width;
1304
1305 return GEN_INT (val);
1306 }
1307
1308 return gen_lowpart (mode, x);
1309 }
bbf6f052
RK
1310
1311 temp = gen_reg_rtx (mode);
1312 convert_move (temp, x, unsignedp);
1313 return temp;
1314}
1315\f
fbe1758d
AM
1316
1317/* This macro is used to determine what the largest unit size that
1318 move_by_pieces can use is. */
1319
1320/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1321 move efficiently, as opposed to MOVE_MAX which is the maximum
1322 number of bhytes we can move with a single instruction. */
1323
1324#ifndef MOVE_MAX_PIECES
1325#define MOVE_MAX_PIECES MOVE_MAX
1326#endif
1327
bbf6f052
RK
1328/* Generate several move instructions to copy LEN bytes
1329 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1330 The caller must pass FROM and TO
1331 through protect_from_queue before calling.
1332 ALIGN (in bytes) is maximum alignment we can assume. */
1333
2e245dac 1334void
bbf6f052
RK
1335move_by_pieces (to, from, len, align)
1336 rtx to, from;
1337 int len, align;
1338{
1339 struct move_by_pieces data;
1340 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1341 int max_size = MOVE_MAX_PIECES + 1;
1342 enum machine_mode mode = VOIDmode, tmode;
1343 enum insn_code icode;
bbf6f052
RK
1344
1345 data.offset = 0;
1346 data.to_addr = to_addr;
1347 data.from_addr = from_addr;
1348 data.to = to;
1349 data.from = from;
1350 data.autinc_to
1351 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1352 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1353 data.autinc_from
1354 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1355 || GET_CODE (from_addr) == POST_INC
1356 || GET_CODE (from_addr) == POST_DEC);
1357
1358 data.explicit_inc_from = 0;
1359 data.explicit_inc_to = 0;
1360 data.reverse
1361 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1362 if (data.reverse) data.offset = len;
1363 data.len = len;
1364
e9cf6a97
JW
1365 data.to_struct = MEM_IN_STRUCT_P (to);
1366 data.from_struct = MEM_IN_STRUCT_P (from);
1367
bbf6f052
RK
1368 /* If copying requires more than two move insns,
1369 copy addresses to registers (to make displacements shorter)
1370 and use post-increment if available. */
1371 if (!(data.autinc_from && data.autinc_to)
1372 && move_by_pieces_ninsns (len, align) > 2)
1373 {
fbe1758d
AM
1374 /* Find the mode of the largest move... */
1375 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1376 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1377 if (GET_MODE_SIZE (tmode) < max_size)
1378 mode = tmode;
1379
1380 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1381 {
1382 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1383 data.autinc_from = 1;
1384 data.explicit_inc_from = -1;
1385 }
fbe1758d 1386 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1387 {
1388 data.from_addr = copy_addr_to_reg (from_addr);
1389 data.autinc_from = 1;
1390 data.explicit_inc_from = 1;
1391 }
bbf6f052
RK
1392 if (!data.autinc_from && CONSTANT_P (from_addr))
1393 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1394 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1395 {
1396 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1397 data.autinc_to = 1;
1398 data.explicit_inc_to = -1;
1399 }
fbe1758d 1400 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1401 {
1402 data.to_addr = copy_addr_to_reg (to_addr);
1403 data.autinc_to = 1;
1404 data.explicit_inc_to = 1;
1405 }
bbf6f052
RK
1406 if (!data.autinc_to && CONSTANT_P (to_addr))
1407 data.to_addr = copy_addr_to_reg (to_addr);
1408 }
1409
c7a7ac46 1410 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1411 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1412 align = MOVE_MAX;
bbf6f052
RK
1413
1414 /* First move what we can in the largest integer mode, then go to
1415 successively smaller modes. */
1416
1417 while (max_size > 1)
1418 {
e7c33f54
RK
1419 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1420 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1421 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1422 mode = tmode;
1423
1424 if (mode == VOIDmode)
1425 break;
1426
1427 icode = mov_optab->handlers[(int) mode].insn_code;
1428 if (icode != CODE_FOR_nothing
1429 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1430 GET_MODE_SIZE (mode)))
1431 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1432
1433 max_size = GET_MODE_SIZE (mode);
1434 }
1435
1436 /* The code above should have handled everything. */
2a8e278c 1437 if (data.len > 0)
bbf6f052
RK
1438 abort ();
1439}
1440
1441/* Return number of insns required to move L bytes by pieces.
1442 ALIGN (in bytes) is maximum alignment we can assume. */
1443
1444static int
1445move_by_pieces_ninsns (l, align)
1446 unsigned int l;
1447 int align;
1448{
1449 register int n_insns = 0;
e87b4f3f 1450 int max_size = MOVE_MAX + 1;
bbf6f052 1451
c7a7ac46 1452 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1453 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1454 align = MOVE_MAX;
bbf6f052
RK
1455
1456 while (max_size > 1)
1457 {
1458 enum machine_mode mode = VOIDmode, tmode;
1459 enum insn_code icode;
1460
e7c33f54
RK
1461 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1462 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1463 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1464 mode = tmode;
1465
1466 if (mode == VOIDmode)
1467 break;
1468
1469 icode = mov_optab->handlers[(int) mode].insn_code;
1470 if (icode != CODE_FOR_nothing
1471 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1472 GET_MODE_SIZE (mode)))
1473 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1474
1475 max_size = GET_MODE_SIZE (mode);
1476 }
1477
1478 return n_insns;
1479}
1480
1481/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1482 with move instructions for mode MODE. GENFUN is the gen_... function
1483 to make a move insn for that mode. DATA has all the other info. */
1484
1485static void
1486move_by_pieces_1 (genfun, mode, data)
eae4b970 1487 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1488 enum machine_mode mode;
1489 struct move_by_pieces *data;
1490{
1491 register int size = GET_MODE_SIZE (mode);
1492 register rtx to1, from1;
1493
1494 while (data->len >= size)
1495 {
1496 if (data->reverse) data->offset -= size;
1497
1498 to1 = (data->autinc_to
38a448ca 1499 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1500 : copy_rtx (change_address (data->to, mode,
1501 plus_constant (data->to_addr,
1502 data->offset))));
e9cf6a97 1503 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1504
db3cf6fb
MS
1505 from1
1506 = (data->autinc_from
38a448ca 1507 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1508 : copy_rtx (change_address (data->from, mode,
1509 plus_constant (data->from_addr,
1510 data->offset))));
e9cf6a97 1511 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052 1512
940da324 1513 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1514 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1515 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1516 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1517
1518 emit_insn ((*genfun) (to1, from1));
940da324 1519 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1520 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1521 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1522 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1523
1524 if (! data->reverse) data->offset += size;
1525
1526 data->len -= size;
1527 }
1528}
1529\f
1530/* Emit code to move a block Y to a block X.
1531 This may be done with string-move instructions,
1532 with multiple scalar move instructions, or with a library call.
1533
1534 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1535 with mode BLKmode.
1536 SIZE is an rtx that says how long they are.
1537 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1538 measured in bytes.
bbf6f052 1539
e9a25f70
JL
1540 Return the address of the new block, if memcpy is called and returns it,
1541 0 otherwise. */
1542
1543rtx
bbf6f052
RK
1544emit_block_move (x, y, size, align)
1545 rtx x, y;
1546 rtx size;
1547 int align;
1548{
e9a25f70 1549 rtx retval = 0;
52cf7115
JL
1550#ifdef TARGET_MEM_FUNCTIONS
1551 static tree fn;
1552 tree call_expr, arg_list;
1553#endif
e9a25f70 1554
bbf6f052
RK
1555 if (GET_MODE (x) != BLKmode)
1556 abort ();
1557
1558 if (GET_MODE (y) != BLKmode)
1559 abort ();
1560
1561 x = protect_from_queue (x, 1);
1562 y = protect_from_queue (y, 0);
5d901c31 1563 size = protect_from_queue (size, 0);
bbf6f052
RK
1564
1565 if (GET_CODE (x) != MEM)
1566 abort ();
1567 if (GET_CODE (y) != MEM)
1568 abort ();
1569 if (size == 0)
1570 abort ();
1571
fbe1758d 1572 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1573 move_by_pieces (x, y, INTVAL (size), align);
1574 else
1575 {
1576 /* Try the most limited insn first, because there's no point
1577 including more than one in the machine description unless
1578 the more limited one has some advantage. */
266007a7 1579
0bba3f6f 1580 rtx opalign = GEN_INT (align);
266007a7
RK
1581 enum machine_mode mode;
1582
1583 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1584 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1585 {
266007a7 1586 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1587
1588 if (code != CODE_FOR_nothing
803090c4
RK
1589 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1590 here because if SIZE is less than the mode mask, as it is
8008b228 1591 returned by the macro, it will definitely be less than the
803090c4 1592 actual mode mask. */
8ca00751
RK
1593 && ((GET_CODE (size) == CONST_INT
1594 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1595 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1596 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1597 && (insn_operand_predicate[(int) code][0] == 0
1598 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1599 && (insn_operand_predicate[(int) code][1] == 0
1600 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1601 && (insn_operand_predicate[(int) code][3] == 0
1602 || (*insn_operand_predicate[(int) code][3]) (opalign,
1603 VOIDmode)))
bbf6f052 1604 {
1ba1e2a8 1605 rtx op2;
266007a7
RK
1606 rtx last = get_last_insn ();
1607 rtx pat;
1608
1ba1e2a8 1609 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1610 if (insn_operand_predicate[(int) code][2] != 0
1611 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1612 op2 = copy_to_mode_reg (mode, op2);
1613
1614 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1615 if (pat)
1616 {
1617 emit_insn (pat);
e9a25f70 1618 return 0;
266007a7
RK
1619 }
1620 else
1621 delete_insns_since (last);
bbf6f052
RK
1622 }
1623 }
bbf6f052 1624
4bc973ae
JL
1625 /* X, Y, or SIZE may have been passed through protect_from_queue.
1626
1627 It is unsafe to save the value generated by protect_from_queue
1628 and reuse it later. Consider what happens if emit_queue is
1629 called before the return value from protect_from_queue is used.
1630
1631 Expansion of the CALL_EXPR below will call emit_queue before
1632 we are finished emitting RTL for argument setup. So if we are
1633 not careful we could get the wrong value for an argument.
1634
1635 To avoid this problem we go ahead and emit code to copy X, Y &
1636 SIZE into new pseudos. We can then place those new pseudos
1637 into an RTL_EXPR and use them later, even after a call to
1638 emit_queue.
1639
1640 Note this is not strictly needed for library calls since they
1641 do not call emit_queue before loading their arguments. However,
1642 we may need to have library calls call emit_queue in the future
1643 since failing to do so could cause problems for targets which
1644 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1645 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1646 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1647
1648#ifdef TARGET_MEM_FUNCTIONS
1649 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1650#else
1651 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1652 TREE_UNSIGNED (integer_type_node));
f3dc586a 1653 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1654#endif
1655
bbf6f052 1656#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1657 /* It is incorrect to use the libcall calling conventions to call
1658 memcpy in this context.
1659
1660 This could be a user call to memcpy and the user may wish to
1661 examine the return value from memcpy.
1662
1663 For targets where libcalls and normal calls have different conventions
1664 for returning pointers, we could end up generating incorrect code.
1665
1666 So instead of using a libcall sequence we build up a suitable
1667 CALL_EXPR and expand the call in the normal fashion. */
1668 if (fn == NULL_TREE)
1669 {
1670 tree fntype;
1671
1672 /* This was copied from except.c, I don't know if all this is
1673 necessary in this context or not. */
1674 fn = get_identifier ("memcpy");
1675 push_obstacks_nochange ();
1676 end_temporary_allocation ();
1677 fntype = build_pointer_type (void_type_node);
1678 fntype = build_function_type (fntype, NULL_TREE);
1679 fn = build_decl (FUNCTION_DECL, fn, fntype);
1680 DECL_EXTERNAL (fn) = 1;
1681 TREE_PUBLIC (fn) = 1;
1682 DECL_ARTIFICIAL (fn) = 1;
1683 make_decl_rtl (fn, NULL_PTR, 1);
1684 assemble_external (fn);
1685 pop_obstacks ();
1686 }
1687
1688 /* We need to make an argument list for the function call.
1689
1690 memcpy has three arguments, the first two are void * addresses and
1691 the last is a size_t byte count for the copy. */
1692 arg_list
1693 = build_tree_list (NULL_TREE,
4bc973ae 1694 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1695 TREE_CHAIN (arg_list)
1696 = build_tree_list (NULL_TREE,
4bc973ae 1697 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1698 TREE_CHAIN (TREE_CHAIN (arg_list))
1699 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1700 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1701
1702 /* Now we have to build up the CALL_EXPR itself. */
1703 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1704 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1705 call_expr, arg_list, NULL_TREE);
1706 TREE_SIDE_EFFECTS (call_expr) = 1;
1707
1708 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1709#else
d562e42e 1710 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1711 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1712 convert_to_mode (TYPE_MODE (integer_type_node), size,
1713 TREE_UNSIGNED (integer_type_node)),
1714 TYPE_MODE (integer_type_node));
bbf6f052
RK
1715#endif
1716 }
e9a25f70
JL
1717
1718 return retval;
bbf6f052
RK
1719}
1720\f
1721/* Copy all or part of a value X into registers starting at REGNO.
1722 The number of registers to be filled is NREGS. */
1723
1724void
1725move_block_to_reg (regno, x, nregs, mode)
1726 int regno;
1727 rtx x;
1728 int nregs;
1729 enum machine_mode mode;
1730{
1731 int i;
381127e8
RL
1732#ifdef HAVE_load_multiple
1733 rtx pat;
1734 rtx last;
1735#endif
bbf6f052 1736
72bb9717
RK
1737 if (nregs == 0)
1738 return;
1739
bbf6f052
RK
1740 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1741 x = validize_mem (force_const_mem (mode, x));
1742
1743 /* See if the machine can do this with a load multiple insn. */
1744#ifdef HAVE_load_multiple
c3a02afe 1745 if (HAVE_load_multiple)
bbf6f052 1746 {
c3a02afe 1747 last = get_last_insn ();
38a448ca 1748 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1749 GEN_INT (nregs));
1750 if (pat)
1751 {
1752 emit_insn (pat);
1753 return;
1754 }
1755 else
1756 delete_insns_since (last);
bbf6f052 1757 }
bbf6f052
RK
1758#endif
1759
1760 for (i = 0; i < nregs; i++)
38a448ca 1761 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1762 operand_subword_force (x, i, mode));
1763}
1764
1765/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1766 The number of registers to be filled is NREGS. SIZE indicates the number
1767 of bytes in the object X. */
1768
bbf6f052
RK
1769
1770void
0040593d 1771move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1772 int regno;
1773 rtx x;
1774 int nregs;
0040593d 1775 int size;
bbf6f052
RK
1776{
1777 int i;
381127e8
RL
1778#ifdef HAVE_store_multiple
1779 rtx pat;
1780 rtx last;
1781#endif
58a32c5c 1782 enum machine_mode mode;
bbf6f052 1783
58a32c5c
DE
1784 /* If SIZE is that of a mode no bigger than a word, just use that
1785 mode's store operation. */
1786 if (size <= UNITS_PER_WORD
1787 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1788 {
1789 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1790 gen_rtx_REG (mode, regno));
58a32c5c
DE
1791 return;
1792 }
1793
0040593d 1794 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1795 to the left before storing to memory. Note that the previous test
1796 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1797 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1798 {
1799 rtx tem = operand_subword (x, 0, 1, BLKmode);
1800 rtx shift;
1801
1802 if (tem == 0)
1803 abort ();
1804
1805 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1806 gen_rtx_REG (word_mode, regno),
0040593d
JW
1807 build_int_2 ((UNITS_PER_WORD - size)
1808 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1809 emit_move_insn (tem, shift);
1810 return;
1811 }
1812
bbf6f052
RK
1813 /* See if the machine can do this with a store multiple insn. */
1814#ifdef HAVE_store_multiple
c3a02afe 1815 if (HAVE_store_multiple)
bbf6f052 1816 {
c3a02afe 1817 last = get_last_insn ();
38a448ca 1818 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1819 GEN_INT (nregs));
1820 if (pat)
1821 {
1822 emit_insn (pat);
1823 return;
1824 }
1825 else
1826 delete_insns_since (last);
bbf6f052 1827 }
bbf6f052
RK
1828#endif
1829
1830 for (i = 0; i < nregs; i++)
1831 {
1832 rtx tem = operand_subword (x, i, 1, BLKmode);
1833
1834 if (tem == 0)
1835 abort ();
1836
38a448ca 1837 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1838 }
1839}
1840
aac5cc16
RH
1841/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1842 registers represented by a PARALLEL. SSIZE represents the total size of
1843 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1844 SRC in bits. */
1845/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1846 the balance will be in what would be the low-order memory addresses, i.e.
1847 left justified for big endian, right justified for little endian. This
1848 happens to be true for the targets currently using this support. If this
1849 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1850 would be needed. */
fffa9c1d
JW
1851
1852void
aac5cc16
RH
1853emit_group_load (dst, orig_src, ssize, align)
1854 rtx dst, orig_src;
1855 int align, ssize;
fffa9c1d 1856{
aac5cc16
RH
1857 rtx *tmps, src;
1858 int start, i;
fffa9c1d 1859
aac5cc16 1860 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1861 abort ();
1862
1863 /* Check for a NULL entry, used to indicate that the parameter goes
1864 both on the stack and in registers. */
aac5cc16
RH
1865 if (XEXP (XVECEXP (dst, 0, 0), 0))
1866 start = 0;
fffa9c1d 1867 else
aac5cc16
RH
1868 start = 1;
1869
1870 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1871
1872 /* If we won't be loading directly from memory, protect the real source
1873 from strange tricks we might play. */
1874 src = orig_src;
1875 if (GET_CODE (src) != MEM)
1876 {
1877 src = gen_reg_rtx (GET_MODE (orig_src));
1878 emit_move_insn (src, orig_src);
1879 }
1880
1881 /* Process the pieces. */
1882 for (i = start; i < XVECLEN (dst, 0); i++)
1883 {
1884 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1885 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1886 int bytelen = GET_MODE_SIZE (mode);
1887 int shift = 0;
1888
1889 /* Handle trailing fragments that run over the size of the struct. */
1890 if (ssize >= 0 && bytepos + bytelen > ssize)
1891 {
1892 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1893 bytelen = ssize - bytepos;
1894 if (bytelen <= 0)
1895 abort();
1896 }
1897
1898 /* Optimize the access just a bit. */
1899 if (GET_CODE (src) == MEM
1900 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1901 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1902 && bytelen == GET_MODE_SIZE (mode))
1903 {
1904 tmps[i] = gen_reg_rtx (mode);
1905 emit_move_insn (tmps[i],
1906 change_address (src, mode,
1907 plus_constant (XEXP (src, 0),
1908 bytepos)));
fffa9c1d 1909 }
7c4a6db0
JW
1910 else if (GET_CODE (src) == CONCAT)
1911 {
1912 if (bytepos == 0
1913 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1914 tmps[i] = XEXP (src, 0);
1915 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1916 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1917 tmps[i] = XEXP (src, 1);
1918 else
1919 abort ();
1920 }
fffa9c1d 1921 else
aac5cc16
RH
1922 {
1923 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1924 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1925 mode, mode, align, ssize);
1926 }
fffa9c1d 1927
aac5cc16
RH
1928 if (BYTES_BIG_ENDIAN && shift)
1929 {
1930 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1931 tmps[i], 0, OPTAB_WIDEN);
1932 }
fffa9c1d 1933 }
aac5cc16
RH
1934 emit_queue();
1935
1936 /* Copy the extracted pieces into the proper (probable) hard regs. */
1937 for (i = start; i < XVECLEN (dst, 0); i++)
1938 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1939}
1940
aac5cc16
RH
1941/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1942 registers represented by a PARALLEL. SSIZE represents the total size of
1943 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1944
1945void
aac5cc16
RH
1946emit_group_store (orig_dst, src, ssize, align)
1947 rtx orig_dst, src;
1948 int ssize, align;
fffa9c1d 1949{
aac5cc16
RH
1950 rtx *tmps, dst;
1951 int start, i;
fffa9c1d 1952
aac5cc16 1953 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1954 abort ();
1955
1956 /* Check for a NULL entry, used to indicate that the parameter goes
1957 both on the stack and in registers. */
aac5cc16
RH
1958 if (XEXP (XVECEXP (src, 0, 0), 0))
1959 start = 0;
fffa9c1d 1960 else
aac5cc16
RH
1961 start = 1;
1962
1963 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 1964
aac5cc16
RH
1965 /* Copy the (probable) hard regs into pseudos. */
1966 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1967 {
aac5cc16
RH
1968 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
1969 tmps[i] = gen_reg_rtx (GET_MODE (reg));
1970 emit_move_insn (tmps[i], reg);
1971 }
1972 emit_queue();
fffa9c1d 1973
aac5cc16
RH
1974 /* If we won't be storing directly into memory, protect the real destination
1975 from strange tricks we might play. */
1976 dst = orig_dst;
10a9f2be
JW
1977 if (GET_CODE (dst) == PARALLEL)
1978 {
1979 rtx temp;
1980
1981 /* We can get a PARALLEL dst if there is a conditional expression in
1982 a return statement. In that case, the dst and src are the same,
1983 so no action is necessary. */
1984 if (rtx_equal_p (dst, src))
1985 return;
1986
1987 /* It is unclear if we can ever reach here, but we may as well handle
1988 it. Allocate a temporary, and split this into a store/load to/from
1989 the temporary. */
1990
1991 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
1992 emit_group_store (temp, src, ssize, align);
1993 emit_group_load (dst, temp, ssize, align);
1994 return;
1995 }
1996 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
1997 {
1998 dst = gen_reg_rtx (GET_MODE (orig_dst));
1999 /* Make life a bit easier for combine. */
2000 emit_move_insn (dst, const0_rtx);
2001 }
2002 else if (! MEM_IN_STRUCT_P (dst))
2003 {
2004 /* store_bit_field requires that memory operations have
2005 mem_in_struct_p set; we might not. */
fffa9c1d 2006
aac5cc16 2007 dst = copy_rtx (orig_dst);
c6df88cb 2008 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2009 }
2010
2011 /* Process the pieces. */
2012 for (i = start; i < XVECLEN (src, 0); i++)
2013 {
2014 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2015 enum machine_mode mode = GET_MODE (tmps[i]);
2016 int bytelen = GET_MODE_SIZE (mode);
2017
2018 /* Handle trailing fragments that run over the size of the struct. */
2019 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2020 {
aac5cc16
RH
2021 if (BYTES_BIG_ENDIAN)
2022 {
2023 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2024 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2025 tmps[i], 0, OPTAB_WIDEN);
2026 }
2027 bytelen = ssize - bytepos;
71bc0330 2028 }
fffa9c1d 2029
aac5cc16
RH
2030 /* Optimize the access just a bit. */
2031 if (GET_CODE (dst) == MEM
2032 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2033 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2034 && bytelen == GET_MODE_SIZE (mode))
2035 {
2036 emit_move_insn (change_address (dst, mode,
2037 plus_constant (XEXP (dst, 0),
2038 bytepos)),
2039 tmps[i]);
2040 }
2041 else
2042 {
2043 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2044 mode, tmps[i], align, ssize);
2045 }
fffa9c1d 2046 }
aac5cc16
RH
2047 emit_queue();
2048
2049 /* Copy from the pseudo into the (probable) hard reg. */
2050 if (GET_CODE (dst) == REG)
2051 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2052}
2053
c36fce9a
GRK
2054/* Generate code to copy a BLKmode object of TYPE out of a
2055 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2056 is null, a stack temporary is created. TGTBLK is returned.
2057
2058 The primary purpose of this routine is to handle functions
2059 that return BLKmode structures in registers. Some machines
2060 (the PA for example) want to return all small structures
2061 in registers regardless of the structure's alignment.
2062 */
2063
2064rtx
2065copy_blkmode_from_reg(tgtblk,srcreg,type)
2066 rtx tgtblk;
2067 rtx srcreg;
2068 tree type;
2069{
2070 int bytes = int_size_in_bytes (type);
2071 rtx src = NULL, dst = NULL;
c84e2712 2072 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
c36fce9a
GRK
2073 int bitpos, xbitpos, big_endian_correction = 0;
2074
2075 if (tgtblk == 0)
2076 {
2077 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2078 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2079 preserve_temp_slots (tgtblk);
2080 }
2081
2082 /* This code assumes srcreg is at least a full word. If it isn't,
2083 copy it into a new pseudo which is a full word. */
2084 if (GET_MODE (srcreg) != BLKmode
2085 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2086 srcreg = convert_to_mode (word_mode, srcreg,
2087 TREE_UNSIGNED (type));
2088
2089 /* Structures whose size is not a multiple of a word are aligned
2090 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2091 machine, this means we must skip the empty high order bytes when
2092 calculating the bit offset. */
2093 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2094 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2095 * BITS_PER_UNIT));
2096
2097 /* Copy the structure BITSIZE bites at a time.
2098
2099 We could probably emit more efficient code for machines
2100 which do not use strict alignment, but it doesn't seem
2101 worth the effort at the current time. */
2102 for (bitpos = 0, xbitpos = big_endian_correction;
2103 bitpos < bytes * BITS_PER_UNIT;
2104 bitpos += bitsize, xbitpos += bitsize)
2105 {
2106
2107 /* We need a new source operand each time xbitpos is on a
2108 word boundary and when xbitpos == big_endian_correction
2109 (the first time through). */
2110 if (xbitpos % BITS_PER_WORD == 0
2111 || xbitpos == big_endian_correction)
2112 src = operand_subword_force (srcreg,
2113 xbitpos / BITS_PER_WORD,
2114 BLKmode);
2115
2116 /* We need a new destination operand each time bitpos is on
2117 a word boundary. */
2118 if (bitpos % BITS_PER_WORD == 0)
2119 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2120
2121 /* Use xbitpos for the source extraction (right justified) and
2122 xbitpos for the destination store (left justified). */
2123 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2124 extract_bit_field (src, bitsize,
2125 xbitpos % BITS_PER_WORD, 1,
2126 NULL_RTX, word_mode,
2127 word_mode,
2128 bitsize / BITS_PER_UNIT,
2129 BITS_PER_WORD),
2130 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2131 }
2132 return tgtblk;
2133}
2134
2135
94b25f81
RK
2136/* Add a USE expression for REG to the (possibly empty) list pointed
2137 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2138
2139void
b3f8cf4a
RK
2140use_reg (call_fusage, reg)
2141 rtx *call_fusage, reg;
2142{
0304dfbb
DE
2143 if (GET_CODE (reg) != REG
2144 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2145 abort();
2146
2147 *call_fusage
38a448ca
RH
2148 = gen_rtx_EXPR_LIST (VOIDmode,
2149 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2150}
2151
94b25f81
RK
2152/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2153 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2154
2155void
0304dfbb
DE
2156use_regs (call_fusage, regno, nregs)
2157 rtx *call_fusage;
bbf6f052
RK
2158 int regno;
2159 int nregs;
2160{
0304dfbb 2161 int i;
bbf6f052 2162
0304dfbb
DE
2163 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2164 abort ();
2165
2166 for (i = 0; i < nregs; i++)
38a448ca 2167 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2168}
fffa9c1d
JW
2169
2170/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2171 PARALLEL REGS. This is for calls that pass values in multiple
2172 non-contiguous locations. The Irix 6 ABI has examples of this. */
2173
2174void
2175use_group_regs (call_fusage, regs)
2176 rtx *call_fusage;
2177 rtx regs;
2178{
2179 int i;
2180
6bd35f86
DE
2181 for (i = 0; i < XVECLEN (regs, 0); i++)
2182 {
2183 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2184
6bd35f86
DE
2185 /* A NULL entry means the parameter goes both on the stack and in
2186 registers. This can also be a MEM for targets that pass values
2187 partially on the stack and partially in registers. */
e9a25f70 2188 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2189 use_reg (call_fusage, reg);
2190 }
fffa9c1d 2191}
bbf6f052 2192\f
9de08200
RK
2193/* Generate several move instructions to clear LEN bytes of block TO.
2194 (A MEM rtx with BLKmode). The caller must pass TO through
2195 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2196 we can assume. */
2197
2198static void
2199clear_by_pieces (to, len, align)
2200 rtx to;
2201 int len, align;
2202{
2203 struct clear_by_pieces data;
2204 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2205 int max_size = MOVE_MAX_PIECES + 1;
2206 enum machine_mode mode = VOIDmode, tmode;
2207 enum insn_code icode;
9de08200
RK
2208
2209 data.offset = 0;
2210 data.to_addr = to_addr;
2211 data.to = to;
2212 data.autinc_to
2213 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2214 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2215
2216 data.explicit_inc_to = 0;
2217 data.reverse
2218 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2219 if (data.reverse) data.offset = len;
2220 data.len = len;
2221
2222 data.to_struct = MEM_IN_STRUCT_P (to);
2223
2224 /* If copying requires more than two move insns,
2225 copy addresses to registers (to make displacements shorter)
2226 and use post-increment if available. */
2227 if (!data.autinc_to
2228 && move_by_pieces_ninsns (len, align) > 2)
2229 {
fbe1758d
AM
2230 /* Determine the main mode we'll be using */
2231 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2232 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2233 if (GET_MODE_SIZE (tmode) < max_size)
2234 mode = tmode;
2235
2236 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2237 {
2238 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2239 data.autinc_to = 1;
2240 data.explicit_inc_to = -1;
2241 }
fbe1758d 2242 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2243 {
2244 data.to_addr = copy_addr_to_reg (to_addr);
2245 data.autinc_to = 1;
2246 data.explicit_inc_to = 1;
2247 }
9de08200
RK
2248 if (!data.autinc_to && CONSTANT_P (to_addr))
2249 data.to_addr = copy_addr_to_reg (to_addr);
2250 }
2251
2252 if (! SLOW_UNALIGNED_ACCESS
2253 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2254 align = MOVE_MAX;
2255
2256 /* First move what we can in the largest integer mode, then go to
2257 successively smaller modes. */
2258
2259 while (max_size > 1)
2260 {
9de08200
RK
2261 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2262 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2263 if (GET_MODE_SIZE (tmode) < max_size)
2264 mode = tmode;
2265
2266 if (mode == VOIDmode)
2267 break;
2268
2269 icode = mov_optab->handlers[(int) mode].insn_code;
2270 if (icode != CODE_FOR_nothing
2271 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2272 GET_MODE_SIZE (mode)))
2273 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2274
2275 max_size = GET_MODE_SIZE (mode);
2276 }
2277
2278 /* The code above should have handled everything. */
2279 if (data.len != 0)
2280 abort ();
2281}
2282
2283/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2284 with move instructions for mode MODE. GENFUN is the gen_... function
2285 to make a move insn for that mode. DATA has all the other info. */
2286
2287static void
2288clear_by_pieces_1 (genfun, mode, data)
eae4b970 2289 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2290 enum machine_mode mode;
2291 struct clear_by_pieces *data;
2292{
2293 register int size = GET_MODE_SIZE (mode);
2294 register rtx to1;
2295
2296 while (data->len >= size)
2297 {
2298 if (data->reverse) data->offset -= size;
2299
2300 to1 = (data->autinc_to
38a448ca 2301 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2302 : copy_rtx (change_address (data->to, mode,
2303 plus_constant (data->to_addr,
2304 data->offset))));
9de08200
RK
2305 MEM_IN_STRUCT_P (to1) = data->to_struct;
2306
940da324 2307 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2308 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2309
2310 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2311 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2312 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2313
2314 if (! data->reverse) data->offset += size;
2315
2316 data->len -= size;
2317 }
2318}
2319\f
bbf6f052 2320/* Write zeros through the storage of OBJECT.
9de08200 2321 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2322 the maximum alignment we can is has, measured in bytes.
bbf6f052 2323
e9a25f70
JL
2324 If we call a function that returns the length of the block, return it. */
2325
2326rtx
9de08200 2327clear_storage (object, size, align)
bbf6f052 2328 rtx object;
4c08eef0 2329 rtx size;
9de08200 2330 int align;
bbf6f052 2331{
52cf7115
JL
2332#ifdef TARGET_MEM_FUNCTIONS
2333 static tree fn;
2334 tree call_expr, arg_list;
2335#endif
e9a25f70
JL
2336 rtx retval = 0;
2337
bbf6f052
RK
2338 if (GET_MODE (object) == BLKmode)
2339 {
9de08200
RK
2340 object = protect_from_queue (object, 1);
2341 size = protect_from_queue (size, 0);
2342
2343 if (GET_CODE (size) == CONST_INT
fbe1758d 2344 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2345 clear_by_pieces (object, INTVAL (size), align);
2346
2347 else
2348 {
2349 /* Try the most limited insn first, because there's no point
2350 including more than one in the machine description unless
2351 the more limited one has some advantage. */
2352
2353 rtx opalign = GEN_INT (align);
2354 enum machine_mode mode;
2355
2356 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2357 mode = GET_MODE_WIDER_MODE (mode))
2358 {
2359 enum insn_code code = clrstr_optab[(int) mode];
2360
2361 if (code != CODE_FOR_nothing
2362 /* We don't need MODE to be narrower than
2363 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2364 the mode mask, as it is returned by the macro, it will
2365 definitely be less than the actual mode mask. */
2366 && ((GET_CODE (size) == CONST_INT
2367 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2368 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2369 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2370 && (insn_operand_predicate[(int) code][0] == 0
2371 || (*insn_operand_predicate[(int) code][0]) (object,
2372 BLKmode))
2373 && (insn_operand_predicate[(int) code][2] == 0
2374 || (*insn_operand_predicate[(int) code][2]) (opalign,
2375 VOIDmode)))
2376 {
2377 rtx op1;
2378 rtx last = get_last_insn ();
2379 rtx pat;
2380
2381 op1 = convert_to_mode (mode, size, 1);
2382 if (insn_operand_predicate[(int) code][1] != 0
2383 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2384 mode))
2385 op1 = copy_to_mode_reg (mode, op1);
2386
2387 pat = GEN_FCN ((int) code) (object, op1, opalign);
2388 if (pat)
2389 {
2390 emit_insn (pat);
e9a25f70 2391 return 0;
9de08200
RK
2392 }
2393 else
2394 delete_insns_since (last);
2395 }
2396 }
2397
4bc973ae 2398 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2399
4bc973ae
JL
2400 It is unsafe to save the value generated by protect_from_queue
2401 and reuse it later. Consider what happens if emit_queue is
2402 called before the return value from protect_from_queue is used.
52cf7115 2403
4bc973ae
JL
2404 Expansion of the CALL_EXPR below will call emit_queue before
2405 we are finished emitting RTL for argument setup. So if we are
2406 not careful we could get the wrong value for an argument.
52cf7115 2407
4bc973ae
JL
2408 To avoid this problem we go ahead and emit code to copy OBJECT
2409 and SIZE into new pseudos. We can then place those new pseudos
2410 into an RTL_EXPR and use them later, even after a call to
2411 emit_queue.
52cf7115 2412
4bc973ae
JL
2413 Note this is not strictly needed for library calls since they
2414 do not call emit_queue before loading their arguments. However,
2415 we may need to have library calls call emit_queue in the future
2416 since failing to do so could cause problems for targets which
2417 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2418 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2419
4bc973ae
JL
2420#ifdef TARGET_MEM_FUNCTIONS
2421 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2422#else
2423 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2424 TREE_UNSIGNED (integer_type_node));
f3dc586a 2425 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2426#endif
52cf7115 2427
52cf7115 2428
4bc973ae
JL
2429#ifdef TARGET_MEM_FUNCTIONS
2430 /* It is incorrect to use the libcall calling conventions to call
2431 memset in this context.
52cf7115 2432
4bc973ae
JL
2433 This could be a user call to memset and the user may wish to
2434 examine the return value from memset.
52cf7115 2435
4bc973ae
JL
2436 For targets where libcalls and normal calls have different
2437 conventions for returning pointers, we could end up generating
2438 incorrect code.
2439
2440 So instead of using a libcall sequence we build up a suitable
2441 CALL_EXPR and expand the call in the normal fashion. */
2442 if (fn == NULL_TREE)
2443 {
2444 tree fntype;
2445
2446 /* This was copied from except.c, I don't know if all this is
2447 necessary in this context or not. */
2448 fn = get_identifier ("memset");
2449 push_obstacks_nochange ();
2450 end_temporary_allocation ();
2451 fntype = build_pointer_type (void_type_node);
2452 fntype = build_function_type (fntype, NULL_TREE);
2453 fn = build_decl (FUNCTION_DECL, fn, fntype);
2454 DECL_EXTERNAL (fn) = 1;
2455 TREE_PUBLIC (fn) = 1;
2456 DECL_ARTIFICIAL (fn) = 1;
2457 make_decl_rtl (fn, NULL_PTR, 1);
2458 assemble_external (fn);
2459 pop_obstacks ();
2460 }
2461
2462 /* We need to make an argument list for the function call.
2463
2464 memset has three arguments, the first is a void * addresses, the
2465 second a integer with the initialization value, the last is a
2466 size_t byte count for the copy. */
2467 arg_list
2468 = build_tree_list (NULL_TREE,
2469 make_tree (build_pointer_type (void_type_node),
2470 object));
2471 TREE_CHAIN (arg_list)
2472 = build_tree_list (NULL_TREE,
2473 make_tree (integer_type_node, const0_rtx));
2474 TREE_CHAIN (TREE_CHAIN (arg_list))
2475 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2476 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2477
2478 /* Now we have to build up the CALL_EXPR itself. */
2479 call_expr = build1 (ADDR_EXPR,
2480 build_pointer_type (TREE_TYPE (fn)), fn);
2481 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2482 call_expr, arg_list, NULL_TREE);
2483 TREE_SIDE_EFFECTS (call_expr) = 1;
2484
2485 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2486#else
9de08200 2487 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2488 VOIDmode, 2, object, Pmode, size,
9de08200 2489 TYPE_MODE (integer_type_node));
bbf6f052 2490#endif
9de08200 2491 }
bbf6f052
RK
2492 }
2493 else
66ed0683 2494 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2495
2496 return retval;
bbf6f052
RK
2497}
2498
2499/* Generate code to copy Y into X.
2500 Both Y and X must have the same mode, except that
2501 Y can be a constant with VOIDmode.
2502 This mode cannot be BLKmode; use emit_block_move for that.
2503
2504 Return the last instruction emitted. */
2505
2506rtx
2507emit_move_insn (x, y)
2508 rtx x, y;
2509{
2510 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2511
2512 x = protect_from_queue (x, 1);
2513 y = protect_from_queue (y, 0);
2514
2515 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2516 abort ();
2517
ee5332b8
RH
2518 /* Never force constant_p_rtx to memory. */
2519 if (GET_CODE (y) == CONSTANT_P_RTX)
2520 ;
2521 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2522 y = force_const_mem (mode, y);
2523
2524 /* If X or Y are memory references, verify that their addresses are valid
2525 for the machine. */
2526 if (GET_CODE (x) == MEM
2527 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2528 && ! push_operand (x, GET_MODE (x)))
2529 || (flag_force_addr
2530 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2531 x = change_address (x, VOIDmode, XEXP (x, 0));
2532
2533 if (GET_CODE (y) == MEM
2534 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2535 || (flag_force_addr
2536 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2537 y = change_address (y, VOIDmode, XEXP (y, 0));
2538
2539 if (mode == BLKmode)
2540 abort ();
2541
261c4230
RS
2542 return emit_move_insn_1 (x, y);
2543}
2544
2545/* Low level part of emit_move_insn.
2546 Called just like emit_move_insn, but assumes X and Y
2547 are basically valid. */
2548
2549rtx
2550emit_move_insn_1 (x, y)
2551 rtx x, y;
2552{
2553 enum machine_mode mode = GET_MODE (x);
2554 enum machine_mode submode;
2555 enum mode_class class = GET_MODE_CLASS (mode);
2556 int i;
2557
76bbe028
ZW
2558 if (mode >= MAX_MACHINE_MODE)
2559 abort ();
2560
bbf6f052
RK
2561 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2562 return
2563 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2564
89742723 2565 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2566 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2567 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2568 * BITS_PER_UNIT),
2569 (class == MODE_COMPLEX_INT
2570 ? MODE_INT : MODE_FLOAT),
2571 0))
7308a047
RS
2572 && (mov_optab->handlers[(int) submode].insn_code
2573 != CODE_FOR_nothing))
2574 {
2575 /* Don't split destination if it is a stack push. */
2576 int stack = push_operand (x, GET_MODE (x));
7308a047 2577
7308a047
RS
2578 /* If this is a stack, push the highpart first, so it
2579 will be in the argument order.
2580
2581 In that case, change_address is used only to convert
2582 the mode, not to change the address. */
c937357e
RS
2583 if (stack)
2584 {
e33c0d66
RS
2585 /* Note that the real part always precedes the imag part in memory
2586 regardless of machine's endianness. */
c937357e
RS
2587#ifdef STACK_GROWS_DOWNWARD
2588 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2589 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2590 gen_imagpart (submode, y)));
c937357e 2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2592 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2593 gen_realpart (submode, y)));
c937357e
RS
2594#else
2595 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2596 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2597 gen_realpart (submode, y)));
c937357e 2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2599 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2600 gen_imagpart (submode, y)));
c937357e
RS
2601#endif
2602 }
2603 else
2604 {
c14c6529
RH
2605 /* Show the output dies here. This is necessary for pseudos;
2606 hard regs shouldn't appear here except as return values.
2607 We never want to emit such a clobber after reload. */
2608 if (x != y
2609 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2610 {
c14c6529 2611 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2612 }
2638126a 2613
c937357e 2614 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2615 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2616 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2617 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2618 }
7308a047 2619
7a1ab50a 2620 return get_last_insn ();
7308a047
RS
2621 }
2622
bbf6f052
RK
2623 /* This will handle any multi-word mode that lacks a move_insn pattern.
2624 However, you will get better code if you define such patterns,
2625 even if they must turn into multiple assembler instructions. */
a4320483 2626 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2627 {
2628 rtx last_insn = 0;
6551fa4d 2629
a98c9f1a
RK
2630#ifdef PUSH_ROUNDING
2631
2632 /* If X is a push on the stack, do the push now and replace
2633 X with a reference to the stack pointer. */
2634 if (push_operand (x, GET_MODE (x)))
2635 {
2636 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2637 x = change_address (x, VOIDmode, stack_pointer_rtx);
2638 }
2639#endif
2640
c14c6529
RH
2641 /* Show the output dies here. This is necessary for pseudos;
2642 hard regs shouldn't appear here except as return values.
2643 We never want to emit such a clobber after reload. */
2644 if (x != y
2645 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2646 {
c14c6529 2647 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2648 }
15a7a8ec 2649
bbf6f052
RK
2650 for (i = 0;
2651 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2652 i++)
2653 {
2654 rtx xpart = operand_subword (x, i, 1, mode);
2655 rtx ypart = operand_subword (y, i, 1, mode);
2656
2657 /* If we can't get a part of Y, put Y into memory if it is a
2658 constant. Otherwise, force it into a register. If we still
2659 can't get a part of Y, abort. */
2660 if (ypart == 0 && CONSTANT_P (y))
2661 {
2662 y = force_const_mem (mode, y);
2663 ypart = operand_subword (y, i, 1, mode);
2664 }
2665 else if (ypart == 0)
2666 ypart = operand_subword_force (y, i, mode);
2667
2668 if (xpart == 0 || ypart == 0)
2669 abort ();
2670
2671 last_insn = emit_move_insn (xpart, ypart);
2672 }
6551fa4d 2673
bbf6f052
RK
2674 return last_insn;
2675 }
2676 else
2677 abort ();
2678}
2679\f
2680/* Pushing data onto the stack. */
2681
2682/* Push a block of length SIZE (perhaps variable)
2683 and return an rtx to address the beginning of the block.
2684 Note that it is not possible for the value returned to be a QUEUED.
2685 The value may be virtual_outgoing_args_rtx.
2686
2687 EXTRA is the number of bytes of padding to push in addition to SIZE.
2688 BELOW nonzero means this padding comes at low addresses;
2689 otherwise, the padding comes at high addresses. */
2690
2691rtx
2692push_block (size, extra, below)
2693 rtx size;
2694 int extra, below;
2695{
2696 register rtx temp;
88f63c77
RK
2697
2698 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2699 if (CONSTANT_P (size))
2700 anti_adjust_stack (plus_constant (size, extra));
2701 else if (GET_CODE (size) == REG && extra == 0)
2702 anti_adjust_stack (size);
2703 else
2704 {
2705 rtx temp = copy_to_mode_reg (Pmode, size);
2706 if (extra != 0)
906c4e36 2707 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2708 temp, 0, OPTAB_LIB_WIDEN);
2709 anti_adjust_stack (temp);
2710 }
2711
e1a9b2ab
HB
2712#if defined (STACK_GROWS_DOWNWARD) \
2713 || (defined (ARGS_GROW_DOWNWARD) \
2714 && !defined (ACCUMULATE_OUTGOING_ARGS))
2715
2716 /* Return the lowest stack address when STACK or ARGS grow downward and
2717 we are not aaccumulating outgoing arguments (the c4x port uses such
2718 conventions). */
bbf6f052
RK
2719 temp = virtual_outgoing_args_rtx;
2720 if (extra != 0 && below)
2721 temp = plus_constant (temp, extra);
2722#else
2723 if (GET_CODE (size) == CONST_INT)
2724 temp = plus_constant (virtual_outgoing_args_rtx,
2725 - INTVAL (size) - (below ? 0 : extra));
2726 else if (extra != 0 && !below)
38a448ca 2727 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2728 negate_rtx (Pmode, plus_constant (size, extra)));
2729 else
38a448ca 2730 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2731 negate_rtx (Pmode, size));
2732#endif
2733
2734 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2735}
2736
87e38d84 2737rtx
bbf6f052
RK
2738gen_push_operand ()
2739{
38a448ca 2740 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2741}
2742
921b3427
RK
2743/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2744 block of SIZE bytes. */
2745
2746static rtx
2747get_push_address (size)
2748 int size;
2749{
2750 register rtx temp;
2751
2752 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2753 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2754 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2755 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2756 else
2757 temp = stack_pointer_rtx;
2758
c85f7c16 2759 return copy_to_reg (temp);
921b3427
RK
2760}
2761
bbf6f052
RK
2762/* Generate code to push X onto the stack, assuming it has mode MODE and
2763 type TYPE.
2764 MODE is redundant except when X is a CONST_INT (since they don't
2765 carry mode info).
2766 SIZE is an rtx for the size of data to be copied (in bytes),
2767 needed only if X is BLKmode.
2768
2769 ALIGN (in bytes) is maximum alignment we can assume.
2770
cd048831
RK
2771 If PARTIAL and REG are both nonzero, then copy that many of the first
2772 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2773 The amount of space pushed is decreased by PARTIAL words,
2774 rounded *down* to a multiple of PARM_BOUNDARY.
2775 REG must be a hard register in this case.
cd048831
RK
2776 If REG is zero but PARTIAL is not, take any all others actions for an
2777 argument partially in registers, but do not actually load any
2778 registers.
bbf6f052
RK
2779
2780 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2781 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2782
2783 On a machine that lacks real push insns, ARGS_ADDR is the address of
2784 the bottom of the argument block for this call. We use indexing off there
2785 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2786 argument block has not been preallocated.
2787
e5e809f4
JL
2788 ARGS_SO_FAR is the size of args previously pushed for this call.
2789
2790 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2791 for arguments passed in registers. If nonzero, it will be the number
2792 of bytes required. */
bbf6f052
RK
2793
2794void
2795emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2796 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2797 register rtx x;
2798 enum machine_mode mode;
2799 tree type;
2800 rtx size;
2801 int align;
2802 int partial;
2803 rtx reg;
2804 int extra;
2805 rtx args_addr;
2806 rtx args_so_far;
e5e809f4 2807 int reg_parm_stack_space;
bbf6f052
RK
2808{
2809 rtx xinner;
2810 enum direction stack_direction
2811#ifdef STACK_GROWS_DOWNWARD
2812 = downward;
2813#else
2814 = upward;
2815#endif
2816
2817 /* Decide where to pad the argument: `downward' for below,
2818 `upward' for above, or `none' for don't pad it.
2819 Default is below for small data on big-endian machines; else above. */
2820 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2821
2822 /* Invert direction if stack is post-update. */
2823 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2824 if (where_pad != none)
2825 where_pad = (where_pad == downward ? upward : downward);
2826
2827 xinner = x = protect_from_queue (x, 0);
2828
2829 if (mode == BLKmode)
2830 {
2831 /* Copy a block into the stack, entirely or partially. */
2832
2833 register rtx temp;
2834 int used = partial * UNITS_PER_WORD;
2835 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2836 int skip;
2837
2838 if (size == 0)
2839 abort ();
2840
2841 used -= offset;
2842
2843 /* USED is now the # of bytes we need not copy to the stack
2844 because registers will take care of them. */
2845
2846 if (partial != 0)
2847 xinner = change_address (xinner, BLKmode,
2848 plus_constant (XEXP (xinner, 0), used));
2849
2850 /* If the partial register-part of the arg counts in its stack size,
2851 skip the part of stack space corresponding to the registers.
2852 Otherwise, start copying to the beginning of the stack space,
2853 by setting SKIP to 0. */
e5e809f4 2854 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2855
2856#ifdef PUSH_ROUNDING
2857 /* Do it with several push insns if that doesn't take lots of insns
2858 and if there is no difficulty with push insns that skip bytes
2859 on the stack for alignment purposes. */
2860 if (args_addr == 0
2861 && GET_CODE (size) == CONST_INT
2862 && skip == 0
15914757 2863 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2864 /* Here we avoid the case of a structure whose weak alignment
2865 forces many pushes of a small amount of data,
2866 and such small pushes do rounding that causes trouble. */
c7a7ac46 2867 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2868 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2869 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2870 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2871 {
2872 /* Push padding now if padding above and stack grows down,
2873 or if padding below and stack grows up.
2874 But if space already allocated, this has already been done. */
2875 if (extra && args_addr == 0
2876 && where_pad != none && where_pad != stack_direction)
906c4e36 2877 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2878
38a448ca 2879 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2880 INTVAL (size) - used, align);
921b3427 2881
7d384cc0 2882 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2883 {
2884 rtx temp;
2885
956d6950 2886 in_check_memory_usage = 1;
921b3427 2887 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2888 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2889 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2890 temp, Pmode,
2891 XEXP (xinner, 0), Pmode,
921b3427
RK
2892 GEN_INT (INTVAL(size) - used),
2893 TYPE_MODE (sizetype));
2894 else
2895 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 2896 temp, Pmode,
921b3427
RK
2897 GEN_INT (INTVAL(size) - used),
2898 TYPE_MODE (sizetype),
956d6950
JL
2899 GEN_INT (MEMORY_USE_RW),
2900 TYPE_MODE (integer_type_node));
2901 in_check_memory_usage = 0;
921b3427 2902 }
bbf6f052
RK
2903 }
2904 else
2905#endif /* PUSH_ROUNDING */
2906 {
2907 /* Otherwise make space on the stack and copy the data
2908 to the address of that space. */
2909
2910 /* Deduct words put into registers from the size we must copy. */
2911 if (partial != 0)
2912 {
2913 if (GET_CODE (size) == CONST_INT)
906c4e36 2914 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2915 else
2916 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2917 GEN_INT (used), NULL_RTX, 0,
2918 OPTAB_LIB_WIDEN);
bbf6f052
RK
2919 }
2920
2921 /* Get the address of the stack space.
2922 In this case, we do not deal with EXTRA separately.
2923 A single stack adjust will do. */
2924 if (! args_addr)
2925 {
2926 temp = push_block (size, extra, where_pad == downward);
2927 extra = 0;
2928 }
2929 else if (GET_CODE (args_so_far) == CONST_INT)
2930 temp = memory_address (BLKmode,
2931 plus_constant (args_addr,
2932 skip + INTVAL (args_so_far)));
2933 else
2934 temp = memory_address (BLKmode,
38a448ca
RH
2935 plus_constant (gen_rtx_PLUS (Pmode,
2936 args_addr,
2937 args_so_far),
bbf6f052 2938 skip));
7d384cc0 2939 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2940 {
2941 rtx target;
2942
956d6950 2943 in_check_memory_usage = 1;
921b3427 2944 target = copy_to_reg (temp);
c85f7c16 2945 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2946 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2947 target, Pmode,
2948 XEXP (xinner, 0), Pmode,
921b3427
RK
2949 size, TYPE_MODE (sizetype));
2950 else
2951 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 2952 target, Pmode,
921b3427 2953 size, TYPE_MODE (sizetype),
956d6950
JL
2954 GEN_INT (MEMORY_USE_RW),
2955 TYPE_MODE (integer_type_node));
2956 in_check_memory_usage = 0;
921b3427 2957 }
bbf6f052
RK
2958
2959 /* TEMP is the address of the block. Copy the data there. */
2960 if (GET_CODE (size) == CONST_INT
fbe1758d 2961 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
bbf6f052 2962 {
38a448ca 2963 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2964 INTVAL (size), align);
2965 goto ret;
2966 }
e5e809f4 2967 else
bbf6f052 2968 {
e5e809f4
JL
2969 rtx opalign = GEN_INT (align);
2970 enum machine_mode mode;
9e6a5703 2971 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
2972
2973 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2974 mode != VOIDmode;
2975 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2976 {
e5e809f4
JL
2977 enum insn_code code = movstr_optab[(int) mode];
2978
2979 if (code != CODE_FOR_nothing
2980 && ((GET_CODE (size) == CONST_INT
2981 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2982 <= (GET_MODE_MASK (mode) >> 1)))
2983 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2984 && (insn_operand_predicate[(int) code][0] == 0
2985 || ((*insn_operand_predicate[(int) code][0])
2986 (target, BLKmode)))
2987 && (insn_operand_predicate[(int) code][1] == 0
2988 || ((*insn_operand_predicate[(int) code][1])
2989 (xinner, BLKmode)))
2990 && (insn_operand_predicate[(int) code][3] == 0
2991 || ((*insn_operand_predicate[(int) code][3])
2992 (opalign, VOIDmode))))
2993 {
2994 rtx op2 = convert_to_mode (mode, size, 1);
2995 rtx last = get_last_insn ();
2996 rtx pat;
2997
2998 if (insn_operand_predicate[(int) code][2] != 0
2999 && ! ((*insn_operand_predicate[(int) code][2])
3000 (op2, mode)))
3001 op2 = copy_to_mode_reg (mode, op2);
3002
3003 pat = GEN_FCN ((int) code) (target, xinner,
3004 op2, opalign);
3005 if (pat)
3006 {
3007 emit_insn (pat);
3008 goto ret;
3009 }
3010 else
3011 delete_insns_since (last);
3012 }
c841050e 3013 }
bbf6f052 3014 }
bbf6f052
RK
3015
3016#ifndef ACCUMULATE_OUTGOING_ARGS
3017 /* If the source is referenced relative to the stack pointer,
3018 copy it to another register to stabilize it. We do not need
3019 to do this if we know that we won't be changing sp. */
3020
3021 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3022 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3023 temp = copy_to_reg (temp);
3024#endif
3025
3026 /* Make inhibit_defer_pop nonzero around the library call
3027 to force it to pop the bcopy-arguments right away. */
3028 NO_DEFER_POP;
3029#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3030 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3031 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3032 convert_to_mode (TYPE_MODE (sizetype),
3033 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3034 TYPE_MODE (sizetype));
bbf6f052 3035#else
d562e42e 3036 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3037 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3038 convert_to_mode (TYPE_MODE (integer_type_node),
3039 size,
3040 TREE_UNSIGNED (integer_type_node)),
3041 TYPE_MODE (integer_type_node));
bbf6f052
RK
3042#endif
3043 OK_DEFER_POP;
3044 }
3045 }
3046 else if (partial > 0)
3047 {
3048 /* Scalar partly in registers. */
3049
3050 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3051 int i;
3052 int not_stack;
3053 /* # words of start of argument
3054 that we must make space for but need not store. */
3055 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3056 int args_offset = INTVAL (args_so_far);
3057 int skip;
3058
3059 /* Push padding now if padding above and stack grows down,
3060 or if padding below and stack grows up.
3061 But if space already allocated, this has already been done. */
3062 if (extra && args_addr == 0
3063 && where_pad != none && where_pad != stack_direction)
906c4e36 3064 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3065
3066 /* If we make space by pushing it, we might as well push
3067 the real data. Otherwise, we can leave OFFSET nonzero
3068 and leave the space uninitialized. */
3069 if (args_addr == 0)
3070 offset = 0;
3071
3072 /* Now NOT_STACK gets the number of words that we don't need to
3073 allocate on the stack. */
3074 not_stack = partial - offset;
3075
3076 /* If the partial register-part of the arg counts in its stack size,
3077 skip the part of stack space corresponding to the registers.
3078 Otherwise, start copying to the beginning of the stack space,
3079 by setting SKIP to 0. */
e5e809f4 3080 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3081
3082 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3083 x = validize_mem (force_const_mem (mode, x));
3084
3085 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3086 SUBREGs of such registers are not allowed. */
3087 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3088 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3089 x = copy_to_reg (x);
3090
3091 /* Loop over all the words allocated on the stack for this arg. */
3092 /* We can do it by words, because any scalar bigger than a word
3093 has a size a multiple of a word. */
3094#ifndef PUSH_ARGS_REVERSED
3095 for (i = not_stack; i < size; i++)
3096#else
3097 for (i = size - 1; i >= not_stack; i--)
3098#endif
3099 if (i >= not_stack + offset)
3100 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3101 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3102 0, args_addr,
3103 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
3104 * UNITS_PER_WORD)),
3105 reg_parm_stack_space);
bbf6f052
RK
3106 }
3107 else
3108 {
3109 rtx addr;
921b3427 3110 rtx target = NULL_RTX;
bbf6f052
RK
3111
3112 /* Push padding now if padding above and stack grows down,
3113 or if padding below and stack grows up.
3114 But if space already allocated, this has already been done. */
3115 if (extra && args_addr == 0
3116 && where_pad != none && where_pad != stack_direction)
906c4e36 3117 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3118
3119#ifdef PUSH_ROUNDING
3120 if (args_addr == 0)
3121 addr = gen_push_operand ();
3122 else
3123#endif
921b3427
RK
3124 {
3125 if (GET_CODE (args_so_far) == CONST_INT)
3126 addr
3127 = memory_address (mode,
3128 plus_constant (args_addr,
3129 INTVAL (args_so_far)));
3130 else
38a448ca
RH
3131 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3132 args_so_far));
921b3427
RK
3133 target = addr;
3134 }
bbf6f052 3135
38a448ca 3136 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3137
7d384cc0 3138 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3139 {
956d6950 3140 in_check_memory_usage = 1;
921b3427
RK
3141 if (target == 0)
3142 target = get_push_address (GET_MODE_SIZE (mode));
3143
c85f7c16 3144 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3145 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3146 target, Pmode,
3147 XEXP (x, 0), Pmode,
921b3427
RK
3148 GEN_INT (GET_MODE_SIZE (mode)),
3149 TYPE_MODE (sizetype));
3150 else
3151 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3152 target, Pmode,
921b3427
RK
3153 GEN_INT (GET_MODE_SIZE (mode)),
3154 TYPE_MODE (sizetype),
956d6950
JL
3155 GEN_INT (MEMORY_USE_RW),
3156 TYPE_MODE (integer_type_node));
3157 in_check_memory_usage = 0;
921b3427 3158 }
bbf6f052
RK
3159 }
3160
3161 ret:
3162 /* If part should go in registers, copy that part
3163 into the appropriate registers. Do this now, at the end,
3164 since mem-to-mem copies above may do function calls. */
cd048831 3165 if (partial > 0 && reg != 0)
fffa9c1d
JW
3166 {
3167 /* Handle calls that pass values in multiple non-contiguous locations.
3168 The Irix 6 ABI has examples of this. */
3169 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3170 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3171 else
3172 move_block_to_reg (REGNO (reg), x, partial, mode);
3173 }
bbf6f052
RK
3174
3175 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3176 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3177}
3178\f
bbf6f052
RK
3179/* Expand an assignment that stores the value of FROM into TO.
3180 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3181 (This may contain a QUEUED rtx;
3182 if the value is constant, this rtx is a constant.)
3183 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3184
3185 SUGGEST_REG is no longer actually used.
3186 It used to mean, copy the value through a register
3187 and return that register, if that is possible.
709f5be1 3188 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3189
3190rtx
3191expand_assignment (to, from, want_value, suggest_reg)
3192 tree to, from;
3193 int want_value;
3194 int suggest_reg;
3195{
3196 register rtx to_rtx = 0;
3197 rtx result;
3198
3199 /* Don't crash if the lhs of the assignment was erroneous. */
3200
3201 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3202 {
3203 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3204 return want_value ? result : NULL_RTX;
3205 }
bbf6f052
RK
3206
3207 /* Assignment of a structure component needs special treatment
3208 if the structure component's rtx is not simply a MEM.
6be58303
JW
3209 Assignment of an array element at a constant index, and assignment of
3210 an array element in an unaligned packed structure field, has the same
3211 problem. */
bbf6f052 3212
08293add
RK
3213 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3214 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3215 {
3216 enum machine_mode mode1;
3217 int bitsize;
3218 int bitpos;
7bb0943f 3219 tree offset;
bbf6f052
RK
3220 int unsignedp;
3221 int volatilep = 0;
0088fcb1 3222 tree tem;
d78d243c 3223 int alignment;
0088fcb1
RK
3224
3225 push_temp_slots ();
839c4796
RK
3226 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3227 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3228
3229 /* If we are going to use store_bit_field and extract_bit_field,
3230 make sure to_rtx will be safe for multiple use. */
3231
3232 if (mode1 == VOIDmode && want_value)
3233 tem = stabilize_reference (tem);
3234
921b3427 3235 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3236 if (offset != 0)
3237 {
906c4e36 3238 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3239
3240 if (GET_CODE (to_rtx) != MEM)
3241 abort ();
bd070e1a
RH
3242
3243 if (GET_MODE (offset_rtx) != ptr_mode)
3244 {
3245#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3246 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3247#else
3248 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3249#endif
3250 }
3251
9a7b9f4f
JL
3252 /* A constant address in TO_RTX can have VOIDmode, we must not try
3253 to call force_reg for that case. Avoid that case. */
89752202
HB
3254 if (GET_CODE (to_rtx) == MEM
3255 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3256 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3257 && bitsize
3258 && (bitpos % bitsize) == 0
3259 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3260 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3261 {
3262 rtx temp = change_address (to_rtx, mode1,
3263 plus_constant (XEXP (to_rtx, 0),
3264 (bitpos /
3265 BITS_PER_UNIT)));
3266 if (GET_CODE (XEXP (temp, 0)) == REG)
3267 to_rtx = temp;
3268 else
3269 to_rtx = change_address (to_rtx, mode1,
3270 force_reg (GET_MODE (XEXP (temp, 0)),
3271 XEXP (temp, 0)));
3272 bitpos = 0;
3273 }
3274
7bb0943f 3275 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
3276 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3277 force_reg (ptr_mode, offset_rtx)));
7bb0943f 3278 }
bbf6f052
RK
3279 if (volatilep)
3280 {
3281 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3282 {
3283 /* When the offset is zero, to_rtx is the address of the
3284 structure we are storing into, and hence may be shared.
3285 We must make a new MEM before setting the volatile bit. */
3286 if (offset == 0)
effbcc6a
RK
3287 to_rtx = copy_rtx (to_rtx);
3288
01188446
JW
3289 MEM_VOLATILE_P (to_rtx) = 1;
3290 }
bbf6f052
RK
3291#if 0 /* This was turned off because, when a field is volatile
3292 in an object which is not volatile, the object may be in a register,
3293 and then we would abort over here. */
3294 else
3295 abort ();
3296#endif
3297 }
3298
956d6950
JL
3299 if (TREE_CODE (to) == COMPONENT_REF
3300 && TREE_READONLY (TREE_OPERAND (to, 1)))
3301 {
8bd6ecc2 3302 if (offset == 0)
956d6950
JL
3303 to_rtx = copy_rtx (to_rtx);
3304
3305 RTX_UNCHANGING_P (to_rtx) = 1;
3306 }
3307
921b3427 3308 /* Check the access. */
7d384cc0 3309 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3310 {
3311 rtx to_addr;
3312 int size;
3313 int best_mode_size;
3314 enum machine_mode best_mode;
3315
3316 best_mode = get_best_mode (bitsize, bitpos,
3317 TYPE_ALIGN (TREE_TYPE (tem)),
3318 mode1, volatilep);
3319 if (best_mode == VOIDmode)
3320 best_mode = QImode;
3321
3322 best_mode_size = GET_MODE_BITSIZE (best_mode);
3323 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3324 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3325 size *= GET_MODE_SIZE (best_mode);
3326
3327 /* Check the access right of the pointer. */
e9a25f70
JL
3328 if (size)
3329 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3330 to_addr, Pmode,
e9a25f70 3331 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3332 GEN_INT (MEMORY_USE_WO),
3333 TYPE_MODE (integer_type_node));
921b3427
RK
3334 }
3335
bbf6f052
RK
3336 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3337 (want_value
3338 /* Spurious cast makes HPUX compiler happy. */
3339 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3340 : VOIDmode),
3341 unsignedp,
3342 /* Required alignment of containing datum. */
d78d243c 3343 alignment,
ece32014
MM
3344 int_size_in_bytes (TREE_TYPE (tem)),
3345 get_alias_set (to));
bbf6f052
RK
3346 preserve_temp_slots (result);
3347 free_temp_slots ();
0088fcb1 3348 pop_temp_slots ();
bbf6f052 3349
709f5be1
RS
3350 /* If the value is meaningful, convert RESULT to the proper mode.
3351 Otherwise, return nothing. */
5ffe63ed
RS
3352 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3353 TYPE_MODE (TREE_TYPE (from)),
3354 result,
3355 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3356 : NULL_RTX);
bbf6f052
RK
3357 }
3358
cd1db108
RS
3359 /* If the rhs is a function call and its value is not an aggregate,
3360 call the function before we start to compute the lhs.
3361 This is needed for correct code for cases such as
3362 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3363 requires loading up part of an address in a separate insn.
3364
3365 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3366 a promoted variable where the zero- or sign- extension needs to be done.
3367 Handling this in the normal way is safe because no computation is done
3368 before the call. */
3369 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3370 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3371 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3372 {
0088fcb1
RK
3373 rtx value;
3374
3375 push_temp_slots ();
3376 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3377 if (to_rtx == 0)
921b3427 3378 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3379
fffa9c1d
JW
3380 /* Handle calls that return values in multiple non-contiguous locations.
3381 The Irix 6 ABI has examples of this. */
3382 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3383 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3384 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3385 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3386 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3387 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45 3388 else
6419e5b0
DT
3389 {
3390#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3391 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3392 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3393 value = convert_memory_address (GET_MODE (to_rtx), value);
3394#endif
3395 emit_move_insn (to_rtx, value);
3396 }
cd1db108
RS
3397 preserve_temp_slots (to_rtx);
3398 free_temp_slots ();
0088fcb1 3399 pop_temp_slots ();
709f5be1 3400 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3401 }
3402
bbf6f052
RK
3403 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3404 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3405
3406 if (to_rtx == 0)
41472af8
MM
3407 {
3408 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3409 if (GET_CODE (to_rtx) == MEM)
3410 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3411 }
bbf6f052 3412
86d38d25
RS
3413 /* Don't move directly into a return register. */
3414 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3415 {
0088fcb1
RK
3416 rtx temp;
3417
3418 push_temp_slots ();
3419 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3420 emit_move_insn (to_rtx, temp);
3421 preserve_temp_slots (to_rtx);
3422 free_temp_slots ();
0088fcb1 3423 pop_temp_slots ();
709f5be1 3424 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3425 }
3426
bbf6f052
RK
3427 /* In case we are returning the contents of an object which overlaps
3428 the place the value is being stored, use a safe function when copying
3429 a value through a pointer into a structure value return block. */
3430 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3431 && current_function_returns_struct
3432 && !current_function_returns_pcc_struct)
3433 {
0088fcb1
RK
3434 rtx from_rtx, size;
3435
3436 push_temp_slots ();
33a20d10 3437 size = expr_size (from);
921b3427
RK
3438 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3439 EXPAND_MEMORY_USE_DONT);
3440
3441 /* Copy the rights of the bitmap. */
7d384cc0 3442 if (current_function_check_memory_usage)
921b3427 3443 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3444 XEXP (to_rtx, 0), Pmode,
3445 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3446 convert_to_mode (TYPE_MODE (sizetype),
3447 size, TREE_UNSIGNED (sizetype)),
3448 TYPE_MODE (sizetype));
bbf6f052
RK
3449
3450#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3451 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3452 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3453 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3454 convert_to_mode (TYPE_MODE (sizetype),
3455 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3456 TYPE_MODE (sizetype));
bbf6f052 3457#else
d562e42e 3458 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3459 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3460 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3461 convert_to_mode (TYPE_MODE (integer_type_node),
3462 size, TREE_UNSIGNED (integer_type_node)),
3463 TYPE_MODE (integer_type_node));
bbf6f052
RK
3464#endif
3465
3466 preserve_temp_slots (to_rtx);
3467 free_temp_slots ();
0088fcb1 3468 pop_temp_slots ();
709f5be1 3469 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3470 }
3471
3472 /* Compute FROM and store the value in the rtx we got. */
3473
0088fcb1 3474 push_temp_slots ();
bbf6f052
RK
3475 result = store_expr (from, to_rtx, want_value);
3476 preserve_temp_slots (result);
3477 free_temp_slots ();
0088fcb1 3478 pop_temp_slots ();
709f5be1 3479 return want_value ? result : NULL_RTX;
bbf6f052
RK
3480}
3481
3482/* Generate code for computing expression EXP,
3483 and storing the value into TARGET.
bbf6f052
RK
3484 TARGET may contain a QUEUED rtx.
3485
709f5be1
RS
3486 If WANT_VALUE is nonzero, return a copy of the value
3487 not in TARGET, so that we can be sure to use the proper
3488 value in a containing expression even if TARGET has something
3489 else stored in it. If possible, we copy the value through a pseudo
3490 and return that pseudo. Or, if the value is constant, we try to
3491 return the constant. In some cases, we return a pseudo
3492 copied *from* TARGET.
3493
3494 If the mode is BLKmode then we may return TARGET itself.
3495 It turns out that in BLKmode it doesn't cause a problem.
3496 because C has no operators that could combine two different
3497 assignments into the same BLKmode object with different values
3498 with no sequence point. Will other languages need this to
3499 be more thorough?
3500
3501 If WANT_VALUE is 0, we return NULL, to make sure
3502 to catch quickly any cases where the caller uses the value
3503 and fails to set WANT_VALUE. */
bbf6f052
RK
3504
3505rtx
709f5be1 3506store_expr (exp, target, want_value)
bbf6f052
RK
3507 register tree exp;
3508 register rtx target;
709f5be1 3509 int want_value;
bbf6f052
RK
3510{
3511 register rtx temp;
3512 int dont_return_target = 0;
3513
3514 if (TREE_CODE (exp) == COMPOUND_EXPR)
3515 {
3516 /* Perform first part of compound expression, then assign from second
3517 part. */
3518 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3519 emit_queue ();
709f5be1 3520 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3521 }
3522 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3523 {
3524 /* For conditional expression, get safe form of the target. Then
3525 test the condition, doing the appropriate assignment on either
3526 side. This avoids the creation of unnecessary temporaries.
3527 For non-BLKmode, it is more efficient not to do this. */
3528
3529 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3530
3531 emit_queue ();
3532 target = protect_from_queue (target, 1);
3533
dabf8373 3534 do_pending_stack_adjust ();
bbf6f052
RK
3535 NO_DEFER_POP;
3536 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3537 start_cleanup_deferral ();
709f5be1 3538 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3539 end_cleanup_deferral ();
bbf6f052
RK
3540 emit_queue ();
3541 emit_jump_insn (gen_jump (lab2));
3542 emit_barrier ();
3543 emit_label (lab1);
956d6950 3544 start_cleanup_deferral ();
709f5be1 3545 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3546 end_cleanup_deferral ();
bbf6f052
RK
3547 emit_queue ();
3548 emit_label (lab2);
3549 OK_DEFER_POP;
a3a58acc 3550
709f5be1 3551 return want_value ? target : NULL_RTX;
bbf6f052 3552 }
bbf6f052 3553 else if (queued_subexp_p (target))
709f5be1
RS
3554 /* If target contains a postincrement, let's not risk
3555 using it as the place to generate the rhs. */
bbf6f052
RK
3556 {
3557 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3558 {
3559 /* Expand EXP into a new pseudo. */
3560 temp = gen_reg_rtx (GET_MODE (target));
3561 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3562 }
3563 else
906c4e36 3564 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3565
3566 /* If target is volatile, ANSI requires accessing the value
3567 *from* the target, if it is accessed. So make that happen.
3568 In no case return the target itself. */
3569 if (! MEM_VOLATILE_P (target) && want_value)
3570 dont_return_target = 1;
bbf6f052 3571 }
12f06d17
CH
3572 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3573 && GET_MODE (target) != BLKmode)
3574 /* If target is in memory and caller wants value in a register instead,
3575 arrange that. Pass TARGET as target for expand_expr so that,
3576 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3577 We know expand_expr will not use the target in that case.
3578 Don't do this if TARGET is volatile because we are supposed
3579 to write it and then read it. */
3580 {
1da93fe0 3581 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3582 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3583 temp = copy_to_reg (temp);
3584 dont_return_target = 1;
3585 }
1499e0a8
RK
3586 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3587 /* If this is an scalar in a register that is stored in a wider mode
3588 than the declared mode, compute the result into its declared mode
3589 and then convert to the wider mode. Our value is the computed
3590 expression. */
3591 {
5a32d038 3592 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3593 which will often result in some optimizations. Do the conversion
3594 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3595 the extend. But don't do this if the type of EXP is a subtype
3596 of something else since then the conversion might involve
3597 more than just converting modes. */
3598 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3599 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3600 {
3601 if (TREE_UNSIGNED (TREE_TYPE (exp))
3602 != SUBREG_PROMOTED_UNSIGNED_P (target))
3603 exp
3604 = convert
3605 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3606 TREE_TYPE (exp)),
3607 exp);
3608
3609 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3610 SUBREG_PROMOTED_UNSIGNED_P (target)),
3611 exp);
3612 }
5a32d038 3613
1499e0a8 3614 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3615
766f36c7 3616 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3617 the access now so it gets done only once. Likewise if
3618 it contains TARGET. */
3619 if (GET_CODE (temp) == MEM && want_value
3620 && (MEM_VOLATILE_P (temp)
3621 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3622 temp = copy_to_reg (temp);
3623
b258707c
RS
3624 /* If TEMP is a VOIDmode constant, use convert_modes to make
3625 sure that we properly convert it. */
3626 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3627 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3628 TYPE_MODE (TREE_TYPE (exp)), temp,
3629 SUBREG_PROMOTED_UNSIGNED_P (target));
3630
1499e0a8
RK
3631 convert_move (SUBREG_REG (target), temp,
3632 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3633
3634 /* If we promoted a constant, change the mode back down to match
3635 target. Otherwise, the caller might get confused by a result whose
3636 mode is larger than expected. */
3637
3638 if (want_value && GET_MODE (temp) != GET_MODE (target)
3639 && GET_MODE (temp) != VOIDmode)
3640 {
3641 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3642 SUBREG_PROMOTED_VAR_P (temp) = 1;
3643 SUBREG_PROMOTED_UNSIGNED_P (temp)
3644 = SUBREG_PROMOTED_UNSIGNED_P (target);
3645 }
3646
709f5be1 3647 return want_value ? temp : NULL_RTX;
1499e0a8 3648 }
bbf6f052
RK
3649 else
3650 {
3651 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3652 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3653 If TARGET is a volatile mem ref, either return TARGET
3654 or return a reg copied *from* TARGET; ANSI requires this.
3655
3656 Otherwise, if TEMP is not TARGET, return TEMP
3657 if it is constant (for efficiency),
3658 or if we really want the correct value. */
bbf6f052
RK
3659 if (!(target && GET_CODE (target) == REG
3660 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3661 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3662 && ! rtx_equal_p (temp, target)
709f5be1 3663 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3664 dont_return_target = 1;
3665 }
3666
b258707c
RS
3667 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3668 the same as that of TARGET, adjust the constant. This is needed, for
3669 example, in case it is a CONST_DOUBLE and we want only a word-sized
3670 value. */
3671 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3672 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3673 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3674 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3675 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3676
7d384cc0 3677 if (current_function_check_memory_usage
921b3427
RK
3678 && GET_CODE (target) == MEM
3679 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3680 {
3681 if (GET_CODE (temp) == MEM)
3682 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3683 XEXP (target, 0), Pmode,
3684 XEXP (temp, 0), Pmode,
921b3427
RK
3685 expr_size (exp), TYPE_MODE (sizetype));
3686 else
3687 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3688 XEXP (target, 0), Pmode,
921b3427 3689 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3690 GEN_INT (MEMORY_USE_WO),
3691 TYPE_MODE (integer_type_node));
921b3427
RK
3692 }
3693
bbf6f052
RK
3694 /* If value was not generated in the target, store it there.
3695 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3696 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3697 one or both of them are volatile memory refs, we have to distinguish
3698 two cases:
3699 - expand_expr has used TARGET. In this case, we must not generate
3700 another copy. This can be detected by TARGET being equal according
3701 to == .
3702 - expand_expr has not used TARGET - that means that the source just
3703 happens to have the same RTX form. Since temp will have been created
3704 by expand_expr, it will compare unequal according to == .
3705 We must generate a copy in this case, to reach the correct number
3706 of volatile memory references. */
bbf6f052 3707
6036acbb 3708 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3709 || (temp != target && (side_effects_p (temp)
3710 || side_effects_p (target))))
6036acbb 3711 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3712 {
3713 target = protect_from_queue (target, 1);
3714 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3715 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3716 {
3717 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3718 if (dont_return_target)
3719 {
3720 /* In this case, we will return TEMP,
3721 so make sure it has the proper mode.
3722 But don't forget to store the value into TARGET. */
3723 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3724 emit_move_insn (target, temp);
3725 }
3726 else
3727 convert_move (target, temp, unsignedp);
3728 }
3729
3730 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3731 {
3732 /* Handle copying a string constant into an array.
3733 The string constant may be shorter than the array.
3734 So copy just the string's actual length, and clear the rest. */
3735 rtx size;
22619c3f 3736 rtx addr;
bbf6f052 3737
e87b4f3f
RS
3738 /* Get the size of the data type of the string,
3739 which is actually the size of the target. */
3740 size = expr_size (exp);
3741 if (GET_CODE (size) == CONST_INT
3742 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3743 emit_block_move (target, temp, size,
3744 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3745 else
bbf6f052 3746 {
e87b4f3f
RS
3747 /* Compute the size of the data to copy from the string. */
3748 tree copy_size
c03b7665 3749 = size_binop (MIN_EXPR,
b50d17a1 3750 make_tree (sizetype, size),
c03b7665
RK
3751 convert (sizetype,
3752 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3753 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3754 VOIDmode, 0);
e87b4f3f
RS
3755 rtx label = 0;
3756
3757 /* Copy that much. */
3758 emit_block_move (target, temp, copy_size_rtx,
3759 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3760
88f63c77
RK
3761 /* Figure out how much is left in TARGET that we have to clear.
3762 Do all calculations in ptr_mode. */
3763
3764 addr = XEXP (target, 0);
3765 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3766
e87b4f3f
RS
3767 if (GET_CODE (copy_size_rtx) == CONST_INT)
3768 {
88f63c77 3769 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3770 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3771 }
3772 else
3773 {
88f63c77
RK
3774 addr = force_reg (ptr_mode, addr);
3775 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3776 copy_size_rtx, NULL_RTX, 0,
3777 OPTAB_LIB_WIDEN);
e87b4f3f 3778
88f63c77 3779 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3780 copy_size_rtx, NULL_RTX, 0,
3781 OPTAB_LIB_WIDEN);
e87b4f3f 3782
e87b4f3f 3783 label = gen_label_rtx ();
c5d5d461
JL
3784 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3785 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3786 }
3787
3788 if (size != const0_rtx)
3789 {
921b3427 3790 /* Be sure we can write on ADDR. */
7d384cc0 3791 if (current_function_check_memory_usage)
921b3427 3792 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3793 addr, Pmode,
921b3427 3794 size, TYPE_MODE (sizetype),
956d6950
JL
3795 GEN_INT (MEMORY_USE_WO),
3796 TYPE_MODE (integer_type_node));
bbf6f052 3797#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3798 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3799 addr, ptr_mode,
3b6f75e2
JW
3800 const0_rtx, TYPE_MODE (integer_type_node),
3801 convert_to_mode (TYPE_MODE (sizetype),
3802 size,
3803 TREE_UNSIGNED (sizetype)),
3804 TYPE_MODE (sizetype));
bbf6f052 3805#else
d562e42e 3806 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3807 addr, ptr_mode,
3b6f75e2
JW
3808 convert_to_mode (TYPE_MODE (integer_type_node),
3809 size,
3810 TREE_UNSIGNED (integer_type_node)),
3811 TYPE_MODE (integer_type_node));
bbf6f052 3812#endif
e87b4f3f 3813 }
22619c3f 3814
e87b4f3f
RS
3815 if (label)
3816 emit_label (label);
bbf6f052
RK
3817 }
3818 }
fffa9c1d
JW
3819 /* Handle calls that return values in multiple non-contiguous locations.
3820 The Irix 6 ABI has examples of this. */
3821 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3822 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3823 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3824 else if (GET_MODE (temp) == BLKmode)
3825 emit_block_move (target, temp, expr_size (exp),
3826 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3827 else
3828 emit_move_insn (target, temp);
3829 }
709f5be1 3830
766f36c7
RK
3831 /* If we don't want a value, return NULL_RTX. */
3832 if (! want_value)
3833 return NULL_RTX;
3834
3835 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3836 ??? The latter test doesn't seem to make sense. */
3837 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3838 return temp;
766f36c7
RK
3839
3840 /* Return TARGET itself if it is a hard register. */
3841 else if (want_value && GET_MODE (target) != BLKmode
3842 && ! (GET_CODE (target) == REG
3843 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3844 return copy_to_reg (target);
766f36c7
RK
3845
3846 else
709f5be1 3847 return target;
bbf6f052
RK
3848}
3849\f
9de08200
RK
3850/* Return 1 if EXP just contains zeros. */
3851
3852static int
3853is_zeros_p (exp)
3854 tree exp;
3855{
3856 tree elt;
3857
3858 switch (TREE_CODE (exp))
3859 {
3860 case CONVERT_EXPR:
3861 case NOP_EXPR:
3862 case NON_LVALUE_EXPR:
3863 return is_zeros_p (TREE_OPERAND (exp, 0));
3864
3865 case INTEGER_CST:
3866 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3867
3868 case COMPLEX_CST:
3869 return
3870 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3871
3872 case REAL_CST:
41c9120b 3873 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3874
3875 case CONSTRUCTOR:
e1a43f73
PB
3876 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3877 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3878 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3879 if (! is_zeros_p (TREE_VALUE (elt)))
3880 return 0;
3881
3882 return 1;
e9a25f70
JL
3883
3884 default:
3885 return 0;
9de08200 3886 }
9de08200
RK
3887}
3888
3889/* Return 1 if EXP contains mostly (3/4) zeros. */
3890
3891static int
3892mostly_zeros_p (exp)
3893 tree exp;
3894{
9de08200
RK
3895 if (TREE_CODE (exp) == CONSTRUCTOR)
3896 {
e1a43f73
PB
3897 int elts = 0, zeros = 0;
3898 tree elt = CONSTRUCTOR_ELTS (exp);
3899 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3900 {
3901 /* If there are no ranges of true bits, it is all zero. */
3902 return elt == NULL_TREE;
3903 }
3904 for (; elt; elt = TREE_CHAIN (elt))
3905 {
3906 /* We do not handle the case where the index is a RANGE_EXPR,
3907 so the statistic will be somewhat inaccurate.
3908 We do make a more accurate count in store_constructor itself,
3909 so since this function is only used for nested array elements,
0f41302f 3910 this should be close enough. */
e1a43f73
PB
3911 if (mostly_zeros_p (TREE_VALUE (elt)))
3912 zeros++;
3913 elts++;
3914 }
9de08200
RK
3915
3916 return 4 * zeros >= 3 * elts;
3917 }
3918
3919 return is_zeros_p (exp);
3920}
3921\f
e1a43f73
PB
3922/* Helper function for store_constructor.
3923 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3924 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3925 CLEARED is as for store_constructor.
3926
3927 This provides a recursive shortcut back to store_constructor when it isn't
3928 necessary to go through store_field. This is so that we can pass through
3929 the cleared field to let store_constructor know that we may not have to
3930 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3931
3932static void
3933store_constructor_field (target, bitsize, bitpos,
3934 mode, exp, type, cleared)
3935 rtx target;
3936 int bitsize, bitpos;
3937 enum machine_mode mode;
3938 tree exp, type;
3939 int cleared;
3940{
3941 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3942 && bitpos % BITS_PER_UNIT == 0
3943 /* If we have a non-zero bitpos for a register target, then we just
3944 let store_field do the bitfield handling. This is unlikely to
3945 generate unnecessary clear instructions anyways. */
3946 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3947 {
126e5b0d
JW
3948 if (bitpos != 0)
3949 target = change_address (target, VOIDmode,
3950 plus_constant (XEXP (target, 0),
3951 bitpos / BITS_PER_UNIT));
3952 store_constructor (exp, target, cleared);
e1a43f73
PB
3953 }
3954 else
3955 store_field (target, bitsize, bitpos, mode, exp,
3956 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
ece32014 3957 int_size_in_bytes (type), 0);
e1a43f73
PB
3958}
3959
bbf6f052 3960/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3961 TARGET is either a REG or a MEM.
0f41302f 3962 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3963
3964static void
e1a43f73 3965store_constructor (exp, target, cleared)
bbf6f052
RK
3966 tree exp;
3967 rtx target;
e1a43f73 3968 int cleared;
bbf6f052 3969{
4af3895e 3970 tree type = TREE_TYPE (exp);
a5efcd63 3971#ifdef WORD_REGISTER_OPERATIONS
34c73909 3972 rtx exp_size = expr_size (exp);
a5efcd63 3973#endif
4af3895e 3974
bbf6f052
RK
3975 /* We know our target cannot conflict, since safe_from_p has been called. */
3976#if 0
3977 /* Don't try copying piece by piece into a hard register
3978 since that is vulnerable to being clobbered by EXP.
3979 Instead, construct in a pseudo register and then copy it all. */
3980 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3981 {
3982 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3983 store_constructor (exp, temp, 0);
bbf6f052
RK
3984 emit_move_insn (target, temp);
3985 return;
3986 }
3987#endif
3988
e44842fe
RK
3989 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3990 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3991 {
3992 register tree elt;
3993
4af3895e 3994 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3995 if (TREE_CODE (type) == UNION_TYPE
3996 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3997 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3998
3999 /* If we are building a static constructor into a register,
4000 set the initial value as zero so we can fold the value into
67225c15
RK
4001 a constant. But if more than one register is involved,
4002 this probably loses. */
4003 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4004 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4005 {
4006 if (! cleared)
e9a25f70 4007 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4008
9de08200
RK
4009 cleared = 1;
4010 }
4011
4012 /* If the constructor has fewer fields than the structure
4013 or if we are initializing the structure to mostly zeros,
bbf6f052 4014 clear the whole structure first. */
9de08200
RK
4015 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4016 != list_length (TYPE_FIELDS (type)))
4017 || mostly_zeros_p (exp))
4018 {
4019 if (! cleared)
4020 clear_storage (target, expr_size (exp),
4021 TYPE_ALIGN (type) / BITS_PER_UNIT);
4022
4023 cleared = 1;
4024 }
bbf6f052
RK
4025 else
4026 /* Inform later passes that the old value is dead. */
38a448ca 4027 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4028
4029 /* Store each element of the constructor into
4030 the corresponding field of TARGET. */
4031
4032 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4033 {
4034 register tree field = TREE_PURPOSE (elt);
34c73909 4035 tree value = TREE_VALUE (elt);
bbf6f052
RK
4036 register enum machine_mode mode;
4037 int bitsize;
b50d17a1 4038 int bitpos = 0;
bbf6f052 4039 int unsignedp;
b50d17a1
RK
4040 tree pos, constant = 0, offset = 0;
4041 rtx to_rtx = target;
bbf6f052 4042
f32fd778
RS
4043 /* Just ignore missing fields.
4044 We cleared the whole structure, above,
4045 if any fields are missing. */
4046 if (field == 0)
4047 continue;
4048
e1a43f73
PB
4049 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4050 continue;
9de08200 4051
bbf6f052
RK
4052 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4053 unsignedp = TREE_UNSIGNED (field);
4054 mode = DECL_MODE (field);
4055 if (DECL_BIT_FIELD (field))
4056 mode = VOIDmode;
4057
b50d17a1
RK
4058 pos = DECL_FIELD_BITPOS (field);
4059 if (TREE_CODE (pos) == INTEGER_CST)
4060 constant = pos;
4061 else if (TREE_CODE (pos) == PLUS_EXPR
4062 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4063 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4064 else
4065 offset = pos;
4066
4067 if (constant)
cd11b87e 4068 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4069
4070 if (offset)
4071 {
4072 rtx offset_rtx;
4073
4074 if (contains_placeholder_p (offset))
4075 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4076 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4077
b50d17a1
RK
4078 offset = size_binop (FLOOR_DIV_EXPR, offset,
4079 size_int (BITS_PER_UNIT));
bbf6f052 4080
b50d17a1
RK
4081 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4082 if (GET_CODE (to_rtx) != MEM)
4083 abort ();
4084
bd070e1a
RH
4085 if (GET_MODE (offset_rtx) != ptr_mode)
4086 {
4087#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4088 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4089#else
4090 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4091#endif
4092 }
4093
b50d17a1
RK
4094 to_rtx
4095 = change_address (to_rtx, VOIDmode,
38a448ca 4096 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 4097 force_reg (ptr_mode, offset_rtx)));
b50d17a1 4098 }
cf04eb80
RK
4099 if (TREE_READONLY (field))
4100 {
9151b3bf 4101 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4102 to_rtx = copy_rtx (to_rtx);
4103
cf04eb80
RK
4104 RTX_UNCHANGING_P (to_rtx) = 1;
4105 }
4106
34c73909
R
4107#ifdef WORD_REGISTER_OPERATIONS
4108 /* If this initializes a field that is smaller than a word, at the
4109 start of a word, try to widen it to a full word.
4110 This special case allows us to output C++ member function
4111 initializations in a form that the optimizers can understand. */
4112 if (constant
4113 && GET_CODE (target) == REG
4114 && bitsize < BITS_PER_WORD
4115 && bitpos % BITS_PER_WORD == 0
4116 && GET_MODE_CLASS (mode) == MODE_INT
4117 && TREE_CODE (value) == INTEGER_CST
4118 && GET_CODE (exp_size) == CONST_INT
4119 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4120 {
4121 tree type = TREE_TYPE (value);
4122 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4123 {
4124 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4125 value = convert (type, value);
4126 }
4127 if (BYTES_BIG_ENDIAN)
4128 value
4129 = fold (build (LSHIFT_EXPR, type, value,
4130 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4131 bitsize = BITS_PER_WORD;
4132 mode = word_mode;
4133 }
4134#endif
e1a43f73 4135 store_constructor_field (to_rtx, bitsize, bitpos,
34c73909 4136 mode, value, type, cleared);
bbf6f052
RK
4137 }
4138 }
4af3895e 4139 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4140 {
4141 register tree elt;
4142 register int i;
e1a43f73 4143 int need_to_clear;
4af3895e 4144 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4145 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4146 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4147 tree elttype = TREE_TYPE (type);
bbf6f052 4148
e1a43f73 4149 /* If the constructor has fewer elements than the array,
38e01259 4150 clear the whole array first. Similarly if this is
e1a43f73
PB
4151 static constructor of a non-BLKmode object. */
4152 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4153 need_to_clear = 1;
4154 else
4155 {
4156 HOST_WIDE_INT count = 0, zero_count = 0;
4157 need_to_clear = 0;
4158 /* This loop is a more accurate version of the loop in
4159 mostly_zeros_p (it handles RANGE_EXPR in an index).
4160 It is also needed to check for missing elements. */
4161 for (elt = CONSTRUCTOR_ELTS (exp);
4162 elt != NULL_TREE;
df0faff1 4163 elt = TREE_CHAIN (elt))
e1a43f73
PB
4164 {
4165 tree index = TREE_PURPOSE (elt);
4166 HOST_WIDE_INT this_node_count;
4167 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4168 {
4169 tree lo_index = TREE_OPERAND (index, 0);
4170 tree hi_index = TREE_OPERAND (index, 1);
4171 if (TREE_CODE (lo_index) != INTEGER_CST
4172 || TREE_CODE (hi_index) != INTEGER_CST)
4173 {
4174 need_to_clear = 1;
4175 break;
4176 }
4177 this_node_count = TREE_INT_CST_LOW (hi_index)
4178 - TREE_INT_CST_LOW (lo_index) + 1;
4179 }
4180 else
4181 this_node_count = 1;
4182 count += this_node_count;
4183 if (mostly_zeros_p (TREE_VALUE (elt)))
4184 zero_count += this_node_count;
4185 }
8e958f70 4186 /* Clear the entire array first if there are any missing elements,
0f41302f 4187 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4188 if (count < maxelt - minelt + 1
4189 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4190 need_to_clear = 1;
4191 }
4192 if (need_to_clear)
9de08200
RK
4193 {
4194 if (! cleared)
4195 clear_storage (target, expr_size (exp),
4196 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
4197 cleared = 1;
4198 }
bbf6f052
RK
4199 else
4200 /* Inform later passes that the old value is dead. */
38a448ca 4201 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4202
4203 /* Store each element of the constructor into
4204 the corresponding element of TARGET, determined
4205 by counting the elements. */
4206 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4207 elt;
4208 elt = TREE_CHAIN (elt), i++)
4209 {
4210 register enum machine_mode mode;
4211 int bitsize;
4212 int bitpos;
4213 int unsignedp;
e1a43f73 4214 tree value = TREE_VALUE (elt);
03dc44a6
RS
4215 tree index = TREE_PURPOSE (elt);
4216 rtx xtarget = target;
bbf6f052 4217
e1a43f73
PB
4218 if (cleared && is_zeros_p (value))
4219 continue;
9de08200 4220
bbf6f052
RK
4221 mode = TYPE_MODE (elttype);
4222 bitsize = GET_MODE_BITSIZE (mode);
4223 unsignedp = TREE_UNSIGNED (elttype);
4224
e1a43f73
PB
4225 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4226 {
4227 tree lo_index = TREE_OPERAND (index, 0);
4228 tree hi_index = TREE_OPERAND (index, 1);
4229 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4230 struct nesting *loop;
05c0b405
PB
4231 HOST_WIDE_INT lo, hi, count;
4232 tree position;
e1a43f73 4233
0f41302f 4234 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4235 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4236 && TREE_CODE (hi_index) == INTEGER_CST
4237 && (lo = TREE_INT_CST_LOW (lo_index),
4238 hi = TREE_INT_CST_LOW (hi_index),
4239 count = hi - lo + 1,
4240 (GET_CODE (target) != MEM
4241 || count <= 2
4242 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4243 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4244 <= 40 * 8))))
e1a43f73 4245 {
05c0b405
PB
4246 lo -= minelt; hi -= minelt;
4247 for (; lo <= hi; lo++)
e1a43f73 4248 {
05c0b405
PB
4249 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4250 store_constructor_field (target, bitsize, bitpos,
4251 mode, value, type, cleared);
e1a43f73
PB
4252 }
4253 }
4254 else
4255 {
4256 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4257 loop_top = gen_label_rtx ();
4258 loop_end = gen_label_rtx ();
4259
4260 unsignedp = TREE_UNSIGNED (domain);
4261
4262 index = build_decl (VAR_DECL, NULL_TREE, domain);
4263
4264 DECL_RTL (index) = index_r
4265 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4266 &unsignedp, 0));
4267
4268 if (TREE_CODE (value) == SAVE_EXPR
4269 && SAVE_EXPR_RTL (value) == 0)
4270 {
0f41302f
MS
4271 /* Make sure value gets expanded once before the
4272 loop. */
e1a43f73
PB
4273 expand_expr (value, const0_rtx, VOIDmode, 0);
4274 emit_queue ();
4275 }
4276 store_expr (lo_index, index_r, 0);
4277 loop = expand_start_loop (0);
4278
0f41302f 4279 /* Assign value to element index. */
e1a43f73
PB
4280 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4281 size_int (BITS_PER_UNIT));
4282 position = size_binop (MULT_EXPR,
4283 size_binop (MINUS_EXPR, index,
4284 TYPE_MIN_VALUE (domain)),
4285 position);
4286 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4287 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4288 xtarget = change_address (target, mode, addr);
4289 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 4290 store_constructor (value, xtarget, cleared);
e1a43f73
PB
4291 else
4292 store_expr (value, xtarget, 0);
4293
4294 expand_exit_loop_if_false (loop,
4295 build (LT_EXPR, integer_type_node,
4296 index, hi_index));
4297
4298 expand_increment (build (PREINCREMENT_EXPR,
4299 TREE_TYPE (index),
7b8b9722 4300 index, integer_one_node), 0, 0);
e1a43f73
PB
4301 expand_end_loop ();
4302 emit_label (loop_end);
4303
4304 /* Needed by stupid register allocation. to extend the
4305 lifetime of pseudo-regs used by target past the end
4306 of the loop. */
38a448ca 4307 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4308 }
4309 }
4310 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4311 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4312 {
e1a43f73 4313 rtx pos_rtx, addr;
03dc44a6
RS
4314 tree position;
4315
5b6c44ff
RK
4316 if (index == 0)
4317 index = size_int (i);
4318
e1a43f73
PB
4319 if (minelt)
4320 index = size_binop (MINUS_EXPR, index,
4321 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4322 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4323 size_int (BITS_PER_UNIT));
4324 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4325 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4326 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4327 xtarget = change_address (target, mode, addr);
e1a43f73 4328 store_expr (value, xtarget, 0);
03dc44a6
RS
4329 }
4330 else
4331 {
4332 if (index != 0)
7c314719 4333 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4334 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4335 else
4336 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
4337 store_constructor_field (target, bitsize, bitpos,
4338 mode, value, type, cleared);
03dc44a6 4339 }
bbf6f052
RK
4340 }
4341 }
071a6595
PB
4342 /* set constructor assignments */
4343 else if (TREE_CODE (type) == SET_TYPE)
4344 {
e1a43f73 4345 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4346 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4347 tree domain = TYPE_DOMAIN (type);
4348 tree domain_min, domain_max, bitlength;
4349
9faa82d8 4350 /* The default implementation strategy is to extract the constant
071a6595
PB
4351 parts of the constructor, use that to initialize the target,
4352 and then "or" in whatever non-constant ranges we need in addition.
4353
4354 If a large set is all zero or all ones, it is
4355 probably better to set it using memset (if available) or bzero.
4356 Also, if a large set has just a single range, it may also be
4357 better to first clear all the first clear the set (using
0f41302f 4358 bzero/memset), and set the bits we want. */
071a6595 4359
0f41302f 4360 /* Check for all zeros. */
e1a43f73 4361 if (elt == NULL_TREE)
071a6595 4362 {
e1a43f73
PB
4363 if (!cleared)
4364 clear_storage (target, expr_size (exp),
4365 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4366 return;
4367 }
4368
071a6595
PB
4369 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4370 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4371 bitlength = size_binop (PLUS_EXPR,
4372 size_binop (MINUS_EXPR, domain_max, domain_min),
4373 size_one_node);
4374
e1a43f73
PB
4375 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4376 abort ();
4377 nbits = TREE_INT_CST_LOW (bitlength);
4378
4379 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4380 are "complicated" (more than one range), initialize (the
4381 constant parts) by copying from a constant. */
4382 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4383 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4384 {
b4ee5a72
PB
4385 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4386 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4387 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4388 HOST_WIDE_INT word = 0;
4389 int bit_pos = 0;
4390 int ibit = 0;
0f41302f 4391 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4392 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4393 for (;;)
071a6595 4394 {
b4ee5a72
PB
4395 if (bit_buffer[ibit])
4396 {
b09f3348 4397 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4398 word |= (1 << (set_word_size - 1 - bit_pos));
4399 else
4400 word |= 1 << bit_pos;
4401 }
4402 bit_pos++; ibit++;
4403 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4404 {
e1a43f73
PB
4405 if (word != 0 || ! cleared)
4406 {
4407 rtx datum = GEN_INT (word);
4408 rtx to_rtx;
0f41302f
MS
4409 /* The assumption here is that it is safe to use
4410 XEXP if the set is multi-word, but not if
4411 it's single-word. */
e1a43f73
PB
4412 if (GET_CODE (target) == MEM)
4413 {
4414 to_rtx = plus_constant (XEXP (target, 0), offset);
4415 to_rtx = change_address (target, mode, to_rtx);
4416 }
4417 else if (offset == 0)
4418 to_rtx = target;
4419 else
4420 abort ();
4421 emit_move_insn (to_rtx, datum);
4422 }
b4ee5a72
PB
4423 if (ibit == nbits)
4424 break;
4425 word = 0;
4426 bit_pos = 0;
4427 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4428 }
4429 }
071a6595 4430 }
e1a43f73
PB
4431 else if (!cleared)
4432 {
0f41302f 4433 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4434 if (TREE_CHAIN (elt) != NULL_TREE
4435 || (TREE_PURPOSE (elt) == NULL_TREE
4436 ? nbits != 1
4437 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4438 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4439 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4440 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4441 != nbits))))
4442 clear_storage (target, expr_size (exp),
4443 TYPE_ALIGN (type) / BITS_PER_UNIT);
4444 }
4445
4446 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4447 {
4448 /* start of range of element or NULL */
4449 tree startbit = TREE_PURPOSE (elt);
4450 /* end of range of element, or element value */
4451 tree endbit = TREE_VALUE (elt);
381127e8 4452#ifdef TARGET_MEM_FUNCTIONS
071a6595 4453 HOST_WIDE_INT startb, endb;
381127e8 4454#endif
071a6595
PB
4455 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4456
4457 bitlength_rtx = expand_expr (bitlength,
4458 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4459
4460 /* handle non-range tuple element like [ expr ] */
4461 if (startbit == NULL_TREE)
4462 {
4463 startbit = save_expr (endbit);
4464 endbit = startbit;
4465 }
4466 startbit = convert (sizetype, startbit);
4467 endbit = convert (sizetype, endbit);
4468 if (! integer_zerop (domain_min))
4469 {
4470 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4471 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4472 }
4473 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4474 EXPAND_CONST_ADDRESS);
4475 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4476 EXPAND_CONST_ADDRESS);
4477
4478 if (REG_P (target))
4479 {
4480 targetx = assign_stack_temp (GET_MODE (target),
4481 GET_MODE_SIZE (GET_MODE (target)),
4482 0);
4483 emit_move_insn (targetx, target);
4484 }
4485 else if (GET_CODE (target) == MEM)
4486 targetx = target;
4487 else
4488 abort ();
4489
4490#ifdef TARGET_MEM_FUNCTIONS
4491 /* Optimization: If startbit and endbit are
9faa82d8 4492 constants divisible by BITS_PER_UNIT,
0f41302f 4493 call memset instead. */
071a6595
PB
4494 if (TREE_CODE (startbit) == INTEGER_CST
4495 && TREE_CODE (endbit) == INTEGER_CST
4496 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4497 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4498 {
071a6595
PB
4499 emit_library_call (memset_libfunc, 0,
4500 VOIDmode, 3,
e1a43f73
PB
4501 plus_constant (XEXP (targetx, 0),
4502 startb / BITS_PER_UNIT),
071a6595 4503 Pmode,
3b6f75e2 4504 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4505 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4506 TYPE_MODE (sizetype));
071a6595
PB
4507 }
4508 else
4509#endif
4510 {
38a448ca 4511 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4512 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4513 bitlength_rtx, TYPE_MODE (sizetype),
4514 startbit_rtx, TYPE_MODE (sizetype),
4515 endbit_rtx, TYPE_MODE (sizetype));
4516 }
4517 if (REG_P (target))
4518 emit_move_insn (target, targetx);
4519 }
4520 }
bbf6f052
RK
4521
4522 else
4523 abort ();
4524}
4525
4526/* Store the value of EXP (an expression tree)
4527 into a subfield of TARGET which has mode MODE and occupies
4528 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4529 If MODE is VOIDmode, it means that we are storing into a bit-field.
4530
4531 If VALUE_MODE is VOIDmode, return nothing in particular.
4532 UNSIGNEDP is not used in this case.
4533
4534 Otherwise, return an rtx for the value stored. This rtx
4535 has mode VALUE_MODE if that is convenient to do.
4536 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4537
4538 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4539 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4540
4541 ALIAS_SET is the alias set for the destination. This value will
4542 (in general) be different from that for TARGET, since TARGET is a
4543 reference to the containing structure. */
bbf6f052
RK
4544
4545static rtx
4546store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4547 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4548 rtx target;
4549 int bitsize, bitpos;
4550 enum machine_mode mode;
4551 tree exp;
4552 enum machine_mode value_mode;
4553 int unsignedp;
4554 int align;
4555 int total_size;
ece32014 4556 int alias_set;
bbf6f052 4557{
906c4e36 4558 HOST_WIDE_INT width_mask = 0;
bbf6f052 4559
e9a25f70
JL
4560 if (TREE_CODE (exp) == ERROR_MARK)
4561 return const0_rtx;
4562
906c4e36
RK
4563 if (bitsize < HOST_BITS_PER_WIDE_INT)
4564 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4565
4566 /* If we are storing into an unaligned field of an aligned union that is
4567 in a register, we may have the mode of TARGET being an integer mode but
4568 MODE == BLKmode. In that case, get an aligned object whose size and
4569 alignment are the same as TARGET and store TARGET into it (we can avoid
4570 the store if the field being stored is the entire width of TARGET). Then
4571 call ourselves recursively to store the field into a BLKmode version of
4572 that object. Finally, load from the object into TARGET. This is not
4573 very efficient in general, but should only be slightly more expensive
4574 than the otherwise-required unaligned accesses. Perhaps this can be
4575 cleaned up later. */
4576
4577 if (mode == BLKmode
4578 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4579 {
4580 rtx object = assign_stack_temp (GET_MODE (target),
4581 GET_MODE_SIZE (GET_MODE (target)), 0);
4582 rtx blk_object = copy_rtx (object);
4583
c6df88cb
MM
4584 MEM_SET_IN_STRUCT_P (object, 1);
4585 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4586 PUT_MODE (blk_object, BLKmode);
4587
4588 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4589 emit_move_insn (object, target);
4590
4591 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4592 align, total_size, alias_set);
bbf6f052 4593
46093b97
RS
4594 /* Even though we aren't returning target, we need to
4595 give it the updated value. */
bbf6f052
RK
4596 emit_move_insn (target, object);
4597
46093b97 4598 return blk_object;
bbf6f052
RK
4599 }
4600
4601 /* If the structure is in a register or if the component
4602 is a bit field, we cannot use addressing to access it.
4603 Use bit-field techniques or SUBREG to store in it. */
4604
4fa52007 4605 if (mode == VOIDmode
6ab06cbb
JW
4606 || (mode != BLKmode && ! direct_store[(int) mode]
4607 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4608 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4609 || GET_CODE (target) == REG
c980ac49 4610 || GET_CODE (target) == SUBREG
ccc98036
RS
4611 /* If the field isn't aligned enough to store as an ordinary memref,
4612 store it as a bit field. */
c7a7ac46 4613 || (SLOW_UNALIGNED_ACCESS
ccc98036 4614 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4615 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4616 {
906c4e36 4617 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4618
ef19912d
RK
4619 /* If BITSIZE is narrower than the size of the type of EXP
4620 we will be narrowing TEMP. Normally, what's wanted are the
4621 low-order bits. However, if EXP's type is a record and this is
4622 big-endian machine, we want the upper BITSIZE bits. */
4623 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4624 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4625 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4626 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4627 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4628 - bitsize),
4629 temp, 1);
4630
bbd6cf73
RK
4631 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4632 MODE. */
4633 if (mode != VOIDmode && mode != BLKmode
4634 && mode != TYPE_MODE (TREE_TYPE (exp)))
4635 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4636
a281e72d
RK
4637 /* If the modes of TARGET and TEMP are both BLKmode, both
4638 must be in memory and BITPOS must be aligned on a byte
4639 boundary. If so, we simply do a block copy. */
4640 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4641 {
4642 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4643 || bitpos % BITS_PER_UNIT != 0)
4644 abort ();
4645
0086427c
RK
4646 target = change_address (target, VOIDmode,
4647 plus_constant (XEXP (target, 0),
a281e72d
RK
4648 bitpos / BITS_PER_UNIT));
4649
4650 emit_block_move (target, temp,
4651 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4652 / BITS_PER_UNIT),
4653 1);
4654
4655 return value_mode == VOIDmode ? const0_rtx : target;
4656 }
4657
bbf6f052
RK
4658 /* Store the value in the bitfield. */
4659 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4660 if (value_mode != VOIDmode)
4661 {
4662 /* The caller wants an rtx for the value. */
4663 /* If possible, avoid refetching from the bitfield itself. */
4664 if (width_mask != 0
4665 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4666 {
9074de27 4667 tree count;
5c4d7cfb 4668 enum machine_mode tmode;
86a2c12a 4669
5c4d7cfb
RS
4670 if (unsignedp)
4671 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4672 tmode = GET_MODE (temp);
86a2c12a
RS
4673 if (tmode == VOIDmode)
4674 tmode = value_mode;
5c4d7cfb
RS
4675 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4676 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4677 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4678 }
bbf6f052 4679 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4680 NULL_RTX, value_mode, 0, align,
4681 total_size);
bbf6f052
RK
4682 }
4683 return const0_rtx;
4684 }
4685 else
4686 {
4687 rtx addr = XEXP (target, 0);
4688 rtx to_rtx;
4689
4690 /* If a value is wanted, it must be the lhs;
4691 so make the address stable for multiple use. */
4692
4693 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4694 && ! CONSTANT_ADDRESS_P (addr)
4695 /* A frame-pointer reference is already stable. */
4696 && ! (GET_CODE (addr) == PLUS
4697 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4698 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4699 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4700 addr = copy_to_reg (addr);
4701
4702 /* Now build a reference to just the desired component. */
4703
effbcc6a
RK
4704 to_rtx = copy_rtx (change_address (target, mode,
4705 plus_constant (addr,
4706 (bitpos
4707 / BITS_PER_UNIT))));
c6df88cb 4708 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4709 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4710
4711 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4712 }
4713}
4714\f
4715/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4716 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4717 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4718
4719 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4720 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4721 If the position of the field is variable, we store a tree
4722 giving the variable offset (in units) in *POFFSET.
4723 This offset is in addition to the bit position.
4724 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4725 We set *PALIGNMENT to the alignment in bytes of the address that will be
4726 computed. This is the alignment of the thing we return if *POFFSET
4727 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4728
4729 If any of the extraction expressions is volatile,
4730 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4731
4732 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4733 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4734 is redundant.
4735
4736 If the field describes a variable-sized object, *PMODE is set to
4737 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4738 this case, but the address of the object can be found. */
bbf6f052
RK
4739
4740tree
4969d05d 4741get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4742 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4743 tree exp;
4744 int *pbitsize;
4745 int *pbitpos;
7bb0943f 4746 tree *poffset;
bbf6f052
RK
4747 enum machine_mode *pmode;
4748 int *punsignedp;
4749 int *pvolatilep;
839c4796 4750 int *palignment;
bbf6f052 4751{
b50d17a1 4752 tree orig_exp = exp;
bbf6f052
RK
4753 tree size_tree = 0;
4754 enum machine_mode mode = VOIDmode;
742920c7 4755 tree offset = integer_zero_node;
c84e2712 4756 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4757
4758 if (TREE_CODE (exp) == COMPONENT_REF)
4759 {
4760 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4761 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4762 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4763 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4764 }
4765 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4766 {
4767 size_tree = TREE_OPERAND (exp, 1);
4768 *punsignedp = TREE_UNSIGNED (exp);
4769 }
4770 else
4771 {
4772 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4773 if (mode == BLKmode)
4774 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4775
bbf6f052
RK
4776 *pbitsize = GET_MODE_BITSIZE (mode);
4777 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4778 }
4779
4780 if (size_tree)
4781 {
4782 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4783 mode = BLKmode, *pbitsize = -1;
4784 else
4785 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4786 }
4787
4788 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4789 and find the ultimate containing object. */
4790
4791 *pbitpos = 0;
4792
4793 while (1)
4794 {
7bb0943f 4795 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4796 {
7bb0943f
RS
4797 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4798 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4799 : TREE_OPERAND (exp, 2));
e6d8c385 4800 tree constant = integer_zero_node, var = pos;
bbf6f052 4801
e7f3c83f
RK
4802 /* If this field hasn't been filled in yet, don't go
4803 past it. This should only happen when folding expressions
4804 made during type construction. */
4805 if (pos == 0)
4806 break;
4807
e6d8c385
RK
4808 /* Assume here that the offset is a multiple of a unit.
4809 If not, there should be an explicitly added constant. */
4810 if (TREE_CODE (pos) == PLUS_EXPR
4811 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4812 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4813 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4814 constant = pos, var = integer_zero_node;
4815
4816 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4817 offset = size_binop (PLUS_EXPR, offset,
4818 size_binop (EXACT_DIV_EXPR, var,
4819 size_int (BITS_PER_UNIT)));
bbf6f052 4820 }
bbf6f052 4821
742920c7 4822 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4823 {
742920c7
RK
4824 /* This code is based on the code in case ARRAY_REF in expand_expr
4825 below. We assume here that the size of an array element is
4826 always an integral multiple of BITS_PER_UNIT. */
4827
4828 tree index = TREE_OPERAND (exp, 1);
4829 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4830 tree low_bound
4831 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4832 tree index_type = TREE_TYPE (index);
ead17059 4833 tree xindex;
742920c7 4834
4c08eef0 4835 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4836 {
4c08eef0
RK
4837 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4838 index);
742920c7
RK
4839 index_type = TREE_TYPE (index);
4840 }
4841
74a4fbfc
DB
4842 /* Optimize the special-case of a zero lower bound.
4843
4844 We convert the low_bound to sizetype to avoid some problems
4845 with constant folding. (E.g. suppose the lower bound is 1,
4846 and its mode is QI. Without the conversion, (ARRAY
4847 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4848 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4849
4850 But sizetype isn't quite right either (especially if
4851 the lowbound is negative). FIXME */
4852
ca0f2220 4853 if (! integer_zerop (low_bound))
74a4fbfc
DB
4854 index = fold (build (MINUS_EXPR, index_type, index,
4855 convert (sizetype, low_bound)));
ca0f2220 4856
f8dac6eb
R
4857 if (TREE_CODE (index) == INTEGER_CST)
4858 {
4859 index = convert (sbitsizetype, index);
4860 index_type = TREE_TYPE (index);
4861 }
4862
ead17059
RH
4863 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4864 convert (sbitsizetype,
4865 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4866
ead17059
RH
4867 if (TREE_CODE (xindex) == INTEGER_CST
4868 && TREE_INT_CST_HIGH (xindex) == 0)
4869 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4870 else
956d6950 4871 {
ead17059
RH
4872 /* Either the bit offset calculated above is not constant, or
4873 it overflowed. In either case, redo the multiplication
4874 against the size in units. This is especially important
4875 in the non-constant case to avoid a division at runtime. */
4876 xindex = fold (build (MULT_EXPR, ssizetype, index,
4877 convert (ssizetype,
4878 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4879
4880 if (contains_placeholder_p (xindex))
4881 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4882
4883 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4884 }
bbf6f052
RK
4885 }
4886 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4887 && ! ((TREE_CODE (exp) == NOP_EXPR
4888 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4889 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4890 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4891 != UNION_TYPE))
bbf6f052
RK
4892 && (TYPE_MODE (TREE_TYPE (exp))
4893 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4894 break;
7bb0943f
RS
4895
4896 /* If any reference in the chain is volatile, the effect is volatile. */
4897 if (TREE_THIS_VOLATILE (exp))
4898 *pvolatilep = 1;
839c4796
RK
4899
4900 /* If the offset is non-constant already, then we can't assume any
4901 alignment more than the alignment here. */
4902 if (! integer_zerop (offset))
4903 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4904
bbf6f052
RK
4905 exp = TREE_OPERAND (exp, 0);
4906 }
4907
839c4796
RK
4908 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4909 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4910 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4911 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4912
742920c7
RK
4913 if (integer_zerop (offset))
4914 offset = 0;
4915
b50d17a1
RK
4916 if (offset != 0 && contains_placeholder_p (offset))
4917 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4918
bbf6f052 4919 *pmode = mode;
7bb0943f 4920 *poffset = offset;
839c4796 4921 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4922 return exp;
4923}
921b3427
RK
4924
4925/* Subroutine of expand_exp: compute memory_usage from modifier. */
4926static enum memory_use_mode
4927get_memory_usage_from_modifier (modifier)
4928 enum expand_modifier modifier;
4929{
4930 switch (modifier)
4931 {
4932 case EXPAND_NORMAL:
e5e809f4 4933 case EXPAND_SUM:
921b3427
RK
4934 return MEMORY_USE_RO;
4935 break;
4936 case EXPAND_MEMORY_USE_WO:
4937 return MEMORY_USE_WO;
4938 break;
4939 case EXPAND_MEMORY_USE_RW:
4940 return MEMORY_USE_RW;
4941 break;
921b3427 4942 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4943 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4944 MEMORY_USE_DONT, because they are modifiers to a call of
4945 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4946 case EXPAND_CONST_ADDRESS:
e5e809f4 4947 case EXPAND_INITIALIZER:
921b3427
RK
4948 return MEMORY_USE_DONT;
4949 case EXPAND_MEMORY_USE_BAD:
4950 default:
4951 abort ();
4952 }
4953}
bbf6f052
RK
4954\f
4955/* Given an rtx VALUE that may contain additions and multiplications,
4956 return an equivalent value that just refers to a register or memory.
4957 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4958 and returning a pseudo-register containing the value.
4959
4960 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4961
4962rtx
4963force_operand (value, target)
4964 rtx value, target;
4965{
4966 register optab binoptab = 0;
4967 /* Use a temporary to force order of execution of calls to
4968 `force_operand'. */
4969 rtx tmp;
4970 register rtx op2;
4971 /* Use subtarget as the target for operand 0 of a binary operation. */
4972 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4973
8b015896
RH
4974 /* Check for a PIC address load. */
4975 if (flag_pic
4976 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4977 && XEXP (value, 0) == pic_offset_table_rtx
4978 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4979 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4980 || GET_CODE (XEXP (value, 1)) == CONST))
4981 {
4982 if (!subtarget)
4983 subtarget = gen_reg_rtx (GET_MODE (value));
4984 emit_move_insn (subtarget, value);
4985 return subtarget;
4986 }
4987
bbf6f052
RK
4988 if (GET_CODE (value) == PLUS)
4989 binoptab = add_optab;
4990 else if (GET_CODE (value) == MINUS)
4991 binoptab = sub_optab;
4992 else if (GET_CODE (value) == MULT)
4993 {
4994 op2 = XEXP (value, 1);
4995 if (!CONSTANT_P (op2)
4996 && !(GET_CODE (op2) == REG && op2 != subtarget))
4997 subtarget = 0;
4998 tmp = force_operand (XEXP (value, 0), subtarget);
4999 return expand_mult (GET_MODE (value), tmp,
906c4e36 5000 force_operand (op2, NULL_RTX),
bbf6f052
RK
5001 target, 0);
5002 }
5003
5004 if (binoptab)
5005 {
5006 op2 = XEXP (value, 1);
5007 if (!CONSTANT_P (op2)
5008 && !(GET_CODE (op2) == REG && op2 != subtarget))
5009 subtarget = 0;
5010 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5011 {
5012 binoptab = add_optab;
5013 op2 = negate_rtx (GET_MODE (value), op2);
5014 }
5015
5016 /* Check for an addition with OP2 a constant integer and our first
5017 operand a PLUS of a virtual register and something else. In that
5018 case, we want to emit the sum of the virtual register and the
5019 constant first and then add the other value. This allows virtual
5020 register instantiation to simply modify the constant rather than
5021 creating another one around this addition. */
5022 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5023 && GET_CODE (XEXP (value, 0)) == PLUS
5024 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5025 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5026 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5027 {
5028 rtx temp = expand_binop (GET_MODE (value), binoptab,
5029 XEXP (XEXP (value, 0), 0), op2,
5030 subtarget, 0, OPTAB_LIB_WIDEN);
5031 return expand_binop (GET_MODE (value), binoptab, temp,
5032 force_operand (XEXP (XEXP (value, 0), 1), 0),
5033 target, 0, OPTAB_LIB_WIDEN);
5034 }
5035
5036 tmp = force_operand (XEXP (value, 0), subtarget);
5037 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5038 force_operand (op2, NULL_RTX),
bbf6f052 5039 target, 0, OPTAB_LIB_WIDEN);
8008b228 5040 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5041 because the only operations we are expanding here are signed ones. */
5042 }
5043 return value;
5044}
5045\f
5046/* Subroutine of expand_expr:
5047 save the non-copied parts (LIST) of an expr (LHS), and return a list
5048 which can restore these values to their previous values,
5049 should something modify their storage. */
5050
5051static tree
5052save_noncopied_parts (lhs, list)
5053 tree lhs;
5054 tree list;
5055{
5056 tree tail;
5057 tree parts = 0;
5058
5059 for (tail = list; tail; tail = TREE_CHAIN (tail))
5060 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5061 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5062 else
5063 {
5064 tree part = TREE_VALUE (tail);
5065 tree part_type = TREE_TYPE (part);
906c4e36 5066 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5067 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5068 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5069 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5070 parts = tree_cons (to_be_saved,
906c4e36
RK
5071 build (RTL_EXPR, part_type, NULL_TREE,
5072 (tree) target),
bbf6f052
RK
5073 parts);
5074 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5075 }
5076 return parts;
5077}
5078
5079/* Subroutine of expand_expr:
5080 record the non-copied parts (LIST) of an expr (LHS), and return a list
5081 which specifies the initial values of these parts. */
5082
5083static tree
5084init_noncopied_parts (lhs, list)
5085 tree lhs;
5086 tree list;
5087{
5088 tree tail;
5089 tree parts = 0;
5090
5091 for (tail = list; tail; tail = TREE_CHAIN (tail))
5092 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5093 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5094 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5095 {
5096 tree part = TREE_VALUE (tail);
5097 tree part_type = TREE_TYPE (part);
906c4e36 5098 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5099 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5100 }
5101 return parts;
5102}
5103
5104/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5105 EXP can reference X, which is being modified. TOP_P is nonzero if this
5106 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5107 for EXP, as opposed to a recursive call to this function.
5108
5109 It is always safe for this routine to return zero since it merely
5110 searches for optimization opportunities. */
bbf6f052
RK
5111
5112static int
e5e809f4 5113safe_from_p (x, exp, top_p)
bbf6f052
RK
5114 rtx x;
5115 tree exp;
e5e809f4 5116 int top_p;
bbf6f052
RK
5117{
5118 rtx exp_rtl = 0;
5119 int i, nops;
ff439b5f
CB
5120 static int save_expr_count;
5121 static int save_expr_size = 0;
5122 static tree *save_expr_rewritten;
5123 static tree save_expr_trees[256];
bbf6f052 5124
6676e72f
RK
5125 if (x == 0
5126 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5127 have no way of allocating temporaries of variable size
5128 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5129 So we assume here that something at a higher level has prevented a
f4510f37 5130 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5131 do this when X is BLKmode and when we are at the top level. */
5132 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5133 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5134 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5135 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5136 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5137 != INTEGER_CST)
f4510f37 5138 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5139 return 1;
5140
ff439b5f
CB
5141 if (top_p && save_expr_size == 0)
5142 {
5143 int rtn;
5144
5145 save_expr_count = 0;
5146 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5147 save_expr_rewritten = &save_expr_trees[0];
5148
5149 rtn = safe_from_p (x, exp, 1);
5150
5151 for (i = 0; i < save_expr_count; ++i)
5152 {
5153 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5154 abort ();
5155 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5156 }
5157
5158 save_expr_size = 0;
5159
5160 return rtn;
5161 }
5162
bbf6f052
RK
5163 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5164 find the underlying pseudo. */
5165 if (GET_CODE (x) == SUBREG)
5166 {
5167 x = SUBREG_REG (x);
5168 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5169 return 0;
5170 }
5171
5172 /* If X is a location in the outgoing argument area, it is always safe. */
5173 if (GET_CODE (x) == MEM
5174 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5175 || (GET_CODE (XEXP (x, 0)) == PLUS
5176 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5177 return 1;
5178
5179 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5180 {
5181 case 'd':
5182 exp_rtl = DECL_RTL (exp);
5183 break;
5184
5185 case 'c':
5186 return 1;
5187
5188 case 'x':
5189 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5190 return ((TREE_VALUE (exp) == 0
e5e809f4 5191 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5192 && (TREE_CHAIN (exp) == 0
e5e809f4 5193 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5194 else if (TREE_CODE (exp) == ERROR_MARK)
5195 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5196 else
5197 return 0;
5198
5199 case '1':
e5e809f4 5200 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5201
5202 case '2':
5203 case '<':
e5e809f4
JL
5204 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5205 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5206
5207 case 'e':
5208 case 'r':
5209 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5210 the expression. If it is set, we conflict iff we are that rtx or
5211 both are in memory. Otherwise, we check all operands of the
5212 expression recursively. */
5213
5214 switch (TREE_CODE (exp))
5215 {
5216 case ADDR_EXPR:
e44842fe 5217 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5218 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5219 || TREE_STATIC (exp));
bbf6f052
RK
5220
5221 case INDIRECT_REF:
5222 if (GET_CODE (x) == MEM)
5223 return 0;
5224 break;
5225
5226 case CALL_EXPR:
5227 exp_rtl = CALL_EXPR_RTL (exp);
5228 if (exp_rtl == 0)
5229 {
5230 /* Assume that the call will clobber all hard registers and
5231 all of memory. */
5232 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5233 || GET_CODE (x) == MEM)
5234 return 0;
5235 }
5236
5237 break;
5238
5239 case RTL_EXPR:
3bb5826a
RK
5240 /* If a sequence exists, we would have to scan every instruction
5241 in the sequence to see if it was safe. This is probably not
5242 worthwhile. */
5243 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5244 return 0;
5245
3bb5826a 5246 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5247 break;
5248
5249 case WITH_CLEANUP_EXPR:
5250 exp_rtl = RTL_EXPR_RTL (exp);
5251 break;
5252
5dab5552 5253 case CLEANUP_POINT_EXPR:
e5e809f4 5254 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5255
bbf6f052
RK
5256 case SAVE_EXPR:
5257 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5258 if (exp_rtl)
5259 break;
5260
5261 /* This SAVE_EXPR might appear many times in the top-level
5262 safe_from_p() expression, and if it has a complex
5263 subexpression, examining it multiple times could result
5264 in a combinatorial explosion. E.g. on an Alpha
5265 running at least 200MHz, a Fortran test case compiled with
5266 optimization took about 28 minutes to compile -- even though
5267 it was only a few lines long, and the complicated line causing
5268 so much time to be spent in the earlier version of safe_from_p()
5269 had only 293 or so unique nodes.
5270
5271 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5272 where it is so we can turn it back in the top-level safe_from_p()
5273 when we're done. */
5274
5275 /* For now, don't bother re-sizing the array. */
5276 if (save_expr_count >= save_expr_size)
5277 return 0;
5278 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5279
5280 nops = tree_code_length[(int) SAVE_EXPR];
5281 for (i = 0; i < nops; i++)
ff59bfe6
JM
5282 {
5283 tree operand = TREE_OPERAND (exp, i);
5284 if (operand == NULL_TREE)
5285 continue;
5286 TREE_SET_CODE (exp, ERROR_MARK);
5287 if (!safe_from_p (x, operand, 0))
5288 return 0;
5289 TREE_SET_CODE (exp, SAVE_EXPR);
5290 }
5291 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5292 return 1;
bbf6f052 5293
8129842c
RS
5294 case BIND_EXPR:
5295 /* The only operand we look at is operand 1. The rest aren't
5296 part of the expression. */
e5e809f4 5297 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5298
bbf6f052 5299 case METHOD_CALL_EXPR:
0f41302f 5300 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5301 abort ();
e9a25f70
JL
5302
5303 default:
5304 break;
bbf6f052
RK
5305 }
5306
5307 /* If we have an rtx, we do not need to scan our operands. */
5308 if (exp_rtl)
5309 break;
5310
5311 nops = tree_code_length[(int) TREE_CODE (exp)];
5312 for (i = 0; i < nops; i++)
5313 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5314 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5315 return 0;
5316 }
5317
5318 /* If we have an rtl, find any enclosed object. Then see if we conflict
5319 with it. */
5320 if (exp_rtl)
5321 {
5322 if (GET_CODE (exp_rtl) == SUBREG)
5323 {
5324 exp_rtl = SUBREG_REG (exp_rtl);
5325 if (GET_CODE (exp_rtl) == REG
5326 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5327 return 0;
5328 }
5329
5330 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5331 are memory and EXP is not readonly. */
5332 return ! (rtx_equal_p (x, exp_rtl)
5333 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5334 && ! TREE_READONLY (exp)));
5335 }
5336
5337 /* If we reach here, it is safe. */
5338 return 1;
5339}
5340
5341/* Subroutine of expand_expr: return nonzero iff EXP is an
5342 expression whose type is statically determinable. */
5343
5344static int
5345fixed_type_p (exp)
5346 tree exp;
5347{
5348 if (TREE_CODE (exp) == PARM_DECL
5349 || TREE_CODE (exp) == VAR_DECL
5350 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5351 || TREE_CODE (exp) == COMPONENT_REF
5352 || TREE_CODE (exp) == ARRAY_REF)
5353 return 1;
5354 return 0;
5355}
01c8a7c8
RK
5356
5357/* Subroutine of expand_expr: return rtx if EXP is a
5358 variable or parameter; else return 0. */
5359
5360static rtx
5361var_rtx (exp)
5362 tree exp;
5363{
5364 STRIP_NOPS (exp);
5365 switch (TREE_CODE (exp))
5366 {
5367 case PARM_DECL:
5368 case VAR_DECL:
5369 return DECL_RTL (exp);
5370 default:
5371 return 0;
5372 }
5373}
dbecbbe4
JL
5374
5375#ifdef MAX_INTEGER_COMPUTATION_MODE
5376void
5377check_max_integer_computation_mode (exp)
5378 tree exp;
5379{
5f652c07 5380 enum tree_code code;
dbecbbe4
JL
5381 enum machine_mode mode;
5382
5f652c07
JM
5383 /* Strip any NOPs that don't change the mode. */
5384 STRIP_NOPS (exp);
5385 code = TREE_CODE (exp);
5386
71bca506
JL
5387 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5388 if (code == NOP_EXPR
5389 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5390 return;
5391
dbecbbe4
JL
5392 /* First check the type of the overall operation. We need only look at
5393 unary, binary and relational operations. */
5394 if (TREE_CODE_CLASS (code) == '1'
5395 || TREE_CODE_CLASS (code) == '2'
5396 || TREE_CODE_CLASS (code) == '<')
5397 {
5398 mode = TYPE_MODE (TREE_TYPE (exp));
5399 if (GET_MODE_CLASS (mode) == MODE_INT
5400 && mode > MAX_INTEGER_COMPUTATION_MODE)
5401 fatal ("unsupported wide integer operation");
5402 }
5403
5404 /* Check operand of a unary op. */
5405 if (TREE_CODE_CLASS (code) == '1')
5406 {
5407 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5408 if (GET_MODE_CLASS (mode) == MODE_INT
5409 && mode > MAX_INTEGER_COMPUTATION_MODE)
5410 fatal ("unsupported wide integer operation");
5411 }
5412
5413 /* Check operands of a binary/comparison op. */
5414 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5415 {
5416 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5417 if (GET_MODE_CLASS (mode) == MODE_INT
5418 && mode > MAX_INTEGER_COMPUTATION_MODE)
5419 fatal ("unsupported wide integer operation");
5420
5421 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5422 if (GET_MODE_CLASS (mode) == MODE_INT
5423 && mode > MAX_INTEGER_COMPUTATION_MODE)
5424 fatal ("unsupported wide integer operation");
5425 }
5426}
5427#endif
5428
bbf6f052
RK
5429\f
5430/* expand_expr: generate code for computing expression EXP.
5431 An rtx for the computed value is returned. The value is never null.
5432 In the case of a void EXP, const0_rtx is returned.
5433
5434 The value may be stored in TARGET if TARGET is nonzero.
5435 TARGET is just a suggestion; callers must assume that
5436 the rtx returned may not be the same as TARGET.
5437
5438 If TARGET is CONST0_RTX, it means that the value will be ignored.
5439
5440 If TMODE is not VOIDmode, it suggests generating the
5441 result in mode TMODE. But this is done only when convenient.
5442 Otherwise, TMODE is ignored and the value generated in its natural mode.
5443 TMODE is just a suggestion; callers must assume that
5444 the rtx returned may not have mode TMODE.
5445
d6a5ac33
RK
5446 Note that TARGET may have neither TMODE nor MODE. In that case, it
5447 probably will not be used.
bbf6f052
RK
5448
5449 If MODIFIER is EXPAND_SUM then when EXP is an addition
5450 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5451 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5452 products as above, or REG or MEM, or constant.
5453 Ordinarily in such cases we would output mul or add instructions
5454 and then return a pseudo reg containing the sum.
5455
5456 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5457 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5458 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5459 This is used for outputting expressions used in initializers.
5460
5461 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5462 with a constant address even if that address is not normally legitimate.
5463 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5464
5465rtx
5466expand_expr (exp, target, tmode, modifier)
5467 register tree exp;
5468 rtx target;
5469 enum machine_mode tmode;
5470 enum expand_modifier modifier;
5471{
b50d17a1
RK
5472 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5473 This is static so it will be accessible to our recursive callees. */
5474 static tree placeholder_list = 0;
bbf6f052
RK
5475 register rtx op0, op1, temp;
5476 tree type = TREE_TYPE (exp);
5477 int unsignedp = TREE_UNSIGNED (type);
68557e14 5478 register enum machine_mode mode;
bbf6f052
RK
5479 register enum tree_code code = TREE_CODE (exp);
5480 optab this_optab;
68557e14
ML
5481 rtx subtarget, original_target;
5482 int ignore;
bbf6f052 5483 tree context;
921b3427
RK
5484 /* Used by check-memory-usage to make modifier read only. */
5485 enum expand_modifier ro_modifier;
bbf6f052 5486
68557e14
ML
5487 /* Handle ERROR_MARK before anybody tries to access its type. */
5488 if (TREE_CODE (exp) == ERROR_MARK)
5489 {
5490 op0 = CONST0_RTX (tmode);
5491 if (op0 != 0)
5492 return op0;
5493 return const0_rtx;
5494 }
5495
5496 mode = TYPE_MODE (type);
5497 /* Use subtarget as the target for operand 0 of a binary operation. */
5498 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5499 original_target = target;
5500 ignore = (target == const0_rtx
5501 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5502 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5503 || code == COND_EXPR)
5504 && TREE_CODE (type) == VOID_TYPE));
5505
921b3427
RK
5506 /* Make a read-only version of the modifier. */
5507 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5508 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5509 ro_modifier = modifier;
5510 else
5511 ro_modifier = EXPAND_NORMAL;
ca695ac9 5512
bbf6f052
RK
5513 /* Don't use hard regs as subtargets, because the combiner
5514 can only handle pseudo regs. */
5515 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5516 subtarget = 0;
5517 /* Avoid subtargets inside loops,
5518 since they hide some invariant expressions. */
5519 if (preserve_subexpressions_p ())
5520 subtarget = 0;
5521
dd27116b
RK
5522 /* If we are going to ignore this result, we need only do something
5523 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5524 is, short-circuit the most common cases here. Note that we must
5525 not call expand_expr with anything but const0_rtx in case this
5526 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5527
dd27116b
RK
5528 if (ignore)
5529 {
5530 if (! TREE_SIDE_EFFECTS (exp))
5531 return const0_rtx;
5532
5533 /* Ensure we reference a volatile object even if value is ignored. */
5534 if (TREE_THIS_VOLATILE (exp)
5535 && TREE_CODE (exp) != FUNCTION_DECL
5536 && mode != VOIDmode && mode != BLKmode)
5537 {
921b3427 5538 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5539 if (GET_CODE (temp) == MEM)
5540 temp = copy_to_reg (temp);
5541 return const0_rtx;
5542 }
5543
5544 if (TREE_CODE_CLASS (code) == '1')
5545 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5546 VOIDmode, ro_modifier);
dd27116b
RK
5547 else if (TREE_CODE_CLASS (code) == '2'
5548 || TREE_CODE_CLASS (code) == '<')
5549 {
921b3427
RK
5550 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5551 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5552 return const0_rtx;
5553 }
5554 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5555 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5556 /* If the second operand has no side effects, just evaluate
0f41302f 5557 the first. */
dd27116b 5558 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5559 VOIDmode, ro_modifier);
dd27116b 5560
90764a87 5561 target = 0;
dd27116b 5562 }
bbf6f052 5563
dbecbbe4 5564#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5565 /* Only check stuff here if the mode we want is different from the mode
5566 of the expression; if it's the same, check_max_integer_computiation_mode
5567 will handle it. Do we really need to check this stuff at all? */
5568
ce3c0b53 5569 if (target
5f652c07 5570 && GET_MODE (target) != mode
ce3c0b53
JL
5571 && TREE_CODE (exp) != INTEGER_CST
5572 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5573 && TREE_CODE (exp) != ARRAY_REF
5574 && TREE_CODE (exp) != COMPONENT_REF
5575 && TREE_CODE (exp) != BIT_FIELD_REF
5576 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5577 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5578 && TREE_CODE (exp) != VAR_DECL
5579 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5580 {
5581 enum machine_mode mode = GET_MODE (target);
5582
5583 if (GET_MODE_CLASS (mode) == MODE_INT
5584 && mode > MAX_INTEGER_COMPUTATION_MODE)
5585 fatal ("unsupported wide integer operation");
5586 }
5587
5f652c07
JM
5588 if (tmode != mode
5589 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5590 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5591 && TREE_CODE (exp) != ARRAY_REF
5592 && TREE_CODE (exp) != COMPONENT_REF
5593 && TREE_CODE (exp) != BIT_FIELD_REF
5594 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5595 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5596 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5597 && TREE_CODE (exp) != RTL_EXPR
71bca506 5598 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5599 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5600 fatal ("unsupported wide integer operation");
5601
5602 check_max_integer_computation_mode (exp);
5603#endif
5604
e44842fe
RK
5605 /* If will do cse, generate all results into pseudo registers
5606 since 1) that allows cse to find more things
5607 and 2) otherwise cse could produce an insn the machine
5608 cannot support. */
5609
bbf6f052
RK
5610 if (! cse_not_expected && mode != BLKmode && target
5611 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5612 target = subtarget;
5613
bbf6f052
RK
5614 switch (code)
5615 {
5616 case LABEL_DECL:
b552441b
RS
5617 {
5618 tree function = decl_function_context (exp);
5619 /* Handle using a label in a containing function. */
d0977240
RK
5620 if (function != current_function_decl
5621 && function != inline_function_decl && function != 0)
b552441b
RS
5622 {
5623 struct function *p = find_function_data (function);
5624 /* Allocate in the memory associated with the function
5625 that the label is in. */
5626 push_obstacks (p->function_obstack,
5627 p->function_maybepermanent_obstack);
5628
49ad7cfa
BS
5629 p->expr->x_forced_labels
5630 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5631 p->expr->x_forced_labels);
b552441b
RS
5632 pop_obstacks ();
5633 }
ab87f8c8
JL
5634 else
5635 {
ab87f8c8
JL
5636 if (modifier == EXPAND_INITIALIZER)
5637 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5638 label_rtx (exp),
5639 forced_labels);
5640 }
38a448ca
RH
5641 temp = gen_rtx_MEM (FUNCTION_MODE,
5642 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5643 if (function != current_function_decl
5644 && function != inline_function_decl && function != 0)
26fcb35a
RS
5645 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5646 return temp;
b552441b 5647 }
bbf6f052
RK
5648
5649 case PARM_DECL:
5650 if (DECL_RTL (exp) == 0)
5651 {
5652 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5653 return CONST0_RTX (mode);
bbf6f052
RK
5654 }
5655
0f41302f 5656 /* ... fall through ... */
d6a5ac33 5657
bbf6f052 5658 case VAR_DECL:
2dca20cd
RS
5659 /* If a static var's type was incomplete when the decl was written,
5660 but the type is complete now, lay out the decl now. */
5661 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5662 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5663 {
5664 push_obstacks_nochange ();
5665 end_temporary_allocation ();
5666 layout_decl (exp, 0);
5667 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5668 pop_obstacks ();
5669 }
d6a5ac33 5670
7d384cc0
KR
5671 /* Although static-storage variables start off initialized, according to
5672 ANSI C, a memcpy could overwrite them with uninitialized values. So
5673 we check them too. This also lets us check for read-only variables
5674 accessed via a non-const declaration, in case it won't be detected
5675 any other way (e.g., in an embedded system or OS kernel without
5676 memory protection).
5677
5678 Aggregates are not checked here; they're handled elsewhere. */
49ad7cfa
BS
5679 if (current_function && current_function_check_memory_usage
5680 && code == VAR_DECL
921b3427 5681 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5682 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5683 {
5684 enum memory_use_mode memory_usage;
5685 memory_usage = get_memory_usage_from_modifier (modifier);
5686
5687 if (memory_usage != MEMORY_USE_DONT)
5688 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5689 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5690 GEN_INT (int_size_in_bytes (type)),
5691 TYPE_MODE (sizetype),
956d6950
JL
5692 GEN_INT (memory_usage),
5693 TYPE_MODE (integer_type_node));
921b3427
RK
5694 }
5695
0f41302f 5696 /* ... fall through ... */
d6a5ac33 5697
2dca20cd 5698 case FUNCTION_DECL:
bbf6f052
RK
5699 case RESULT_DECL:
5700 if (DECL_RTL (exp) == 0)
5701 abort ();
d6a5ac33 5702
e44842fe
RK
5703 /* Ensure variable marked as used even if it doesn't go through
5704 a parser. If it hasn't be used yet, write out an external
5705 definition. */
5706 if (! TREE_USED (exp))
5707 {
5708 assemble_external (exp);
5709 TREE_USED (exp) = 1;
5710 }
5711
dc6d66b3
RK
5712 /* Show we haven't gotten RTL for this yet. */
5713 temp = 0;
5714
bbf6f052
RK
5715 /* Handle variables inherited from containing functions. */
5716 context = decl_function_context (exp);
5717
5718 /* We treat inline_function_decl as an alias for the current function
5719 because that is the inline function whose vars, types, etc.
5720 are being merged into the current function.
5721 See expand_inline_function. */
d6a5ac33 5722
bbf6f052
RK
5723 if (context != 0 && context != current_function_decl
5724 && context != inline_function_decl
5725 /* If var is static, we don't need a static chain to access it. */
5726 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5727 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5728 {
5729 rtx addr;
5730
5731 /* Mark as non-local and addressable. */
81feeecb 5732 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5733 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5734 abort ();
bbf6f052
RK
5735 mark_addressable (exp);
5736 if (GET_CODE (DECL_RTL (exp)) != MEM)
5737 abort ();
5738 addr = XEXP (DECL_RTL (exp), 0);
5739 if (GET_CODE (addr) == MEM)
38a448ca
RH
5740 addr = gen_rtx_MEM (Pmode,
5741 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5742 else
5743 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5744 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5745 }
4af3895e 5746
bbf6f052
RK
5747 /* This is the case of an array whose size is to be determined
5748 from its initializer, while the initializer is still being parsed.
5749 See expand_decl. */
d6a5ac33 5750
dc6d66b3
RK
5751 else if (GET_CODE (DECL_RTL (exp)) == MEM
5752 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5753 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5754 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5755
5756 /* If DECL_RTL is memory, we are in the normal case and either
5757 the address is not valid or it is not a register and -fforce-addr
5758 is specified, get the address into a register. */
5759
dc6d66b3
RK
5760 else if (GET_CODE (DECL_RTL (exp)) == MEM
5761 && modifier != EXPAND_CONST_ADDRESS
5762 && modifier != EXPAND_SUM
5763 && modifier != EXPAND_INITIALIZER
5764 && (! memory_address_p (DECL_MODE (exp),
5765 XEXP (DECL_RTL (exp), 0))
5766 || (flag_force_addr
5767 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5768 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5769 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5770
dc6d66b3
RK
5771 /* If we got something, return it. But first, set the alignment
5772 the address is a register. */
5773 if (temp != 0)
5774 {
5775 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5776 mark_reg_pointer (XEXP (temp, 0),
5777 DECL_ALIGN (exp) / BITS_PER_UNIT);
5778
5779 return temp;
5780 }
5781
1499e0a8
RK
5782 /* If the mode of DECL_RTL does not match that of the decl, it
5783 must be a promoted value. We return a SUBREG of the wanted mode,
5784 but mark it so that we know that it was already extended. */
5785
5786 if (GET_CODE (DECL_RTL (exp)) == REG
5787 && GET_MODE (DECL_RTL (exp)) != mode)
5788 {
1499e0a8
RK
5789 /* Get the signedness used for this variable. Ensure we get the
5790 same mode we got when the variable was declared. */
78911e8b
RK
5791 if (GET_MODE (DECL_RTL (exp))
5792 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5793 abort ();
5794
38a448ca 5795 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5796 SUBREG_PROMOTED_VAR_P (temp) = 1;
5797 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5798 return temp;
5799 }
5800
bbf6f052
RK
5801 return DECL_RTL (exp);
5802
5803 case INTEGER_CST:
5804 return immed_double_const (TREE_INT_CST_LOW (exp),
5805 TREE_INT_CST_HIGH (exp),
5806 mode);
5807
5808 case CONST_DECL:
921b3427
RK
5809 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5810 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5811
5812 case REAL_CST:
5813 /* If optimized, generate immediate CONST_DOUBLE
5814 which will be turned into memory by reload if necessary.
5815
5816 We used to force a register so that loop.c could see it. But
5817 this does not allow gen_* patterns to perform optimizations with
5818 the constants. It also produces two insns in cases like "x = 1.0;".
5819 On most machines, floating-point constants are not permitted in
5820 many insns, so we'd end up copying it to a register in any case.
5821
5822 Now, we do the copying in expand_binop, if appropriate. */
5823 return immed_real_const (exp);
5824
5825 case COMPLEX_CST:
5826 case STRING_CST:
5827 if (! TREE_CST_RTL (exp))
5828 output_constant_def (exp);
5829
5830 /* TREE_CST_RTL probably contains a constant address.
5831 On RISC machines where a constant address isn't valid,
5832 make some insns to get that address into a register. */
5833 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5834 && modifier != EXPAND_CONST_ADDRESS
5835 && modifier != EXPAND_INITIALIZER
5836 && modifier != EXPAND_SUM
d6a5ac33
RK
5837 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5838 || (flag_force_addr
5839 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5840 return change_address (TREE_CST_RTL (exp), VOIDmode,
5841 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5842 return TREE_CST_RTL (exp);
5843
bf1e5319 5844 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5845 {
5846 rtx to_return;
5847 char *saved_input_filename = input_filename;
5848 int saved_lineno = lineno;
5849 input_filename = EXPR_WFL_FILENAME (exp);
5850 lineno = EXPR_WFL_LINENO (exp);
5851 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5852 emit_line_note (input_filename, lineno);
5853 /* Possibly avoid switching back and force here */
5854 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5855 input_filename = saved_input_filename;
5856 lineno = saved_lineno;
5857 return to_return;
5858 }
bf1e5319 5859
bbf6f052
RK
5860 case SAVE_EXPR:
5861 context = decl_function_context (exp);
d6a5ac33 5862
d0977240
RK
5863 /* If this SAVE_EXPR was at global context, assume we are an
5864 initialization function and move it into our context. */
5865 if (context == 0)
5866 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5867
bbf6f052
RK
5868 /* We treat inline_function_decl as an alias for the current function
5869 because that is the inline function whose vars, types, etc.
5870 are being merged into the current function.
5871 See expand_inline_function. */
5872 if (context == current_function_decl || context == inline_function_decl)
5873 context = 0;
5874
5875 /* If this is non-local, handle it. */
5876 if (context)
5877 {
d0977240
RK
5878 /* The following call just exists to abort if the context is
5879 not of a containing function. */
5880 find_function_data (context);
5881
bbf6f052
RK
5882 temp = SAVE_EXPR_RTL (exp);
5883 if (temp && GET_CODE (temp) == REG)
5884 {
5885 put_var_into_stack (exp);
5886 temp = SAVE_EXPR_RTL (exp);
5887 }
5888 if (temp == 0 || GET_CODE (temp) != MEM)
5889 abort ();
5890 return change_address (temp, mode,
5891 fix_lexical_addr (XEXP (temp, 0), exp));
5892 }
5893 if (SAVE_EXPR_RTL (exp) == 0)
5894 {
06089a8b
RK
5895 if (mode == VOIDmode)
5896 temp = const0_rtx;
5897 else
e5e809f4 5898 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5899
bbf6f052 5900 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5901 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5902 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5903 save_expr_regs);
ff78f773
RK
5904
5905 /* If the mode of TEMP does not match that of the expression, it
5906 must be a promoted value. We pass store_expr a SUBREG of the
5907 wanted mode but mark it so that we know that it was already
5908 extended. Note that `unsignedp' was modified above in
5909 this case. */
5910
5911 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5912 {
38a448ca 5913 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5914 SUBREG_PROMOTED_VAR_P (temp) = 1;
5915 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5916 }
5917
4c7a0be9 5918 if (temp == const0_rtx)
921b3427
RK
5919 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5920 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5921 else
5922 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5923
5924 TREE_USED (exp) = 1;
bbf6f052 5925 }
1499e0a8
RK
5926
5927 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5928 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5929 but mark it so that we know that it was already extended. */
1499e0a8
RK
5930
5931 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5932 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5933 {
e70d22c8
RK
5934 /* Compute the signedness and make the proper SUBREG. */
5935 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5936 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5937 SUBREG_PROMOTED_VAR_P (temp) = 1;
5938 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5939 return temp;
5940 }
5941
bbf6f052
RK
5942 return SAVE_EXPR_RTL (exp);
5943
679163cf
MS
5944 case UNSAVE_EXPR:
5945 {
5946 rtx temp;
5947 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5948 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5949 return temp;
5950 }
5951
b50d17a1 5952 case PLACEHOLDER_EXPR:
e9a25f70
JL
5953 {
5954 tree placeholder_expr;
5955
5956 /* If there is an object on the head of the placeholder list,
e5e809f4 5957 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5958 further information, see tree.def. */
5959 for (placeholder_expr = placeholder_list;
5960 placeholder_expr != 0;
5961 placeholder_expr = TREE_CHAIN (placeholder_expr))
5962 {
5963 tree need_type = TYPE_MAIN_VARIANT (type);
5964 tree object = 0;
5965 tree old_list = placeholder_list;
5966 tree elt;
5967
e5e809f4
JL
5968 /* Find the outermost reference that is of the type we want.
5969 If none, see if any object has a type that is a pointer to
5970 the type we want. */
5971 for (elt = TREE_PURPOSE (placeholder_expr);
5972 elt != 0 && object == 0;
5973 elt
5974 = ((TREE_CODE (elt) == COMPOUND_EXPR
5975 || TREE_CODE (elt) == COND_EXPR)
5976 ? TREE_OPERAND (elt, 1)
5977 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5978 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5979 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5980 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5981 ? TREE_OPERAND (elt, 0) : 0))
5982 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5983 object = elt;
e9a25f70 5984
e9a25f70 5985 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5986 elt != 0 && object == 0;
5987 elt
5988 = ((TREE_CODE (elt) == COMPOUND_EXPR
5989 || TREE_CODE (elt) == COND_EXPR)
5990 ? TREE_OPERAND (elt, 1)
5991 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5992 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5993 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5994 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5995 ? TREE_OPERAND (elt, 0) : 0))
5996 if (POINTER_TYPE_P (TREE_TYPE (elt))
5997 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5998 == need_type))
e5e809f4 5999 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6000
e9a25f70 6001 if (object != 0)
2cde2255 6002 {
e9a25f70
JL
6003 /* Expand this object skipping the list entries before
6004 it was found in case it is also a PLACEHOLDER_EXPR.
6005 In that case, we want to translate it using subsequent
6006 entries. */
6007 placeholder_list = TREE_CHAIN (placeholder_expr);
6008 temp = expand_expr (object, original_target, tmode,
6009 ro_modifier);
6010 placeholder_list = old_list;
6011 return temp;
2cde2255 6012 }
e9a25f70
JL
6013 }
6014 }
b50d17a1
RK
6015
6016 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6017 abort ();
6018
6019 case WITH_RECORD_EXPR:
6020 /* Put the object on the placeholder list, expand our first operand,
6021 and pop the list. */
6022 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6023 placeholder_list);
6024 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6025 tmode, ro_modifier);
b50d17a1
RK
6026 placeholder_list = TREE_CHAIN (placeholder_list);
6027 return target;
6028
70e6ca43
APB
6029 case GOTO_EXPR:
6030 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6031 expand_goto (TREE_OPERAND (exp, 0));
6032 else
6033 expand_computed_goto (TREE_OPERAND (exp, 0));
6034 return const0_rtx;
6035
bbf6f052 6036 case EXIT_EXPR:
e44842fe
RK
6037 expand_exit_loop_if_false (NULL_PTR,
6038 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6039 return const0_rtx;
6040
f42e28dd
APB
6041 case LABELED_BLOCK_EXPR:
6042 if (LABELED_BLOCK_BODY (exp))
6043 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6044 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6045 return const0_rtx;
6046
6047 case EXIT_BLOCK_EXPR:
6048 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6049 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6050 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6051 return const0_rtx;
6052
bbf6f052 6053 case LOOP_EXPR:
0088fcb1 6054 push_temp_slots ();
bbf6f052
RK
6055 expand_start_loop (1);
6056 expand_expr_stmt (TREE_OPERAND (exp, 0));
6057 expand_end_loop ();
0088fcb1 6058 pop_temp_slots ();
bbf6f052
RK
6059
6060 return const0_rtx;
6061
6062 case BIND_EXPR:
6063 {
6064 tree vars = TREE_OPERAND (exp, 0);
6065 int vars_need_expansion = 0;
6066
6067 /* Need to open a binding contour here because
e976b8b2 6068 if there are any cleanups they must be contained here. */
bbf6f052
RK
6069 expand_start_bindings (0);
6070
2df53c0b
RS
6071 /* Mark the corresponding BLOCK for output in its proper place. */
6072 if (TREE_OPERAND (exp, 2) != 0
6073 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6074 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6075
6076 /* If VARS have not yet been expanded, expand them now. */
6077 while (vars)
6078 {
6079 if (DECL_RTL (vars) == 0)
6080 {
6081 vars_need_expansion = 1;
6082 expand_decl (vars);
6083 }
6084 expand_decl_init (vars);
6085 vars = TREE_CHAIN (vars);
6086 }
6087
921b3427 6088 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6089
6090 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6091
6092 return temp;
6093 }
6094
6095 case RTL_EXPR:
83b853c9
JM
6096 if (RTL_EXPR_SEQUENCE (exp))
6097 {
6098 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6099 abort ();
6100 emit_insns (RTL_EXPR_SEQUENCE (exp));
6101 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6102 }
99310285 6103 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 6104 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6105 return RTL_EXPR_RTL (exp);
6106
6107 case CONSTRUCTOR:
dd27116b
RK
6108 /* If we don't need the result, just ensure we evaluate any
6109 subexpressions. */
6110 if (ignore)
6111 {
6112 tree elt;
6113 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6114 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6115 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6116 return const0_rtx;
6117 }
3207b172 6118
4af3895e
JVA
6119 /* All elts simple constants => refer to a constant in memory. But
6120 if this is a non-BLKmode mode, let it store a field at a time
6121 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6122 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6123 store directly into the target unless the type is large enough
6124 that memcpy will be used. If we are making an initializer and
3207b172 6125 all operands are constant, put it in memory as well. */
dd27116b 6126 else if ((TREE_STATIC (exp)
3207b172 6127 && ((mode == BLKmode
e5e809f4 6128 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6129 || TREE_ADDRESSABLE (exp)
6130 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
fbe1758d
AM
6131 && (!MOVE_BY_PIECES_P
6132 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6133 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6134 && ! mostly_zeros_p (exp))))
dd27116b 6135 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6136 {
6137 rtx constructor = output_constant_def (exp);
b552441b
RS
6138 if (modifier != EXPAND_CONST_ADDRESS
6139 && modifier != EXPAND_INITIALIZER
6140 && modifier != EXPAND_SUM
d6a5ac33
RK
6141 && (! memory_address_p (GET_MODE (constructor),
6142 XEXP (constructor, 0))
6143 || (flag_force_addr
6144 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6145 constructor = change_address (constructor, VOIDmode,
6146 XEXP (constructor, 0));
6147 return constructor;
6148 }
6149
bbf6f052
RK
6150 else
6151 {
e9ac02a6
JW
6152 /* Handle calls that pass values in multiple non-contiguous
6153 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6154 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6155 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6156 {
6157 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6158 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6159 else
6160 target = assign_temp (type, 0, 1, 1);
6161 }
07604beb
RK
6162
6163 if (TREE_READONLY (exp))
6164 {
9151b3bf 6165 if (GET_CODE (target) == MEM)
effbcc6a
RK
6166 target = copy_rtx (target);
6167
07604beb
RK
6168 RTX_UNCHANGING_P (target) = 1;
6169 }
6170
e1a43f73 6171 store_constructor (exp, target, 0);
bbf6f052
RK
6172 return target;
6173 }
6174
6175 case INDIRECT_REF:
6176 {
6177 tree exp1 = TREE_OPERAND (exp, 0);
6178 tree exp2;
7581a30f
JW
6179 tree index;
6180 tree string = string_constant (exp1, &index);
6181 int i;
6182
06eaa86f 6183 /* Try to optimize reads from const strings. */
7581a30f
JW
6184 if (string
6185 && TREE_CODE (string) == STRING_CST
6186 && TREE_CODE (index) == INTEGER_CST
6187 && !TREE_INT_CST_HIGH (index)
6188 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6189 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6190 && GET_MODE_SIZE (mode) == 1
6191 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6192 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6193
405f0da6
JW
6194 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6195 op0 = memory_address (mode, op0);
8c8a8e34 6196
49ad7cfa
BS
6197 if (current_function && current_function_check_memory_usage
6198 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6199 {
6200 enum memory_use_mode memory_usage;
6201 memory_usage = get_memory_usage_from_modifier (modifier);
6202
6203 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6204 {
6205 in_check_memory_usage = 1;
6206 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6207 op0, Pmode,
c85f7c16
JL
6208 GEN_INT (int_size_in_bytes (type)),
6209 TYPE_MODE (sizetype),
6210 GEN_INT (memory_usage),
6211 TYPE_MODE (integer_type_node));
6212 in_check_memory_usage = 0;
6213 }
921b3427
RK
6214 }
6215
38a448ca 6216 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6217 /* If address was computed by addition,
6218 mark this as an element of an aggregate. */
9ec36da5
JL
6219 if (TREE_CODE (exp1) == PLUS_EXPR
6220 || (TREE_CODE (exp1) == SAVE_EXPR
6221 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6222 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6223 || (TREE_CODE (exp1) == ADDR_EXPR
6224 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6225 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6226 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6227
2c4c436a 6228 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6229 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6230
6231 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6232 here, because, in C and C++, the fact that a location is accessed
6233 through a pointer to const does not mean that the value there can
6234 never change. Languages where it can never change should
6235 also set TREE_STATIC. */
5cb7a25a 6236 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6237 return temp;
6238 }
bbf6f052
RK
6239
6240 case ARRAY_REF:
742920c7
RK
6241 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6242 abort ();
bbf6f052 6243
bbf6f052 6244 {
742920c7
RK
6245 tree array = TREE_OPERAND (exp, 0);
6246 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6247 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6248 tree index = TREE_OPERAND (exp, 1);
6249 tree index_type = TREE_TYPE (index);
08293add 6250 HOST_WIDE_INT i;
b50d17a1 6251
d4c89139
PB
6252 /* Optimize the special-case of a zero lower bound.
6253
6254 We convert the low_bound to sizetype to avoid some problems
6255 with constant folding. (E.g. suppose the lower bound is 1,
6256 and its mode is QI. Without the conversion, (ARRAY
6257 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6258 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6259
6260 But sizetype isn't quite right either (especially if
6261 the lowbound is negative). FIXME */
6262
742920c7 6263 if (! integer_zerop (low_bound))
d4c89139
PB
6264 index = fold (build (MINUS_EXPR, index_type, index,
6265 convert (sizetype, low_bound)));
742920c7 6266
742920c7 6267 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6268 This is not done in fold so it won't happen inside &.
6269 Don't fold if this is for wide characters since it's too
6270 difficult to do correctly and this is a very rare case. */
742920c7
RK
6271
6272 if (TREE_CODE (array) == STRING_CST
6273 && TREE_CODE (index) == INTEGER_CST
6274 && !TREE_INT_CST_HIGH (index)
307b821c 6275 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6276 && GET_MODE_CLASS (mode) == MODE_INT
6277 && GET_MODE_SIZE (mode) == 1)
307b821c 6278 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6279
742920c7
RK
6280 /* If this is a constant index into a constant array,
6281 just get the value from the array. Handle both the cases when
6282 we have an explicit constructor and when our operand is a variable
6283 that was declared const. */
4af3895e 6284
742920c7
RK
6285 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6286 {
6287 if (TREE_CODE (index) == INTEGER_CST
6288 && TREE_INT_CST_HIGH (index) == 0)
6289 {
6290 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6291
6292 i = TREE_INT_CST_LOW (index);
6293 while (elem && i--)
6294 elem = TREE_CHAIN (elem);
6295 if (elem)
6296 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6297 tmode, ro_modifier);
742920c7
RK
6298 }
6299 }
4af3895e 6300
742920c7
RK
6301 else if (optimize >= 1
6302 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6303 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6304 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6305 {
08293add 6306 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6307 {
6308 tree init = DECL_INITIAL (array);
6309
6310 i = TREE_INT_CST_LOW (index);
6311 if (TREE_CODE (init) == CONSTRUCTOR)
6312 {
6313 tree elem = CONSTRUCTOR_ELTS (init);
6314
03dc44a6
RS
6315 while (elem
6316 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6317 elem = TREE_CHAIN (elem);
6318 if (elem)
6319 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6320 tmode, ro_modifier);
742920c7
RK
6321 }
6322 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6323 && TREE_INT_CST_HIGH (index) == 0
6324 && (TREE_INT_CST_LOW (index)
6325 < TREE_STRING_LENGTH (init)))
6326 return (GEN_INT
6327 (TREE_STRING_POINTER
6328 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6329 }
6330 }
6331 }
8c8a8e34 6332
08293add 6333 /* ... fall through ... */
bbf6f052
RK
6334
6335 case COMPONENT_REF:
6336 case BIT_FIELD_REF:
4af3895e 6337 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6338 appropriate field if it is present. Don't do this if we have
6339 already written the data since we want to refer to that copy
6340 and varasm.c assumes that's what we'll do. */
4af3895e 6341 if (code != ARRAY_REF
7a0b7b9a
RK
6342 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6343 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6344 {
6345 tree elt;
6346
6347 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6348 elt = TREE_CHAIN (elt))
86b5812c
RK
6349 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6350 /* We can normally use the value of the field in the
6351 CONSTRUCTOR. However, if this is a bitfield in
6352 an integral mode that we can fit in a HOST_WIDE_INT,
6353 we must mask only the number of bits in the bitfield,
6354 since this is done implicitly by the constructor. If
6355 the bitfield does not meet either of those conditions,
6356 we can't do this optimization. */
6357 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6358 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6359 == MODE_INT)
6360 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6361 <= HOST_BITS_PER_WIDE_INT))))
6362 {
6363 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6364 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6365 {
6366 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6367
6368 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6369 {
6370 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6371 op0 = expand_and (op0, op1, target);
6372 }
6373 else
6374 {
e5e809f4
JL
6375 enum machine_mode imode
6376 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6377 tree count
e5e809f4
JL
6378 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6379 0);
86b5812c
RK
6380
6381 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6382 target, 0);
6383 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6384 target, 0);
6385 }
6386 }
6387
6388 return op0;
6389 }
4af3895e
JVA
6390 }
6391
bbf6f052
RK
6392 {
6393 enum machine_mode mode1;
6394 int bitsize;
6395 int bitpos;
7bb0943f 6396 tree offset;
bbf6f052 6397 int volatilep = 0;
034f9101 6398 int alignment;
839c4796
RK
6399 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6400 &mode1, &unsignedp, &volatilep,
6401 &alignment);
bbf6f052 6402
e7f3c83f
RK
6403 /* If we got back the original object, something is wrong. Perhaps
6404 we are evaluating an expression too early. In any event, don't
6405 infinitely recurse. */
6406 if (tem == exp)
6407 abort ();
6408
3d27140a 6409 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6410 computation, since it will need a temporary and TARGET is known
6411 to have to do. This occurs in unchecked conversion in Ada. */
6412
6413 op0 = expand_expr (tem,
6414 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6415 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6416 != INTEGER_CST)
6417 ? target : NULL_RTX),
4ed67205 6418 VOIDmode,
e5e809f4
JL
6419 modifier == EXPAND_INITIALIZER
6420 ? modifier : EXPAND_NORMAL);
bbf6f052 6421
8c8a8e34 6422 /* If this is a constant, put it into a register if it is a
8008b228 6423 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6424 if (CONSTANT_P (op0))
6425 {
6426 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6427 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6428 op0 = force_reg (mode, op0);
6429 else
6430 op0 = validize_mem (force_const_mem (mode, op0));
6431 }
6432
7bb0943f
RS
6433 if (offset != 0)
6434 {
906c4e36 6435 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6436
6437 if (GET_CODE (op0) != MEM)
6438 abort ();
2d48c13d
JL
6439
6440 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6441 {
2d48c13d 6442#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6443 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6444#else
bd070e1a 6445 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6446#endif
bd070e1a 6447 }
2d48c13d 6448
efd07ca7
JL
6449 /* A constant address in TO_RTX can have VOIDmode, we must not try
6450 to call force_reg for that case. Avoid that case. */
89752202
HB
6451 if (GET_CODE (op0) == MEM
6452 && GET_MODE (op0) == BLKmode
efd07ca7 6453 && GET_MODE (XEXP (op0, 0)) != VOIDmode
89752202
HB
6454 && bitsize
6455 && (bitpos % bitsize) == 0
6456 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6457 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6458 {
6459 rtx temp = change_address (op0, mode1,
6460 plus_constant (XEXP (op0, 0),
6461 (bitpos /
6462 BITS_PER_UNIT)));
6463 if (GET_CODE (XEXP (temp, 0)) == REG)
6464 op0 = temp;
6465 else
6466 op0 = change_address (op0, mode1,
6467 force_reg (GET_MODE (XEXP (temp, 0)),
6468 XEXP (temp, 0)));
6469 bitpos = 0;
6470 }
6471
6472
7bb0943f 6473 op0 = change_address (op0, VOIDmode,
38a448ca
RH
6474 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6475 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
6476 }
6477
bbf6f052
RK
6478 /* Don't forget about volatility even if this is a bitfield. */
6479 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6480 {
6481 op0 = copy_rtx (op0);
6482 MEM_VOLATILE_P (op0) = 1;
6483 }
6484
921b3427 6485 /* Check the access. */
49ad7cfa
BS
6486 if (current_function && current_function_check_memory_usage
6487 && GET_CODE (op0) == MEM)
921b3427
RK
6488 {
6489 enum memory_use_mode memory_usage;
6490 memory_usage = get_memory_usage_from_modifier (modifier);
6491
6492 if (memory_usage != MEMORY_USE_DONT)
6493 {
6494 rtx to;
6495 int size;
6496
6497 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6498 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6499
6500 /* Check the access right of the pointer. */
e9a25f70
JL
6501 if (size > BITS_PER_UNIT)
6502 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6503 to, Pmode,
e9a25f70
JL
6504 GEN_INT (size / BITS_PER_UNIT),
6505 TYPE_MODE (sizetype),
956d6950
JL
6506 GEN_INT (memory_usage),
6507 TYPE_MODE (integer_type_node));
921b3427
RK
6508 }
6509 }
6510
ccc98036
RS
6511 /* In cases where an aligned union has an unaligned object
6512 as a field, we might be extracting a BLKmode value from
6513 an integer-mode (e.g., SImode) object. Handle this case
6514 by doing the extract into an object as wide as the field
6515 (which we know to be the width of a basic mode), then
f2420d0b
JW
6516 storing into memory, and changing the mode to BLKmode.
6517 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6518 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6519 if (mode1 == VOIDmode
ccc98036 6520 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6521 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6522 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6523 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6524 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6525 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6526 /* If the field isn't aligned enough to fetch as a memref,
6527 fetch it as a bit field. */
6528 || (SLOW_UNALIGNED_ACCESS
c84e2712 6529 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
f9409c3a 6530 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6531 {
bbf6f052
RK
6532 enum machine_mode ext_mode = mode;
6533
6534 if (ext_mode == BLKmode)
6535 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6536
6537 if (ext_mode == BLKmode)
a281e72d
RK
6538 {
6539 /* In this case, BITPOS must start at a byte boundary and
6540 TARGET, if specified, must be a MEM. */
6541 if (GET_CODE (op0) != MEM
6542 || (target != 0 && GET_CODE (target) != MEM)
6543 || bitpos % BITS_PER_UNIT != 0)
6544 abort ();
6545
6546 op0 = change_address (op0, VOIDmode,
6547 plus_constant (XEXP (op0, 0),
6548 bitpos / BITS_PER_UNIT));
6549 if (target == 0)
6550 target = assign_temp (type, 0, 1, 1);
6551
6552 emit_block_move (target, op0,
6553 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6554 / BITS_PER_UNIT),
6555 1);
6556
6557 return target;
6558 }
bbf6f052 6559
dc6d66b3
RK
6560 op0 = validize_mem (op0);
6561
6562 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6563 mark_reg_pointer (XEXP (op0, 0), alignment);
6564
6565 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6566 unsignedp, target, ext_mode, ext_mode,
034f9101 6567 alignment,
bbf6f052 6568 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6569
6570 /* If the result is a record type and BITSIZE is narrower than
6571 the mode of OP0, an integral mode, and this is a big endian
6572 machine, we must put the field into the high-order bits. */
6573 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6574 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6575 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6576 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6577 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6578 - bitsize),
6579 op0, 1);
6580
bbf6f052
RK
6581 if (mode == BLKmode)
6582 {
6583 rtx new = assign_stack_temp (ext_mode,
6584 bitsize / BITS_PER_UNIT, 0);
6585
6586 emit_move_insn (new, op0);
6587 op0 = copy_rtx (new);
6588 PUT_MODE (op0, BLKmode);
c6df88cb 6589 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6590 }
6591
6592 return op0;
6593 }
6594
05019f83
RK
6595 /* If the result is BLKmode, use that to access the object
6596 now as well. */
6597 if (mode == BLKmode)
6598 mode1 = BLKmode;
6599
bbf6f052
RK
6600 /* Get a reference to just this component. */
6601 if (modifier == EXPAND_CONST_ADDRESS
6602 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6603 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6604 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6605 else
6606 op0 = change_address (op0, mode1,
6607 plus_constant (XEXP (op0, 0),
6608 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6609
6610 if (GET_CODE (op0) == MEM)
6611 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6612
dc6d66b3
RK
6613 if (GET_CODE (XEXP (op0, 0)) == REG)
6614 mark_reg_pointer (XEXP (op0, 0), alignment);
6615
c6df88cb 6616 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6617 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6618 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6619 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6620 || modifier == EXPAND_INITIALIZER)
bbf6f052 6621 return op0;
0d15e60c 6622 else if (target == 0)
bbf6f052 6623 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6624
bbf6f052
RK
6625 convert_move (target, op0, unsignedp);
6626 return target;
6627 }
6628
bbf6f052
RK
6629 /* Intended for a reference to a buffer of a file-object in Pascal.
6630 But it's not certain that a special tree code will really be
6631 necessary for these. INDIRECT_REF might work for them. */
6632 case BUFFER_REF:
6633 abort ();
6634
7308a047 6635 case IN_EXPR:
7308a047 6636 {
d6a5ac33
RK
6637 /* Pascal set IN expression.
6638
6639 Algorithm:
6640 rlo = set_low - (set_low%bits_per_word);
6641 the_word = set [ (index - rlo)/bits_per_word ];
6642 bit_index = index % bits_per_word;
6643 bitmask = 1 << bit_index;
6644 return !!(the_word & bitmask); */
6645
7308a047
RS
6646 tree set = TREE_OPERAND (exp, 0);
6647 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6648 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6649 tree set_type = TREE_TYPE (set);
7308a047
RS
6650 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6651 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6652 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6653 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6654 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6655 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6656 rtx setaddr = XEXP (setval, 0);
6657 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6658 rtx rlow;
6659 rtx diff, quo, rem, addr, bit, result;
7308a047 6660
d6a5ac33
RK
6661 preexpand_calls (exp);
6662
6663 /* If domain is empty, answer is no. Likewise if index is constant
6664 and out of bounds. */
51723711 6665 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6666 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6667 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6668 || (TREE_CODE (index) == INTEGER_CST
6669 && TREE_CODE (set_low_bound) == INTEGER_CST
6670 && tree_int_cst_lt (index, set_low_bound))
6671 || (TREE_CODE (set_high_bound) == INTEGER_CST
6672 && TREE_CODE (index) == INTEGER_CST
6673 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6674 return const0_rtx;
6675
d6a5ac33
RK
6676 if (target == 0)
6677 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6678
6679 /* If we get here, we have to generate the code for both cases
6680 (in range and out of range). */
6681
6682 op0 = gen_label_rtx ();
6683 op1 = gen_label_rtx ();
6684
6685 if (! (GET_CODE (index_val) == CONST_INT
6686 && GET_CODE (lo_r) == CONST_INT))
6687 {
c5d5d461
JL
6688 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6689 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6690 }
6691
6692 if (! (GET_CODE (index_val) == CONST_INT
6693 && GET_CODE (hi_r) == CONST_INT))
6694 {
c5d5d461
JL
6695 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6696 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6697 }
6698
6699 /* Calculate the element number of bit zero in the first word
6700 of the set. */
6701 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6702 rlow = GEN_INT (INTVAL (lo_r)
6703 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6704 else
17938e57
RK
6705 rlow = expand_binop (index_mode, and_optab, lo_r,
6706 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6707 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6708
d6a5ac33
RK
6709 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6710 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6711
6712 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6713 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6714 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6715 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6716
7308a047 6717 addr = memory_address (byte_mode,
d6a5ac33
RK
6718 expand_binop (index_mode, add_optab, diff,
6719 setaddr, NULL_RTX, iunsignedp,
17938e57 6720 OPTAB_LIB_WIDEN));
d6a5ac33 6721
7308a047
RS
6722 /* Extract the bit we want to examine */
6723 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6724 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6725 make_tree (TREE_TYPE (index), rem),
6726 NULL_RTX, 1);
6727 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6728 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6729 1, OPTAB_LIB_WIDEN);
17938e57
RK
6730
6731 if (result != target)
6732 convert_move (target, result, 1);
7308a047
RS
6733
6734 /* Output the code to handle the out-of-range case. */
6735 emit_jump (op0);
6736 emit_label (op1);
6737 emit_move_insn (target, const0_rtx);
6738 emit_label (op0);
6739 return target;
6740 }
6741
bbf6f052
RK
6742 case WITH_CLEANUP_EXPR:
6743 if (RTL_EXPR_RTL (exp) == 0)
6744 {
6745 RTL_EXPR_RTL (exp)
921b3427 6746 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6747 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6748
bbf6f052
RK
6749 /* That's it for this cleanup. */
6750 TREE_OPERAND (exp, 2) = 0;
6751 }
6752 return RTL_EXPR_RTL (exp);
6753
5dab5552
MS
6754 case CLEANUP_POINT_EXPR:
6755 {
e976b8b2
MS
6756 /* Start a new binding layer that will keep track of all cleanup
6757 actions to be performed. */
6758 expand_start_bindings (0);
6759
d93d4205 6760 target_temp_slot_level = temp_slot_level;
e976b8b2 6761
921b3427 6762 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6763 /* If we're going to use this value, load it up now. */
6764 if (! ignore)
6765 op0 = force_not_mem (op0);
d93d4205 6766 preserve_temp_slots (op0);
e976b8b2 6767 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6768 }
6769 return op0;
6770
bbf6f052
RK
6771 case CALL_EXPR:
6772 /* Check for a built-in function. */
6773 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6774 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6775 == FUNCTION_DECL)
bbf6f052
RK
6776 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6777 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6778
bbf6f052
RK
6779 /* If this call was expanded already by preexpand_calls,
6780 just return the result we got. */
6781 if (CALL_EXPR_RTL (exp) != 0)
6782 return CALL_EXPR_RTL (exp);
d6a5ac33 6783
8129842c 6784 return expand_call (exp, target, ignore);
bbf6f052
RK
6785
6786 case NON_LVALUE_EXPR:
6787 case NOP_EXPR:
6788 case CONVERT_EXPR:
6789 case REFERENCE_EXPR:
bbf6f052
RK
6790 if (TREE_CODE (type) == UNION_TYPE)
6791 {
6792 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6793 if (target == 0)
06089a8b
RK
6794 {
6795 if (mode != BLKmode)
6796 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6797 else
6798 target = assign_temp (type, 0, 1, 1);
6799 }
d6a5ac33 6800
bbf6f052
RK
6801 if (GET_CODE (target) == MEM)
6802 /* Store data into beginning of memory target. */
6803 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6804 change_address (target, TYPE_MODE (valtype), 0), 0);
6805
bbf6f052
RK
6806 else if (GET_CODE (target) == REG)
6807 /* Store this field into a union of the proper type. */
6808 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6809 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6810 VOIDmode, 0, 1,
ece32014
MM
6811 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6812 0);
bbf6f052
RK
6813 else
6814 abort ();
6815
6816 /* Return the entire union. */
6817 return target;
6818 }
d6a5ac33 6819
7f62854a
RK
6820 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6821 {
6822 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6823 ro_modifier);
7f62854a
RK
6824
6825 /* If the signedness of the conversion differs and OP0 is
6826 a promoted SUBREG, clear that indication since we now
6827 have to do the proper extension. */
6828 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6829 && GET_CODE (op0) == SUBREG)
6830 SUBREG_PROMOTED_VAR_P (op0) = 0;
6831
6832 return op0;
6833 }
6834
1499e0a8 6835 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6836 if (GET_MODE (op0) == mode)
6837 return op0;
12342f90 6838
d6a5ac33
RK
6839 /* If OP0 is a constant, just convert it into the proper mode. */
6840 if (CONSTANT_P (op0))
6841 return
6842 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6843 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6844
26fcb35a 6845 if (modifier == EXPAND_INITIALIZER)
38a448ca 6846 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6847
bbf6f052 6848 if (target == 0)
d6a5ac33
RK
6849 return
6850 convert_to_mode (mode, op0,
6851 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6852 else
d6a5ac33
RK
6853 convert_move (target, op0,
6854 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6855 return target;
6856
6857 case PLUS_EXPR:
0f41302f
MS
6858 /* We come here from MINUS_EXPR when the second operand is a
6859 constant. */
bbf6f052
RK
6860 plus_expr:
6861 this_optab = add_optab;
6862
6863 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6864 something else, make sure we add the register to the constant and
6865 then to the other thing. This case can occur during strength
6866 reduction and doing it this way will produce better code if the
6867 frame pointer or argument pointer is eliminated.
6868
6869 fold-const.c will ensure that the constant is always in the inner
6870 PLUS_EXPR, so the only case we need to do anything about is if
6871 sp, ap, or fp is our second argument, in which case we must swap
6872 the innermost first argument and our second argument. */
6873
6874 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6875 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6876 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6877 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6878 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6879 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6880 {
6881 tree t = TREE_OPERAND (exp, 1);
6882
6883 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6884 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6885 }
6886
88f63c77 6887 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6888 something, we might be forming a constant. So try to use
6889 plus_constant. If it produces a sum and we can't accept it,
6890 use force_operand. This allows P = &ARR[const] to generate
6891 efficient code on machines where a SYMBOL_REF is not a valid
6892 address.
6893
6894 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6895 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6896 || mode == ptr_mode)
bbf6f052 6897 {
c980ac49
RS
6898 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6899 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6900 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6901 {
cbbc503e
JL
6902 rtx constant_part;
6903
c980ac49
RS
6904 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6905 EXPAND_SUM);
cbbc503e
JL
6906 /* Use immed_double_const to ensure that the constant is
6907 truncated according to the mode of OP1, then sign extended
6908 to a HOST_WIDE_INT. Using the constant directly can result
6909 in non-canonical RTL in a 64x32 cross compile. */
6910 constant_part
6911 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
6912 (HOST_WIDE_INT) 0,
a5efcd63 6913 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 6914 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
6915 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6916 op1 = force_operand (op1, target);
6917 return op1;
6918 }
bbf6f052 6919
c980ac49
RS
6920 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6921 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6922 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6923 {
cbbc503e
JL
6924 rtx constant_part;
6925
c980ac49
RS
6926 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6927 EXPAND_SUM);
6928 if (! CONSTANT_P (op0))
6929 {
6930 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6931 VOIDmode, modifier);
709f5be1
RS
6932 /* Don't go to both_summands if modifier
6933 says it's not right to return a PLUS. */
6934 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6935 goto binop2;
c980ac49
RS
6936 goto both_summands;
6937 }
cbbc503e
JL
6938 /* Use immed_double_const to ensure that the constant is
6939 truncated according to the mode of OP1, then sign extended
6940 to a HOST_WIDE_INT. Using the constant directly can result
6941 in non-canonical RTL in a 64x32 cross compile. */
6942 constant_part
6943 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
6944 (HOST_WIDE_INT) 0,
2a94e396 6945 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 6946 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
6947 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6948 op0 = force_operand (op0, target);
6949 return op0;
6950 }
bbf6f052
RK
6951 }
6952
6953 /* No sense saving up arithmetic to be done
6954 if it's all in the wrong mode to form part of an address.
6955 And force_operand won't know whether to sign-extend or
6956 zero-extend. */
6957 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6958 || mode != ptr_mode)
c980ac49 6959 goto binop;
bbf6f052
RK
6960
6961 preexpand_calls (exp);
e5e809f4 6962 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6963 subtarget = 0;
6964
921b3427
RK
6965 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6966 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6967
c980ac49 6968 both_summands:
bbf6f052
RK
6969 /* Make sure any term that's a sum with a constant comes last. */
6970 if (GET_CODE (op0) == PLUS
6971 && CONSTANT_P (XEXP (op0, 1)))
6972 {
6973 temp = op0;
6974 op0 = op1;
6975 op1 = temp;
6976 }
6977 /* If adding to a sum including a constant,
6978 associate it to put the constant outside. */
6979 if (GET_CODE (op1) == PLUS
6980 && CONSTANT_P (XEXP (op1, 1)))
6981 {
6982 rtx constant_term = const0_rtx;
6983
6984 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6985 if (temp != 0)
6986 op0 = temp;
6f90e075
JW
6987 /* Ensure that MULT comes first if there is one. */
6988 else if (GET_CODE (op0) == MULT)
38a448ca 6989 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6990 else
38a448ca 6991 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6992
6993 /* Let's also eliminate constants from op0 if possible. */
6994 op0 = eliminate_constant_term (op0, &constant_term);
6995
6996 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6997 their sum should be a constant. Form it into OP1, since the
6998 result we want will then be OP0 + OP1. */
6999
7000 temp = simplify_binary_operation (PLUS, mode, constant_term,
7001 XEXP (op1, 1));
7002 if (temp != 0)
7003 op1 = temp;
7004 else
38a448ca 7005 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7006 }
7007
7008 /* Put a constant term last and put a multiplication first. */
7009 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7010 temp = op1, op1 = op0, op0 = temp;
7011
7012 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7013 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7014
7015 case MINUS_EXPR:
ea87523e
RK
7016 /* For initializers, we are allowed to return a MINUS of two
7017 symbolic constants. Here we handle all cases when both operands
7018 are constant. */
bbf6f052
RK
7019 /* Handle difference of two symbolic constants,
7020 for the sake of an initializer. */
7021 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7022 && really_constant_p (TREE_OPERAND (exp, 0))
7023 && really_constant_p (TREE_OPERAND (exp, 1)))
7024 {
906c4e36 7025 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7026 VOIDmode, ro_modifier);
906c4e36 7027 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7028 VOIDmode, ro_modifier);
ea87523e 7029
ea87523e
RK
7030 /* If the last operand is a CONST_INT, use plus_constant of
7031 the negated constant. Else make the MINUS. */
7032 if (GET_CODE (op1) == CONST_INT)
7033 return plus_constant (op0, - INTVAL (op1));
7034 else
38a448ca 7035 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7036 }
7037 /* Convert A - const to A + (-const). */
7038 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7039 {
ae431183
RK
7040 tree negated = fold (build1 (NEGATE_EXPR, type,
7041 TREE_OPERAND (exp, 1)));
7042
7043 /* Deal with the case where we can't negate the constant
7044 in TYPE. */
7045 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7046 {
7047 tree newtype = signed_type (type);
7048 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7049 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7050 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7051
7052 if (! TREE_OVERFLOW (newneg))
7053 return expand_expr (convert (type,
7054 build (PLUS_EXPR, newtype,
7055 newop0, newneg)),
921b3427 7056 target, tmode, ro_modifier);
ae431183
RK
7057 }
7058 else
7059 {
7060 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7061 goto plus_expr;
7062 }
bbf6f052
RK
7063 }
7064 this_optab = sub_optab;
7065 goto binop;
7066
7067 case MULT_EXPR:
7068 preexpand_calls (exp);
7069 /* If first operand is constant, swap them.
7070 Thus the following special case checks need only
7071 check the second operand. */
7072 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7073 {
7074 register tree t1 = TREE_OPERAND (exp, 0);
7075 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7076 TREE_OPERAND (exp, 1) = t1;
7077 }
7078
7079 /* Attempt to return something suitable for generating an
7080 indexed address, for machines that support that. */
7081
88f63c77 7082 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7083 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7084 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7085 {
921b3427
RK
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7087 EXPAND_SUM);
bbf6f052
RK
7088
7089 /* Apply distributive law if OP0 is x+c. */
7090 if (GET_CODE (op0) == PLUS
7091 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
7092 return gen_rtx_PLUS (mode,
7093 gen_rtx_MULT (mode, XEXP (op0, 0),
7094 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
7095 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7096 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7097
7098 if (GET_CODE (op0) != REG)
906c4e36 7099 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7100 if (GET_CODE (op0) != REG)
7101 op0 = copy_to_mode_reg (mode, op0);
7102
38a448ca
RH
7103 return gen_rtx_MULT (mode, op0,
7104 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7105 }
7106
e5e809f4 7107 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7108 subtarget = 0;
7109
7110 /* Check for multiplying things that have been extended
7111 from a narrower type. If this machine supports multiplying
7112 in that narrower type with a result in the desired type,
7113 do it that way, and avoid the explicit type-conversion. */
7114 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7115 && TREE_CODE (type) == INTEGER_TYPE
7116 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7117 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7118 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7119 && int_fits_type_p (TREE_OPERAND (exp, 1),
7120 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7121 /* Don't use a widening multiply if a shift will do. */
7122 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7123 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7124 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7125 ||
7126 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7127 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7128 ==
7129 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7130 /* If both operands are extended, they must either both
7131 be zero-extended or both be sign-extended. */
7132 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7133 ==
7134 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7135 {
7136 enum machine_mode innermode
7137 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7138 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7139 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7140 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7141 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7142 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7143 {
b10af0c8
TG
7144 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7145 {
7146 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7147 NULL_RTX, VOIDmode, 0);
7148 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7149 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7150 VOIDmode, 0);
7151 else
7152 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7153 NULL_RTX, VOIDmode, 0);
7154 goto binop2;
7155 }
7156 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7157 && innermode == word_mode)
7158 {
7159 rtx htem;
7160 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7161 NULL_RTX, VOIDmode, 0);
7162 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7163 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7164 VOIDmode, 0);
7165 else
7166 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7167 NULL_RTX, VOIDmode, 0);
7168 temp = expand_binop (mode, other_optab, op0, op1, target,
7169 unsignedp, OPTAB_LIB_WIDEN);
7170 htem = expand_mult_highpart_adjust (innermode,
7171 gen_highpart (innermode, temp),
7172 op0, op1,
7173 gen_highpart (innermode, temp),
7174 unsignedp);
7175 emit_move_insn (gen_highpart (innermode, temp), htem);
7176 return temp;
7177 }
bbf6f052
RK
7178 }
7179 }
7180 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7181 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7182 return expand_mult (mode, op0, op1, target, unsignedp);
7183
7184 case TRUNC_DIV_EXPR:
7185 case FLOOR_DIV_EXPR:
7186 case CEIL_DIV_EXPR:
7187 case ROUND_DIV_EXPR:
7188 case EXACT_DIV_EXPR:
7189 preexpand_calls (exp);
e5e809f4 7190 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7191 subtarget = 0;
7192 /* Possible optimization: compute the dividend with EXPAND_SUM
7193 then if the divisor is constant can optimize the case
7194 where some terms of the dividend have coeffs divisible by it. */
7195 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7196 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7197 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7198
7199 case RDIV_EXPR:
7200 this_optab = flodiv_optab;
7201 goto binop;
7202
7203 case TRUNC_MOD_EXPR:
7204 case FLOOR_MOD_EXPR:
7205 case CEIL_MOD_EXPR:
7206 case ROUND_MOD_EXPR:
7207 preexpand_calls (exp);
e5e809f4 7208 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7209 subtarget = 0;
7210 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7211 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7212 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7213
7214 case FIX_ROUND_EXPR:
7215 case FIX_FLOOR_EXPR:
7216 case FIX_CEIL_EXPR:
7217 abort (); /* Not used for C. */
7218
7219 case FIX_TRUNC_EXPR:
906c4e36 7220 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7221 if (target == 0)
7222 target = gen_reg_rtx (mode);
7223 expand_fix (target, op0, unsignedp);
7224 return target;
7225
7226 case FLOAT_EXPR:
906c4e36 7227 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7228 if (target == 0)
7229 target = gen_reg_rtx (mode);
7230 /* expand_float can't figure out what to do if FROM has VOIDmode.
7231 So give it the correct mode. With -O, cse will optimize this. */
7232 if (GET_MODE (op0) == VOIDmode)
7233 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7234 op0);
7235 expand_float (target, op0,
7236 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7237 return target;
7238
7239 case NEGATE_EXPR:
5b22bee8 7240 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7241 temp = expand_unop (mode, neg_optab, op0, target, 0);
7242 if (temp == 0)
7243 abort ();
7244 return temp;
7245
7246 case ABS_EXPR:
7247 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7248
2d7050fd 7249 /* Handle complex values specially. */
d6a5ac33
RK
7250 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7251 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7252 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7253
bbf6f052
RK
7254 /* Unsigned abs is simply the operand. Testing here means we don't
7255 risk generating incorrect code below. */
7256 if (TREE_UNSIGNED (type))
7257 return op0;
7258
91813b28 7259 return expand_abs (mode, op0, target,
e5e809f4 7260 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7261
7262 case MAX_EXPR:
7263 case MIN_EXPR:
7264 target = original_target;
e5e809f4 7265 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7266 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7267 || GET_MODE (target) != mode
bbf6f052
RK
7268 || (GET_CODE (target) == REG
7269 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7270 target = gen_reg_rtx (mode);
906c4e36 7271 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7272 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7273
7274 /* First try to do it with a special MIN or MAX instruction.
7275 If that does not win, use a conditional jump to select the proper
7276 value. */
7277 this_optab = (TREE_UNSIGNED (type)
7278 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7279 : (code == MIN_EXPR ? smin_optab : smax_optab));
7280
7281 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7282 OPTAB_WIDEN);
7283 if (temp != 0)
7284 return temp;
7285
fa2981d8
JW
7286 /* At this point, a MEM target is no longer useful; we will get better
7287 code without it. */
7288
7289 if (GET_CODE (target) == MEM)
7290 target = gen_reg_rtx (mode);
7291
ee456b1c
RK
7292 if (target != op0)
7293 emit_move_insn (target, op0);
d6a5ac33 7294
bbf6f052 7295 op0 = gen_label_rtx ();
d6a5ac33 7296
f81497d9
RS
7297 /* If this mode is an integer too wide to compare properly,
7298 compare word by word. Rely on cse to optimize constant cases. */
b30f05db 7299 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode))
bbf6f052 7300 {
f81497d9 7301 if (code == MAX_EXPR)
d6a5ac33
RK
7302 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7303 target, op1, NULL_RTX, op0);
bbf6f052 7304 else
d6a5ac33
RK
7305 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7306 op1, target, NULL_RTX, op0);
bbf6f052 7307 }
f81497d9
RS
7308 else
7309 {
b30f05db
BS
7310 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7311 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7312 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7313 op0);
f81497d9 7314 }
b30f05db 7315 emit_move_insn (target, op1);
bbf6f052
RK
7316 emit_label (op0);
7317 return target;
7318
bbf6f052
RK
7319 case BIT_NOT_EXPR:
7320 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7321 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7322 if (temp == 0)
7323 abort ();
7324 return temp;
7325
7326 case FFS_EXPR:
7327 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7328 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7329 if (temp == 0)
7330 abort ();
7331 return temp;
7332
d6a5ac33
RK
7333 /* ??? Can optimize bitwise operations with one arg constant.
7334 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7335 and (a bitwise1 b) bitwise2 b (etc)
7336 but that is probably not worth while. */
7337
7338 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7339 boolean values when we want in all cases to compute both of them. In
7340 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7341 as actual zero-or-1 values and then bitwise anding. In cases where
7342 there cannot be any side effects, better code would be made by
7343 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7344 how to recognize those cases. */
7345
bbf6f052
RK
7346 case TRUTH_AND_EXPR:
7347 case BIT_AND_EXPR:
7348 this_optab = and_optab;
7349 goto binop;
7350
bbf6f052
RK
7351 case TRUTH_OR_EXPR:
7352 case BIT_IOR_EXPR:
7353 this_optab = ior_optab;
7354 goto binop;
7355
874726a8 7356 case TRUTH_XOR_EXPR:
bbf6f052
RK
7357 case BIT_XOR_EXPR:
7358 this_optab = xor_optab;
7359 goto binop;
7360
7361 case LSHIFT_EXPR:
7362 case RSHIFT_EXPR:
7363 case LROTATE_EXPR:
7364 case RROTATE_EXPR:
7365 preexpand_calls (exp);
e5e809f4 7366 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7367 subtarget = 0;
7368 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7369 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7370 unsignedp);
7371
d6a5ac33
RK
7372 /* Could determine the answer when only additive constants differ. Also,
7373 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7374 case LT_EXPR:
7375 case LE_EXPR:
7376 case GT_EXPR:
7377 case GE_EXPR:
7378 case EQ_EXPR:
7379 case NE_EXPR:
7380 preexpand_calls (exp);
7381 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7382 if (temp != 0)
7383 return temp;
d6a5ac33 7384
0f41302f 7385 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7386 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7387 && original_target
7388 && GET_CODE (original_target) == REG
7389 && (GET_MODE (original_target)
7390 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7391 {
d6a5ac33
RK
7392 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7393 VOIDmode, 0);
7394
bbf6f052
RK
7395 if (temp != original_target)
7396 temp = copy_to_reg (temp);
d6a5ac33 7397
bbf6f052 7398 op1 = gen_label_rtx ();
c5d5d461
JL
7399 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7400 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7401 emit_move_insn (temp, const1_rtx);
7402 emit_label (op1);
7403 return temp;
7404 }
d6a5ac33 7405
bbf6f052
RK
7406 /* If no set-flag instruction, must generate a conditional
7407 store into a temporary variable. Drop through
7408 and handle this like && and ||. */
7409
7410 case TRUTH_ANDIF_EXPR:
7411 case TRUTH_ORIF_EXPR:
e44842fe 7412 if (! ignore
e5e809f4 7413 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7414 /* Make sure we don't have a hard reg (such as function's return
7415 value) live across basic blocks, if not optimizing. */
7416 || (!optimize && GET_CODE (target) == REG
7417 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7418 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7419
7420 if (target)
7421 emit_clr_insn (target);
7422
bbf6f052
RK
7423 op1 = gen_label_rtx ();
7424 jumpifnot (exp, op1);
e44842fe
RK
7425
7426 if (target)
7427 emit_0_to_1_insn (target);
7428
bbf6f052 7429 emit_label (op1);
e44842fe 7430 return ignore ? const0_rtx : target;
bbf6f052
RK
7431
7432 case TRUTH_NOT_EXPR:
7433 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7434 /* The parser is careful to generate TRUTH_NOT_EXPR
7435 only with operands that are always zero or one. */
906c4e36 7436 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7437 target, 1, OPTAB_LIB_WIDEN);
7438 if (temp == 0)
7439 abort ();
7440 return temp;
7441
7442 case COMPOUND_EXPR:
7443 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7444 emit_queue ();
7445 return expand_expr (TREE_OPERAND (exp, 1),
7446 (ignore ? const0_rtx : target),
7447 VOIDmode, 0);
7448
7449 case COND_EXPR:
ac01eace
RK
7450 /* If we would have a "singleton" (see below) were it not for a
7451 conversion in each arm, bring that conversion back out. */
7452 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7453 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7454 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7455 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7456 {
7457 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7458 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7459
7460 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7461 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7462 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7463 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7464 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7465 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7466 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7467 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7468 return expand_expr (build1 (NOP_EXPR, type,
7469 build (COND_EXPR, TREE_TYPE (true),
7470 TREE_OPERAND (exp, 0),
7471 true, false)),
7472 target, tmode, modifier);
7473 }
7474
bbf6f052
RK
7475 {
7476 /* Note that COND_EXPRs whose type is a structure or union
7477 are required to be constructed to contain assignments of
7478 a temporary variable, so that we can evaluate them here
7479 for side effect only. If type is void, we must do likewise. */
7480
7481 /* If an arm of the branch requires a cleanup,
7482 only that cleanup is performed. */
7483
7484 tree singleton = 0;
7485 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7486
7487 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7488 convert it to our mode, if necessary. */
7489 if (integer_onep (TREE_OPERAND (exp, 1))
7490 && integer_zerop (TREE_OPERAND (exp, 2))
7491 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7492 {
dd27116b
RK
7493 if (ignore)
7494 {
7495 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7496 ro_modifier);
dd27116b
RK
7497 return const0_rtx;
7498 }
7499
921b3427 7500 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7501 if (GET_MODE (op0) == mode)
7502 return op0;
d6a5ac33 7503
bbf6f052
RK
7504 if (target == 0)
7505 target = gen_reg_rtx (mode);
7506 convert_move (target, op0, unsignedp);
7507 return target;
7508 }
7509
ac01eace
RK
7510 /* Check for X ? A + B : A. If we have this, we can copy A to the
7511 output and conditionally add B. Similarly for unary operations.
7512 Don't do this if X has side-effects because those side effects
7513 might affect A or B and the "?" operation is a sequence point in
7514 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7515
7516 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7517 && operand_equal_p (TREE_OPERAND (exp, 2),
7518 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7519 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7520 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7521 && operand_equal_p (TREE_OPERAND (exp, 1),
7522 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7523 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7524 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7525 && operand_equal_p (TREE_OPERAND (exp, 2),
7526 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7527 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7528 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7529 && operand_equal_p (TREE_OPERAND (exp, 1),
7530 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7531 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7532
01c8a7c8
RK
7533 /* If we are not to produce a result, we have no target. Otherwise,
7534 if a target was specified use it; it will not be used as an
7535 intermediate target unless it is safe. If no target, use a
7536 temporary. */
7537
7538 if (ignore)
7539 temp = 0;
7540 else if (original_target
e5e809f4 7541 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7542 || (singleton && GET_CODE (original_target) == REG
7543 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7544 && original_target == var_rtx (singleton)))
7545 && GET_MODE (original_target) == mode
7c00d1fe
RK
7546#ifdef HAVE_conditional_move
7547 && (! can_conditionally_move_p (mode)
7548 || GET_CODE (original_target) == REG
7549 || TREE_ADDRESSABLE (type))
7550#endif
01c8a7c8
RK
7551 && ! (GET_CODE (original_target) == MEM
7552 && MEM_VOLATILE_P (original_target)))
7553 temp = original_target;
7554 else if (TREE_ADDRESSABLE (type))
7555 abort ();
7556 else
7557 temp = assign_temp (type, 0, 0, 1);
7558
ac01eace
RK
7559 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7560 do the test of X as a store-flag operation, do this as
7561 A + ((X != 0) << log C). Similarly for other simple binary
7562 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7563 if (temp && singleton && binary_op
bbf6f052
RK
7564 && (TREE_CODE (binary_op) == PLUS_EXPR
7565 || TREE_CODE (binary_op) == MINUS_EXPR
7566 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7567 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7568 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7569 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7570 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7571 {
7572 rtx result;
7573 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7574 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7575 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7576 : xor_optab);
bbf6f052
RK
7577
7578 /* If we had X ? A : A + 1, do this as A + (X == 0).
7579
7580 We have to invert the truth value here and then put it
7581 back later if do_store_flag fails. We cannot simply copy
7582 TREE_OPERAND (exp, 0) to another variable and modify that
7583 because invert_truthvalue can modify the tree pointed to
7584 by its argument. */
7585 if (singleton == TREE_OPERAND (exp, 1))
7586 TREE_OPERAND (exp, 0)
7587 = invert_truthvalue (TREE_OPERAND (exp, 0));
7588
7589 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7590 (safe_from_p (temp, singleton, 1)
906c4e36 7591 ? temp : NULL_RTX),
bbf6f052
RK
7592 mode, BRANCH_COST <= 1);
7593
ac01eace
RK
7594 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7595 result = expand_shift (LSHIFT_EXPR, mode, result,
7596 build_int_2 (tree_log2
7597 (TREE_OPERAND
7598 (binary_op, 1)),
7599 0),
e5e809f4 7600 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7601 ? temp : NULL_RTX), 0);
7602
bbf6f052
RK
7603 if (result)
7604 {
906c4e36 7605 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7606 return expand_binop (mode, boptab, op1, result, temp,
7607 unsignedp, OPTAB_LIB_WIDEN);
7608 }
7609 else if (singleton == TREE_OPERAND (exp, 1))
7610 TREE_OPERAND (exp, 0)
7611 = invert_truthvalue (TREE_OPERAND (exp, 0));
7612 }
7613
dabf8373 7614 do_pending_stack_adjust ();
bbf6f052
RK
7615 NO_DEFER_POP;
7616 op0 = gen_label_rtx ();
7617
7618 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7619 {
7620 if (temp != 0)
7621 {
7622 /* If the target conflicts with the other operand of the
7623 binary op, we can't use it. Also, we can't use the target
7624 if it is a hard register, because evaluating the condition
7625 might clobber it. */
7626 if ((binary_op
e5e809f4 7627 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7628 || (GET_CODE (temp) == REG
7629 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7630 temp = gen_reg_rtx (mode);
7631 store_expr (singleton, temp, 0);
7632 }
7633 else
906c4e36 7634 expand_expr (singleton,
2937cf87 7635 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7636 if (singleton == TREE_OPERAND (exp, 1))
7637 jumpif (TREE_OPERAND (exp, 0), op0);
7638 else
7639 jumpifnot (TREE_OPERAND (exp, 0), op0);
7640
956d6950 7641 start_cleanup_deferral ();
bbf6f052
RK
7642 if (binary_op && temp == 0)
7643 /* Just touch the other operand. */
7644 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7645 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7646 else if (binary_op)
7647 store_expr (build (TREE_CODE (binary_op), type,
7648 make_tree (type, temp),
7649 TREE_OPERAND (binary_op, 1)),
7650 temp, 0);
7651 else
7652 store_expr (build1 (TREE_CODE (unary_op), type,
7653 make_tree (type, temp)),
7654 temp, 0);
7655 op1 = op0;
bbf6f052 7656 }
bbf6f052
RK
7657 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7658 comparison operator. If we have one of these cases, set the
7659 output to A, branch on A (cse will merge these two references),
7660 then set the output to FOO. */
7661 else if (temp
7662 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7663 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7664 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7665 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7666 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7667 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7668 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7669 {
7670 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7671 temp = gen_reg_rtx (mode);
7672 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7673 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7674
956d6950 7675 start_cleanup_deferral ();
bbf6f052
RK
7676 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7677 op1 = op0;
7678 }
7679 else if (temp
7680 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7681 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7682 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7683 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7684 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7685 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7686 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7687 {
7688 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7689 temp = gen_reg_rtx (mode);
7690 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7691 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7692
956d6950 7693 start_cleanup_deferral ();
bbf6f052
RK
7694 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7695 op1 = op0;
7696 }
7697 else
7698 {
7699 op1 = gen_label_rtx ();
7700 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7701
956d6950 7702 start_cleanup_deferral ();
2ac84cfe
NS
7703
7704 /* One branch of the cond can be void, if it never returns. For
7705 example A ? throw : E */
7706 if (temp != 0
7707 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
7708 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7709 else
906c4e36
RK
7710 expand_expr (TREE_OPERAND (exp, 1),
7711 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7712 end_cleanup_deferral ();
bbf6f052
RK
7713 emit_queue ();
7714 emit_jump_insn (gen_jump (op1));
7715 emit_barrier ();
7716 emit_label (op0);
956d6950 7717 start_cleanup_deferral ();
2ac84cfe
NS
7718 if (temp != 0
7719 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
7720 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7721 else
906c4e36
RK
7722 expand_expr (TREE_OPERAND (exp, 2),
7723 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7724 }
7725
956d6950 7726 end_cleanup_deferral ();
bbf6f052
RK
7727
7728 emit_queue ();
7729 emit_label (op1);
7730 OK_DEFER_POP;
5dab5552 7731
bbf6f052
RK
7732 return temp;
7733 }
7734
7735 case TARGET_EXPR:
7736 {
7737 /* Something needs to be initialized, but we didn't know
7738 where that thing was when building the tree. For example,
7739 it could be the return value of a function, or a parameter
7740 to a function which lays down in the stack, or a temporary
7741 variable which must be passed by reference.
7742
7743 We guarantee that the expression will either be constructed
7744 or copied into our original target. */
7745
7746 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7747 tree cleanups = NULL_TREE;
5c062816 7748 tree exp1;
bbf6f052
RK
7749
7750 if (TREE_CODE (slot) != VAR_DECL)
7751 abort ();
7752
9c51f375
RK
7753 if (! ignore)
7754 target = original_target;
7755
bbf6f052
RK
7756 if (target == 0)
7757 {
7758 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7759 {
7760 target = DECL_RTL (slot);
5c062816 7761 /* If we have already expanded the slot, so don't do
ac993f4f 7762 it again. (mrs) */
5c062816
MS
7763 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7764 return target;
ac993f4f 7765 }
bbf6f052
RK
7766 else
7767 {
e9a25f70 7768 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7769 /* All temp slots at this level must not conflict. */
7770 preserve_temp_slots (target);
7771 DECL_RTL (slot) = target;
e9a25f70
JL
7772 if (TREE_ADDRESSABLE (slot))
7773 {
7774 TREE_ADDRESSABLE (slot) = 0;
7775 mark_addressable (slot);
7776 }
bbf6f052 7777
e287fd6e
RK
7778 /* Since SLOT is not known to the called function
7779 to belong to its stack frame, we must build an explicit
7780 cleanup. This case occurs when we must build up a reference
7781 to pass the reference as an argument. In this case,
7782 it is very likely that such a reference need not be
7783 built here. */
7784
7785 if (TREE_OPERAND (exp, 2) == 0)
7786 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7787 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7788 }
bbf6f052
RK
7789 }
7790 else
7791 {
7792 /* This case does occur, when expanding a parameter which
7793 needs to be constructed on the stack. The target
7794 is the actual stack address that we want to initialize.
7795 The function we call will perform the cleanup in this case. */
7796
8c042b47
RS
7797 /* If we have already assigned it space, use that space,
7798 not target that we were passed in, as our target
7799 parameter is only a hint. */
7800 if (DECL_RTL (slot) != 0)
7801 {
7802 target = DECL_RTL (slot);
7803 /* If we have already expanded the slot, so don't do
7804 it again. (mrs) */
7805 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7806 return target;
7807 }
21002281
JW
7808 else
7809 {
7810 DECL_RTL (slot) = target;
7811 /* If we must have an addressable slot, then make sure that
7812 the RTL that we just stored in slot is OK. */
7813 if (TREE_ADDRESSABLE (slot))
7814 {
7815 TREE_ADDRESSABLE (slot) = 0;
7816 mark_addressable (slot);
7817 }
7818 }
bbf6f052
RK
7819 }
7820
4847c938 7821 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7822 /* Mark it as expanded. */
7823 TREE_OPERAND (exp, 1) = NULL_TREE;
7824
e5e809f4 7825 TREE_USED (slot) = 1;
41531e5b 7826 store_expr (exp1, target, 0);
61d6b1cc 7827
e976b8b2 7828 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7829
41531e5b 7830 return target;
bbf6f052
RK
7831 }
7832
7833 case INIT_EXPR:
7834 {
7835 tree lhs = TREE_OPERAND (exp, 0);
7836 tree rhs = TREE_OPERAND (exp, 1);
7837 tree noncopied_parts = 0;
7838 tree lhs_type = TREE_TYPE (lhs);
7839
7840 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7841 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7842 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7843 TYPE_NONCOPIED_PARTS (lhs_type));
7844 while (noncopied_parts != 0)
7845 {
7846 expand_assignment (TREE_VALUE (noncopied_parts),
7847 TREE_PURPOSE (noncopied_parts), 0, 0);
7848 noncopied_parts = TREE_CHAIN (noncopied_parts);
7849 }
7850 return temp;
7851 }
7852
7853 case MODIFY_EXPR:
7854 {
7855 /* If lhs is complex, expand calls in rhs before computing it.
7856 That's so we don't compute a pointer and save it over a call.
7857 If lhs is simple, compute it first so we can give it as a
7858 target if the rhs is just a call. This avoids an extra temp and copy
7859 and that prevents a partial-subsumption which makes bad code.
7860 Actually we could treat component_ref's of vars like vars. */
7861
7862 tree lhs = TREE_OPERAND (exp, 0);
7863 tree rhs = TREE_OPERAND (exp, 1);
7864 tree noncopied_parts = 0;
7865 tree lhs_type = TREE_TYPE (lhs);
7866
7867 temp = 0;
7868
7869 if (TREE_CODE (lhs) != VAR_DECL
7870 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7871 && TREE_CODE (lhs) != PARM_DECL
7872 && ! (TREE_CODE (lhs) == INDIRECT_REF
7873 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7874 preexpand_calls (exp);
7875
7876 /* Check for |= or &= of a bitfield of size one into another bitfield
7877 of size 1. In this case, (unless we need the result of the
7878 assignment) we can do this more efficiently with a
7879 test followed by an assignment, if necessary.
7880
7881 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7882 things change so we do, this code should be enhanced to
7883 support it. */
7884 if (ignore
7885 && TREE_CODE (lhs) == COMPONENT_REF
7886 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7887 || TREE_CODE (rhs) == BIT_AND_EXPR)
7888 && TREE_OPERAND (rhs, 0) == lhs
7889 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7890 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7891 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7892 {
7893 rtx label = gen_label_rtx ();
7894
7895 do_jump (TREE_OPERAND (rhs, 1),
7896 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7897 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7898 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7899 (TREE_CODE (rhs) == BIT_IOR_EXPR
7900 ? integer_one_node
7901 : integer_zero_node)),
7902 0, 0);
e7c33f54 7903 do_pending_stack_adjust ();
bbf6f052
RK
7904 emit_label (label);
7905 return const0_rtx;
7906 }
7907
7908 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7909 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7910 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7911 TYPE_NONCOPIED_PARTS (lhs_type));
7912
7913 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7914 while (noncopied_parts != 0)
7915 {
7916 expand_assignment (TREE_PURPOSE (noncopied_parts),
7917 TREE_VALUE (noncopied_parts), 0, 0);
7918 noncopied_parts = TREE_CHAIN (noncopied_parts);
7919 }
7920 return temp;
7921 }
7922
6e7f84a7
APB
7923 case RETURN_EXPR:
7924 if (!TREE_OPERAND (exp, 0))
7925 expand_null_return ();
7926 else
7927 expand_return (TREE_OPERAND (exp, 0));
7928 return const0_rtx;
7929
bbf6f052
RK
7930 case PREINCREMENT_EXPR:
7931 case PREDECREMENT_EXPR:
7b8b9722 7932 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7933
7934 case POSTINCREMENT_EXPR:
7935 case POSTDECREMENT_EXPR:
7936 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7937 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7938
7939 case ADDR_EXPR:
987c71d9 7940 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7941 be a MEM corresponding to a stack slot. */
987c71d9
RK
7942 temp = 0;
7943
bbf6f052
RK
7944 /* Are we taking the address of a nested function? */
7945 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7946 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7947 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7948 && ! TREE_STATIC (exp))
bbf6f052
RK
7949 {
7950 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7951 op0 = force_operand (op0, target);
7952 }
682ba3a6
RK
7953 /* If we are taking the address of something erroneous, just
7954 return a zero. */
7955 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7956 return const0_rtx;
bbf6f052
RK
7957 else
7958 {
e287fd6e
RK
7959 /* We make sure to pass const0_rtx down if we came in with
7960 ignore set, to avoid doing the cleanups twice for something. */
7961 op0 = expand_expr (TREE_OPERAND (exp, 0),
7962 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7963 (modifier == EXPAND_INITIALIZER
7964 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7965
119af78a
RK
7966 /* If we are going to ignore the result, OP0 will have been set
7967 to const0_rtx, so just return it. Don't get confused and
7968 think we are taking the address of the constant. */
7969 if (ignore)
7970 return op0;
7971
3539e816
MS
7972 op0 = protect_from_queue (op0, 0);
7973
896102d0
RK
7974 /* We would like the object in memory. If it is a constant,
7975 we can have it be statically allocated into memory. For
682ba3a6 7976 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7977 memory and store the value into it. */
7978
7979 if (CONSTANT_P (op0))
7980 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7981 op0);
987c71d9 7982 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7983 {
7984 mark_temp_addr_taken (op0);
7985 temp = XEXP (op0, 0);
7986 }
896102d0 7987
682ba3a6 7988 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 7989 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
7990 {
7991 /* If this object is in a register, it must be not
0f41302f 7992 be BLKmode. */
896102d0 7993 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7994 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7995
7a0b7b9a 7996 mark_temp_addr_taken (memloc);
896102d0
RK
7997 emit_move_insn (memloc, op0);
7998 op0 = memloc;
7999 }
8000
bbf6f052
RK
8001 if (GET_CODE (op0) != MEM)
8002 abort ();
8003
8004 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8005 {
8006 temp = XEXP (op0, 0);
8007#ifdef POINTERS_EXTEND_UNSIGNED
8008 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8009 && mode == ptr_mode)
9fcfcce7 8010 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8011#endif
8012 return temp;
8013 }
987c71d9 8014
bbf6f052
RK
8015 op0 = force_operand (XEXP (op0, 0), target);
8016 }
987c71d9 8017
bbf6f052 8018 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8019 op0 = force_reg (Pmode, op0);
8020
dc6d66b3
RK
8021 if (GET_CODE (op0) == REG
8022 && ! REG_USERVAR_P (op0))
8023 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
8024
8025 /* If we might have had a temp slot, add an equivalent address
8026 for it. */
8027 if (temp != 0)
8028 update_temp_slot_address (temp, op0);
8029
88f63c77
RK
8030#ifdef POINTERS_EXTEND_UNSIGNED
8031 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8032 && mode == ptr_mode)
9fcfcce7 8033 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8034#endif
8035
bbf6f052
RK
8036 return op0;
8037
8038 case ENTRY_VALUE_EXPR:
8039 abort ();
8040
7308a047
RS
8041 /* COMPLEX type for Extended Pascal & Fortran */
8042 case COMPLEX_EXPR:
8043 {
8044 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8045 rtx insns;
7308a047
RS
8046
8047 /* Get the rtx code of the operands. */
8048 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8049 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8050
8051 if (! target)
8052 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8053
6551fa4d 8054 start_sequence ();
7308a047
RS
8055
8056 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8057 emit_move_insn (gen_realpart (mode, target), op0);
8058 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8059
6551fa4d
JW
8060 insns = get_insns ();
8061 end_sequence ();
8062
7308a047 8063 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8064 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8065 each with a separate pseudo as destination.
8066 It's not correct for flow to treat them as a unit. */
6d6e61ce 8067 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8068 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8069 else
8070 emit_insns (insns);
7308a047
RS
8071
8072 return target;
8073 }
8074
8075 case REALPART_EXPR:
2d7050fd
RS
8076 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8077 return gen_realpart (mode, op0);
7308a047
RS
8078
8079 case IMAGPART_EXPR:
2d7050fd
RS
8080 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8081 return gen_imagpart (mode, op0);
7308a047
RS
8082
8083 case CONJ_EXPR:
8084 {
62acb978 8085 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8086 rtx imag_t;
6551fa4d 8087 rtx insns;
7308a047
RS
8088
8089 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8090
8091 if (! target)
d6a5ac33 8092 target = gen_reg_rtx (mode);
7308a047 8093
6551fa4d 8094 start_sequence ();
7308a047
RS
8095
8096 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8097 emit_move_insn (gen_realpart (partmode, target),
8098 gen_realpart (partmode, op0));
7308a047 8099
62acb978
RK
8100 imag_t = gen_imagpart (partmode, target);
8101 temp = expand_unop (partmode, neg_optab,
8102 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8103 if (temp != imag_t)
8104 emit_move_insn (imag_t, temp);
8105
6551fa4d
JW
8106 insns = get_insns ();
8107 end_sequence ();
8108
d6a5ac33
RK
8109 /* Conjugate should appear as a single unit
8110 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8111 each with a separate pseudo as destination.
8112 It's not correct for flow to treat them as a unit. */
6d6e61ce 8113 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8114 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8115 else
8116 emit_insns (insns);
7308a047
RS
8117
8118 return target;
8119 }
8120
e976b8b2
MS
8121 case TRY_CATCH_EXPR:
8122 {
8123 tree handler = TREE_OPERAND (exp, 1);
8124
8125 expand_eh_region_start ();
8126
8127 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8128
8129 expand_eh_region_end (handler);
8130
8131 return op0;
8132 }
8133
b335b813
PB
8134 case TRY_FINALLY_EXPR:
8135 {
8136 tree try_block = TREE_OPERAND (exp, 0);
8137 tree finally_block = TREE_OPERAND (exp, 1);
8138 rtx finally_label = gen_label_rtx ();
8139 rtx done_label = gen_label_rtx ();
8140 rtx return_link = gen_reg_rtx (Pmode);
8141 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8142 (tree) finally_label, (tree) return_link);
8143 TREE_SIDE_EFFECTS (cleanup) = 1;
8144
8145 /* Start a new binding layer that will keep track of all cleanup
8146 actions to be performed. */
8147 expand_start_bindings (0);
8148
8149 target_temp_slot_level = temp_slot_level;
8150
8151 expand_decl_cleanup (NULL_TREE, cleanup);
8152 op0 = expand_expr (try_block, target, tmode, modifier);
8153
8154 preserve_temp_slots (op0);
8155 expand_end_bindings (NULL_TREE, 0, 0);
8156 emit_jump (done_label);
8157 emit_label (finally_label);
8158 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8159 emit_indirect_jump (return_link);
8160 emit_label (done_label);
8161 return op0;
8162 }
8163
8164 case GOTO_SUBROUTINE_EXPR:
8165 {
8166 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8167 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8168 rtx return_address = gen_label_rtx ();
8169 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8170 emit_jump (subr);
8171 emit_label (return_address);
8172 return const0_rtx;
8173 }
8174
e976b8b2
MS
8175 case POPDCC_EXPR:
8176 {
8177 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8178 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8179 return const0_rtx;
8180 }
8181
8182 case POPDHC_EXPR:
8183 {
8184 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8185 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8186 return const0_rtx;
8187 }
8188
d3707adb
RH
8189 case VA_ARG_EXPR:
8190 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8191
bbf6f052 8192 default:
90764a87 8193 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8194 }
8195
8196 /* Here to do an ordinary binary operator, generating an instruction
8197 from the optab already placed in `this_optab'. */
8198 binop:
8199 preexpand_calls (exp);
e5e809f4 8200 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8201 subtarget = 0;
8202 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8203 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8204 binop2:
8205 temp = expand_binop (mode, this_optab, op0, op1, target,
8206 unsignedp, OPTAB_LIB_WIDEN);
8207 if (temp == 0)
8208 abort ();
8209 return temp;
8210}
b93a436e
JL
8211\f
8212/* Return the tree node and offset if a given argument corresponds to
8213 a string constant. */
8214
28f4ec01 8215tree
b93a436e
JL
8216string_constant (arg, ptr_offset)
8217 tree arg;
8218 tree *ptr_offset;
8219{
8220 STRIP_NOPS (arg);
8221
8222 if (TREE_CODE (arg) == ADDR_EXPR
8223 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8224 {
8225 *ptr_offset = integer_zero_node;
8226 return TREE_OPERAND (arg, 0);
8227 }
8228 else if (TREE_CODE (arg) == PLUS_EXPR)
8229 {
8230 tree arg0 = TREE_OPERAND (arg, 0);
8231 tree arg1 = TREE_OPERAND (arg, 1);
8232
8233 STRIP_NOPS (arg0);
8234 STRIP_NOPS (arg1);
8235
8236 if (TREE_CODE (arg0) == ADDR_EXPR
8237 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8238 {
b93a436e
JL
8239 *ptr_offset = arg1;
8240 return TREE_OPERAND (arg0, 0);
bbf6f052 8241 }
b93a436e
JL
8242 else if (TREE_CODE (arg1) == ADDR_EXPR
8243 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8244 {
b93a436e
JL
8245 *ptr_offset = arg0;
8246 return TREE_OPERAND (arg1, 0);
bbf6f052 8247 }
b93a436e 8248 }
ca695ac9 8249
b93a436e
JL
8250 return 0;
8251}
ca695ac9 8252\f
b93a436e
JL
8253/* Expand code for a post- or pre- increment or decrement
8254 and return the RTX for the result.
8255 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8256
b93a436e
JL
8257static rtx
8258expand_increment (exp, post, ignore)
8259 register tree exp;
8260 int post, ignore;
ca695ac9 8261{
b93a436e
JL
8262 register rtx op0, op1;
8263 register rtx temp, value;
8264 register tree incremented = TREE_OPERAND (exp, 0);
8265 optab this_optab = add_optab;
8266 int icode;
8267 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8268 int op0_is_copy = 0;
8269 int single_insn = 0;
8270 /* 1 means we can't store into OP0 directly,
8271 because it is a subreg narrower than a word,
8272 and we don't dare clobber the rest of the word. */
8273 int bad_subreg = 0;
1499e0a8 8274
b93a436e
JL
8275 /* Stabilize any component ref that might need to be
8276 evaluated more than once below. */
8277 if (!post
8278 || TREE_CODE (incremented) == BIT_FIELD_REF
8279 || (TREE_CODE (incremented) == COMPONENT_REF
8280 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8281 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8282 incremented = stabilize_reference (incremented);
8283 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8284 ones into save exprs so that they don't accidentally get evaluated
8285 more than once by the code below. */
8286 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8287 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8288 incremented = save_expr (incremented);
e9a25f70 8289
b93a436e
JL
8290 /* Compute the operands as RTX.
8291 Note whether OP0 is the actual lvalue or a copy of it:
8292 I believe it is a copy iff it is a register or subreg
8293 and insns were generated in computing it. */
e9a25f70 8294
b93a436e
JL
8295 temp = get_last_insn ();
8296 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 8297
b93a436e
JL
8298 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8299 in place but instead must do sign- or zero-extension during assignment,
8300 so we copy it into a new register and let the code below use it as
8301 a copy.
e9a25f70 8302
b93a436e
JL
8303 Note that we can safely modify this SUBREG since it is know not to be
8304 shared (it was made by the expand_expr call above). */
8305
8306 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8307 {
8308 if (post)
8309 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8310 else
8311 bad_subreg = 1;
8312 }
8313 else if (GET_CODE (op0) == SUBREG
8314 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8315 {
8316 /* We cannot increment this SUBREG in place. If we are
8317 post-incrementing, get a copy of the old value. Otherwise,
8318 just mark that we cannot increment in place. */
8319 if (post)
8320 op0 = copy_to_reg (op0);
8321 else
8322 bad_subreg = 1;
e9a25f70
JL
8323 }
8324
b93a436e
JL
8325 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8326 && temp != get_last_insn ());
8327 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8328 EXPAND_MEMORY_USE_BAD);
1499e0a8 8329
b93a436e
JL
8330 /* Decide whether incrementing or decrementing. */
8331 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8332 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8333 this_optab = sub_optab;
8334
8335 /* Convert decrement by a constant into a negative increment. */
8336 if (this_optab == sub_optab
8337 && GET_CODE (op1) == CONST_INT)
ca695ac9 8338 {
b93a436e
JL
8339 op1 = GEN_INT (- INTVAL (op1));
8340 this_optab = add_optab;
ca695ac9 8341 }
1499e0a8 8342
b93a436e
JL
8343 /* For a preincrement, see if we can do this with a single instruction. */
8344 if (!post)
8345 {
8346 icode = (int) this_optab->handlers[(int) mode].insn_code;
8347 if (icode != (int) CODE_FOR_nothing
8348 /* Make sure that OP0 is valid for operands 0 and 1
8349 of the insn we want to queue. */
8350 && (*insn_operand_predicate[icode][0]) (op0, mode)
8351 && (*insn_operand_predicate[icode][1]) (op0, mode)
8352 && (*insn_operand_predicate[icode][2]) (op1, mode))
8353 single_insn = 1;
8354 }
bbf6f052 8355
b93a436e
JL
8356 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8357 then we cannot just increment OP0. We must therefore contrive to
8358 increment the original value. Then, for postincrement, we can return
8359 OP0 since it is a copy of the old value. For preincrement, expand here
8360 unless we can do it with a single insn.
bbf6f052 8361
b93a436e
JL
8362 Likewise if storing directly into OP0 would clobber high bits
8363 we need to preserve (bad_subreg). */
8364 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8365 {
b93a436e
JL
8366 /* This is the easiest way to increment the value wherever it is.
8367 Problems with multiple evaluation of INCREMENTED are prevented
8368 because either (1) it is a component_ref or preincrement,
8369 in which case it was stabilized above, or (2) it is an array_ref
8370 with constant index in an array in a register, which is
8371 safe to reevaluate. */
8372 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8373 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8374 ? MINUS_EXPR : PLUS_EXPR),
8375 TREE_TYPE (exp),
8376 incremented,
8377 TREE_OPERAND (exp, 1));
a358cee0 8378
b93a436e
JL
8379 while (TREE_CODE (incremented) == NOP_EXPR
8380 || TREE_CODE (incremented) == CONVERT_EXPR)
8381 {
8382 newexp = convert (TREE_TYPE (incremented), newexp);
8383 incremented = TREE_OPERAND (incremented, 0);
8384 }
bbf6f052 8385
b93a436e
JL
8386 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8387 return post ? op0 : temp;
8388 }
bbf6f052 8389
b93a436e
JL
8390 if (post)
8391 {
8392 /* We have a true reference to the value in OP0.
8393 If there is an insn to add or subtract in this mode, queue it.
8394 Queueing the increment insn avoids the register shuffling
8395 that often results if we must increment now and first save
8396 the old value for subsequent use. */
bbf6f052 8397
b93a436e
JL
8398#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8399 op0 = stabilize (op0);
8400#endif
41dfd40c 8401
b93a436e
JL
8402 icode = (int) this_optab->handlers[(int) mode].insn_code;
8403 if (icode != (int) CODE_FOR_nothing
8404 /* Make sure that OP0 is valid for operands 0 and 1
8405 of the insn we want to queue. */
8406 && (*insn_operand_predicate[icode][0]) (op0, mode)
8407 && (*insn_operand_predicate[icode][1]) (op0, mode))
8408 {
8409 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8410 op1 = force_reg (mode, op1);
bbf6f052 8411
b93a436e
JL
8412 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8413 }
8414 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8415 {
8416 rtx addr = (general_operand (XEXP (op0, 0), mode)
8417 ? force_reg (Pmode, XEXP (op0, 0))
8418 : copy_to_reg (XEXP (op0, 0)));
8419 rtx temp, result;
ca695ac9 8420
b93a436e
JL
8421 op0 = change_address (op0, VOIDmode, addr);
8422 temp = force_reg (GET_MODE (op0), op0);
8423 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
8424 op1 = force_reg (mode, op1);
ca695ac9 8425
b93a436e
JL
8426 /* The increment queue is LIFO, thus we have to `queue'
8427 the instructions in reverse order. */
8428 enqueue_insn (op0, gen_move_insn (op0, temp));
8429 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8430 return result;
bbf6f052
RK
8431 }
8432 }
ca695ac9 8433
b93a436e
JL
8434 /* Preincrement, or we can't increment with one simple insn. */
8435 if (post)
8436 /* Save a copy of the value before inc or dec, to return it later. */
8437 temp = value = copy_to_reg (op0);
8438 else
8439 /* Arrange to return the incremented value. */
8440 /* Copy the rtx because expand_binop will protect from the queue,
8441 and the results of that would be invalid for us to return
8442 if our caller does emit_queue before using our result. */
8443 temp = copy_rtx (value = op0);
bbf6f052 8444
b93a436e
JL
8445 /* Increment however we can. */
8446 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 8447 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
8448 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
8449 /* Make sure the value is stored into OP0. */
8450 if (op1 != op0)
8451 emit_move_insn (op0, op1);
5718612f 8452
b93a436e
JL
8453 return temp;
8454}
8455\f
8456/* Expand all function calls contained within EXP, innermost ones first.
8457 But don't look within expressions that have sequence points.
8458 For each CALL_EXPR, record the rtx for its value
8459 in the CALL_EXPR_RTL field. */
5718612f 8460
b93a436e
JL
8461static void
8462preexpand_calls (exp)
8463 tree exp;
8464{
8465 register int nops, i;
8466 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 8467
b93a436e
JL
8468 if (! do_preexpand_calls)
8469 return;
5718612f 8470
b93a436e 8471 /* Only expressions and references can contain calls. */
bbf6f052 8472
b93a436e
JL
8473 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
8474 return;
bbf6f052 8475
b93a436e
JL
8476 switch (TREE_CODE (exp))
8477 {
8478 case CALL_EXPR:
8479 /* Do nothing if already expanded. */
8480 if (CALL_EXPR_RTL (exp) != 0
8481 /* Do nothing if the call returns a variable-sized object. */
8482 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
8483 /* Do nothing to built-in functions. */
8484 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
8485 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
8486 == FUNCTION_DECL)
8487 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
8488 return;
bbf6f052 8489
b93a436e
JL
8490 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
8491 return;
bbf6f052 8492
b93a436e
JL
8493 case COMPOUND_EXPR:
8494 case COND_EXPR:
8495 case TRUTH_ANDIF_EXPR:
8496 case TRUTH_ORIF_EXPR:
8497 /* If we find one of these, then we can be sure
8498 the adjust will be done for it (since it makes jumps).
8499 Do it now, so that if this is inside an argument
8500 of a function, we don't get the stack adjustment
8501 after some other args have already been pushed. */
8502 do_pending_stack_adjust ();
8503 return;
bbf6f052 8504
b93a436e
JL
8505 case BLOCK:
8506 case RTL_EXPR:
8507 case WITH_CLEANUP_EXPR:
8508 case CLEANUP_POINT_EXPR:
8509 case TRY_CATCH_EXPR:
8510 return;
bbf6f052 8511
b93a436e
JL
8512 case SAVE_EXPR:
8513 if (SAVE_EXPR_RTL (exp) != 0)
8514 return;
8515
8516 default:
8517 break;
ca695ac9 8518 }
bbf6f052 8519
b93a436e
JL
8520 nops = tree_code_length[(int) TREE_CODE (exp)];
8521 for (i = 0; i < nops; i++)
8522 if (TREE_OPERAND (exp, i) != 0)
8523 {
8524 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
8525 if (type == 'e' || type == '<' || type == '1' || type == '2'
8526 || type == 'r')
8527 preexpand_calls (TREE_OPERAND (exp, i));
8528 }
8529}
8530\f
8531/* At the start of a function, record that we have no previously-pushed
8532 arguments waiting to be popped. */
bbf6f052 8533
b93a436e
JL
8534void
8535init_pending_stack_adjust ()
8536{
8537 pending_stack_adjust = 0;
8538}
bbf6f052 8539
b93a436e 8540/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
8541 so the adjustment won't get done.
8542
8543 Note, if the current function calls alloca, then it must have a
8544 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 8545
b93a436e
JL
8546void
8547clear_pending_stack_adjust ()
8548{
8549#ifdef EXIT_IGNORE_STACK
8550 if (optimize > 0
060fbabf
JL
8551 && (! flag_omit_frame_pointer || current_function_calls_alloca)
8552 && EXIT_IGNORE_STACK
b93a436e
JL
8553 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
8554 && ! flag_inline_functions)
8555 pending_stack_adjust = 0;
8556#endif
8557}
bbf6f052 8558
b93a436e
JL
8559/* Pop any previously-pushed arguments that have not been popped yet. */
8560
8561void
8562do_pending_stack_adjust ()
8563{
8564 if (inhibit_defer_pop == 0)
ca695ac9 8565 {
b93a436e
JL
8566 if (pending_stack_adjust != 0)
8567 adjust_stack (GEN_INT (pending_stack_adjust));
8568 pending_stack_adjust = 0;
bbf6f052 8569 }
bbf6f052
RK
8570}
8571\f
b93a436e 8572/* Expand conditional expressions. */
bbf6f052 8573
b93a436e
JL
8574/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
8575 LABEL is an rtx of code CODE_LABEL, in this function and all the
8576 functions here. */
bbf6f052 8577
b93a436e
JL
8578void
8579jumpifnot (exp, label)
ca695ac9 8580 tree exp;
b93a436e 8581 rtx label;
bbf6f052 8582{
b93a436e
JL
8583 do_jump (exp, label, NULL_RTX);
8584}
bbf6f052 8585
b93a436e 8586/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 8587
b93a436e
JL
8588void
8589jumpif (exp, label)
8590 tree exp;
8591 rtx label;
8592{
8593 do_jump (exp, NULL_RTX, label);
8594}
ca695ac9 8595
b93a436e
JL
8596/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
8597 the result is zero, or IF_TRUE_LABEL if the result is one.
8598 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
8599 meaning fall through in that case.
ca695ac9 8600
b93a436e
JL
8601 do_jump always does any pending stack adjust except when it does not
8602 actually perform a jump. An example where there is no jump
8603 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 8604
b93a436e
JL
8605 This function is responsible for optimizing cases such as
8606 &&, || and comparison operators in EXP. */
5718612f 8607
b93a436e
JL
8608void
8609do_jump (exp, if_false_label, if_true_label)
8610 tree exp;
8611 rtx if_false_label, if_true_label;
8612{
8613 register enum tree_code code = TREE_CODE (exp);
8614 /* Some cases need to create a label to jump to
8615 in order to properly fall through.
8616 These cases set DROP_THROUGH_LABEL nonzero. */
8617 rtx drop_through_label = 0;
8618 rtx temp;
b93a436e
JL
8619 int i;
8620 tree type;
8621 enum machine_mode mode;
ca695ac9 8622
dbecbbe4
JL
8623#ifdef MAX_INTEGER_COMPUTATION_MODE
8624 check_max_integer_computation_mode (exp);
8625#endif
8626
b93a436e 8627 emit_queue ();
ca695ac9 8628
b93a436e 8629 switch (code)
ca695ac9 8630 {
b93a436e 8631 case ERROR_MARK:
ca695ac9 8632 break;
bbf6f052 8633
b93a436e
JL
8634 case INTEGER_CST:
8635 temp = integer_zerop (exp) ? if_false_label : if_true_label;
8636 if (temp)
8637 emit_jump (temp);
8638 break;
bbf6f052 8639
b93a436e
JL
8640#if 0
8641 /* This is not true with #pragma weak */
8642 case ADDR_EXPR:
8643 /* The address of something can never be zero. */
8644 if (if_true_label)
8645 emit_jump (if_true_label);
8646 break;
8647#endif
bbf6f052 8648
b93a436e
JL
8649 case NOP_EXPR:
8650 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
8651 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
8652 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
8653 goto normal;
8654 case CONVERT_EXPR:
8655 /* If we are narrowing the operand, we have to do the compare in the
8656 narrower mode. */
8657 if ((TYPE_PRECISION (TREE_TYPE (exp))
8658 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8659 goto normal;
8660 case NON_LVALUE_EXPR:
8661 case REFERENCE_EXPR:
8662 case ABS_EXPR:
8663 case NEGATE_EXPR:
8664 case LROTATE_EXPR:
8665 case RROTATE_EXPR:
8666 /* These cannot change zero->non-zero or vice versa. */
8667 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8668 break;
bbf6f052 8669
b93a436e
JL
8670#if 0
8671 /* This is never less insns than evaluating the PLUS_EXPR followed by
8672 a test and can be longer if the test is eliminated. */
8673 case PLUS_EXPR:
8674 /* Reduce to minus. */
8675 exp = build (MINUS_EXPR, TREE_TYPE (exp),
8676 TREE_OPERAND (exp, 0),
8677 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
8678 TREE_OPERAND (exp, 1))));
8679 /* Process as MINUS. */
ca695ac9 8680#endif
bbf6f052 8681
b93a436e
JL
8682 case MINUS_EXPR:
8683 /* Non-zero iff operands of minus differ. */
b30f05db
BS
8684 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
8685 TREE_OPERAND (exp, 0),
8686 TREE_OPERAND (exp, 1)),
8687 NE, NE, if_false_label, if_true_label);
b93a436e 8688 break;
bbf6f052 8689
b93a436e
JL
8690 case BIT_AND_EXPR:
8691 /* If we are AND'ing with a small constant, do this comparison in the
8692 smallest type that fits. If the machine doesn't have comparisons
8693 that small, it will be converted back to the wider comparison.
8694 This helps if we are testing the sign bit of a narrower object.
8695 combine can't do this for us because it can't know whether a
8696 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 8697
b93a436e
JL
8698 if (! SLOW_BYTE_ACCESS
8699 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
8700 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
8701 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
8702 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
8703 && (type = type_for_mode (mode, 1)) != 0
8704 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8705 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8706 != CODE_FOR_nothing))
8707 {
8708 do_jump (convert (type, exp), if_false_label, if_true_label);
8709 break;
8710 }
8711 goto normal;
bbf6f052 8712
b93a436e
JL
8713 case TRUTH_NOT_EXPR:
8714 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8715 break;
bbf6f052 8716
b93a436e
JL
8717 case TRUTH_ANDIF_EXPR:
8718 if (if_false_label == 0)
8719 if_false_label = drop_through_label = gen_label_rtx ();
8720 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
8721 start_cleanup_deferral ();
8722 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8723 end_cleanup_deferral ();
8724 break;
bbf6f052 8725
b93a436e
JL
8726 case TRUTH_ORIF_EXPR:
8727 if (if_true_label == 0)
8728 if_true_label = drop_through_label = gen_label_rtx ();
8729 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
8730 start_cleanup_deferral ();
8731 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8732 end_cleanup_deferral ();
8733 break;
bbf6f052 8734
b93a436e
JL
8735 case COMPOUND_EXPR:
8736 push_temp_slots ();
8737 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
8738 preserve_temp_slots (NULL_RTX);
8739 free_temp_slots ();
8740 pop_temp_slots ();
8741 emit_queue ();
8742 do_pending_stack_adjust ();
8743 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
8744 break;
bbf6f052 8745
b93a436e
JL
8746 case COMPONENT_REF:
8747 case BIT_FIELD_REF:
8748 case ARRAY_REF:
8749 {
8750 int bitsize, bitpos, unsignedp;
8751 enum machine_mode mode;
8752 tree type;
8753 tree offset;
8754 int volatilep = 0;
8755 int alignment;
bbf6f052 8756
b93a436e
JL
8757 /* Get description of this reference. We don't actually care
8758 about the underlying object here. */
8759 get_inner_reference (exp, &bitsize, &bitpos, &offset,
8760 &mode, &unsignedp, &volatilep,
8761 &alignment);
bbf6f052 8762
b93a436e
JL
8763 type = type_for_size (bitsize, unsignedp);
8764 if (! SLOW_BYTE_ACCESS
8765 && type != 0 && bitsize >= 0
8766 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
8767 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
8768 != CODE_FOR_nothing))
8769 {
8770 do_jump (convert (type, exp), if_false_label, if_true_label);
8771 break;
8772 }
8773 goto normal;
8774 }
bbf6f052 8775
b93a436e
JL
8776 case COND_EXPR:
8777 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
8778 if (integer_onep (TREE_OPERAND (exp, 1))
8779 && integer_zerop (TREE_OPERAND (exp, 2)))
8780 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 8781
b93a436e
JL
8782 else if (integer_zerop (TREE_OPERAND (exp, 1))
8783 && integer_onep (TREE_OPERAND (exp, 2)))
8784 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 8785
b93a436e
JL
8786 else
8787 {
8788 register rtx label1 = gen_label_rtx ();
8789 drop_through_label = gen_label_rtx ();
bbf6f052 8790
b93a436e 8791 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 8792
b93a436e
JL
8793 start_cleanup_deferral ();
8794 /* Now the THEN-expression. */
8795 do_jump (TREE_OPERAND (exp, 1),
8796 if_false_label ? if_false_label : drop_through_label,
8797 if_true_label ? if_true_label : drop_through_label);
8798 /* In case the do_jump just above never jumps. */
8799 do_pending_stack_adjust ();
8800 emit_label (label1);
bbf6f052 8801
b93a436e
JL
8802 /* Now the ELSE-expression. */
8803 do_jump (TREE_OPERAND (exp, 2),
8804 if_false_label ? if_false_label : drop_through_label,
8805 if_true_label ? if_true_label : drop_through_label);
8806 end_cleanup_deferral ();
8807 }
8808 break;
bbf6f052 8809
b93a436e
JL
8810 case EQ_EXPR:
8811 {
8812 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 8813
9ec36da5
JL
8814 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8815 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
8816 {
8817 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8818 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8819 do_jump
8820 (fold
8821 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
8822 fold (build (EQ_EXPR, TREE_TYPE (exp),
8823 fold (build1 (REALPART_EXPR,
8824 TREE_TYPE (inner_type),
8825 exp0)),
8826 fold (build1 (REALPART_EXPR,
8827 TREE_TYPE (inner_type),
8828 exp1)))),
8829 fold (build (EQ_EXPR, TREE_TYPE (exp),
8830 fold (build1 (IMAGPART_EXPR,
8831 TREE_TYPE (inner_type),
8832 exp0)),
8833 fold (build1 (IMAGPART_EXPR,
8834 TREE_TYPE (inner_type),
8835 exp1)))))),
8836 if_false_label, if_true_label);
8837 }
9ec36da5
JL
8838
8839 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8840 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
8841
b93a436e
JL
8842 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8843 && !can_compare_p (TYPE_MODE (inner_type)))
8844 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
8845 else
b30f05db 8846 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
8847 break;
8848 }
bbf6f052 8849
b93a436e
JL
8850 case NE_EXPR:
8851 {
8852 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 8853
9ec36da5
JL
8854 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
8855 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
8856 {
8857 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
8858 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
8859 do_jump
8860 (fold
8861 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
8862 fold (build (NE_EXPR, TREE_TYPE (exp),
8863 fold (build1 (REALPART_EXPR,
8864 TREE_TYPE (inner_type),
8865 exp0)),
8866 fold (build1 (REALPART_EXPR,
8867 TREE_TYPE (inner_type),
8868 exp1)))),
8869 fold (build (NE_EXPR, TREE_TYPE (exp),
8870 fold (build1 (IMAGPART_EXPR,
8871 TREE_TYPE (inner_type),
8872 exp0)),
8873 fold (build1 (IMAGPART_EXPR,
8874 TREE_TYPE (inner_type),
8875 exp1)))))),
8876 if_false_label, if_true_label);
8877 }
9ec36da5
JL
8878
8879 else if (integer_zerop (TREE_OPERAND (exp, 1)))
8880 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
8881
b93a436e
JL
8882 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
8883 && !can_compare_p (TYPE_MODE (inner_type)))
8884 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
8885 else
b30f05db 8886 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
8887 break;
8888 }
bbf6f052 8889
b93a436e
JL
8890 case LT_EXPR:
8891 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8892 == MODE_INT)
8893 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8894 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
8895 else
b30f05db 8896 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 8897 break;
bbf6f052 8898
b93a436e
JL
8899 case LE_EXPR:
8900 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8901 == MODE_INT)
8902 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8903 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
8904 else
b30f05db 8905 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 8906 break;
bbf6f052 8907
b93a436e
JL
8908 case GT_EXPR:
8909 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8910 == MODE_INT)
8911 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8912 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
8913 else
b30f05db 8914 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 8915 break;
bbf6f052 8916
b93a436e
JL
8917 case GE_EXPR:
8918 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
8919 == MODE_INT)
8920 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
8921 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
8922 else
b30f05db 8923 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 8924 break;
bbf6f052 8925
b93a436e
JL
8926 default:
8927 normal:
8928 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
8929#if 0
8930 /* This is not needed any more and causes poor code since it causes
8931 comparisons and tests from non-SI objects to have different code
8932 sequences. */
8933 /* Copy to register to avoid generating bad insns by cse
8934 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
8935 if (!cse_not_expected && GET_CODE (temp) == MEM)
8936 temp = copy_to_reg (temp);
ca695ac9 8937#endif
b93a436e 8938 do_pending_stack_adjust ();
b30f05db
BS
8939 /* Do any postincrements in the expression that was tested. */
8940 emit_queue ();
8941
8942 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
8943 {
8944 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
8945 if (target)
8946 emit_jump (target);
8947 }
b93a436e 8948 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
b30f05db 8949 && ! can_compare_p (GET_MODE (temp)))
b93a436e
JL
8950 /* Note swapping the labels gives us not-equal. */
8951 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
8952 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
8953 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
8954 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
8955 GET_MODE (temp), NULL_RTX, 0,
8956 if_false_label, if_true_label);
b93a436e
JL
8957 else
8958 abort ();
8959 }
bbf6f052 8960
b93a436e
JL
8961 if (drop_through_label)
8962 {
8963 /* If do_jump produces code that might be jumped around,
8964 do any stack adjusts from that code, before the place
8965 where control merges in. */
8966 do_pending_stack_adjust ();
8967 emit_label (drop_through_label);
8968 }
bbf6f052 8969}
b93a436e
JL
8970\f
8971/* Given a comparison expression EXP for values too wide to be compared
8972 with one insn, test the comparison and jump to the appropriate label.
8973 The code of EXP is ignored; we always test GT if SWAP is 0,
8974 and LT if SWAP is 1. */
bbf6f052 8975
b93a436e
JL
8976static void
8977do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
8978 tree exp;
8979 int swap;
8980 rtx if_false_label, if_true_label;
8981{
8982 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
8983 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
8984 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 8985 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 8986
b30f05db 8987 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
8988}
8989
b93a436e
JL
8990/* Compare OP0 with OP1, word at a time, in mode MODE.
8991 UNSIGNEDP says to do unsigned comparison.
8992 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 8993
b93a436e
JL
8994void
8995do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
8996 enum machine_mode mode;
8997 int unsignedp;
8998 rtx op0, op1;
8999 rtx if_false_label, if_true_label;
f81497d9 9000{
b93a436e
JL
9001 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9002 rtx drop_through_label = 0;
9003 int i;
f81497d9 9004
b93a436e
JL
9005 if (! if_true_label || ! if_false_label)
9006 drop_through_label = gen_label_rtx ();
9007 if (! if_true_label)
9008 if_true_label = drop_through_label;
9009 if (! if_false_label)
9010 if_false_label = drop_through_label;
f81497d9 9011
b93a436e
JL
9012 /* Compare a word at a time, high order first. */
9013 for (i = 0; i < nwords; i++)
9014 {
9015 rtx comp;
9016 rtx op0_word, op1_word;
bbf6f052 9017
b93a436e
JL
9018 if (WORDS_BIG_ENDIAN)
9019 {
9020 op0_word = operand_subword_force (op0, i, mode);
9021 op1_word = operand_subword_force (op1, i, mode);
9022 }
9023 else
9024 {
9025 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9026 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9027 }
bbf6f052 9028
b93a436e 9029 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9030 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9031 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9032 NULL_RTX, if_true_label);
bbf6f052 9033
b93a436e 9034 /* Consider lower words only if these are equal. */
b30f05db
BS
9035 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9036 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9037 }
bbf6f052 9038
b93a436e
JL
9039 if (if_false_label)
9040 emit_jump (if_false_label);
9041 if (drop_through_label)
9042 emit_label (drop_through_label);
bbf6f052
RK
9043}
9044
b93a436e
JL
9045/* Given an EQ_EXPR expression EXP for values too wide to be compared
9046 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9047
b93a436e
JL
9048static void
9049do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9050 tree exp;
9051 rtx if_false_label, if_true_label;
bbf6f052 9052{
b93a436e
JL
9053 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9054 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9055 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9056 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9057 int i;
9058 rtx drop_through_label = 0;
bbf6f052 9059
b93a436e
JL
9060 if (! if_false_label)
9061 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9062
b93a436e 9063 for (i = 0; i < nwords; i++)
b30f05db
BS
9064 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9065 operand_subword_force (op1, i, mode),
9066 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9067 word_mode, NULL_RTX, 0, if_false_label,
9068 NULL_RTX);
bbf6f052 9069
b93a436e
JL
9070 if (if_true_label)
9071 emit_jump (if_true_label);
9072 if (drop_through_label)
9073 emit_label (drop_through_label);
bbf6f052 9074}
b93a436e
JL
9075\f
9076/* Jump according to whether OP0 is 0.
9077 We assume that OP0 has an integer mode that is too wide
9078 for the available compare insns. */
bbf6f052 9079
f5963e61 9080void
b93a436e
JL
9081do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9082 rtx op0;
9083 rtx if_false_label, if_true_label;
ca695ac9 9084{
b93a436e
JL
9085 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9086 rtx part;
9087 int i;
9088 rtx drop_through_label = 0;
bbf6f052 9089
b93a436e
JL
9090 /* The fastest way of doing this comparison on almost any machine is to
9091 "or" all the words and compare the result. If all have to be loaded
9092 from memory and this is a very wide item, it's possible this may
9093 be slower, but that's highly unlikely. */
bbf6f052 9094
b93a436e
JL
9095 part = gen_reg_rtx (word_mode);
9096 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9097 for (i = 1; i < nwords && part != 0; i++)
9098 part = expand_binop (word_mode, ior_optab, part,
9099 operand_subword_force (op0, i, GET_MODE (op0)),
9100 part, 1, OPTAB_WIDEN);
bbf6f052 9101
b93a436e
JL
9102 if (part != 0)
9103 {
b30f05db
BS
9104 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9105 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9106
b93a436e
JL
9107 return;
9108 }
bbf6f052 9109
b93a436e
JL
9110 /* If we couldn't do the "or" simply, do this with a series of compares. */
9111 if (! if_false_label)
9112 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9113
b93a436e 9114 for (i = 0; i < nwords; i++)
b30f05db
BS
9115 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9116 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9117 if_false_label, NULL_RTX);
bbf6f052 9118
b93a436e
JL
9119 if (if_true_label)
9120 emit_jump (if_true_label);
0f41302f 9121
b93a436e
JL
9122 if (drop_through_label)
9123 emit_label (drop_through_label);
bbf6f052 9124}
b93a436e 9125\f
b30f05db 9126/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9127 (including code to compute the values to be compared)
9128 and set (CC0) according to the result.
b30f05db 9129 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9130
b93a436e 9131 We force a stack adjustment unless there are currently
b30f05db 9132 things pushed on the stack that aren't yet used.
ca695ac9 9133
b30f05db
BS
9134 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9135 compared.
9136
9137 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9138 size of MODE should be used. */
9139
9140rtx
9141compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9142 register rtx op0, op1;
9143 enum rtx_code code;
9144 int unsignedp;
9145 enum machine_mode mode;
9146 rtx size;
9147 int align;
b93a436e 9148{
b30f05db 9149 rtx tem;
76bbe028 9150
b30f05db
BS
9151 /* If one operand is constant, make it the second one. Only do this
9152 if the other operand is not constant as well. */
ca695ac9 9153
b30f05db
BS
9154 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9155 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 9156 {
b30f05db
BS
9157 tem = op0;
9158 op0 = op1;
9159 op1 = tem;
9160 code = swap_condition (code);
ca695ac9 9161 }
bbf6f052 9162
b30f05db 9163 if (flag_force_mem)
b93a436e 9164 {
b30f05db
BS
9165 op0 = force_not_mem (op0);
9166 op1 = force_not_mem (op1);
9167 }
bbf6f052 9168
b30f05db
BS
9169 do_pending_stack_adjust ();
9170
9171 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9172 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9173 return tem;
9174
9175#if 0
9176 /* There's no need to do this now that combine.c can eliminate lots of
9177 sign extensions. This can be less efficient in certain cases on other
9178 machines. */
9179
9180 /* If this is a signed equality comparison, we can do it as an
9181 unsigned comparison since zero-extension is cheaper than sign
9182 extension and comparisons with zero are done as unsigned. This is
9183 the case even on machines that can do fast sign extension, since
9184 zero-extension is easier to combine with other operations than
9185 sign-extension is. If we are comparing against a constant, we must
9186 convert it to what it would look like unsigned. */
9187 if ((code == EQ || code == NE) && ! unsignedp
9188 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9189 {
9190 if (GET_CODE (op1) == CONST_INT
9191 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9192 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9193 unsignedp = 1;
b93a436e
JL
9194 }
9195#endif
b30f05db
BS
9196
9197 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 9198
b30f05db 9199 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9200}
bbf6f052 9201
b30f05db 9202/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9203 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9204
b93a436e
JL
9205 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9206 compared.
bbf6f052 9207
b93a436e
JL
9208 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9209 size of MODE should be used. */
ca695ac9 9210
b30f05db
BS
9211void
9212do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9213 if_false_label, if_true_label)
b93a436e
JL
9214 register rtx op0, op1;
9215 enum rtx_code code;
9216 int unsignedp;
9217 enum machine_mode mode;
9218 rtx size;
9219 int align;
b30f05db 9220 rtx if_false_label, if_true_label;
bbf6f052 9221{
b93a436e 9222 rtx tem;
b30f05db
BS
9223 int dummy_true_label = 0;
9224
9225 /* Reverse the comparison if that is safe and we want to jump if it is
9226 false. */
9227 if (! if_true_label && ! FLOAT_MODE_P (mode))
9228 {
9229 if_true_label = if_false_label;
9230 if_false_label = 0;
9231 code = reverse_condition (code);
9232 }
bbf6f052 9233
b93a436e
JL
9234 /* If one operand is constant, make it the second one. Only do this
9235 if the other operand is not constant as well. */
e7c33f54 9236
b93a436e
JL
9237 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9238 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 9239 {
b93a436e
JL
9240 tem = op0;
9241 op0 = op1;
9242 op1 = tem;
9243 code = swap_condition (code);
9244 }
bbf6f052 9245
b93a436e
JL
9246 if (flag_force_mem)
9247 {
9248 op0 = force_not_mem (op0);
9249 op1 = force_not_mem (op1);
9250 }
bbf6f052 9251
b93a436e 9252 do_pending_stack_adjust ();
ca695ac9 9253
b93a436e
JL
9254 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9255 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9256 {
9257 if (tem == const_true_rtx)
9258 {
9259 if (if_true_label)
9260 emit_jump (if_true_label);
9261 }
9262 else
9263 {
9264 if (if_false_label)
9265 emit_jump (if_false_label);
9266 }
9267 return;
9268 }
ca695ac9 9269
b93a436e
JL
9270#if 0
9271 /* There's no need to do this now that combine.c can eliminate lots of
9272 sign extensions. This can be less efficient in certain cases on other
9273 machines. */
ca695ac9 9274
b93a436e
JL
9275 /* If this is a signed equality comparison, we can do it as an
9276 unsigned comparison since zero-extension is cheaper than sign
9277 extension and comparisons with zero are done as unsigned. This is
9278 the case even on machines that can do fast sign extension, since
9279 zero-extension is easier to combine with other operations than
9280 sign-extension is. If we are comparing against a constant, we must
9281 convert it to what it would look like unsigned. */
9282 if ((code == EQ || code == NE) && ! unsignedp
9283 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9284 {
9285 if (GET_CODE (op1) == CONST_INT
9286 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9287 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9288 unsignedp = 1;
9289 }
9290#endif
ca695ac9 9291
b30f05db
BS
9292 if (! if_true_label)
9293 {
9294 dummy_true_label = 1;
9295 if_true_label = gen_label_rtx ();
9296 }
9297
9298 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9299 if_true_label);
9300
9301 if (if_false_label)
9302 emit_jump (if_false_label);
9303 if (dummy_true_label)
9304 emit_label (if_true_label);
9305}
9306
9307/* Generate code for a comparison expression EXP (including code to compute
9308 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9309 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9310 generated code will drop through.
9311 SIGNED_CODE should be the rtx operation for this comparison for
9312 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9313
9314 We force a stack adjustment unless there are currently
9315 things pushed on the stack that aren't yet used. */
9316
9317static void
9318do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9319 if_true_label)
9320 register tree exp;
9321 enum rtx_code signed_code, unsigned_code;
9322 rtx if_false_label, if_true_label;
9323{
9324 register rtx op0, op1;
9325 register tree type;
9326 register enum machine_mode mode;
9327 int unsignedp;
9328 enum rtx_code code;
9329
9330 /* Don't crash if the comparison was erroneous. */
9331 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9332 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9333 return;
9334
9335 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9336 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9337 mode = TYPE_MODE (type);
9338 unsignedp = TREE_UNSIGNED (type);
9339 code = unsignedp ? unsigned_code : signed_code;
9340
9341#ifdef HAVE_canonicalize_funcptr_for_compare
9342 /* If function pointers need to be "canonicalized" before they can
9343 be reliably compared, then canonicalize them. */
9344 if (HAVE_canonicalize_funcptr_for_compare
9345 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9346 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9347 == FUNCTION_TYPE))
9348 {
9349 rtx new_op0 = gen_reg_rtx (mode);
9350
9351 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9352 op0 = new_op0;
9353 }
9354
9355 if (HAVE_canonicalize_funcptr_for_compare
9356 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9357 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9358 == FUNCTION_TYPE))
9359 {
9360 rtx new_op1 = gen_reg_rtx (mode);
9361
9362 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9363 op1 = new_op1;
9364 }
9365#endif
9366
9367 /* Do any postincrements in the expression that was tested. */
9368 emit_queue ();
9369
9370 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9371 ((mode == BLKmode)
9372 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
9373 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT,
9374 if_false_label, if_true_label);
b93a436e
JL
9375}
9376\f
9377/* Generate code to calculate EXP using a store-flag instruction
9378 and return an rtx for the result. EXP is either a comparison
9379 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9380
b93a436e 9381 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9382
b93a436e
JL
9383 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9384 cheap.
ca695ac9 9385
b93a436e
JL
9386 Return zero if there is no suitable set-flag instruction
9387 available on this machine.
ca695ac9 9388
b93a436e
JL
9389 Once expand_expr has been called on the arguments of the comparison,
9390 we are committed to doing the store flag, since it is not safe to
9391 re-evaluate the expression. We emit the store-flag insn by calling
9392 emit_store_flag, but only expand the arguments if we have a reason
9393 to believe that emit_store_flag will be successful. If we think that
9394 it will, but it isn't, we have to simulate the store-flag with a
9395 set/jump/set sequence. */
ca695ac9 9396
b93a436e
JL
9397static rtx
9398do_store_flag (exp, target, mode, only_cheap)
9399 tree exp;
9400 rtx target;
9401 enum machine_mode mode;
9402 int only_cheap;
9403{
9404 enum rtx_code code;
9405 tree arg0, arg1, type;
9406 tree tem;
9407 enum machine_mode operand_mode;
9408 int invert = 0;
9409 int unsignedp;
9410 rtx op0, op1;
9411 enum insn_code icode;
9412 rtx subtarget = target;
381127e8 9413 rtx result, label;
ca695ac9 9414
b93a436e
JL
9415 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9416 result at the end. We can't simply invert the test since it would
9417 have already been inverted if it were valid. This case occurs for
9418 some floating-point comparisons. */
ca695ac9 9419
b93a436e
JL
9420 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9421 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9422
b93a436e
JL
9423 arg0 = TREE_OPERAND (exp, 0);
9424 arg1 = TREE_OPERAND (exp, 1);
9425 type = TREE_TYPE (arg0);
9426 operand_mode = TYPE_MODE (type);
9427 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9428
b93a436e
JL
9429 /* We won't bother with BLKmode store-flag operations because it would mean
9430 passing a lot of information to emit_store_flag. */
9431 if (operand_mode == BLKmode)
9432 return 0;
ca695ac9 9433
b93a436e
JL
9434 /* We won't bother with store-flag operations involving function pointers
9435 when function pointers must be canonicalized before comparisons. */
9436#ifdef HAVE_canonicalize_funcptr_for_compare
9437 if (HAVE_canonicalize_funcptr_for_compare
9438 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9439 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9440 == FUNCTION_TYPE))
9441 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9442 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9443 == FUNCTION_TYPE))))
9444 return 0;
ca695ac9
JB
9445#endif
9446
b93a436e
JL
9447 STRIP_NOPS (arg0);
9448 STRIP_NOPS (arg1);
ca695ac9 9449
b93a436e
JL
9450 /* Get the rtx comparison code to use. We know that EXP is a comparison
9451 operation of some type. Some comparisons against 1 and -1 can be
9452 converted to comparisons with zero. Do so here so that the tests
9453 below will be aware that we have a comparison with zero. These
9454 tests will not catch constants in the first operand, but constants
9455 are rarely passed as the first operand. */
ca695ac9 9456
b93a436e
JL
9457 switch (TREE_CODE (exp))
9458 {
9459 case EQ_EXPR:
9460 code = EQ;
bbf6f052 9461 break;
b93a436e
JL
9462 case NE_EXPR:
9463 code = NE;
bbf6f052 9464 break;
b93a436e
JL
9465 case LT_EXPR:
9466 if (integer_onep (arg1))
9467 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
9468 else
9469 code = unsignedp ? LTU : LT;
ca695ac9 9470 break;
b93a436e
JL
9471 case LE_EXPR:
9472 if (! unsignedp && integer_all_onesp (arg1))
9473 arg1 = integer_zero_node, code = LT;
9474 else
9475 code = unsignedp ? LEU : LE;
ca695ac9 9476 break;
b93a436e
JL
9477 case GT_EXPR:
9478 if (! unsignedp && integer_all_onesp (arg1))
9479 arg1 = integer_zero_node, code = GE;
9480 else
9481 code = unsignedp ? GTU : GT;
9482 break;
9483 case GE_EXPR:
9484 if (integer_onep (arg1))
9485 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
9486 else
9487 code = unsignedp ? GEU : GE;
ca695ac9 9488 break;
ca695ac9 9489 default:
b93a436e 9490 abort ();
bbf6f052 9491 }
bbf6f052 9492
b93a436e
JL
9493 /* Put a constant second. */
9494 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
9495 {
9496 tem = arg0; arg0 = arg1; arg1 = tem;
9497 code = swap_condition (code);
ca695ac9 9498 }
bbf6f052 9499
b93a436e
JL
9500 /* If this is an equality or inequality test of a single bit, we can
9501 do this by shifting the bit being tested to the low-order bit and
9502 masking the result with the constant 1. If the condition was EQ,
9503 we xor it with 1. This does not require an scc insn and is faster
9504 than an scc insn even if we have it. */
d39985fa 9505
b93a436e
JL
9506 if ((code == NE || code == EQ)
9507 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
9508 && integer_pow2p (TREE_OPERAND (arg0, 1)))
9509 {
9510 tree inner = TREE_OPERAND (arg0, 0);
9511 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
9512 int ops_unsignedp;
bbf6f052 9513
b93a436e
JL
9514 /* If INNER is a right shift of a constant and it plus BITNUM does
9515 not overflow, adjust BITNUM and INNER. */
ca695ac9 9516
b93a436e
JL
9517 if (TREE_CODE (inner) == RSHIFT_EXPR
9518 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
9519 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
9520 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
9521 < TYPE_PRECISION (type)))
ca695ac9 9522 {
b93a436e
JL
9523 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
9524 inner = TREE_OPERAND (inner, 0);
ca695ac9 9525 }
ca695ac9 9526
b93a436e
JL
9527 /* If we are going to be able to omit the AND below, we must do our
9528 operations as unsigned. If we must use the AND, we have a choice.
9529 Normally unsigned is faster, but for some machines signed is. */
9530 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
9531#ifdef LOAD_EXTEND_OP
9532 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
9533#else
9534 : 1
9535#endif
9536 );
bbf6f052 9537
b93a436e
JL
9538 if (subtarget == 0 || GET_CODE (subtarget) != REG
9539 || GET_MODE (subtarget) != operand_mode
e5e809f4 9540 || ! safe_from_p (subtarget, inner, 1))
b93a436e 9541 subtarget = 0;
bbf6f052 9542
b93a436e 9543 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 9544
b93a436e
JL
9545 if (bitnum != 0)
9546 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
9547 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 9548
b93a436e
JL
9549 if (GET_MODE (op0) != mode)
9550 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 9551
b93a436e
JL
9552 if ((code == EQ && ! invert) || (code == NE && invert))
9553 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
9554 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 9555
b93a436e
JL
9556 /* Put the AND last so it can combine with more things. */
9557 if (bitnum != TYPE_PRECISION (type) - 1)
9558 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 9559
b93a436e
JL
9560 return op0;
9561 }
bbf6f052 9562
b93a436e
JL
9563 /* Now see if we are likely to be able to do this. Return if not. */
9564 if (! can_compare_p (operand_mode))
9565 return 0;
9566 icode = setcc_gen_code[(int) code];
9567 if (icode == CODE_FOR_nothing
9568 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 9569 {
b93a436e
JL
9570 /* We can only do this if it is one of the special cases that
9571 can be handled without an scc insn. */
9572 if ((code == LT && integer_zerop (arg1))
9573 || (! only_cheap && code == GE && integer_zerop (arg1)))
9574 ;
9575 else if (BRANCH_COST >= 0
9576 && ! only_cheap && (code == NE || code == EQ)
9577 && TREE_CODE (type) != REAL_TYPE
9578 && ((abs_optab->handlers[(int) operand_mode].insn_code
9579 != CODE_FOR_nothing)
9580 || (ffs_optab->handlers[(int) operand_mode].insn_code
9581 != CODE_FOR_nothing)))
9582 ;
9583 else
9584 return 0;
ca695ac9 9585 }
b93a436e
JL
9586
9587 preexpand_calls (exp);
9588 if (subtarget == 0 || GET_CODE (subtarget) != REG
9589 || GET_MODE (subtarget) != operand_mode
e5e809f4 9590 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
9591 subtarget = 0;
9592
9593 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
9594 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
9595
9596 if (target == 0)
9597 target = gen_reg_rtx (mode);
9598
9599 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
9600 because, if the emit_store_flag does anything it will succeed and
9601 OP0 and OP1 will not be used subsequently. */
ca695ac9 9602
b93a436e
JL
9603 result = emit_store_flag (target, code,
9604 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
9605 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
9606 operand_mode, unsignedp, 1);
ca695ac9 9607
b93a436e
JL
9608 if (result)
9609 {
9610 if (invert)
9611 result = expand_binop (mode, xor_optab, result, const1_rtx,
9612 result, 0, OPTAB_LIB_WIDEN);
9613 return result;
ca695ac9 9614 }
bbf6f052 9615
b93a436e
JL
9616 /* If this failed, we have to do this with set/compare/jump/set code. */
9617 if (GET_CODE (target) != REG
9618 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
9619 target = gen_reg_rtx (GET_MODE (target));
9620
9621 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
9622 result = compare_from_rtx (op0, op1, code, unsignedp,
9623 operand_mode, NULL_RTX, 0);
9624 if (GET_CODE (result) == CONST_INT)
9625 return (((result == const0_rtx && ! invert)
9626 || (result != const0_rtx && invert))
9627 ? const0_rtx : const1_rtx);
ca695ac9 9628
b93a436e
JL
9629 label = gen_label_rtx ();
9630 if (bcc_gen_fctn[(int) code] == 0)
9631 abort ();
0f41302f 9632
b93a436e
JL
9633 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
9634 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
9635 emit_label (label);
bbf6f052 9636
b93a436e 9637 return target;
ca695ac9 9638}
b93a436e
JL
9639\f
9640/* Generate a tablejump instruction (used for switch statements). */
9641
9642#ifdef HAVE_tablejump
e87b4f3f 9643
b93a436e
JL
9644/* INDEX is the value being switched on, with the lowest value
9645 in the table already subtracted.
9646 MODE is its expected mode (needed if INDEX is constant).
9647 RANGE is the length of the jump table.
9648 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 9649
b93a436e
JL
9650 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
9651 index value is out of range. */
0f41302f 9652
ca695ac9 9653void
b93a436e
JL
9654do_tablejump (index, mode, range, table_label, default_label)
9655 rtx index, range, table_label, default_label;
9656 enum machine_mode mode;
ca695ac9 9657{
b93a436e 9658 register rtx temp, vector;
88d3b7f0 9659
b93a436e
JL
9660 /* Do an unsigned comparison (in the proper mode) between the index
9661 expression and the value which represents the length of the range.
9662 Since we just finished subtracting the lower bound of the range
9663 from the index expression, this comparison allows us to simultaneously
9664 check that the original index expression value is both greater than
9665 or equal to the minimum value of the range and less than or equal to
9666 the maximum value of the range. */
709f5be1 9667
c5d5d461
JL
9668 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
9669 0, default_label);
bbf6f052 9670
b93a436e
JL
9671 /* If index is in range, it must fit in Pmode.
9672 Convert to Pmode so we can index with it. */
9673 if (mode != Pmode)
9674 index = convert_to_mode (Pmode, index, 1);
bbf6f052 9675
b93a436e
JL
9676 /* Don't let a MEM slip thru, because then INDEX that comes
9677 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
9678 and break_out_memory_refs will go to work on it and mess it up. */
9679#ifdef PIC_CASE_VECTOR_ADDRESS
9680 if (flag_pic && GET_CODE (index) != REG)
9681 index = copy_to_mode_reg (Pmode, index);
9682#endif
ca695ac9 9683
b93a436e
JL
9684 /* If flag_force_addr were to affect this address
9685 it could interfere with the tricky assumptions made
9686 about addresses that contain label-refs,
9687 which may be valid only very near the tablejump itself. */
9688 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
9689 GET_MODE_SIZE, because this indicates how large insns are. The other
9690 uses should all be Pmode, because they are addresses. This code
9691 could fail if addresses and insns are not the same size. */
9692 index = gen_rtx_PLUS (Pmode,
9693 gen_rtx_MULT (Pmode, index,
9694 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
9695 gen_rtx_LABEL_REF (Pmode, table_label));
9696#ifdef PIC_CASE_VECTOR_ADDRESS
9697 if (flag_pic)
9698 index = PIC_CASE_VECTOR_ADDRESS (index);
9699 else
bbf6f052 9700#endif
b93a436e
JL
9701 index = memory_address_noforce (CASE_VECTOR_MODE, index);
9702 temp = gen_reg_rtx (CASE_VECTOR_MODE);
9703 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
9704 RTX_UNCHANGING_P (vector) = 1;
9705 convert_move (temp, vector, 0);
9706
9707 emit_jump_insn (gen_tablejump (temp, table_label));
9708
9709 /* If we are generating PIC code or if the table is PC-relative, the
9710 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
9711 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
9712 emit_barrier ();
bbf6f052 9713}
b93a436e
JL
9714
9715#endif /* HAVE_tablejump */
This page took 2.152361 seconds and 5 git commands to generate.