]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Change some regsets to regset_heads
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
fdf004cf 2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
d7db6646 43#include "ggc.h"
b1474bb7 44#include "tm_p.h"
bbf6f052
RK
45
46#define CEIL(x,y) (((x) + (y) - 1) / (y))
47
48/* Decide whether a function's arguments should be processed
bbc8a071
RK
49 from first to last or from last to first.
50
51 They should if the stack and args grow in opposite directions, but
52 only if we have push insns. */
bbf6f052 53
bbf6f052 54#ifdef PUSH_ROUNDING
bbc8a071 55
3319a347 56#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
57#define PUSH_ARGS_REVERSED /* If it's last to first */
58#endif
bbc8a071 59
bbf6f052
RK
60#endif
61
62#ifndef STACK_PUSH_CODE
63#ifdef STACK_GROWS_DOWNWARD
64#define STACK_PUSH_CODE PRE_DEC
65#else
66#define STACK_PUSH_CODE PRE_INC
67#endif
68#endif
69
18543a22
ILT
70/* Assume that case vectors are not pc-relative. */
71#ifndef CASE_VECTOR_PC_RELATIVE
72#define CASE_VECTOR_PC_RELATIVE 0
73#endif
74
bbf6f052
RK
75/* If this is nonzero, we do not bother generating VOLATILE
76 around volatile memory references, and we are willing to
77 output indirect addresses. If cse is to follow, we reject
78 indirect addresses so a useful potential cse is generated;
79 if it is used only once, instruction combination will produce
80 the same indirect address eventually. */
81int cse_not_expected;
82
83/* Nonzero to generate code for all the subroutines within an
84 expression before generating the upper levels of the expression.
85 Nowadays this is never zero. */
86int do_preexpand_calls = 1;
87
956d6950 88/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
89 usage. Used when current_function_check_memory_usage is true, to avoid
90 infinite recursion. */
956d6950
JL
91static int in_check_memory_usage;
92
14a774a9
RK
93/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
94static tree placeholder_list = 0;
95
4969d05d
RK
96/* This structure is used by move_by_pieces to describe the move to
97 be performed. */
4969d05d
RK
98struct move_by_pieces
99{
100 rtx to;
101 rtx to_addr;
102 int autinc_to;
103 int explicit_inc_to;
e9cf6a97 104 int to_struct;
c5c76735 105 int to_readonly;
4969d05d
RK
106 rtx from;
107 rtx from_addr;
108 int autinc_from;
109 int explicit_inc_from;
e9cf6a97 110 int from_struct;
c5c76735 111 int from_readonly;
4969d05d
RK
112 int len;
113 int offset;
114 int reverse;
115};
116
9de08200
RK
117/* This structure is used by clear_by_pieces to describe the clear to
118 be performed. */
119
120struct clear_by_pieces
121{
122 rtx to;
123 rtx to_addr;
124 int autinc_to;
125 int explicit_inc_to;
126 int to_struct;
127 int len;
128 int offset;
129 int reverse;
130};
131
292b1216 132extern struct obstack permanent_obstack;
c02bd5d9 133
03566575
JW
134static rtx get_push_address PROTO ((int));
135
4969d05d 136static rtx enqueue_insn PROTO((rtx, rtx));
4969d05d 137static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 138static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 139 struct move_by_pieces *));
9de08200 140static void clear_by_pieces PROTO((rtx, int, int));
c5c76735
JL
141static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
142 enum machine_mode,
9de08200
RK
143 struct clear_by_pieces *));
144static int is_zeros_p PROTO((tree));
145static int mostly_zeros_p PROTO((tree));
d77fac3b 146static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
c5c76735
JL
147 tree, tree, int, int));
148static void store_constructor PROTO((tree, rtx, int, int));
4969d05d 149static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
150 enum machine_mode, int, int,
151 int, int));
e009aaf3
JL
152static enum memory_use_mode
153 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
154static tree save_noncopied_parts PROTO((tree, tree));
155static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 156static int safe_from_p PROTO((rtx, tree, int));
4969d05d 157static int fixed_type_p PROTO((tree));
01c8a7c8 158static rtx var_rtx PROTO((tree));
14a774a9
RK
159static int readonly_fields_p PROTO((tree));
160static rtx expand_expr_unaligned PROTO((tree, int *));
7b8b9722 161static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
162static void preexpand_calls PROTO((tree));
163static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
164static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
b30f05db 165static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
4969d05d 166static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 167
4fa52007
RK
168/* Record for each mode whether we can move a register directly to or
169 from an object of that mode in memory. If we can't, we won't try
170 to use that mode directly when accessing a field of that mode. */
171
172static char direct_load[NUM_MACHINE_MODES];
173static char direct_store[NUM_MACHINE_MODES];
174
7e24ffc9
HPN
175/* If a memory-to-memory move would take MOVE_RATIO or more simple
176 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
177
178#ifndef MOVE_RATIO
266007a7 179#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
180#define MOVE_RATIO 2
181#else
996d9dac
MM
182/* If we are optimizing for space (-Os), cut down the default move ratio */
183#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
184#endif
185#endif
e87b4f3f 186
fbe1758d
AM
187/* This macro is used to determine whether move_by_pieces should be called
188 to perform a structure copy. */
189#ifndef MOVE_BY_PIECES_P
190#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
191 (SIZE, ALIGN) < MOVE_RATIO)
192#endif
193
266007a7 194/* This array records the insn_code of insns to perform block moves. */
e6677db3 195enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 196
9de08200
RK
197/* This array records the insn_code of insns to perform block clears. */
198enum insn_code clrstr_optab[NUM_MACHINE_MODES];
199
0f41302f 200/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
201
202#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 203#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 204#endif
bbf6f052 205\f
4fa52007 206/* This is run once per compilation to set up which modes can be used
266007a7 207 directly in memory and to initialize the block move optab. */
4fa52007
RK
208
209void
210init_expr_once ()
211{
212 rtx insn, pat;
213 enum machine_mode mode;
cff48d8f 214 int num_clobbers;
9ec36da5
JL
215 rtx mem, mem1;
216 char *free_point;
217
218 start_sequence ();
219
220 /* Since we are on the permanent obstack, we must be sure we save this
221 spot AFTER we call start_sequence, since it will reuse the rtl it
222 makes. */
223 free_point = (char *) oballoc (0);
224
e2549997
RS
225 /* Try indexing by frame ptr and try by stack ptr.
226 It is known that on the Convex the stack ptr isn't a valid index.
227 With luck, one or the other is valid on any machine. */
9ec36da5
JL
228 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
229 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 230
38a448ca 231 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
232 pat = PATTERN (insn);
233
234 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
235 mode = (enum machine_mode) ((int) mode + 1))
236 {
237 int regno;
238 rtx reg;
4fa52007
RK
239
240 direct_load[(int) mode] = direct_store[(int) mode] = 0;
241 PUT_MODE (mem, mode);
e2549997 242 PUT_MODE (mem1, mode);
4fa52007 243
e6fe56a4
RK
244 /* See if there is some register that can be used in this mode and
245 directly loaded or stored from memory. */
246
7308a047
RS
247 if (mode != VOIDmode && mode != BLKmode)
248 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
249 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
250 regno++)
251 {
252 if (! HARD_REGNO_MODE_OK (regno, mode))
253 continue;
e6fe56a4 254
38a448ca 255 reg = gen_rtx_REG (mode, regno);
e6fe56a4 256
7308a047
RS
257 SET_SRC (pat) = mem;
258 SET_DEST (pat) = reg;
259 if (recog (pat, insn, &num_clobbers) >= 0)
260 direct_load[(int) mode] = 1;
e6fe56a4 261
e2549997
RS
262 SET_SRC (pat) = mem1;
263 SET_DEST (pat) = reg;
264 if (recog (pat, insn, &num_clobbers) >= 0)
265 direct_load[(int) mode] = 1;
266
7308a047
RS
267 SET_SRC (pat) = reg;
268 SET_DEST (pat) = mem;
269 if (recog (pat, insn, &num_clobbers) >= 0)
270 direct_store[(int) mode] = 1;
e2549997
RS
271
272 SET_SRC (pat) = reg;
273 SET_DEST (pat) = mem1;
274 if (recog (pat, insn, &num_clobbers) >= 0)
275 direct_store[(int) mode] = 1;
7308a047 276 }
4fa52007
RK
277 }
278
279 end_sequence ();
9ec36da5 280 obfree (free_point);
4fa52007 281}
cff48d8f 282
bbf6f052
RK
283/* This is run at the start of compiling a function. */
284
285void
286init_expr ()
287{
49ad7cfa
BS
288 current_function->expr
289 = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 290
49ad7cfa 291 pending_chain = 0;
bbf6f052
RK
292 pending_stack_adjust = 0;
293 inhibit_defer_pop = 0;
bbf6f052 294 saveregs_value = 0;
0006469d 295 apply_args_value = 0;
e87b4f3f 296 forced_labels = 0;
bbf6f052
RK
297}
298
fa51b01b
RH
299void
300mark_expr_status (p)
301 struct expr_status *p;
302{
303 if (p == NULL)
304 return;
305
306 ggc_mark_rtx (p->x_saveregs_value);
307 ggc_mark_rtx (p->x_apply_args_value);
308 ggc_mark_rtx (p->x_forced_labels);
309}
310
311void
312free_expr_status (f)
313 struct function *f;
314{
315 free (f->expr);
316 f->expr = NULL;
317}
318
49ad7cfa 319/* Small sanity check that the queue is empty at the end of a function. */
bbf6f052 320void
49ad7cfa 321finish_expr_for_function ()
bbf6f052 322{
49ad7cfa
BS
323 if (pending_chain)
324 abort ();
bbf6f052
RK
325}
326\f
327/* Manage the queue of increment instructions to be output
328 for POSTINCREMENT_EXPR expressions, etc. */
329
bbf6f052
RK
330/* Queue up to increment (or change) VAR later. BODY says how:
331 BODY should be the same thing you would pass to emit_insn
332 to increment right away. It will go to emit_insn later on.
333
334 The value is a QUEUED expression to be used in place of VAR
335 where you want to guarantee the pre-incrementation value of VAR. */
336
337static rtx
338enqueue_insn (var, body)
339 rtx var, body;
340{
c5c76735
JL
341 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
342 body, pending_chain);
bbf6f052
RK
343 return pending_chain;
344}
345
346/* Use protect_from_queue to convert a QUEUED expression
347 into something that you can put immediately into an instruction.
348 If the queued incrementation has not happened yet,
349 protect_from_queue returns the variable itself.
350 If the incrementation has happened, protect_from_queue returns a temp
351 that contains a copy of the old value of the variable.
352
353 Any time an rtx which might possibly be a QUEUED is to be put
354 into an instruction, it must be passed through protect_from_queue first.
355 QUEUED expressions are not meaningful in instructions.
356
357 Do not pass a value through protect_from_queue and then hold
358 on to it for a while before putting it in an instruction!
359 If the queue is flushed in between, incorrect code will result. */
360
361rtx
362protect_from_queue (x, modify)
363 register rtx x;
364 int modify;
365{
366 register RTX_CODE code = GET_CODE (x);
367
368#if 0 /* A QUEUED can hang around after the queue is forced out. */
369 /* Shortcut for most common case. */
370 if (pending_chain == 0)
371 return x;
372#endif
373
374 if (code != QUEUED)
375 {
e9baa644
RK
376 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
377 use of autoincrement. Make a copy of the contents of the memory
378 location rather than a copy of the address, but not if the value is
379 of mode BLKmode. Don't modify X in place since it might be
380 shared. */
bbf6f052
RK
381 if (code == MEM && GET_MODE (x) != BLKmode
382 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
383 {
384 register rtx y = XEXP (x, 0);
38a448ca 385 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 386
e9baa644 387 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 388 MEM_COPY_ATTRIBUTES (new, x);
41472af8 389 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 390
bbf6f052
RK
391 if (QUEUED_INSN (y))
392 {
e9baa644
RK
393 register rtx temp = gen_reg_rtx (GET_MODE (new));
394 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
395 QUEUED_INSN (y));
396 return temp;
397 }
e9baa644 398 return new;
bbf6f052
RK
399 }
400 /* Otherwise, recursively protect the subexpressions of all
401 the kinds of rtx's that can contain a QUEUED. */
402 if (code == MEM)
3f15938e
RS
403 {
404 rtx tem = protect_from_queue (XEXP (x, 0), 0);
405 if (tem != XEXP (x, 0))
406 {
407 x = copy_rtx (x);
408 XEXP (x, 0) = tem;
409 }
410 }
bbf6f052
RK
411 else if (code == PLUS || code == MULT)
412 {
3f15938e
RS
413 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
414 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
415 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
416 {
417 x = copy_rtx (x);
418 XEXP (x, 0) = new0;
419 XEXP (x, 1) = new1;
420 }
bbf6f052
RK
421 }
422 return x;
423 }
424 /* If the increment has not happened, use the variable itself. */
425 if (QUEUED_INSN (x) == 0)
426 return QUEUED_VAR (x);
427 /* If the increment has happened and a pre-increment copy exists,
428 use that copy. */
429 if (QUEUED_COPY (x) != 0)
430 return QUEUED_COPY (x);
431 /* The increment has happened but we haven't set up a pre-increment copy.
432 Set one up now, and use it. */
433 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
434 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
435 QUEUED_INSN (x));
436 return QUEUED_COPY (x);
437}
438
439/* Return nonzero if X contains a QUEUED expression:
440 if it contains anything that will be altered by a queued increment.
441 We handle only combinations of MEM, PLUS, MINUS and MULT operators
442 since memory addresses generally contain only those. */
443
1f06ee8d 444int
bbf6f052
RK
445queued_subexp_p (x)
446 rtx x;
447{
448 register enum rtx_code code = GET_CODE (x);
449 switch (code)
450 {
451 case QUEUED:
452 return 1;
453 case MEM:
454 return queued_subexp_p (XEXP (x, 0));
455 case MULT:
456 case PLUS:
457 case MINUS:
e9a25f70
JL
458 return (queued_subexp_p (XEXP (x, 0))
459 || queued_subexp_p (XEXP (x, 1)));
460 default:
461 return 0;
bbf6f052 462 }
bbf6f052
RK
463}
464
465/* Perform all the pending incrementations. */
466
467void
468emit_queue ()
469{
470 register rtx p;
381127e8 471 while ((p = pending_chain))
bbf6f052 472 {
41b083c4
R
473 rtx body = QUEUED_BODY (p);
474
475 if (GET_CODE (body) == SEQUENCE)
476 {
477 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
478 emit_insn (QUEUED_BODY (p));
479 }
480 else
481 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
482 pending_chain = QUEUED_NEXT (p);
483 }
484}
bbf6f052
RK
485\f
486/* Copy data from FROM to TO, where the machine modes are not the same.
487 Both modes may be integer, or both may be floating.
488 UNSIGNEDP should be nonzero if FROM is an unsigned type.
489 This causes zero-extension instead of sign-extension. */
490
491void
492convert_move (to, from, unsignedp)
493 register rtx to, from;
494 int unsignedp;
495{
496 enum machine_mode to_mode = GET_MODE (to);
497 enum machine_mode from_mode = GET_MODE (from);
498 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
499 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
500 enum insn_code code;
501 rtx libcall;
502
503 /* rtx code for making an equivalent value. */
504 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
505
506 to = protect_from_queue (to, 1);
507 from = protect_from_queue (from, 0);
508
509 if (to_real != from_real)
510 abort ();
511
1499e0a8
RK
512 /* If FROM is a SUBREG that indicates that we have already done at least
513 the required extension, strip it. We don't handle such SUBREGs as
514 TO here. */
515
516 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
517 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
518 >= GET_MODE_SIZE (to_mode))
519 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
520 from = gen_lowpart (to_mode, from), from_mode = to_mode;
521
522 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
523 abort ();
524
bbf6f052
RK
525 if (to_mode == from_mode
526 || (from_mode == VOIDmode && CONSTANT_P (from)))
527 {
528 emit_move_insn (to, from);
529 return;
530 }
531
532 if (to_real)
533 {
81d79e2c
RS
534 rtx value;
535
2b01c326 536 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 537 {
2b01c326
RK
538 /* Try converting directly if the insn is supported. */
539 if ((code = can_extend_p (to_mode, from_mode, 0))
540 != CODE_FOR_nothing)
541 {
542 emit_unop_insn (code, to, from, UNKNOWN);
543 return;
544 }
bbf6f052 545 }
2b01c326 546
b424402e
RS
547#ifdef HAVE_trunchfqf2
548 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
549 {
550 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
551 return;
552 }
553#endif
704af6a1
JL
554#ifdef HAVE_trunctqfqf2
555 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
556 {
557 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
558 return;
559 }
560#endif
b424402e
RS
561#ifdef HAVE_truncsfqf2
562 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
563 {
564 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
565 return;
566 }
567#endif
568#ifdef HAVE_truncdfqf2
569 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
570 {
571 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
572 return;
573 }
574#endif
575#ifdef HAVE_truncxfqf2
576 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
577 {
578 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
579 return;
580 }
581#endif
582#ifdef HAVE_trunctfqf2
583 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
584 {
585 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
586 return;
587 }
588#endif
03747aa3
RK
589
590#ifdef HAVE_trunctqfhf2
591 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
592 {
593 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
594 return;
595 }
596#endif
b424402e
RS
597#ifdef HAVE_truncsfhf2
598 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
599 {
600 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
601 return;
602 }
603#endif
604#ifdef HAVE_truncdfhf2
605 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
606 {
607 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
608 return;
609 }
610#endif
611#ifdef HAVE_truncxfhf2
612 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
613 {
614 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
615 return;
616 }
617#endif
618#ifdef HAVE_trunctfhf2
619 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
620 {
621 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
622 return;
623 }
624#endif
2b01c326
RK
625
626#ifdef HAVE_truncsftqf2
627 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
628 {
629 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncdftqf2
634 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_truncxftqf2
641 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
642 {
643 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
647#ifdef HAVE_trunctftqf2
648 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
649 {
650 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
651 return;
652 }
653#endif
654
bbf6f052
RK
655#ifdef HAVE_truncdfsf2
656 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
b092b471
JW
662#ifdef HAVE_truncxfsf2
663 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
bbf6f052
RK
669#ifdef HAVE_trunctfsf2
670 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
671 {
672 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
b092b471
JW
676#ifdef HAVE_truncxfdf2
677 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
678 {
679 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
bbf6f052
RK
683#ifdef HAVE_trunctfdf2
684 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
685 {
686 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
687 return;
688 }
689#endif
690
b092b471
JW
691 libcall = (rtx) 0;
692 switch (from_mode)
693 {
694 case SFmode:
695 switch (to_mode)
696 {
697 case DFmode:
698 libcall = extendsfdf2_libfunc;
699 break;
700
701 case XFmode:
702 libcall = extendsfxf2_libfunc;
703 break;
704
705 case TFmode:
706 libcall = extendsftf2_libfunc;
707 break;
e9a25f70
JL
708
709 default:
710 break;
b092b471
JW
711 }
712 break;
713
714 case DFmode:
715 switch (to_mode)
716 {
717 case SFmode:
718 libcall = truncdfsf2_libfunc;
719 break;
720
721 case XFmode:
722 libcall = extenddfxf2_libfunc;
723 break;
724
725 case TFmode:
726 libcall = extenddftf2_libfunc;
727 break;
e9a25f70
JL
728
729 default:
730 break;
b092b471
JW
731 }
732 break;
733
734 case XFmode:
735 switch (to_mode)
736 {
737 case SFmode:
738 libcall = truncxfsf2_libfunc;
739 break;
740
741 case DFmode:
742 libcall = truncxfdf2_libfunc;
743 break;
e9a25f70
JL
744
745 default:
746 break;
b092b471
JW
747 }
748 break;
749
750 case TFmode:
751 switch (to_mode)
752 {
753 case SFmode:
754 libcall = trunctfsf2_libfunc;
755 break;
756
757 case DFmode:
758 libcall = trunctfdf2_libfunc;
759 break;
e9a25f70
JL
760
761 default:
762 break;
b092b471
JW
763 }
764 break;
e9a25f70
JL
765
766 default:
767 break;
b092b471
JW
768 }
769
770 if (libcall == (rtx) 0)
771 /* This conversion is not implemented yet. */
bbf6f052
RK
772 abort ();
773
81d79e2c
RS
774 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
775 1, from, from_mode);
776 emit_move_insn (to, value);
bbf6f052
RK
777 return;
778 }
779
780 /* Now both modes are integers. */
781
782 /* Handle expanding beyond a word. */
783 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
784 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
785 {
786 rtx insns;
787 rtx lowpart;
788 rtx fill_value;
789 rtx lowfrom;
790 int i;
791 enum machine_mode lowpart_mode;
792 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
793
794 /* Try converting directly if the insn is supported. */
795 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
796 != CODE_FOR_nothing)
797 {
cd1b4b44
RK
798 /* If FROM is a SUBREG, put it into a register. Do this
799 so that we always generate the same set of insns for
800 better cse'ing; if an intermediate assignment occurred,
801 we won't be doing the operation directly on the SUBREG. */
802 if (optimize > 0 && GET_CODE (from) == SUBREG)
803 from = force_reg (from_mode, from);
bbf6f052
RK
804 emit_unop_insn (code, to, from, equiv_code);
805 return;
806 }
807 /* Next, try converting via full word. */
808 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
809 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
810 != CODE_FOR_nothing))
811 {
a81fee56 812 if (GET_CODE (to) == REG)
38a448ca 813 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
814 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
815 emit_unop_insn (code, to,
816 gen_lowpart (word_mode, to), equiv_code);
817 return;
818 }
819
820 /* No special multiword conversion insn; do it by hand. */
821 start_sequence ();
822
5c5033c3
RK
823 /* Since we will turn this into a no conflict block, we must ensure
824 that the source does not overlap the target. */
825
826 if (reg_overlap_mentioned_p (to, from))
827 from = force_reg (from_mode, from);
828
bbf6f052
RK
829 /* Get a copy of FROM widened to a word, if necessary. */
830 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
831 lowpart_mode = word_mode;
832 else
833 lowpart_mode = from_mode;
834
835 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
836
837 lowpart = gen_lowpart (lowpart_mode, to);
838 emit_move_insn (lowpart, lowfrom);
839
840 /* Compute the value to put in each remaining word. */
841 if (unsignedp)
842 fill_value = const0_rtx;
843 else
844 {
845#ifdef HAVE_slt
846 if (HAVE_slt
a995e389 847 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
848 && STORE_FLAG_VALUE == -1)
849 {
906c4e36
RK
850 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
851 lowpart_mode, 0, 0);
bbf6f052
RK
852 fill_value = gen_reg_rtx (word_mode);
853 emit_insn (gen_slt (fill_value));
854 }
855 else
856#endif
857 {
858 fill_value
859 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
860 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 861 NULL_RTX, 0);
bbf6f052
RK
862 fill_value = convert_to_mode (word_mode, fill_value, 1);
863 }
864 }
865
866 /* Fill the remaining words. */
867 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
868 {
869 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
870 rtx subword = operand_subword (to, index, 1, to_mode);
871
872 if (subword == 0)
873 abort ();
874
875 if (fill_value != subword)
876 emit_move_insn (subword, fill_value);
877 }
878
879 insns = get_insns ();
880 end_sequence ();
881
906c4e36 882 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 883 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
884 return;
885 }
886
d3c64ee3
RS
887 /* Truncating multi-word to a word or less. */
888 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
889 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 890 {
431a6eca
JW
891 if (!((GET_CODE (from) == MEM
892 && ! MEM_VOLATILE_P (from)
893 && direct_load[(int) to_mode]
894 && ! mode_dependent_address_p (XEXP (from, 0)))
895 || GET_CODE (from) == REG
896 || GET_CODE (from) == SUBREG))
897 from = force_reg (from_mode, from);
bbf6f052
RK
898 convert_move (to, gen_lowpart (word_mode, from), 0);
899 return;
900 }
901
902 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
903 if (to_mode == PQImode)
904 {
905 if (from_mode != QImode)
906 from = convert_to_mode (QImode, from, unsignedp);
907
908#ifdef HAVE_truncqipqi2
909 if (HAVE_truncqipqi2)
910 {
911 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
912 return;
913 }
914#endif /* HAVE_truncqipqi2 */
915 abort ();
916 }
917
918 if (from_mode == PQImode)
919 {
920 if (to_mode != QImode)
921 {
922 from = convert_to_mode (QImode, from, unsignedp);
923 from_mode = QImode;
924 }
925 else
926 {
927#ifdef HAVE_extendpqiqi2
928 if (HAVE_extendpqiqi2)
929 {
930 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
931 return;
932 }
933#endif /* HAVE_extendpqiqi2 */
934 abort ();
935 }
936 }
937
bbf6f052
RK
938 if (to_mode == PSImode)
939 {
940 if (from_mode != SImode)
941 from = convert_to_mode (SImode, from, unsignedp);
942
1f584163
DE
943#ifdef HAVE_truncsipsi2
944 if (HAVE_truncsipsi2)
bbf6f052 945 {
1f584163 946 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
947 return;
948 }
1f584163 949#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
950 abort ();
951 }
952
953 if (from_mode == PSImode)
954 {
955 if (to_mode != SImode)
956 {
957 from = convert_to_mode (SImode, from, unsignedp);
958 from_mode = SImode;
959 }
960 else
961 {
1f584163
DE
962#ifdef HAVE_extendpsisi2
963 if (HAVE_extendpsisi2)
bbf6f052 964 {
1f584163 965 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
966 return;
967 }
1f584163 968#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
969 abort ();
970 }
971 }
972
0407367d
RK
973 if (to_mode == PDImode)
974 {
975 if (from_mode != DImode)
976 from = convert_to_mode (DImode, from, unsignedp);
977
978#ifdef HAVE_truncdipdi2
979 if (HAVE_truncdipdi2)
980 {
981 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
982 return;
983 }
984#endif /* HAVE_truncdipdi2 */
985 abort ();
986 }
987
988 if (from_mode == PDImode)
989 {
990 if (to_mode != DImode)
991 {
992 from = convert_to_mode (DImode, from, unsignedp);
993 from_mode = DImode;
994 }
995 else
996 {
997#ifdef HAVE_extendpdidi2
998 if (HAVE_extendpdidi2)
999 {
1000 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1001 return;
1002 }
1003#endif /* HAVE_extendpdidi2 */
1004 abort ();
1005 }
1006 }
1007
bbf6f052
RK
1008 /* Now follow all the conversions between integers
1009 no more than a word long. */
1010
1011 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1012 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1013 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1014 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1015 {
d3c64ee3
RS
1016 if (!((GET_CODE (from) == MEM
1017 && ! MEM_VOLATILE_P (from)
1018 && direct_load[(int) to_mode]
1019 && ! mode_dependent_address_p (XEXP (from, 0)))
1020 || GET_CODE (from) == REG
1021 || GET_CODE (from) == SUBREG))
1022 from = force_reg (from_mode, from);
34aa3599
RK
1023 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1024 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1025 from = copy_to_reg (from);
bbf6f052
RK
1026 emit_move_insn (to, gen_lowpart (to_mode, from));
1027 return;
1028 }
1029
d3c64ee3 1030 /* Handle extension. */
bbf6f052
RK
1031 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1032 {
1033 /* Convert directly if that works. */
1034 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1035 != CODE_FOR_nothing)
1036 {
1037 emit_unop_insn (code, to, from, equiv_code);
1038 return;
1039 }
1040 else
1041 {
1042 enum machine_mode intermediate;
2b28d92e
NC
1043 rtx tmp;
1044 tree shift_amount;
bbf6f052
RK
1045
1046 /* Search for a mode to convert via. */
1047 for (intermediate = from_mode; intermediate != VOIDmode;
1048 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1049 if (((can_extend_p (to_mode, intermediate, unsignedp)
1050 != CODE_FOR_nothing)
1051 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1052 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1053 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1054 && (can_extend_p (intermediate, from_mode, unsignedp)
1055 != CODE_FOR_nothing))
1056 {
1057 convert_move (to, convert_to_mode (intermediate, from,
1058 unsignedp), unsignedp);
1059 return;
1060 }
1061
2b28d92e
NC
1062 /* No suitable intermediate mode.
1063 Generate what we need with shifts. */
1064 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1065 - GET_MODE_BITSIZE (from_mode), 0);
1066 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1067 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1068 to, unsignedp);
1069 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1070 to, unsignedp);
1071 if (tmp != to)
1072 emit_move_insn (to, tmp);
1073 return;
bbf6f052
RK
1074 }
1075 }
1076
1077 /* Support special truncate insns for certain modes. */
1078
1079 if (from_mode == DImode && to_mode == SImode)
1080 {
1081#ifdef HAVE_truncdisi2
1082 if (HAVE_truncdisi2)
1083 {
1084 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1085 return;
1086 }
1087#endif
1088 convert_move (to, force_reg (from_mode, from), unsignedp);
1089 return;
1090 }
1091
1092 if (from_mode == DImode && to_mode == HImode)
1093 {
1094#ifdef HAVE_truncdihi2
1095 if (HAVE_truncdihi2)
1096 {
1097 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1098 return;
1099 }
1100#endif
1101 convert_move (to, force_reg (from_mode, from), unsignedp);
1102 return;
1103 }
1104
1105 if (from_mode == DImode && to_mode == QImode)
1106 {
1107#ifdef HAVE_truncdiqi2
1108 if (HAVE_truncdiqi2)
1109 {
1110 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1111 return;
1112 }
1113#endif
1114 convert_move (to, force_reg (from_mode, from), unsignedp);
1115 return;
1116 }
1117
1118 if (from_mode == SImode && to_mode == HImode)
1119 {
1120#ifdef HAVE_truncsihi2
1121 if (HAVE_truncsihi2)
1122 {
1123 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1124 return;
1125 }
1126#endif
1127 convert_move (to, force_reg (from_mode, from), unsignedp);
1128 return;
1129 }
1130
1131 if (from_mode == SImode && to_mode == QImode)
1132 {
1133#ifdef HAVE_truncsiqi2
1134 if (HAVE_truncsiqi2)
1135 {
1136 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1137 return;
1138 }
1139#endif
1140 convert_move (to, force_reg (from_mode, from), unsignedp);
1141 return;
1142 }
1143
1144 if (from_mode == HImode && to_mode == QImode)
1145 {
1146#ifdef HAVE_trunchiqi2
1147 if (HAVE_trunchiqi2)
1148 {
1149 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1150 return;
1151 }
1152#endif
1153 convert_move (to, force_reg (from_mode, from), unsignedp);
1154 return;
1155 }
1156
b9bcad65
RK
1157 if (from_mode == TImode && to_mode == DImode)
1158 {
1159#ifdef HAVE_trunctidi2
1160 if (HAVE_trunctidi2)
1161 {
1162 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1163 return;
1164 }
1165#endif
1166 convert_move (to, force_reg (from_mode, from), unsignedp);
1167 return;
1168 }
1169
1170 if (from_mode == TImode && to_mode == SImode)
1171 {
1172#ifdef HAVE_trunctisi2
1173 if (HAVE_trunctisi2)
1174 {
1175 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1176 return;
1177 }
1178#endif
1179 convert_move (to, force_reg (from_mode, from), unsignedp);
1180 return;
1181 }
1182
1183 if (from_mode == TImode && to_mode == HImode)
1184 {
1185#ifdef HAVE_trunctihi2
1186 if (HAVE_trunctihi2)
1187 {
1188 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1189 return;
1190 }
1191#endif
1192 convert_move (to, force_reg (from_mode, from), unsignedp);
1193 return;
1194 }
1195
1196 if (from_mode == TImode && to_mode == QImode)
1197 {
1198#ifdef HAVE_trunctiqi2
1199 if (HAVE_trunctiqi2)
1200 {
1201 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1202 return;
1203 }
1204#endif
1205 convert_move (to, force_reg (from_mode, from), unsignedp);
1206 return;
1207 }
1208
bbf6f052
RK
1209 /* Handle truncation of volatile memrefs, and so on;
1210 the things that couldn't be truncated directly,
1211 and for which there was no special instruction. */
1212 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1213 {
1214 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1215 emit_move_insn (to, temp);
1216 return;
1217 }
1218
1219 /* Mode combination is not recognized. */
1220 abort ();
1221}
1222
1223/* Return an rtx for a value that would result
1224 from converting X to mode MODE.
1225 Both X and MODE may be floating, or both integer.
1226 UNSIGNEDP is nonzero if X is an unsigned value.
1227 This can be done by referring to a part of X in place
5d901c31
RS
1228 or by copying to a new temporary with conversion.
1229
1230 This function *must not* call protect_from_queue
1231 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1232
1233rtx
1234convert_to_mode (mode, x, unsignedp)
1235 enum machine_mode mode;
1236 rtx x;
1237 int unsignedp;
5ffe63ed
RS
1238{
1239 return convert_modes (mode, VOIDmode, x, unsignedp);
1240}
1241
1242/* Return an rtx for a value that would result
1243 from converting X from mode OLDMODE to mode MODE.
1244 Both modes may be floating, or both integer.
1245 UNSIGNEDP is nonzero if X is an unsigned value.
1246
1247 This can be done by referring to a part of X in place
1248 or by copying to a new temporary with conversion.
1249
1250 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1251
1252 This function *must not* call protect_from_queue
1253 except when putting X into an insn (in which case convert_move does it). */
1254
1255rtx
1256convert_modes (mode, oldmode, x, unsignedp)
1257 enum machine_mode mode, oldmode;
1258 rtx x;
1259 int unsignedp;
bbf6f052
RK
1260{
1261 register rtx temp;
5ffe63ed 1262
1499e0a8
RK
1263 /* If FROM is a SUBREG that indicates that we have already done at least
1264 the required extension, strip it. */
1265
1266 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1267 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1268 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1269 x = gen_lowpart (mode, x);
bbf6f052 1270
64791b18
RK
1271 if (GET_MODE (x) != VOIDmode)
1272 oldmode = GET_MODE (x);
1273
5ffe63ed 1274 if (mode == oldmode)
bbf6f052
RK
1275 return x;
1276
1277 /* There is one case that we must handle specially: If we are converting
906c4e36 1278 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1279 we are to interpret the constant as unsigned, gen_lowpart will do
1280 the wrong if the constant appears negative. What we want to do is
1281 make the high-order word of the constant zero, not all ones. */
1282
1283 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1284 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1285 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1286 {
1287 HOST_WIDE_INT val = INTVAL (x);
1288
1289 if (oldmode != VOIDmode
1290 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1291 {
1292 int width = GET_MODE_BITSIZE (oldmode);
1293
1294 /* We need to zero extend VAL. */
1295 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1296 }
1297
1298 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1299 }
bbf6f052
RK
1300
1301 /* We can do this with a gen_lowpart if both desired and current modes
1302 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1303 non-volatile MEM. Except for the constant case where MODE is no
1304 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1305
ba2e110c
RK
1306 if ((GET_CODE (x) == CONST_INT
1307 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1308 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1309 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1310 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1311 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1312 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1313 && direct_load[(int) mode])
2bf29316
JW
1314 || (GET_CODE (x) == REG
1315 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1316 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1317 {
1318 /* ?? If we don't know OLDMODE, we have to assume here that
1319 X does not need sign- or zero-extension. This may not be
1320 the case, but it's the best we can do. */
1321 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1322 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1323 {
1324 HOST_WIDE_INT val = INTVAL (x);
1325 int width = GET_MODE_BITSIZE (oldmode);
1326
1327 /* We must sign or zero-extend in this case. Start by
1328 zero-extending, then sign extend if we need to. */
1329 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1330 if (! unsignedp
1331 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1332 val |= (HOST_WIDE_INT) (-1) << width;
1333
1334 return GEN_INT (val);
1335 }
1336
1337 return gen_lowpart (mode, x);
1338 }
bbf6f052
RK
1339
1340 temp = gen_reg_rtx (mode);
1341 convert_move (temp, x, unsignedp);
1342 return temp;
1343}
1344\f
fbe1758d
AM
1345
1346/* This macro is used to determine what the largest unit size that
1347 move_by_pieces can use is. */
1348
1349/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1350 move efficiently, as opposed to MOVE_MAX which is the maximum
1351 number of bhytes we can move with a single instruction. */
1352
1353#ifndef MOVE_MAX_PIECES
1354#define MOVE_MAX_PIECES MOVE_MAX
1355#endif
1356
bbf6f052
RK
1357/* Generate several move instructions to copy LEN bytes
1358 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1359 The caller must pass FROM and TO
1360 through protect_from_queue before calling.
1361 ALIGN (in bytes) is maximum alignment we can assume. */
1362
2e245dac 1363void
bbf6f052
RK
1364move_by_pieces (to, from, len, align)
1365 rtx to, from;
1366 int len, align;
1367{
1368 struct move_by_pieces data;
1369 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1370 int max_size = MOVE_MAX_PIECES + 1;
1371 enum machine_mode mode = VOIDmode, tmode;
1372 enum insn_code icode;
bbf6f052
RK
1373
1374 data.offset = 0;
1375 data.to_addr = to_addr;
1376 data.from_addr = from_addr;
1377 data.to = to;
1378 data.from = from;
1379 data.autinc_to
1380 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1381 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1382 data.autinc_from
1383 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1384 || GET_CODE (from_addr) == POST_INC
1385 || GET_CODE (from_addr) == POST_DEC);
1386
1387 data.explicit_inc_from = 0;
1388 data.explicit_inc_to = 0;
1389 data.reverse
1390 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1391 if (data.reverse) data.offset = len;
1392 data.len = len;
1393
e9cf6a97
JW
1394 data.to_struct = MEM_IN_STRUCT_P (to);
1395 data.from_struct = MEM_IN_STRUCT_P (from);
c5c76735
JL
1396 data.to_readonly = RTX_UNCHANGING_P (to);
1397 data.from_readonly = RTX_UNCHANGING_P (from);
e9cf6a97 1398
bbf6f052
RK
1399 /* If copying requires more than two move insns,
1400 copy addresses to registers (to make displacements shorter)
1401 and use post-increment if available. */
1402 if (!(data.autinc_from && data.autinc_to)
1403 && move_by_pieces_ninsns (len, align) > 2)
1404 {
fbe1758d
AM
1405 /* Find the mode of the largest move... */
1406 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1407 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1408 if (GET_MODE_SIZE (tmode) < max_size)
1409 mode = tmode;
1410
1411 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1412 {
1413 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1414 data.autinc_from = 1;
1415 data.explicit_inc_from = -1;
1416 }
fbe1758d 1417 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1418 {
1419 data.from_addr = copy_addr_to_reg (from_addr);
1420 data.autinc_from = 1;
1421 data.explicit_inc_from = 1;
1422 }
bbf6f052
RK
1423 if (!data.autinc_from && CONSTANT_P (from_addr))
1424 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1425 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1426 {
1427 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1428 data.autinc_to = 1;
1429 data.explicit_inc_to = -1;
1430 }
fbe1758d 1431 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1432 {
1433 data.to_addr = copy_addr_to_reg (to_addr);
1434 data.autinc_to = 1;
1435 data.explicit_inc_to = 1;
1436 }
bbf6f052
RK
1437 if (!data.autinc_to && CONSTANT_P (to_addr))
1438 data.to_addr = copy_addr_to_reg (to_addr);
1439 }
1440
c7a7ac46 1441 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1442 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1443 align = MOVE_MAX;
bbf6f052
RK
1444
1445 /* First move what we can in the largest integer mode, then go to
1446 successively smaller modes. */
1447
1448 while (max_size > 1)
1449 {
e7c33f54
RK
1450 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1451 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1452 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1453 mode = tmode;
1454
1455 if (mode == VOIDmode)
1456 break;
1457
1458 icode = mov_optab->handlers[(int) mode].insn_code;
1459 if (icode != CODE_FOR_nothing
1460 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1461 GET_MODE_SIZE (mode)))
1462 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1463
1464 max_size = GET_MODE_SIZE (mode);
1465 }
1466
1467 /* The code above should have handled everything. */
2a8e278c 1468 if (data.len > 0)
bbf6f052
RK
1469 abort ();
1470}
1471
1472/* Return number of insns required to move L bytes by pieces.
1473 ALIGN (in bytes) is maximum alignment we can assume. */
1474
1475static int
1476move_by_pieces_ninsns (l, align)
1477 unsigned int l;
1478 int align;
1479{
1480 register int n_insns = 0;
e87b4f3f 1481 int max_size = MOVE_MAX + 1;
bbf6f052 1482
c7a7ac46 1483 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1484 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1485 align = MOVE_MAX;
bbf6f052
RK
1486
1487 while (max_size > 1)
1488 {
1489 enum machine_mode mode = VOIDmode, tmode;
1490 enum insn_code icode;
1491
e7c33f54
RK
1492 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1493 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1494 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1495 mode = tmode;
1496
1497 if (mode == VOIDmode)
1498 break;
1499
1500 icode = mov_optab->handlers[(int) mode].insn_code;
1501 if (icode != CODE_FOR_nothing
1502 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1503 GET_MODE_SIZE (mode)))
1504 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1505
1506 max_size = GET_MODE_SIZE (mode);
1507 }
1508
1509 return n_insns;
1510}
1511
1512/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1513 with move instructions for mode MODE. GENFUN is the gen_... function
1514 to make a move insn for that mode. DATA has all the other info. */
1515
1516static void
1517move_by_pieces_1 (genfun, mode, data)
eae4b970 1518 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1519 enum machine_mode mode;
1520 struct move_by_pieces *data;
1521{
1522 register int size = GET_MODE_SIZE (mode);
1523 register rtx to1, from1;
1524
1525 while (data->len >= size)
1526 {
1527 if (data->reverse) data->offset -= size;
1528
1529 to1 = (data->autinc_to
38a448ca 1530 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1531 : copy_rtx (change_address (data->to, mode,
1532 plus_constant (data->to_addr,
1533 data->offset))));
e9cf6a97 1534 MEM_IN_STRUCT_P (to1) = data->to_struct;
c5c76735 1535 RTX_UNCHANGING_P (to1) = data->to_readonly;
effbcc6a 1536
db3cf6fb
MS
1537 from1
1538 = (data->autinc_from
38a448ca 1539 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1540 : copy_rtx (change_address (data->from, mode,
1541 plus_constant (data->from_addr,
1542 data->offset))));
e9cf6a97 1543 MEM_IN_STRUCT_P (from1) = data->from_struct;
c5c76735 1544 RTX_UNCHANGING_P (from1) = data->from_readonly;
bbf6f052 1545
940da324 1546 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1547 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1548 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1549 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1550
1551 emit_insn ((*genfun) (to1, from1));
940da324 1552 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1553 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1554 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1555 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1556
1557 if (! data->reverse) data->offset += size;
1558
1559 data->len -= size;
1560 }
1561}
1562\f
1563/* Emit code to move a block Y to a block X.
1564 This may be done with string-move instructions,
1565 with multiple scalar move instructions, or with a library call.
1566
1567 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1568 with mode BLKmode.
1569 SIZE is an rtx that says how long they are.
1570 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1571 measured in bytes.
bbf6f052 1572
e9a25f70
JL
1573 Return the address of the new block, if memcpy is called and returns it,
1574 0 otherwise. */
1575
1576rtx
bbf6f052
RK
1577emit_block_move (x, y, size, align)
1578 rtx x, y;
1579 rtx size;
1580 int align;
1581{
e9a25f70 1582 rtx retval = 0;
52cf7115
JL
1583#ifdef TARGET_MEM_FUNCTIONS
1584 static tree fn;
1585 tree call_expr, arg_list;
1586#endif
e9a25f70 1587
bbf6f052
RK
1588 if (GET_MODE (x) != BLKmode)
1589 abort ();
1590
1591 if (GET_MODE (y) != BLKmode)
1592 abort ();
1593
1594 x = protect_from_queue (x, 1);
1595 y = protect_from_queue (y, 0);
5d901c31 1596 size = protect_from_queue (size, 0);
bbf6f052
RK
1597
1598 if (GET_CODE (x) != MEM)
1599 abort ();
1600 if (GET_CODE (y) != MEM)
1601 abort ();
1602 if (size == 0)
1603 abort ();
1604
fbe1758d 1605 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1606 move_by_pieces (x, y, INTVAL (size), align);
1607 else
1608 {
1609 /* Try the most limited insn first, because there's no point
1610 including more than one in the machine description unless
1611 the more limited one has some advantage. */
266007a7 1612
0bba3f6f 1613 rtx opalign = GEN_INT (align);
266007a7
RK
1614 enum machine_mode mode;
1615
1616 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1617 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1618 {
266007a7 1619 enum insn_code code = movstr_optab[(int) mode];
a995e389 1620 insn_operand_predicate_fn pred;
266007a7
RK
1621
1622 if (code != CODE_FOR_nothing
803090c4
RK
1623 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1624 here because if SIZE is less than the mode mask, as it is
8008b228 1625 returned by the macro, it will definitely be less than the
803090c4 1626 actual mode mask. */
8ca00751
RK
1627 && ((GET_CODE (size) == CONST_INT
1628 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1629 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1630 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1631 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1632 || (*pred) (x, BLKmode))
1633 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1634 || (*pred) (y, BLKmode))
1635 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1636 || (*pred) (opalign, VOIDmode)))
bbf6f052 1637 {
1ba1e2a8 1638 rtx op2;
266007a7
RK
1639 rtx last = get_last_insn ();
1640 rtx pat;
1641
1ba1e2a8 1642 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1643 pred = insn_data[(int) code].operand[2].predicate;
1644 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1645 op2 = copy_to_mode_reg (mode, op2);
1646
1647 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1648 if (pat)
1649 {
1650 emit_insn (pat);
e9a25f70 1651 return 0;
266007a7
RK
1652 }
1653 else
1654 delete_insns_since (last);
bbf6f052
RK
1655 }
1656 }
bbf6f052 1657
4bc973ae
JL
1658 /* X, Y, or SIZE may have been passed through protect_from_queue.
1659
1660 It is unsafe to save the value generated by protect_from_queue
1661 and reuse it later. Consider what happens if emit_queue is
1662 called before the return value from protect_from_queue is used.
1663
1664 Expansion of the CALL_EXPR below will call emit_queue before
1665 we are finished emitting RTL for argument setup. So if we are
1666 not careful we could get the wrong value for an argument.
1667
1668 To avoid this problem we go ahead and emit code to copy X, Y &
1669 SIZE into new pseudos. We can then place those new pseudos
1670 into an RTL_EXPR and use them later, even after a call to
1671 emit_queue.
1672
1673 Note this is not strictly needed for library calls since they
1674 do not call emit_queue before loading their arguments. However,
1675 we may need to have library calls call emit_queue in the future
1676 since failing to do so could cause problems for targets which
1677 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1678 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1679 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1680
1681#ifdef TARGET_MEM_FUNCTIONS
1682 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1683#else
1684 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1685 TREE_UNSIGNED (integer_type_node));
f3dc586a 1686 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1687#endif
1688
bbf6f052 1689#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1690 /* It is incorrect to use the libcall calling conventions to call
1691 memcpy in this context.
1692
1693 This could be a user call to memcpy and the user may wish to
1694 examine the return value from memcpy.
1695
1696 For targets where libcalls and normal calls have different conventions
1697 for returning pointers, we could end up generating incorrect code.
1698
1699 So instead of using a libcall sequence we build up a suitable
1700 CALL_EXPR and expand the call in the normal fashion. */
1701 if (fn == NULL_TREE)
1702 {
1703 tree fntype;
1704
1705 /* This was copied from except.c, I don't know if all this is
1706 necessary in this context or not. */
1707 fn = get_identifier ("memcpy");
1708 push_obstacks_nochange ();
1709 end_temporary_allocation ();
1710 fntype = build_pointer_type (void_type_node);
1711 fntype = build_function_type (fntype, NULL_TREE);
1712 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 1713 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1714 DECL_EXTERNAL (fn) = 1;
1715 TREE_PUBLIC (fn) = 1;
1716 DECL_ARTIFICIAL (fn) = 1;
1717 make_decl_rtl (fn, NULL_PTR, 1);
1718 assemble_external (fn);
1719 pop_obstacks ();
1720 }
1721
1722 /* We need to make an argument list for the function call.
1723
1724 memcpy has three arguments, the first two are void * addresses and
1725 the last is a size_t byte count for the copy. */
1726 arg_list
1727 = build_tree_list (NULL_TREE,
4bc973ae 1728 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1729 TREE_CHAIN (arg_list)
1730 = build_tree_list (NULL_TREE,
4bc973ae 1731 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1732 TREE_CHAIN (TREE_CHAIN (arg_list))
1733 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1734 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1735
1736 /* Now we have to build up the CALL_EXPR itself. */
1737 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1738 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1739 call_expr, arg_list, NULL_TREE);
1740 TREE_SIDE_EFFECTS (call_expr) = 1;
1741
1742 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1743#else
d562e42e 1744 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1745 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1746 convert_to_mode (TYPE_MODE (integer_type_node), size,
1747 TREE_UNSIGNED (integer_type_node)),
1748 TYPE_MODE (integer_type_node));
bbf6f052
RK
1749#endif
1750 }
e9a25f70
JL
1751
1752 return retval;
bbf6f052
RK
1753}
1754\f
1755/* Copy all or part of a value X into registers starting at REGNO.
1756 The number of registers to be filled is NREGS. */
1757
1758void
1759move_block_to_reg (regno, x, nregs, mode)
1760 int regno;
1761 rtx x;
1762 int nregs;
1763 enum machine_mode mode;
1764{
1765 int i;
381127e8
RL
1766#ifdef HAVE_load_multiple
1767 rtx pat;
1768 rtx last;
1769#endif
bbf6f052 1770
72bb9717
RK
1771 if (nregs == 0)
1772 return;
1773
bbf6f052
RK
1774 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1775 x = validize_mem (force_const_mem (mode, x));
1776
1777 /* See if the machine can do this with a load multiple insn. */
1778#ifdef HAVE_load_multiple
c3a02afe 1779 if (HAVE_load_multiple)
bbf6f052 1780 {
c3a02afe 1781 last = get_last_insn ();
38a448ca 1782 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1783 GEN_INT (nregs));
1784 if (pat)
1785 {
1786 emit_insn (pat);
1787 return;
1788 }
1789 else
1790 delete_insns_since (last);
bbf6f052 1791 }
bbf6f052
RK
1792#endif
1793
1794 for (i = 0; i < nregs; i++)
38a448ca 1795 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1796 operand_subword_force (x, i, mode));
1797}
1798
1799/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1800 The number of registers to be filled is NREGS. SIZE indicates the number
1801 of bytes in the object X. */
1802
bbf6f052
RK
1803
1804void
0040593d 1805move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1806 int regno;
1807 rtx x;
1808 int nregs;
0040593d 1809 int size;
bbf6f052
RK
1810{
1811 int i;
381127e8
RL
1812#ifdef HAVE_store_multiple
1813 rtx pat;
1814 rtx last;
1815#endif
58a32c5c 1816 enum machine_mode mode;
bbf6f052 1817
58a32c5c
DE
1818 /* If SIZE is that of a mode no bigger than a word, just use that
1819 mode's store operation. */
1820 if (size <= UNITS_PER_WORD
1821 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1822 {
1823 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1824 gen_rtx_REG (mode, regno));
58a32c5c
DE
1825 return;
1826 }
1827
0040593d 1828 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1829 to the left before storing to memory. Note that the previous test
1830 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1831 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1832 {
1833 rtx tem = operand_subword (x, 0, 1, BLKmode);
1834 rtx shift;
1835
1836 if (tem == 0)
1837 abort ();
1838
1839 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1840 gen_rtx_REG (word_mode, regno),
0040593d
JW
1841 build_int_2 ((UNITS_PER_WORD - size)
1842 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1843 emit_move_insn (tem, shift);
1844 return;
1845 }
1846
bbf6f052
RK
1847 /* See if the machine can do this with a store multiple insn. */
1848#ifdef HAVE_store_multiple
c3a02afe 1849 if (HAVE_store_multiple)
bbf6f052 1850 {
c3a02afe 1851 last = get_last_insn ();
38a448ca 1852 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1853 GEN_INT (nregs));
1854 if (pat)
1855 {
1856 emit_insn (pat);
1857 return;
1858 }
1859 else
1860 delete_insns_since (last);
bbf6f052 1861 }
bbf6f052
RK
1862#endif
1863
1864 for (i = 0; i < nregs; i++)
1865 {
1866 rtx tem = operand_subword (x, i, 1, BLKmode);
1867
1868 if (tem == 0)
1869 abort ();
1870
38a448ca 1871 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1872 }
1873}
1874
aac5cc16
RH
1875/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1876 registers represented by a PARALLEL. SSIZE represents the total size of
1877 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1878 SRC in bits. */
1879/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1880 the balance will be in what would be the low-order memory addresses, i.e.
1881 left justified for big endian, right justified for little endian. This
1882 happens to be true for the targets currently using this support. If this
1883 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1884 would be needed. */
fffa9c1d
JW
1885
1886void
aac5cc16
RH
1887emit_group_load (dst, orig_src, ssize, align)
1888 rtx dst, orig_src;
1889 int align, ssize;
fffa9c1d 1890{
aac5cc16
RH
1891 rtx *tmps, src;
1892 int start, i;
fffa9c1d 1893
aac5cc16 1894 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1895 abort ();
1896
1897 /* Check for a NULL entry, used to indicate that the parameter goes
1898 both on the stack and in registers. */
aac5cc16
RH
1899 if (XEXP (XVECEXP (dst, 0, 0), 0))
1900 start = 0;
fffa9c1d 1901 else
aac5cc16
RH
1902 start = 1;
1903
1904 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1905
1906 /* If we won't be loading directly from memory, protect the real source
1907 from strange tricks we might play. */
1908 src = orig_src;
1909 if (GET_CODE (src) != MEM)
1910 {
8b725198
JJ
1911 if (GET_CODE (src) == VOIDmode)
1912 src = gen_reg_rtx (GET_MODE (dst));
1913 else
1914 src = gen_reg_rtx (GET_MODE (orig_src));
aac5cc16
RH
1915 emit_move_insn (src, orig_src);
1916 }
1917
1918 /* Process the pieces. */
1919 for (i = start; i < XVECLEN (dst, 0); i++)
1920 {
1921 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1922 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1923 int bytelen = GET_MODE_SIZE (mode);
1924 int shift = 0;
1925
1926 /* Handle trailing fragments that run over the size of the struct. */
1927 if (ssize >= 0 && bytepos + bytelen > ssize)
1928 {
1929 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1930 bytelen = ssize - bytepos;
1931 if (bytelen <= 0)
1932 abort();
1933 }
1934
1935 /* Optimize the access just a bit. */
1936 if (GET_CODE (src) == MEM
1937 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1938 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1939 && bytelen == GET_MODE_SIZE (mode))
1940 {
1941 tmps[i] = gen_reg_rtx (mode);
1942 emit_move_insn (tmps[i],
1943 change_address (src, mode,
1944 plus_constant (XEXP (src, 0),
1945 bytepos)));
fffa9c1d 1946 }
7c4a6db0
JW
1947 else if (GET_CODE (src) == CONCAT)
1948 {
1949 if (bytepos == 0
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1951 tmps[i] = XEXP (src, 0);
1952 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1953 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1954 tmps[i] = XEXP (src, 1);
1955 else
1956 abort ();
1957 }
fffa9c1d 1958 else
aac5cc16
RH
1959 {
1960 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1961 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1962 mode, mode, align, ssize);
1963 }
fffa9c1d 1964
aac5cc16
RH
1965 if (BYTES_BIG_ENDIAN && shift)
1966 {
1967 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1968 tmps[i], 0, OPTAB_WIDEN);
1969 }
fffa9c1d 1970 }
aac5cc16
RH
1971 emit_queue();
1972
1973 /* Copy the extracted pieces into the proper (probable) hard regs. */
1974 for (i = start; i < XVECLEN (dst, 0); i++)
1975 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1976}
1977
aac5cc16
RH
1978/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1979 registers represented by a PARALLEL. SSIZE represents the total size of
1980 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1981
1982void
aac5cc16
RH
1983emit_group_store (orig_dst, src, ssize, align)
1984 rtx orig_dst, src;
1985 int ssize, align;
fffa9c1d 1986{
aac5cc16
RH
1987 rtx *tmps, dst;
1988 int start, i;
fffa9c1d 1989
aac5cc16 1990 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1991 abort ();
1992
1993 /* Check for a NULL entry, used to indicate that the parameter goes
1994 both on the stack and in registers. */
aac5cc16
RH
1995 if (XEXP (XVECEXP (src, 0, 0), 0))
1996 start = 0;
fffa9c1d 1997 else
aac5cc16
RH
1998 start = 1;
1999
2000 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 2001
aac5cc16
RH
2002 /* Copy the (probable) hard regs into pseudos. */
2003 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2004 {
aac5cc16
RH
2005 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2006 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2007 emit_move_insn (tmps[i], reg);
2008 }
2009 emit_queue();
fffa9c1d 2010
aac5cc16
RH
2011 /* If we won't be storing directly into memory, protect the real destination
2012 from strange tricks we might play. */
2013 dst = orig_dst;
10a9f2be
JW
2014 if (GET_CODE (dst) == PARALLEL)
2015 {
2016 rtx temp;
2017
2018 /* We can get a PARALLEL dst if there is a conditional expression in
2019 a return statement. In that case, the dst and src are the same,
2020 so no action is necessary. */
2021 if (rtx_equal_p (dst, src))
2022 return;
2023
2024 /* It is unclear if we can ever reach here, but we may as well handle
2025 it. Allocate a temporary, and split this into a store/load to/from
2026 the temporary. */
2027
2028 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2029 emit_group_store (temp, src, ssize, align);
2030 emit_group_load (dst, temp, ssize, align);
2031 return;
2032 }
2033 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2034 {
2035 dst = gen_reg_rtx (GET_MODE (orig_dst));
2036 /* Make life a bit easier for combine. */
2037 emit_move_insn (dst, const0_rtx);
2038 }
2039 else if (! MEM_IN_STRUCT_P (dst))
2040 {
2041 /* store_bit_field requires that memory operations have
2042 mem_in_struct_p set; we might not. */
fffa9c1d 2043
aac5cc16 2044 dst = copy_rtx (orig_dst);
c6df88cb 2045 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2046 }
2047
2048 /* Process the pieces. */
2049 for (i = start; i < XVECLEN (src, 0); i++)
2050 {
2051 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2052 enum machine_mode mode = GET_MODE (tmps[i]);
2053 int bytelen = GET_MODE_SIZE (mode);
2054
2055 /* Handle trailing fragments that run over the size of the struct. */
2056 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2057 {
aac5cc16
RH
2058 if (BYTES_BIG_ENDIAN)
2059 {
2060 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2061 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2062 tmps[i], 0, OPTAB_WIDEN);
2063 }
2064 bytelen = ssize - bytepos;
71bc0330 2065 }
fffa9c1d 2066
aac5cc16
RH
2067 /* Optimize the access just a bit. */
2068 if (GET_CODE (dst) == MEM
2069 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2070 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2071 && bytelen == GET_MODE_SIZE (mode))
2072 {
2073 emit_move_insn (change_address (dst, mode,
2074 plus_constant (XEXP (dst, 0),
2075 bytepos)),
2076 tmps[i]);
2077 }
2078 else
2079 {
2080 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2081 mode, tmps[i], align, ssize);
2082 }
fffa9c1d 2083 }
aac5cc16
RH
2084 emit_queue();
2085
2086 /* Copy from the pseudo into the (probable) hard reg. */
2087 if (GET_CODE (dst) == REG)
2088 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2089}
2090
c36fce9a
GRK
2091/* Generate code to copy a BLKmode object of TYPE out of a
2092 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2093 is null, a stack temporary is created. TGTBLK is returned.
2094
2095 The primary purpose of this routine is to handle functions
2096 that return BLKmode structures in registers. Some machines
2097 (the PA for example) want to return all small structures
2098 in registers regardless of the structure's alignment.
2099 */
2100
2101rtx
2102copy_blkmode_from_reg(tgtblk,srcreg,type)
2103 rtx tgtblk;
2104 rtx srcreg;
2105 tree type;
2106{
2107 int bytes = int_size_in_bytes (type);
2108 rtx src = NULL, dst = NULL;
c84e2712 2109 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
c36fce9a
GRK
2110 int bitpos, xbitpos, big_endian_correction = 0;
2111
2112 if (tgtblk == 0)
2113 {
2114 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2115 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2116 preserve_temp_slots (tgtblk);
2117 }
2118
2119 /* This code assumes srcreg is at least a full word. If it isn't,
2120 copy it into a new pseudo which is a full word. */
2121 if (GET_MODE (srcreg) != BLKmode
2122 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2123 srcreg = convert_to_mode (word_mode, srcreg,
2124 TREE_UNSIGNED (type));
2125
2126 /* Structures whose size is not a multiple of a word are aligned
2127 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2128 machine, this means we must skip the empty high order bytes when
2129 calculating the bit offset. */
2130 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2131 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2132 * BITS_PER_UNIT));
2133
2134 /* Copy the structure BITSIZE bites at a time.
2135
2136 We could probably emit more efficient code for machines
2137 which do not use strict alignment, but it doesn't seem
2138 worth the effort at the current time. */
2139 for (bitpos = 0, xbitpos = big_endian_correction;
2140 bitpos < bytes * BITS_PER_UNIT;
2141 bitpos += bitsize, xbitpos += bitsize)
2142 {
2143
2144 /* We need a new source operand each time xbitpos is on a
2145 word boundary and when xbitpos == big_endian_correction
2146 (the first time through). */
2147 if (xbitpos % BITS_PER_WORD == 0
2148 || xbitpos == big_endian_correction)
2149 src = operand_subword_force (srcreg,
2150 xbitpos / BITS_PER_WORD,
2151 BLKmode);
2152
2153 /* We need a new destination operand each time bitpos is on
2154 a word boundary. */
2155 if (bitpos % BITS_PER_WORD == 0)
2156 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2157
2158 /* Use xbitpos for the source extraction (right justified) and
2159 xbitpos for the destination store (left justified). */
2160 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2161 extract_bit_field (src, bitsize,
2162 xbitpos % BITS_PER_WORD, 1,
2163 NULL_RTX, word_mode,
2164 word_mode,
2165 bitsize / BITS_PER_UNIT,
2166 BITS_PER_WORD),
2167 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2168 }
2169 return tgtblk;
2170}
2171
2172
94b25f81
RK
2173/* Add a USE expression for REG to the (possibly empty) list pointed
2174 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2175
2176void
b3f8cf4a
RK
2177use_reg (call_fusage, reg)
2178 rtx *call_fusage, reg;
2179{
0304dfbb
DE
2180 if (GET_CODE (reg) != REG
2181 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2182 abort();
2183
2184 *call_fusage
38a448ca
RH
2185 = gen_rtx_EXPR_LIST (VOIDmode,
2186 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2187}
2188
94b25f81
RK
2189/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2190 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2191
2192void
0304dfbb
DE
2193use_regs (call_fusage, regno, nregs)
2194 rtx *call_fusage;
bbf6f052
RK
2195 int regno;
2196 int nregs;
2197{
0304dfbb 2198 int i;
bbf6f052 2199
0304dfbb
DE
2200 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2201 abort ();
2202
2203 for (i = 0; i < nregs; i++)
38a448ca 2204 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2205}
fffa9c1d
JW
2206
2207/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2208 PARALLEL REGS. This is for calls that pass values in multiple
2209 non-contiguous locations. The Irix 6 ABI has examples of this. */
2210
2211void
2212use_group_regs (call_fusage, regs)
2213 rtx *call_fusage;
2214 rtx regs;
2215{
2216 int i;
2217
6bd35f86
DE
2218 for (i = 0; i < XVECLEN (regs, 0); i++)
2219 {
2220 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2221
6bd35f86
DE
2222 /* A NULL entry means the parameter goes both on the stack and in
2223 registers. This can also be a MEM for targets that pass values
2224 partially on the stack and partially in registers. */
e9a25f70 2225 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2226 use_reg (call_fusage, reg);
2227 }
fffa9c1d 2228}
bbf6f052 2229\f
9de08200
RK
2230/* Generate several move instructions to clear LEN bytes of block TO.
2231 (A MEM rtx with BLKmode). The caller must pass TO through
2232 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2233 we can assume. */
2234
2235static void
2236clear_by_pieces (to, len, align)
2237 rtx to;
2238 int len, align;
2239{
2240 struct clear_by_pieces data;
2241 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2242 int max_size = MOVE_MAX_PIECES + 1;
2243 enum machine_mode mode = VOIDmode, tmode;
2244 enum insn_code icode;
9de08200
RK
2245
2246 data.offset = 0;
2247 data.to_addr = to_addr;
2248 data.to = to;
2249 data.autinc_to
2250 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2251 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2252
2253 data.explicit_inc_to = 0;
2254 data.reverse
2255 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2256 if (data.reverse) data.offset = len;
2257 data.len = len;
2258
2259 data.to_struct = MEM_IN_STRUCT_P (to);
2260
2261 /* If copying requires more than two move insns,
2262 copy addresses to registers (to make displacements shorter)
2263 and use post-increment if available. */
2264 if (!data.autinc_to
2265 && move_by_pieces_ninsns (len, align) > 2)
2266 {
fbe1758d
AM
2267 /* Determine the main mode we'll be using */
2268 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2269 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2270 if (GET_MODE_SIZE (tmode) < max_size)
2271 mode = tmode;
2272
2273 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2274 {
2275 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2276 data.autinc_to = 1;
2277 data.explicit_inc_to = -1;
2278 }
fbe1758d 2279 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2280 {
2281 data.to_addr = copy_addr_to_reg (to_addr);
2282 data.autinc_to = 1;
2283 data.explicit_inc_to = 1;
2284 }
9de08200
RK
2285 if (!data.autinc_to && CONSTANT_P (to_addr))
2286 data.to_addr = copy_addr_to_reg (to_addr);
2287 }
2288
2289 if (! SLOW_UNALIGNED_ACCESS
2290 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2291 align = MOVE_MAX;
2292
2293 /* First move what we can in the largest integer mode, then go to
2294 successively smaller modes. */
2295
2296 while (max_size > 1)
2297 {
9de08200
RK
2298 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2299 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2300 if (GET_MODE_SIZE (tmode) < max_size)
2301 mode = tmode;
2302
2303 if (mode == VOIDmode)
2304 break;
2305
2306 icode = mov_optab->handlers[(int) mode].insn_code;
2307 if (icode != CODE_FOR_nothing
2308 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2309 GET_MODE_SIZE (mode)))
2310 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2311
2312 max_size = GET_MODE_SIZE (mode);
2313 }
2314
2315 /* The code above should have handled everything. */
2316 if (data.len != 0)
2317 abort ();
2318}
2319
2320/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2321 with move instructions for mode MODE. GENFUN is the gen_... function
2322 to make a move insn for that mode. DATA has all the other info. */
2323
2324static void
2325clear_by_pieces_1 (genfun, mode, data)
eae4b970 2326 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2327 enum machine_mode mode;
2328 struct clear_by_pieces *data;
2329{
2330 register int size = GET_MODE_SIZE (mode);
2331 register rtx to1;
2332
2333 while (data->len >= size)
2334 {
2335 if (data->reverse) data->offset -= size;
2336
2337 to1 = (data->autinc_to
38a448ca 2338 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2339 : copy_rtx (change_address (data->to, mode,
2340 plus_constant (data->to_addr,
2341 data->offset))));
9de08200
RK
2342 MEM_IN_STRUCT_P (to1) = data->to_struct;
2343
940da324 2344 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2345 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2346
2347 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2348 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2349 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2350
2351 if (! data->reverse) data->offset += size;
2352
2353 data->len -= size;
2354 }
2355}
2356\f
bbf6f052 2357/* Write zeros through the storage of OBJECT.
9de08200 2358 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2359 the maximum alignment we can is has, measured in bytes.
bbf6f052 2360
e9a25f70
JL
2361 If we call a function that returns the length of the block, return it. */
2362
2363rtx
9de08200 2364clear_storage (object, size, align)
bbf6f052 2365 rtx object;
4c08eef0 2366 rtx size;
9de08200 2367 int align;
bbf6f052 2368{
52cf7115
JL
2369#ifdef TARGET_MEM_FUNCTIONS
2370 static tree fn;
2371 tree call_expr, arg_list;
2372#endif
e9a25f70
JL
2373 rtx retval = 0;
2374
bbf6f052
RK
2375 if (GET_MODE (object) == BLKmode)
2376 {
9de08200
RK
2377 object = protect_from_queue (object, 1);
2378 size = protect_from_queue (size, 0);
2379
2380 if (GET_CODE (size) == CONST_INT
fbe1758d 2381 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2382 clear_by_pieces (object, INTVAL (size), align);
2383
2384 else
2385 {
2386 /* Try the most limited insn first, because there's no point
2387 including more than one in the machine description unless
2388 the more limited one has some advantage. */
2389
2390 rtx opalign = GEN_INT (align);
2391 enum machine_mode mode;
2392
2393 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2394 mode = GET_MODE_WIDER_MODE (mode))
2395 {
2396 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2397 insn_operand_predicate_fn pred;
9de08200
RK
2398
2399 if (code != CODE_FOR_nothing
2400 /* We don't need MODE to be narrower than
2401 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2402 the mode mask, as it is returned by the macro, it will
2403 definitely be less than the actual mode mask. */
2404 && ((GET_CODE (size) == CONST_INT
2405 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2406 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2407 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2408 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2409 || (*pred) (object, BLKmode))
2410 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2411 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2412 {
2413 rtx op1;
2414 rtx last = get_last_insn ();
2415 rtx pat;
2416
2417 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2418 pred = insn_data[(int) code].operand[1].predicate;
2419 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2420 op1 = copy_to_mode_reg (mode, op1);
2421
2422 pat = GEN_FCN ((int) code) (object, op1, opalign);
2423 if (pat)
2424 {
2425 emit_insn (pat);
e9a25f70 2426 return 0;
9de08200
RK
2427 }
2428 else
2429 delete_insns_since (last);
2430 }
2431 }
2432
4bc973ae 2433 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2434
4bc973ae
JL
2435 It is unsafe to save the value generated by protect_from_queue
2436 and reuse it later. Consider what happens if emit_queue is
2437 called before the return value from protect_from_queue is used.
52cf7115 2438
4bc973ae
JL
2439 Expansion of the CALL_EXPR below will call emit_queue before
2440 we are finished emitting RTL for argument setup. So if we are
2441 not careful we could get the wrong value for an argument.
52cf7115 2442
4bc973ae
JL
2443 To avoid this problem we go ahead and emit code to copy OBJECT
2444 and SIZE into new pseudos. We can then place those new pseudos
2445 into an RTL_EXPR and use them later, even after a call to
2446 emit_queue.
52cf7115 2447
4bc973ae
JL
2448 Note this is not strictly needed for library calls since they
2449 do not call emit_queue before loading their arguments. However,
2450 we may need to have library calls call emit_queue in the future
2451 since failing to do so could cause problems for targets which
2452 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2453 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2454
4bc973ae
JL
2455#ifdef TARGET_MEM_FUNCTIONS
2456 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2457#else
2458 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2459 TREE_UNSIGNED (integer_type_node));
f3dc586a 2460 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2461#endif
52cf7115 2462
52cf7115 2463
4bc973ae
JL
2464#ifdef TARGET_MEM_FUNCTIONS
2465 /* It is incorrect to use the libcall calling conventions to call
2466 memset in this context.
52cf7115 2467
4bc973ae
JL
2468 This could be a user call to memset and the user may wish to
2469 examine the return value from memset.
52cf7115 2470
4bc973ae
JL
2471 For targets where libcalls and normal calls have different
2472 conventions for returning pointers, we could end up generating
2473 incorrect code.
2474
2475 So instead of using a libcall sequence we build up a suitable
2476 CALL_EXPR and expand the call in the normal fashion. */
2477 if (fn == NULL_TREE)
2478 {
2479 tree fntype;
2480
2481 /* This was copied from except.c, I don't know if all this is
2482 necessary in this context or not. */
2483 fn = get_identifier ("memset");
2484 push_obstacks_nochange ();
2485 end_temporary_allocation ();
2486 fntype = build_pointer_type (void_type_node);
2487 fntype = build_function_type (fntype, NULL_TREE);
2488 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2489 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2490 DECL_EXTERNAL (fn) = 1;
2491 TREE_PUBLIC (fn) = 1;
2492 DECL_ARTIFICIAL (fn) = 1;
2493 make_decl_rtl (fn, NULL_PTR, 1);
2494 assemble_external (fn);
2495 pop_obstacks ();
2496 }
2497
2498 /* We need to make an argument list for the function call.
2499
2500 memset has three arguments, the first is a void * addresses, the
2501 second a integer with the initialization value, the last is a
2502 size_t byte count for the copy. */
2503 arg_list
2504 = build_tree_list (NULL_TREE,
2505 make_tree (build_pointer_type (void_type_node),
2506 object));
2507 TREE_CHAIN (arg_list)
2508 = build_tree_list (NULL_TREE,
2509 make_tree (integer_type_node, const0_rtx));
2510 TREE_CHAIN (TREE_CHAIN (arg_list))
2511 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2512 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2513
2514 /* Now we have to build up the CALL_EXPR itself. */
2515 call_expr = build1 (ADDR_EXPR,
2516 build_pointer_type (TREE_TYPE (fn)), fn);
2517 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2518 call_expr, arg_list, NULL_TREE);
2519 TREE_SIDE_EFFECTS (call_expr) = 1;
2520
2521 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2522#else
9de08200 2523 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2524 VOIDmode, 2, object, Pmode, size,
9de08200 2525 TYPE_MODE (integer_type_node));
bbf6f052 2526#endif
9de08200 2527 }
bbf6f052
RK
2528 }
2529 else
66ed0683 2530 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2531
2532 return retval;
bbf6f052
RK
2533}
2534
2535/* Generate code to copy Y into X.
2536 Both Y and X must have the same mode, except that
2537 Y can be a constant with VOIDmode.
2538 This mode cannot be BLKmode; use emit_block_move for that.
2539
2540 Return the last instruction emitted. */
2541
2542rtx
2543emit_move_insn (x, y)
2544 rtx x, y;
2545{
2546 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2547
2548 x = protect_from_queue (x, 1);
2549 y = protect_from_queue (y, 0);
2550
2551 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2552 abort ();
2553
ee5332b8
RH
2554 /* Never force constant_p_rtx to memory. */
2555 if (GET_CODE (y) == CONSTANT_P_RTX)
2556 ;
2557 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2558 y = force_const_mem (mode, y);
2559
2560 /* If X or Y are memory references, verify that their addresses are valid
2561 for the machine. */
2562 if (GET_CODE (x) == MEM
2563 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2564 && ! push_operand (x, GET_MODE (x)))
2565 || (flag_force_addr
2566 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2567 x = change_address (x, VOIDmode, XEXP (x, 0));
2568
2569 if (GET_CODE (y) == MEM
2570 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2571 || (flag_force_addr
2572 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2573 y = change_address (y, VOIDmode, XEXP (y, 0));
2574
2575 if (mode == BLKmode)
2576 abort ();
2577
261c4230
RS
2578 return emit_move_insn_1 (x, y);
2579}
2580
2581/* Low level part of emit_move_insn.
2582 Called just like emit_move_insn, but assumes X and Y
2583 are basically valid. */
2584
2585rtx
2586emit_move_insn_1 (x, y)
2587 rtx x, y;
2588{
2589 enum machine_mode mode = GET_MODE (x);
2590 enum machine_mode submode;
2591 enum mode_class class = GET_MODE_CLASS (mode);
2592 int i;
2593
76bbe028
ZW
2594 if (mode >= MAX_MACHINE_MODE)
2595 abort ();
2596
bbf6f052
RK
2597 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2598 return
2599 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2600
89742723 2601 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2602 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2603 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2604 * BITS_PER_UNIT),
2605 (class == MODE_COMPLEX_INT
2606 ? MODE_INT : MODE_FLOAT),
2607 0))
7308a047
RS
2608 && (mov_optab->handlers[(int) submode].insn_code
2609 != CODE_FOR_nothing))
2610 {
2611 /* Don't split destination if it is a stack push. */
2612 int stack = push_operand (x, GET_MODE (x));
7308a047 2613
7308a047
RS
2614 /* If this is a stack, push the highpart first, so it
2615 will be in the argument order.
2616
2617 In that case, change_address is used only to convert
2618 the mode, not to change the address. */
c937357e
RS
2619 if (stack)
2620 {
e33c0d66
RS
2621 /* Note that the real part always precedes the imag part in memory
2622 regardless of machine's endianness. */
c937357e
RS
2623#ifdef STACK_GROWS_DOWNWARD
2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2626 gen_imagpart (submode, y)));
c937357e 2627 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2628 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2629 gen_realpart (submode, y)));
c937357e
RS
2630#else
2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2633 gen_realpart (submode, y)));
c937357e 2634 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2635 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2636 gen_imagpart (submode, y)));
c937357e
RS
2637#endif
2638 }
2639 else
2640 {
235ae7be
DM
2641 rtx realpart_x, realpart_y;
2642 rtx imagpart_x, imagpart_y;
2643
405f63da
MM
2644 /* If this is a complex value with each part being smaller than a
2645 word, the usual calling sequence will likely pack the pieces into
2646 a single register. Unfortunately, SUBREG of hard registers only
2647 deals in terms of words, so we have a problem converting input
2648 arguments to the CONCAT of two registers that is used elsewhere
2649 for complex values. If this is before reload, we can copy it into
2650 memory and reload. FIXME, we should see about using extract and
2651 insert on integer registers, but complex short and complex char
2652 variables should be rarely used. */
2653 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2654 && (reload_in_progress | reload_completed) == 0)
2655 {
2656 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2657 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2658
2659 if (packed_dest_p || packed_src_p)
2660 {
2661 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2662 ? MODE_FLOAT : MODE_INT);
2663
2664 enum machine_mode reg_mode =
2665 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2666
2667 if (reg_mode != BLKmode)
2668 {
2669 rtx mem = assign_stack_temp (reg_mode,
2670 GET_MODE_SIZE (mode), 0);
2671
2672 rtx cmem = change_address (mem, mode, NULL_RTX);
2673
2674 current_function->cannot_inline
2675 = "function uses short complex types";
2676
2677 if (packed_dest_p)
2678 {
2679 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2680 emit_move_insn_1 (cmem, y);
2681 return emit_move_insn_1 (sreg, mem);
2682 }
2683 else
2684 {
2685 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2686 emit_move_insn_1 (mem, sreg);
2687 return emit_move_insn_1 (x, cmem);
2688 }
2689 }
2690 }
2691 }
2692
235ae7be
DM
2693 realpart_x = gen_realpart (submode, x);
2694 realpart_y = gen_realpart (submode, y);
2695 imagpart_x = gen_imagpart (submode, x);
2696 imagpart_y = gen_imagpart (submode, y);
2697
2698 /* Show the output dies here. This is necessary for SUBREGs
2699 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2700 hard regs shouldn't appear here except as return values.
2701 We never want to emit such a clobber after reload. */
2702 if (x != y
235ae7be
DM
2703 && ! (reload_in_progress || reload_completed)
2704 && (GET_CODE (realpart_x) == SUBREG
2705 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2706 {
c14c6529 2707 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2708 }
2638126a 2709
c937357e 2710 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2711 (realpart_x, realpart_y));
c937357e 2712 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2713 (imagpart_x, imagpart_y));
c937357e 2714 }
7308a047 2715
7a1ab50a 2716 return get_last_insn ();
7308a047
RS
2717 }
2718
bbf6f052
RK
2719 /* This will handle any multi-word mode that lacks a move_insn pattern.
2720 However, you will get better code if you define such patterns,
2721 even if they must turn into multiple assembler instructions. */
a4320483 2722 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2723 {
2724 rtx last_insn = 0;
235ae7be
DM
2725 rtx seq;
2726 int need_clobber;
6551fa4d 2727
a98c9f1a
RK
2728#ifdef PUSH_ROUNDING
2729
2730 /* If X is a push on the stack, do the push now and replace
2731 X with a reference to the stack pointer. */
2732 if (push_operand (x, GET_MODE (x)))
2733 {
2734 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2735 x = change_address (x, VOIDmode, stack_pointer_rtx);
2736 }
2737#endif
2738
235ae7be 2739 start_sequence ();
15a7a8ec 2740
235ae7be 2741 need_clobber = 0;
bbf6f052
RK
2742 for (i = 0;
2743 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2744 i++)
2745 {
2746 rtx xpart = operand_subword (x, i, 1, mode);
2747 rtx ypart = operand_subword (y, i, 1, mode);
2748
2749 /* If we can't get a part of Y, put Y into memory if it is a
2750 constant. Otherwise, force it into a register. If we still
2751 can't get a part of Y, abort. */
2752 if (ypart == 0 && CONSTANT_P (y))
2753 {
2754 y = force_const_mem (mode, y);
2755 ypart = operand_subword (y, i, 1, mode);
2756 }
2757 else if (ypart == 0)
2758 ypart = operand_subword_force (y, i, mode);
2759
2760 if (xpart == 0 || ypart == 0)
2761 abort ();
2762
235ae7be
DM
2763 need_clobber |= (GET_CODE (xpart) == SUBREG);
2764
bbf6f052
RK
2765 last_insn = emit_move_insn (xpart, ypart);
2766 }
6551fa4d 2767
235ae7be
DM
2768 seq = gen_sequence ();
2769 end_sequence ();
2770
2771 /* Show the output dies here. This is necessary for SUBREGs
2772 of pseudos since we cannot track their lifetimes correctly;
2773 hard regs shouldn't appear here except as return values.
2774 We never want to emit such a clobber after reload. */
2775 if (x != y
2776 && ! (reload_in_progress || reload_completed)
2777 && need_clobber != 0)
2778 {
2779 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2780 }
2781
2782 emit_insn (seq);
2783
bbf6f052
RK
2784 return last_insn;
2785 }
2786 else
2787 abort ();
2788}
2789\f
2790/* Pushing data onto the stack. */
2791
2792/* Push a block of length SIZE (perhaps variable)
2793 and return an rtx to address the beginning of the block.
2794 Note that it is not possible for the value returned to be a QUEUED.
2795 The value may be virtual_outgoing_args_rtx.
2796
2797 EXTRA is the number of bytes of padding to push in addition to SIZE.
2798 BELOW nonzero means this padding comes at low addresses;
2799 otherwise, the padding comes at high addresses. */
2800
2801rtx
2802push_block (size, extra, below)
2803 rtx size;
2804 int extra, below;
2805{
2806 register rtx temp;
88f63c77
RK
2807
2808 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2809 if (CONSTANT_P (size))
2810 anti_adjust_stack (plus_constant (size, extra));
2811 else if (GET_CODE (size) == REG && extra == 0)
2812 anti_adjust_stack (size);
2813 else
2814 {
2815 rtx temp = copy_to_mode_reg (Pmode, size);
2816 if (extra != 0)
906c4e36 2817 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2818 temp, 0, OPTAB_LIB_WIDEN);
2819 anti_adjust_stack (temp);
2820 }
2821
e1a9b2ab
HB
2822#if defined (STACK_GROWS_DOWNWARD) \
2823 || (defined (ARGS_GROW_DOWNWARD) \
2824 && !defined (ACCUMULATE_OUTGOING_ARGS))
2825
2826 /* Return the lowest stack address when STACK or ARGS grow downward and
2827 we are not aaccumulating outgoing arguments (the c4x port uses such
2828 conventions). */
bbf6f052
RK
2829 temp = virtual_outgoing_args_rtx;
2830 if (extra != 0 && below)
2831 temp = plus_constant (temp, extra);
2832#else
2833 if (GET_CODE (size) == CONST_INT)
2834 temp = plus_constant (virtual_outgoing_args_rtx,
2835 - INTVAL (size) - (below ? 0 : extra));
2836 else if (extra != 0 && !below)
38a448ca 2837 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2838 negate_rtx (Pmode, plus_constant (size, extra)));
2839 else
38a448ca 2840 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
c5c76735 2841 negate_rtx (Pmode, size));
bbf6f052
RK
2842#endif
2843
2844 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2845}
2846
87e38d84 2847rtx
bbf6f052
RK
2848gen_push_operand ()
2849{
38a448ca 2850 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2851}
2852
921b3427
RK
2853/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2854 block of SIZE bytes. */
2855
2856static rtx
2857get_push_address (size)
2858 int size;
2859{
2860 register rtx temp;
2861
2862 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2863 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2864 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2865 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2866 else
2867 temp = stack_pointer_rtx;
2868
c85f7c16 2869 return copy_to_reg (temp);
921b3427
RK
2870}
2871
bbf6f052
RK
2872/* Generate code to push X onto the stack, assuming it has mode MODE and
2873 type TYPE.
2874 MODE is redundant except when X is a CONST_INT (since they don't
2875 carry mode info).
2876 SIZE is an rtx for the size of data to be copied (in bytes),
2877 needed only if X is BLKmode.
2878
2879 ALIGN (in bytes) is maximum alignment we can assume.
2880
cd048831
RK
2881 If PARTIAL and REG are both nonzero, then copy that many of the first
2882 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2883 The amount of space pushed is decreased by PARTIAL words,
2884 rounded *down* to a multiple of PARM_BOUNDARY.
2885 REG must be a hard register in this case.
cd048831
RK
2886 If REG is zero but PARTIAL is not, take any all others actions for an
2887 argument partially in registers, but do not actually load any
2888 registers.
bbf6f052
RK
2889
2890 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2891 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2892
2893 On a machine that lacks real push insns, ARGS_ADDR is the address of
2894 the bottom of the argument block for this call. We use indexing off there
2895 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2896 argument block has not been preallocated.
2897
e5e809f4
JL
2898 ARGS_SO_FAR is the size of args previously pushed for this call.
2899
2900 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2901 for arguments passed in registers. If nonzero, it will be the number
2902 of bytes required. */
bbf6f052
RK
2903
2904void
2905emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
2906 args_addr, args_so_far, reg_parm_stack_space,
2907 alignment_pad)
bbf6f052
RK
2908 register rtx x;
2909 enum machine_mode mode;
2910 tree type;
2911 rtx size;
2912 int align;
2913 int partial;
2914 rtx reg;
2915 int extra;
2916 rtx args_addr;
2917 rtx args_so_far;
e5e809f4 2918 int reg_parm_stack_space;
4fc026cd 2919 rtx alignment_pad;
bbf6f052
RK
2920{
2921 rtx xinner;
2922 enum direction stack_direction
2923#ifdef STACK_GROWS_DOWNWARD
2924 = downward;
2925#else
2926 = upward;
2927#endif
2928
2929 /* Decide where to pad the argument: `downward' for below,
2930 `upward' for above, or `none' for don't pad it.
2931 Default is below for small data on big-endian machines; else above. */
2932 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2933
2934 /* Invert direction if stack is post-update. */
2935 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2936 if (where_pad != none)
2937 where_pad = (where_pad == downward ? upward : downward);
2938
2939 xinner = x = protect_from_queue (x, 0);
2940
2941 if (mode == BLKmode)
2942 {
2943 /* Copy a block into the stack, entirely or partially. */
2944
2945 register rtx temp;
2946 int used = partial * UNITS_PER_WORD;
2947 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2948 int skip;
2949
2950 if (size == 0)
2951 abort ();
2952
2953 used -= offset;
2954
2955 /* USED is now the # of bytes we need not copy to the stack
2956 because registers will take care of them. */
2957
2958 if (partial != 0)
2959 xinner = change_address (xinner, BLKmode,
2960 plus_constant (XEXP (xinner, 0), used));
2961
2962 /* If the partial register-part of the arg counts in its stack size,
2963 skip the part of stack space corresponding to the registers.
2964 Otherwise, start copying to the beginning of the stack space,
2965 by setting SKIP to 0. */
e5e809f4 2966 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2967
2968#ifdef PUSH_ROUNDING
2969 /* Do it with several push insns if that doesn't take lots of insns
2970 and if there is no difficulty with push insns that skip bytes
2971 on the stack for alignment purposes. */
2972 if (args_addr == 0
2973 && GET_CODE (size) == CONST_INT
2974 && skip == 0
15914757 2975 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2976 /* Here we avoid the case of a structure whose weak alignment
2977 forces many pushes of a small amount of data,
2978 and such small pushes do rounding that causes trouble. */
c7a7ac46 2979 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2980 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2981 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2982 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2983 {
2984 /* Push padding now if padding above and stack grows down,
2985 or if padding below and stack grows up.
2986 But if space already allocated, this has already been done. */
2987 if (extra && args_addr == 0
2988 && where_pad != none && where_pad != stack_direction)
906c4e36 2989 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2990
38a448ca 2991 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2992 INTVAL (size) - used, align);
921b3427 2993
7d384cc0 2994 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2995 {
2996 rtx temp;
2997
956d6950 2998 in_check_memory_usage = 1;
921b3427 2999 temp = get_push_address (INTVAL(size) - used);
c85f7c16 3000 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3001 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3002 temp, Pmode,
3003 XEXP (xinner, 0), Pmode,
921b3427
RK
3004 GEN_INT (INTVAL(size) - used),
3005 TYPE_MODE (sizetype));
3006 else
3007 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3008 temp, Pmode,
921b3427
RK
3009 GEN_INT (INTVAL(size) - used),
3010 TYPE_MODE (sizetype),
956d6950
JL
3011 GEN_INT (MEMORY_USE_RW),
3012 TYPE_MODE (integer_type_node));
3013 in_check_memory_usage = 0;
921b3427 3014 }
bbf6f052
RK
3015 }
3016 else
3017#endif /* PUSH_ROUNDING */
3018 {
3019 /* Otherwise make space on the stack and copy the data
3020 to the address of that space. */
3021
3022 /* Deduct words put into registers from the size we must copy. */
3023 if (partial != 0)
3024 {
3025 if (GET_CODE (size) == CONST_INT)
906c4e36 3026 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3027 else
3028 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3029 GEN_INT (used), NULL_RTX, 0,
3030 OPTAB_LIB_WIDEN);
bbf6f052
RK
3031 }
3032
3033 /* Get the address of the stack space.
3034 In this case, we do not deal with EXTRA separately.
3035 A single stack adjust will do. */
3036 if (! args_addr)
3037 {
3038 temp = push_block (size, extra, where_pad == downward);
3039 extra = 0;
3040 }
3041 else if (GET_CODE (args_so_far) == CONST_INT)
3042 temp = memory_address (BLKmode,
3043 plus_constant (args_addr,
3044 skip + INTVAL (args_so_far)));
3045 else
3046 temp = memory_address (BLKmode,
38a448ca
RH
3047 plus_constant (gen_rtx_PLUS (Pmode,
3048 args_addr,
3049 args_so_far),
bbf6f052 3050 skip));
7d384cc0 3051 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3052 {
3053 rtx target;
3054
956d6950 3055 in_check_memory_usage = 1;
921b3427 3056 target = copy_to_reg (temp);
c85f7c16 3057 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3058 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3059 target, Pmode,
3060 XEXP (xinner, 0), Pmode,
921b3427
RK
3061 size, TYPE_MODE (sizetype));
3062 else
3063 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3064 target, Pmode,
921b3427 3065 size, TYPE_MODE (sizetype),
956d6950
JL
3066 GEN_INT (MEMORY_USE_RW),
3067 TYPE_MODE (integer_type_node));
3068 in_check_memory_usage = 0;
921b3427 3069 }
bbf6f052
RK
3070
3071 /* TEMP is the address of the block. Copy the data there. */
3072 if (GET_CODE (size) == CONST_INT
fbe1758d 3073 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
bbf6f052 3074 {
38a448ca 3075 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
3076 INTVAL (size), align);
3077 goto ret;
3078 }
e5e809f4 3079 else
bbf6f052 3080 {
e5e809f4
JL
3081 rtx opalign = GEN_INT (align);
3082 enum machine_mode mode;
9e6a5703 3083 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
3084
3085 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3086 mode != VOIDmode;
3087 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3088 {
e5e809f4 3089 enum insn_code code = movstr_optab[(int) mode];
a995e389 3090 insn_operand_predicate_fn pred;
e5e809f4
JL
3091
3092 if (code != CODE_FOR_nothing
3093 && ((GET_CODE (size) == CONST_INT
3094 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3095 <= (GET_MODE_MASK (mode) >> 1)))
3096 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3097 && (!(pred = insn_data[(int) code].operand[0].predicate)
3098 || ((*pred) (target, BLKmode)))
3099 && (!(pred = insn_data[(int) code].operand[1].predicate)
3100 || ((*pred) (xinner, BLKmode)))
3101 && (!(pred = insn_data[(int) code].operand[3].predicate)
3102 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3103 {
3104 rtx op2 = convert_to_mode (mode, size, 1);
3105 rtx last = get_last_insn ();
3106 rtx pat;
3107
a995e389
RH
3108 pred = insn_data[(int) code].operand[2].predicate;
3109 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3110 op2 = copy_to_mode_reg (mode, op2);
3111
3112 pat = GEN_FCN ((int) code) (target, xinner,
3113 op2, opalign);
3114 if (pat)
3115 {
3116 emit_insn (pat);
3117 goto ret;
3118 }
3119 else
3120 delete_insns_since (last);
3121 }
c841050e 3122 }
bbf6f052 3123 }
bbf6f052
RK
3124
3125#ifndef ACCUMULATE_OUTGOING_ARGS
3126 /* If the source is referenced relative to the stack pointer,
3127 copy it to another register to stabilize it. We do not need
3128 to do this if we know that we won't be changing sp. */
3129
3130 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3131 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3132 temp = copy_to_reg (temp);
3133#endif
3134
3135 /* Make inhibit_defer_pop nonzero around the library call
3136 to force it to pop the bcopy-arguments right away. */
3137 NO_DEFER_POP;
3138#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3139 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3140 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3141 convert_to_mode (TYPE_MODE (sizetype),
3142 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3143 TYPE_MODE (sizetype));
bbf6f052 3144#else
d562e42e 3145 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3146 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3147 convert_to_mode (TYPE_MODE (integer_type_node),
3148 size,
3149 TREE_UNSIGNED (integer_type_node)),
3150 TYPE_MODE (integer_type_node));
bbf6f052
RK
3151#endif
3152 OK_DEFER_POP;
3153 }
3154 }
3155 else if (partial > 0)
3156 {
3157 /* Scalar partly in registers. */
3158
3159 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3160 int i;
3161 int not_stack;
3162 /* # words of start of argument
3163 that we must make space for but need not store. */
3164 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3165 int args_offset = INTVAL (args_so_far);
3166 int skip;
3167
3168 /* Push padding now if padding above and stack grows down,
3169 or if padding below and stack grows up.
3170 But if space already allocated, this has already been done. */
3171 if (extra && args_addr == 0
3172 && where_pad != none && where_pad != stack_direction)
906c4e36 3173 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3174
3175 /* If we make space by pushing it, we might as well push
3176 the real data. Otherwise, we can leave OFFSET nonzero
3177 and leave the space uninitialized. */
3178 if (args_addr == 0)
3179 offset = 0;
3180
3181 /* Now NOT_STACK gets the number of words that we don't need to
3182 allocate on the stack. */
3183 not_stack = partial - offset;
3184
3185 /* If the partial register-part of the arg counts in its stack size,
3186 skip the part of stack space corresponding to the registers.
3187 Otherwise, start copying to the beginning of the stack space,
3188 by setting SKIP to 0. */
e5e809f4 3189 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3190
3191 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3192 x = validize_mem (force_const_mem (mode, x));
3193
3194 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3195 SUBREGs of such registers are not allowed. */
3196 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3197 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3198 x = copy_to_reg (x);
3199
3200 /* Loop over all the words allocated on the stack for this arg. */
3201 /* We can do it by words, because any scalar bigger than a word
3202 has a size a multiple of a word. */
3203#ifndef PUSH_ARGS_REVERSED
3204 for (i = not_stack; i < size; i++)
3205#else
3206 for (i = size - 1; i >= not_stack; i--)
3207#endif
3208 if (i >= not_stack + offset)
3209 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3210 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3211 0, args_addr,
3212 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3213 * UNITS_PER_WORD)),
4fc026cd 3214 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3215 }
3216 else
3217 {
3218 rtx addr;
921b3427 3219 rtx target = NULL_RTX;
bbf6f052
RK
3220
3221 /* Push padding now if padding above and stack grows down,
3222 or if padding below and stack grows up.
3223 But if space already allocated, this has already been done. */
3224 if (extra && args_addr == 0
3225 && where_pad != none && where_pad != stack_direction)
906c4e36 3226 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3227
3228#ifdef PUSH_ROUNDING
3229 if (args_addr == 0)
3230 addr = gen_push_operand ();
3231 else
3232#endif
921b3427
RK
3233 {
3234 if (GET_CODE (args_so_far) == CONST_INT)
3235 addr
3236 = memory_address (mode,
3237 plus_constant (args_addr,
3238 INTVAL (args_so_far)));
3239 else
38a448ca
RH
3240 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3241 args_so_far));
921b3427
RK
3242 target = addr;
3243 }
bbf6f052 3244
38a448ca 3245 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3246
7d384cc0 3247 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3248 {
956d6950 3249 in_check_memory_usage = 1;
921b3427
RK
3250 if (target == 0)
3251 target = get_push_address (GET_MODE_SIZE (mode));
3252
c85f7c16 3253 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3254 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3255 target, Pmode,
3256 XEXP (x, 0), Pmode,
921b3427
RK
3257 GEN_INT (GET_MODE_SIZE (mode)),
3258 TYPE_MODE (sizetype));
3259 else
3260 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3261 target, Pmode,
921b3427
RK
3262 GEN_INT (GET_MODE_SIZE (mode)),
3263 TYPE_MODE (sizetype),
956d6950
JL
3264 GEN_INT (MEMORY_USE_RW),
3265 TYPE_MODE (integer_type_node));
3266 in_check_memory_usage = 0;
921b3427 3267 }
bbf6f052
RK
3268 }
3269
3270 ret:
3271 /* If part should go in registers, copy that part
3272 into the appropriate registers. Do this now, at the end,
3273 since mem-to-mem copies above may do function calls. */
cd048831 3274 if (partial > 0 && reg != 0)
fffa9c1d
JW
3275 {
3276 /* Handle calls that pass values in multiple non-contiguous locations.
3277 The Irix 6 ABI has examples of this. */
3278 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3279 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3280 else
3281 move_block_to_reg (REGNO (reg), x, partial, mode);
3282 }
bbf6f052
RK
3283
3284 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3285 anti_adjust_stack (GEN_INT (extra));
4fc026cd
CM
3286
3287 if (alignment_pad)
3288 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3289}
3290\f
bbf6f052
RK
3291/* Expand an assignment that stores the value of FROM into TO.
3292 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3293 (This may contain a QUEUED rtx;
3294 if the value is constant, this rtx is a constant.)
3295 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3296
3297 SUGGEST_REG is no longer actually used.
3298 It used to mean, copy the value through a register
3299 and return that register, if that is possible.
709f5be1 3300 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3301
3302rtx
3303expand_assignment (to, from, want_value, suggest_reg)
3304 tree to, from;
3305 int want_value;
c5c76735 3306 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3307{
3308 register rtx to_rtx = 0;
3309 rtx result;
3310
3311 /* Don't crash if the lhs of the assignment was erroneous. */
3312
3313 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3314 {
3315 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3316 return want_value ? result : NULL_RTX;
3317 }
bbf6f052
RK
3318
3319 /* Assignment of a structure component needs special treatment
3320 if the structure component's rtx is not simply a MEM.
6be58303
JW
3321 Assignment of an array element at a constant index, and assignment of
3322 an array element in an unaligned packed structure field, has the same
3323 problem. */
bbf6f052 3324
08293add
RK
3325 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3326 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3327 {
3328 enum machine_mode mode1;
3329 int bitsize;
3330 int bitpos;
7bb0943f 3331 tree offset;
bbf6f052
RK
3332 int unsignedp;
3333 int volatilep = 0;
0088fcb1 3334 tree tem;
d78d243c 3335 int alignment;
0088fcb1
RK
3336
3337 push_temp_slots ();
839c4796
RK
3338 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3339 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3340
3341 /* If we are going to use store_bit_field and extract_bit_field,
3342 make sure to_rtx will be safe for multiple use. */
3343
3344 if (mode1 == VOIDmode && want_value)
3345 tem = stabilize_reference (tem);
3346
921b3427 3347 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3348 if (offset != 0)
3349 {
906c4e36 3350 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3351
3352 if (GET_CODE (to_rtx) != MEM)
3353 abort ();
bd070e1a
RH
3354
3355 if (GET_MODE (offset_rtx) != ptr_mode)
3356 {
3357#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3358 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3359#else
3360 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3361#endif
3362 }
3363
9a7b9f4f
JL
3364 /* A constant address in TO_RTX can have VOIDmode, we must not try
3365 to call force_reg for that case. Avoid that case. */
89752202
HB
3366 if (GET_CODE (to_rtx) == MEM
3367 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3368 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3369 && bitsize
3370 && (bitpos % bitsize) == 0
3371 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3372 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3373 {
3374 rtx temp = change_address (to_rtx, mode1,
3375 plus_constant (XEXP (to_rtx, 0),
3376 (bitpos /
3377 BITS_PER_UNIT)));
3378 if (GET_CODE (XEXP (temp, 0)) == REG)
3379 to_rtx = temp;
3380 else
3381 to_rtx = change_address (to_rtx, mode1,
3382 force_reg (GET_MODE (XEXP (temp, 0)),
3383 XEXP (temp, 0)));
3384 bitpos = 0;
3385 }
3386
7bb0943f 3387 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3388 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3389 force_reg (ptr_mode,
3390 offset_rtx)));
7bb0943f 3391 }
c5c76735 3392
bbf6f052
RK
3393 if (volatilep)
3394 {
3395 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3396 {
3397 /* When the offset is zero, to_rtx is the address of the
3398 structure we are storing into, and hence may be shared.
3399 We must make a new MEM before setting the volatile bit. */
3400 if (offset == 0)
effbcc6a
RK
3401 to_rtx = copy_rtx (to_rtx);
3402
01188446
JW
3403 MEM_VOLATILE_P (to_rtx) = 1;
3404 }
bbf6f052
RK
3405#if 0 /* This was turned off because, when a field is volatile
3406 in an object which is not volatile, the object may be in a register,
3407 and then we would abort over here. */
3408 else
3409 abort ();
3410#endif
3411 }
3412
956d6950
JL
3413 if (TREE_CODE (to) == COMPONENT_REF
3414 && TREE_READONLY (TREE_OPERAND (to, 1)))
3415 {
8bd6ecc2 3416 if (offset == 0)
956d6950
JL
3417 to_rtx = copy_rtx (to_rtx);
3418
3419 RTX_UNCHANGING_P (to_rtx) = 1;
3420 }
3421
921b3427 3422 /* Check the access. */
7d384cc0 3423 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3424 {
3425 rtx to_addr;
3426 int size;
3427 int best_mode_size;
3428 enum machine_mode best_mode;
3429
3430 best_mode = get_best_mode (bitsize, bitpos,
3431 TYPE_ALIGN (TREE_TYPE (tem)),
3432 mode1, volatilep);
3433 if (best_mode == VOIDmode)
3434 best_mode = QImode;
3435
3436 best_mode_size = GET_MODE_BITSIZE (best_mode);
3437 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3438 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3439 size *= GET_MODE_SIZE (best_mode);
3440
3441 /* Check the access right of the pointer. */
e9a25f70
JL
3442 if (size)
3443 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3444 to_addr, Pmode,
e9a25f70 3445 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3446 GEN_INT (MEMORY_USE_WO),
3447 TYPE_MODE (integer_type_node));
921b3427
RK
3448 }
3449
bbf6f052
RK
3450 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3451 (want_value
3452 /* Spurious cast makes HPUX compiler happy. */
3453 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3454 : VOIDmode),
3455 unsignedp,
3456 /* Required alignment of containing datum. */
d78d243c 3457 alignment,
ece32014
MM
3458 int_size_in_bytes (TREE_TYPE (tem)),
3459 get_alias_set (to));
bbf6f052
RK
3460 preserve_temp_slots (result);
3461 free_temp_slots ();
0088fcb1 3462 pop_temp_slots ();
bbf6f052 3463
709f5be1
RS
3464 /* If the value is meaningful, convert RESULT to the proper mode.
3465 Otherwise, return nothing. */
5ffe63ed
RS
3466 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3467 TYPE_MODE (TREE_TYPE (from)),
3468 result,
3469 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3470 : NULL_RTX);
bbf6f052
RK
3471 }
3472
cd1db108
RS
3473 /* If the rhs is a function call and its value is not an aggregate,
3474 call the function before we start to compute the lhs.
3475 This is needed for correct code for cases such as
3476 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3477 requires loading up part of an address in a separate insn.
3478
3479 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3480 a promoted variable where the zero- or sign- extension needs to be done.
3481 Handling this in the normal way is safe because no computation is done
3482 before the call. */
3483 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3484 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3485 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3486 {
0088fcb1
RK
3487 rtx value;
3488
3489 push_temp_slots ();
3490 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3491 if (to_rtx == 0)
921b3427 3492 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3493
fffa9c1d
JW
3494 /* Handle calls that return values in multiple non-contiguous locations.
3495 The Irix 6 ABI has examples of this. */
3496 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3497 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3498 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3499 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3500 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3501 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45 3502 else
6419e5b0
DT
3503 {
3504#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3505 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3506 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3507 value = convert_memory_address (GET_MODE (to_rtx), value);
3508#endif
3509 emit_move_insn (to_rtx, value);
3510 }
cd1db108
RS
3511 preserve_temp_slots (to_rtx);
3512 free_temp_slots ();
0088fcb1 3513 pop_temp_slots ();
709f5be1 3514 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3515 }
3516
bbf6f052
RK
3517 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3518 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3519
3520 if (to_rtx == 0)
41472af8
MM
3521 {
3522 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3523 if (GET_CODE (to_rtx) == MEM)
3524 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3525 }
bbf6f052 3526
86d38d25 3527 /* Don't move directly into a return register. */
14a774a9
RK
3528 if (TREE_CODE (to) == RESULT_DECL
3529 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3530 {
0088fcb1
RK
3531 rtx temp;
3532
3533 push_temp_slots ();
3534 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3535
3536 if (GET_CODE (to_rtx) == PARALLEL)
3537 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3538 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3539 else
3540 emit_move_insn (to_rtx, temp);
3541
86d38d25
RS
3542 preserve_temp_slots (to_rtx);
3543 free_temp_slots ();
0088fcb1 3544 pop_temp_slots ();
709f5be1 3545 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3546 }
3547
bbf6f052
RK
3548 /* In case we are returning the contents of an object which overlaps
3549 the place the value is being stored, use a safe function when copying
3550 a value through a pointer into a structure value return block. */
3551 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3552 && current_function_returns_struct
3553 && !current_function_returns_pcc_struct)
3554 {
0088fcb1
RK
3555 rtx from_rtx, size;
3556
3557 push_temp_slots ();
33a20d10 3558 size = expr_size (from);
921b3427
RK
3559 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3560 EXPAND_MEMORY_USE_DONT);
3561
3562 /* Copy the rights of the bitmap. */
7d384cc0 3563 if (current_function_check_memory_usage)
921b3427 3564 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3565 XEXP (to_rtx, 0), Pmode,
3566 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3567 convert_to_mode (TYPE_MODE (sizetype),
3568 size, TREE_UNSIGNED (sizetype)),
3569 TYPE_MODE (sizetype));
bbf6f052
RK
3570
3571#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3572 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3573 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3574 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3575 convert_to_mode (TYPE_MODE (sizetype),
3576 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3577 TYPE_MODE (sizetype));
bbf6f052 3578#else
d562e42e 3579 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3580 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3581 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3582 convert_to_mode (TYPE_MODE (integer_type_node),
3583 size, TREE_UNSIGNED (integer_type_node)),
3584 TYPE_MODE (integer_type_node));
bbf6f052
RK
3585#endif
3586
3587 preserve_temp_slots (to_rtx);
3588 free_temp_slots ();
0088fcb1 3589 pop_temp_slots ();
709f5be1 3590 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3591 }
3592
3593 /* Compute FROM and store the value in the rtx we got. */
3594
0088fcb1 3595 push_temp_slots ();
bbf6f052
RK
3596 result = store_expr (from, to_rtx, want_value);
3597 preserve_temp_slots (result);
3598 free_temp_slots ();
0088fcb1 3599 pop_temp_slots ();
709f5be1 3600 return want_value ? result : NULL_RTX;
bbf6f052
RK
3601}
3602
3603/* Generate code for computing expression EXP,
3604 and storing the value into TARGET.
bbf6f052
RK
3605 TARGET may contain a QUEUED rtx.
3606
709f5be1
RS
3607 If WANT_VALUE is nonzero, return a copy of the value
3608 not in TARGET, so that we can be sure to use the proper
3609 value in a containing expression even if TARGET has something
3610 else stored in it. If possible, we copy the value through a pseudo
3611 and return that pseudo. Or, if the value is constant, we try to
3612 return the constant. In some cases, we return a pseudo
3613 copied *from* TARGET.
3614
3615 If the mode is BLKmode then we may return TARGET itself.
3616 It turns out that in BLKmode it doesn't cause a problem.
3617 because C has no operators that could combine two different
3618 assignments into the same BLKmode object with different values
3619 with no sequence point. Will other languages need this to
3620 be more thorough?
3621
3622 If WANT_VALUE is 0, we return NULL, to make sure
3623 to catch quickly any cases where the caller uses the value
3624 and fails to set WANT_VALUE. */
bbf6f052
RK
3625
3626rtx
709f5be1 3627store_expr (exp, target, want_value)
bbf6f052
RK
3628 register tree exp;
3629 register rtx target;
709f5be1 3630 int want_value;
bbf6f052
RK
3631{
3632 register rtx temp;
3633 int dont_return_target = 0;
3634
3635 if (TREE_CODE (exp) == COMPOUND_EXPR)
3636 {
3637 /* Perform first part of compound expression, then assign from second
3638 part. */
3639 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3640 emit_queue ();
709f5be1 3641 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3642 }
3643 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3644 {
3645 /* For conditional expression, get safe form of the target. Then
3646 test the condition, doing the appropriate assignment on either
3647 side. This avoids the creation of unnecessary temporaries.
3648 For non-BLKmode, it is more efficient not to do this. */
3649
3650 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3651
3652 emit_queue ();
3653 target = protect_from_queue (target, 1);
3654
dabf8373 3655 do_pending_stack_adjust ();
bbf6f052
RK
3656 NO_DEFER_POP;
3657 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3658 start_cleanup_deferral ();
709f5be1 3659 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3660 end_cleanup_deferral ();
bbf6f052
RK
3661 emit_queue ();
3662 emit_jump_insn (gen_jump (lab2));
3663 emit_barrier ();
3664 emit_label (lab1);
956d6950 3665 start_cleanup_deferral ();
709f5be1 3666 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3667 end_cleanup_deferral ();
bbf6f052
RK
3668 emit_queue ();
3669 emit_label (lab2);
3670 OK_DEFER_POP;
a3a58acc 3671
709f5be1 3672 return want_value ? target : NULL_RTX;
bbf6f052 3673 }
bbf6f052 3674 else if (queued_subexp_p (target))
709f5be1
RS
3675 /* If target contains a postincrement, let's not risk
3676 using it as the place to generate the rhs. */
bbf6f052
RK
3677 {
3678 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3679 {
3680 /* Expand EXP into a new pseudo. */
3681 temp = gen_reg_rtx (GET_MODE (target));
3682 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3683 }
3684 else
906c4e36 3685 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3686
3687 /* If target is volatile, ANSI requires accessing the value
3688 *from* the target, if it is accessed. So make that happen.
3689 In no case return the target itself. */
3690 if (! MEM_VOLATILE_P (target) && want_value)
3691 dont_return_target = 1;
bbf6f052 3692 }
12f06d17
CH
3693 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3694 && GET_MODE (target) != BLKmode)
3695 /* If target is in memory and caller wants value in a register instead,
3696 arrange that. Pass TARGET as target for expand_expr so that,
3697 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3698 We know expand_expr will not use the target in that case.
3699 Don't do this if TARGET is volatile because we are supposed
3700 to write it and then read it. */
3701 {
1da93fe0 3702 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3703 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3704 temp = copy_to_reg (temp);
3705 dont_return_target = 1;
3706 }
1499e0a8
RK
3707 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3708 /* If this is an scalar in a register that is stored in a wider mode
3709 than the declared mode, compute the result into its declared mode
3710 and then convert to the wider mode. Our value is the computed
3711 expression. */
3712 {
5a32d038 3713 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3714 which will often result in some optimizations. Do the conversion
3715 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3716 the extend. But don't do this if the type of EXP is a subtype
3717 of something else since then the conversion might involve
3718 more than just converting modes. */
3719 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3720 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3721 {
3722 if (TREE_UNSIGNED (TREE_TYPE (exp))
3723 != SUBREG_PROMOTED_UNSIGNED_P (target))
3724 exp
3725 = convert
3726 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3727 TREE_TYPE (exp)),
3728 exp);
3729
3730 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3731 SUBREG_PROMOTED_UNSIGNED_P (target)),
3732 exp);
3733 }
5a32d038 3734
1499e0a8 3735 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3736
766f36c7 3737 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3738 the access now so it gets done only once. Likewise if
3739 it contains TARGET. */
3740 if (GET_CODE (temp) == MEM && want_value
3741 && (MEM_VOLATILE_P (temp)
3742 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3743 temp = copy_to_reg (temp);
3744
b258707c
RS
3745 /* If TEMP is a VOIDmode constant, use convert_modes to make
3746 sure that we properly convert it. */
3747 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3748 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3749 TYPE_MODE (TREE_TYPE (exp)), temp,
3750 SUBREG_PROMOTED_UNSIGNED_P (target));
3751
1499e0a8
RK
3752 convert_move (SUBREG_REG (target), temp,
3753 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3754
3755 /* If we promoted a constant, change the mode back down to match
3756 target. Otherwise, the caller might get confused by a result whose
3757 mode is larger than expected. */
3758
3759 if (want_value && GET_MODE (temp) != GET_MODE (target)
3760 && GET_MODE (temp) != VOIDmode)
3761 {
3762 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3763 SUBREG_PROMOTED_VAR_P (temp) = 1;
3764 SUBREG_PROMOTED_UNSIGNED_P (temp)
3765 = SUBREG_PROMOTED_UNSIGNED_P (target);
3766 }
3767
709f5be1 3768 return want_value ? temp : NULL_RTX;
1499e0a8 3769 }
bbf6f052
RK
3770 else
3771 {
3772 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3773 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3774 If TARGET is a volatile mem ref, either return TARGET
3775 or return a reg copied *from* TARGET; ANSI requires this.
3776
3777 Otherwise, if TEMP is not TARGET, return TEMP
3778 if it is constant (for efficiency),
3779 or if we really want the correct value. */
bbf6f052
RK
3780 if (!(target && GET_CODE (target) == REG
3781 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3782 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3783 && ! rtx_equal_p (temp, target)
709f5be1 3784 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3785 dont_return_target = 1;
3786 }
3787
b258707c
RS
3788 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3789 the same as that of TARGET, adjust the constant. This is needed, for
3790 example, in case it is a CONST_DOUBLE and we want only a word-sized
3791 value. */
3792 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3793 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3794 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3795 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3796 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3797
7d384cc0 3798 if (current_function_check_memory_usage
921b3427
RK
3799 && GET_CODE (target) == MEM
3800 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3801 {
3802 if (GET_CODE (temp) == MEM)
3803 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3804 XEXP (target, 0), Pmode,
3805 XEXP (temp, 0), Pmode,
921b3427
RK
3806 expr_size (exp), TYPE_MODE (sizetype));
3807 else
3808 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3809 XEXP (target, 0), Pmode,
921b3427 3810 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3811 GEN_INT (MEMORY_USE_WO),
3812 TYPE_MODE (integer_type_node));
921b3427
RK
3813 }
3814
bbf6f052
RK
3815 /* If value was not generated in the target, store it there.
3816 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3817 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3818 one or both of them are volatile memory refs, we have to distinguish
3819 two cases:
3820 - expand_expr has used TARGET. In this case, we must not generate
3821 another copy. This can be detected by TARGET being equal according
3822 to == .
3823 - expand_expr has not used TARGET - that means that the source just
3824 happens to have the same RTX form. Since temp will have been created
3825 by expand_expr, it will compare unequal according to == .
3826 We must generate a copy in this case, to reach the correct number
3827 of volatile memory references. */
bbf6f052 3828
6036acbb 3829 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3830 || (temp != target && (side_effects_p (temp)
3831 || side_effects_p (target))))
6036acbb 3832 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3833 {
3834 target = protect_from_queue (target, 1);
3835 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3836 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3837 {
3838 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3839 if (dont_return_target)
3840 {
3841 /* In this case, we will return TEMP,
3842 so make sure it has the proper mode.
3843 But don't forget to store the value into TARGET. */
3844 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3845 emit_move_insn (target, temp);
3846 }
3847 else
3848 convert_move (target, temp, unsignedp);
3849 }
3850
3851 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3852 {
3853 /* Handle copying a string constant into an array.
3854 The string constant may be shorter than the array.
3855 So copy just the string's actual length, and clear the rest. */
3856 rtx size;
22619c3f 3857 rtx addr;
bbf6f052 3858
e87b4f3f
RS
3859 /* Get the size of the data type of the string,
3860 which is actually the size of the target. */
3861 size = expr_size (exp);
3862 if (GET_CODE (size) == CONST_INT
3863 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3864 emit_block_move (target, temp, size,
3865 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3866 else
bbf6f052 3867 {
e87b4f3f
RS
3868 /* Compute the size of the data to copy from the string. */
3869 tree copy_size
c03b7665 3870 = size_binop (MIN_EXPR,
b50d17a1 3871 make_tree (sizetype, size),
c03b7665
RK
3872 convert (sizetype,
3873 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3874 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3875 VOIDmode, 0);
e87b4f3f
RS
3876 rtx label = 0;
3877
3878 /* Copy that much. */
3879 emit_block_move (target, temp, copy_size_rtx,
3880 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3881
88f63c77
RK
3882 /* Figure out how much is left in TARGET that we have to clear.
3883 Do all calculations in ptr_mode. */
3884
3885 addr = XEXP (target, 0);
3886 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3887
e87b4f3f
RS
3888 if (GET_CODE (copy_size_rtx) == CONST_INT)
3889 {
88f63c77 3890 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3891 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3892 }
3893 else
3894 {
88f63c77
RK
3895 addr = force_reg (ptr_mode, addr);
3896 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3897 copy_size_rtx, NULL_RTX, 0,
3898 OPTAB_LIB_WIDEN);
e87b4f3f 3899
88f63c77 3900 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3901 copy_size_rtx, NULL_RTX, 0,
3902 OPTAB_LIB_WIDEN);
e87b4f3f 3903
e87b4f3f 3904 label = gen_label_rtx ();
c5d5d461
JL
3905 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3906 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3907 }
3908
3909 if (size != const0_rtx)
3910 {
921b3427 3911 /* Be sure we can write on ADDR. */
7d384cc0 3912 if (current_function_check_memory_usage)
921b3427 3913 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3914 addr, Pmode,
921b3427 3915 size, TYPE_MODE (sizetype),
956d6950
JL
3916 GEN_INT (MEMORY_USE_WO),
3917 TYPE_MODE (integer_type_node));
bbf6f052 3918#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3919 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3920 addr, ptr_mode,
3b6f75e2
JW
3921 const0_rtx, TYPE_MODE (integer_type_node),
3922 convert_to_mode (TYPE_MODE (sizetype),
3923 size,
3924 TREE_UNSIGNED (sizetype)),
3925 TYPE_MODE (sizetype));
bbf6f052 3926#else
d562e42e 3927 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3928 addr, ptr_mode,
3b6f75e2
JW
3929 convert_to_mode (TYPE_MODE (integer_type_node),
3930 size,
3931 TREE_UNSIGNED (integer_type_node)),
3932 TYPE_MODE (integer_type_node));
bbf6f052 3933#endif
e87b4f3f 3934 }
22619c3f 3935
e87b4f3f
RS
3936 if (label)
3937 emit_label (label);
bbf6f052
RK
3938 }
3939 }
fffa9c1d
JW
3940 /* Handle calls that return values in multiple non-contiguous locations.
3941 The Irix 6 ABI has examples of this. */
3942 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3943 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3944 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3945 else if (GET_MODE (temp) == BLKmode)
3946 emit_block_move (target, temp, expr_size (exp),
3947 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3948 else
3949 emit_move_insn (target, temp);
3950 }
709f5be1 3951
766f36c7
RK
3952 /* If we don't want a value, return NULL_RTX. */
3953 if (! want_value)
3954 return NULL_RTX;
3955
3956 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3957 ??? The latter test doesn't seem to make sense. */
3958 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3959 return temp;
766f36c7
RK
3960
3961 /* Return TARGET itself if it is a hard register. */
3962 else if (want_value && GET_MODE (target) != BLKmode
3963 && ! (GET_CODE (target) == REG
3964 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3965 return copy_to_reg (target);
766f36c7
RK
3966
3967 else
709f5be1 3968 return target;
bbf6f052
RK
3969}
3970\f
9de08200
RK
3971/* Return 1 if EXP just contains zeros. */
3972
3973static int
3974is_zeros_p (exp)
3975 tree exp;
3976{
3977 tree elt;
3978
3979 switch (TREE_CODE (exp))
3980 {
3981 case CONVERT_EXPR:
3982 case NOP_EXPR:
3983 case NON_LVALUE_EXPR:
3984 return is_zeros_p (TREE_OPERAND (exp, 0));
3985
3986 case INTEGER_CST:
3987 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3988
3989 case COMPLEX_CST:
3990 return
3991 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3992
3993 case REAL_CST:
41c9120b 3994 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3995
3996 case CONSTRUCTOR:
e1a43f73
PB
3997 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3998 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3999 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4000 if (! is_zeros_p (TREE_VALUE (elt)))
4001 return 0;
4002
4003 return 1;
e9a25f70
JL
4004
4005 default:
4006 return 0;
9de08200 4007 }
9de08200
RK
4008}
4009
4010/* Return 1 if EXP contains mostly (3/4) zeros. */
4011
4012static int
4013mostly_zeros_p (exp)
4014 tree exp;
4015{
9de08200
RK
4016 if (TREE_CODE (exp) == CONSTRUCTOR)
4017 {
e1a43f73
PB
4018 int elts = 0, zeros = 0;
4019 tree elt = CONSTRUCTOR_ELTS (exp);
4020 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4021 {
4022 /* If there are no ranges of true bits, it is all zero. */
4023 return elt == NULL_TREE;
4024 }
4025 for (; elt; elt = TREE_CHAIN (elt))
4026 {
4027 /* We do not handle the case where the index is a RANGE_EXPR,
4028 so the statistic will be somewhat inaccurate.
4029 We do make a more accurate count in store_constructor itself,
4030 so since this function is only used for nested array elements,
0f41302f 4031 this should be close enough. */
e1a43f73
PB
4032 if (mostly_zeros_p (TREE_VALUE (elt)))
4033 zeros++;
4034 elts++;
4035 }
9de08200
RK
4036
4037 return 4 * zeros >= 3 * elts;
4038 }
4039
4040 return is_zeros_p (exp);
4041}
4042\f
e1a43f73
PB
4043/* Helper function for store_constructor.
4044 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4045 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4046 ALIGN and CLEARED are as for store_constructor.
23ccec44
JW
4047
4048 This provides a recursive shortcut back to store_constructor when it isn't
4049 necessary to go through store_field. This is so that we can pass through
4050 the cleared field to let store_constructor know that we may not have to
4051 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4052
4053static void
4054store_constructor_field (target, bitsize, bitpos,
c5c76735 4055 mode, exp, type, align, cleared)
e1a43f73
PB
4056 rtx target;
4057 int bitsize, bitpos;
4058 enum machine_mode mode;
4059 tree exp, type;
c5c76735 4060 int align;
e1a43f73
PB
4061 int cleared;
4062{
4063 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4064 && bitpos % BITS_PER_UNIT == 0
4065 /* If we have a non-zero bitpos for a register target, then we just
4066 let store_field do the bitfield handling. This is unlikely to
4067 generate unnecessary clear instructions anyways. */
4068 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4069 {
126e5b0d 4070 if (bitpos != 0)
ce64861e
RK
4071 target
4072 = change_address (target,
4073 GET_MODE (target) == BLKmode
4074 || 0 != (bitpos
4075 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4076 ? BLKmode : VOIDmode,
4077 plus_constant (XEXP (target, 0),
4078 bitpos / BITS_PER_UNIT));
c5c76735 4079 store_constructor (exp, target, align, cleared);
e1a43f73
PB
4080 }
4081 else
c5c76735
JL
4082 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4083 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
4084 int_size_in_bytes (type), cleared);
e1a43f73
PB
4085}
4086
bbf6f052 4087/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4088 TARGET is either a REG or a MEM.
c5c76735 4089 ALIGN is the maximum known alignment for TARGET, in bits.
0f41302f 4090 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
4091
4092static void
c5c76735 4093store_constructor (exp, target, align, cleared)
bbf6f052
RK
4094 tree exp;
4095 rtx target;
c5c76735 4096 int align;
e1a43f73 4097 int cleared;
bbf6f052 4098{
4af3895e 4099 tree type = TREE_TYPE (exp);
a5efcd63 4100#ifdef WORD_REGISTER_OPERATIONS
34c73909 4101 rtx exp_size = expr_size (exp);
a5efcd63 4102#endif
4af3895e 4103
bbf6f052
RK
4104 /* We know our target cannot conflict, since safe_from_p has been called. */
4105#if 0
4106 /* Don't try copying piece by piece into a hard register
4107 since that is vulnerable to being clobbered by EXP.
4108 Instead, construct in a pseudo register and then copy it all. */
4109 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4110 {
4111 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 4112 store_constructor (exp, temp, 0);
bbf6f052
RK
4113 emit_move_insn (target, temp);
4114 return;
4115 }
4116#endif
4117
e44842fe
RK
4118 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4119 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4120 {
4121 register tree elt;
4122
4af3895e 4123 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4124 if ((TREE_CODE (type) == UNION_TYPE
4125 || TREE_CODE (type) == QUAL_UNION_TYPE)
4126 && ! cleared)
a59f8640
R
4127 {
4128 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4129
4130 /* If the constructor is empty, clear the union. */
4131 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4132 clear_storage (target, expr_size (exp),
4133 TYPE_ALIGN (type) / BITS_PER_UNIT);
4134 }
4af3895e
JVA
4135
4136 /* If we are building a static constructor into a register,
4137 set the initial value as zero so we can fold the value into
67225c15
RK
4138 a constant. But if more than one register is involved,
4139 this probably loses. */
4140 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4141 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4142 {
4143 if (! cleared)
e9a25f70 4144 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4145
9de08200
RK
4146 cleared = 1;
4147 }
4148
4149 /* If the constructor has fewer fields than the structure
4150 or if we are initializing the structure to mostly zeros,
bbf6f052 4151 clear the whole structure first. */
9de08200
RK
4152 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4153 != list_length (TYPE_FIELDS (type)))
4154 || mostly_zeros_p (exp))
4155 {
4156 if (! cleared)
4157 clear_storage (target, expr_size (exp),
c5c76735 4158 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4159
4160 cleared = 1;
4161 }
dd1db5ec 4162 else if (! cleared)
bbf6f052 4163 /* Inform later passes that the old value is dead. */
38a448ca 4164 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4165
4166 /* Store each element of the constructor into
4167 the corresponding field of TARGET. */
4168
4169 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4170 {
4171 register tree field = TREE_PURPOSE (elt);
c5c76735 4172#ifdef WORD_REGISTER_OPERATIONS
34c73909 4173 tree value = TREE_VALUE (elt);
c5c76735 4174#endif
bbf6f052
RK
4175 register enum machine_mode mode;
4176 int bitsize;
b50d17a1 4177 int bitpos = 0;
bbf6f052 4178 int unsignedp;
b50d17a1
RK
4179 tree pos, constant = 0, offset = 0;
4180 rtx to_rtx = target;
bbf6f052 4181
f32fd778
RS
4182 /* Just ignore missing fields.
4183 We cleared the whole structure, above,
4184 if any fields are missing. */
4185 if (field == 0)
4186 continue;
4187
e1a43f73
PB
4188 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4189 continue;
9de08200 4190
14a774a9
RK
4191 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4192 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4193 else
4194 bitsize = -1;
4195
bbf6f052
RK
4196 unsignedp = TREE_UNSIGNED (field);
4197 mode = DECL_MODE (field);
4198 if (DECL_BIT_FIELD (field))
4199 mode = VOIDmode;
4200
b50d17a1
RK
4201 pos = DECL_FIELD_BITPOS (field);
4202 if (TREE_CODE (pos) == INTEGER_CST)
4203 constant = pos;
4204 else if (TREE_CODE (pos) == PLUS_EXPR
4205 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4206 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4207 else
4208 offset = pos;
4209
4210 if (constant)
cd11b87e 4211 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4212
4213 if (offset)
4214 {
4215 rtx offset_rtx;
4216
4217 if (contains_placeholder_p (offset))
4218 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4219 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4220
9f887d05 4221 offset = size_binop (EXACT_DIV_EXPR, offset,
b50d17a1 4222 size_int (BITS_PER_UNIT));
bbf6f052 4223
b50d17a1
RK
4224 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4225 if (GET_CODE (to_rtx) != MEM)
4226 abort ();
4227
bd070e1a
RH
4228 if (GET_MODE (offset_rtx) != ptr_mode)
4229 {
4230#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4231 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4232#else
4233 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4234#endif
4235 }
4236
b50d17a1
RK
4237 to_rtx
4238 = change_address (to_rtx, VOIDmode,
38a448ca 4239 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4240 force_reg (ptr_mode,
4241 offset_rtx)));
b50d17a1 4242 }
c5c76735 4243
cf04eb80
RK
4244 if (TREE_READONLY (field))
4245 {
9151b3bf 4246 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4247 to_rtx = copy_rtx (to_rtx);
4248
cf04eb80
RK
4249 RTX_UNCHANGING_P (to_rtx) = 1;
4250 }
4251
34c73909
R
4252#ifdef WORD_REGISTER_OPERATIONS
4253 /* If this initializes a field that is smaller than a word, at the
4254 start of a word, try to widen it to a full word.
4255 This special case allows us to output C++ member function
4256 initializations in a form that the optimizers can understand. */
4257 if (constant
4258 && GET_CODE (target) == REG
4259 && bitsize < BITS_PER_WORD
4260 && bitpos % BITS_PER_WORD == 0
4261 && GET_MODE_CLASS (mode) == MODE_INT
4262 && TREE_CODE (value) == INTEGER_CST
4263 && GET_CODE (exp_size) == CONST_INT
4264 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4265 {
4266 tree type = TREE_TYPE (value);
4267 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4268 {
4269 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4270 value = convert (type, value);
4271 }
4272 if (BYTES_BIG_ENDIAN)
4273 value
4274 = fold (build (LSHIFT_EXPR, type, value,
4275 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4276 bitsize = BITS_PER_WORD;
4277 mode = word_mode;
4278 }
4279#endif
c5c76735
JL
4280 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4281 TREE_VALUE (elt), type,
4282 MIN (align,
4283 DECL_ALIGN (TREE_PURPOSE (elt))),
4284 cleared);
bbf6f052
RK
4285 }
4286 }
4af3895e 4287 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4288 {
4289 register tree elt;
4290 register int i;
e1a43f73 4291 int need_to_clear;
4af3895e 4292 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4293 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4294 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4295 tree elttype = TREE_TYPE (type);
bbf6f052 4296
e1a43f73 4297 /* If the constructor has fewer elements than the array,
38e01259 4298 clear the whole array first. Similarly if this is
e1a43f73
PB
4299 static constructor of a non-BLKmode object. */
4300 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4301 need_to_clear = 1;
4302 else
4303 {
4304 HOST_WIDE_INT count = 0, zero_count = 0;
4305 need_to_clear = 0;
4306 /* This loop is a more accurate version of the loop in
4307 mostly_zeros_p (it handles RANGE_EXPR in an index).
4308 It is also needed to check for missing elements. */
4309 for (elt = CONSTRUCTOR_ELTS (exp);
4310 elt != NULL_TREE;
df0faff1 4311 elt = TREE_CHAIN (elt))
e1a43f73
PB
4312 {
4313 tree index = TREE_PURPOSE (elt);
4314 HOST_WIDE_INT this_node_count;
4315 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4316 {
4317 tree lo_index = TREE_OPERAND (index, 0);
4318 tree hi_index = TREE_OPERAND (index, 1);
4319 if (TREE_CODE (lo_index) != INTEGER_CST
4320 || TREE_CODE (hi_index) != INTEGER_CST)
4321 {
4322 need_to_clear = 1;
4323 break;
4324 }
4325 this_node_count = TREE_INT_CST_LOW (hi_index)
4326 - TREE_INT_CST_LOW (lo_index) + 1;
4327 }
4328 else
4329 this_node_count = 1;
4330 count += this_node_count;
4331 if (mostly_zeros_p (TREE_VALUE (elt)))
4332 zero_count += this_node_count;
4333 }
8e958f70 4334 /* Clear the entire array first if there are any missing elements,
0f41302f 4335 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4336 if (count < maxelt - minelt + 1
4337 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4338 need_to_clear = 1;
4339 }
4340 if (need_to_clear)
9de08200
RK
4341 {
4342 if (! cleared)
4343 clear_storage (target, expr_size (exp),
c5c76735 4344 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4345 cleared = 1;
4346 }
bbf6f052
RK
4347 else
4348 /* Inform later passes that the old value is dead. */
38a448ca 4349 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4350
4351 /* Store each element of the constructor into
4352 the corresponding element of TARGET, determined
4353 by counting the elements. */
4354 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4355 elt;
4356 elt = TREE_CHAIN (elt), i++)
4357 {
4358 register enum machine_mode mode;
4359 int bitsize;
4360 int bitpos;
4361 int unsignedp;
e1a43f73 4362 tree value = TREE_VALUE (elt);
c5c76735 4363 int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4364 tree index = TREE_PURPOSE (elt);
4365 rtx xtarget = target;
bbf6f052 4366
e1a43f73
PB
4367 if (cleared && is_zeros_p (value))
4368 continue;
9de08200 4369
bbf6f052 4370 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4371 mode = TYPE_MODE (elttype);
4372 if (mode == BLKmode)
4373 {
4374 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4375 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4376 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4377 else
4378 bitsize = -1;
4379 }
4380 else
4381 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4382
e1a43f73
PB
4383 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4384 {
4385 tree lo_index = TREE_OPERAND (index, 0);
4386 tree hi_index = TREE_OPERAND (index, 1);
4387 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4388 struct nesting *loop;
05c0b405
PB
4389 HOST_WIDE_INT lo, hi, count;
4390 tree position;
e1a43f73 4391
0f41302f 4392 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4393 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4394 && TREE_CODE (hi_index) == INTEGER_CST
4395 && (lo = TREE_INT_CST_LOW (lo_index),
4396 hi = TREE_INT_CST_LOW (hi_index),
4397 count = hi - lo + 1,
4398 (GET_CODE (target) != MEM
4399 || count <= 2
4400 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4401 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4402 <= 40 * 8))))
e1a43f73 4403 {
05c0b405
PB
4404 lo -= minelt; hi -= minelt;
4405 for (; lo <= hi; lo++)
e1a43f73 4406 {
05c0b405 4407 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
c5c76735
JL
4408 store_constructor_field (target, bitsize, bitpos, mode,
4409 value, type, align, cleared);
e1a43f73
PB
4410 }
4411 }
4412 else
4413 {
4414 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4415 loop_top = gen_label_rtx ();
4416 loop_end = gen_label_rtx ();
4417
4418 unsignedp = TREE_UNSIGNED (domain);
4419
4420 index = build_decl (VAR_DECL, NULL_TREE, domain);
4421
4422 DECL_RTL (index) = index_r
4423 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4424 &unsignedp, 0));
4425
4426 if (TREE_CODE (value) == SAVE_EXPR
4427 && SAVE_EXPR_RTL (value) == 0)
4428 {
0f41302f
MS
4429 /* Make sure value gets expanded once before the
4430 loop. */
e1a43f73
PB
4431 expand_expr (value, const0_rtx, VOIDmode, 0);
4432 emit_queue ();
4433 }
4434 store_expr (lo_index, index_r, 0);
4435 loop = expand_start_loop (0);
4436
0f41302f 4437 /* Assign value to element index. */
e1a43f73
PB
4438 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4439 size_int (BITS_PER_UNIT));
4440 position = size_binop (MULT_EXPR,
4441 size_binop (MINUS_EXPR, index,
4442 TYPE_MIN_VALUE (domain)),
4443 position);
4444 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4445 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4446 xtarget = change_address (target, mode, addr);
4447 if (TREE_CODE (value) == CONSTRUCTOR)
c5c76735 4448 store_constructor (value, xtarget, align, cleared);
e1a43f73
PB
4449 else
4450 store_expr (value, xtarget, 0);
4451
4452 expand_exit_loop_if_false (loop,
4453 build (LT_EXPR, integer_type_node,
4454 index, hi_index));
4455
4456 expand_increment (build (PREINCREMENT_EXPR,
4457 TREE_TYPE (index),
7b8b9722 4458 index, integer_one_node), 0, 0);
e1a43f73
PB
4459 expand_end_loop ();
4460 emit_label (loop_end);
4461
4462 /* Needed by stupid register allocation. to extend the
4463 lifetime of pseudo-regs used by target past the end
4464 of the loop. */
38a448ca 4465 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4466 }
4467 }
4468 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4469 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4470 {
e1a43f73 4471 rtx pos_rtx, addr;
03dc44a6
RS
4472 tree position;
4473
5b6c44ff
RK
4474 if (index == 0)
4475 index = size_int (i);
4476
e1a43f73
PB
4477 if (minelt)
4478 index = size_binop (MINUS_EXPR, index,
4479 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4480 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4481 size_int (BITS_PER_UNIT));
4482 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4483 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4484 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4485 xtarget = change_address (target, mode, addr);
e1a43f73 4486 store_expr (value, xtarget, 0);
03dc44a6
RS
4487 }
4488 else
4489 {
4490 if (index != 0)
7c314719 4491 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4492 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4493 else
4494 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
c5c76735
JL
4495 store_constructor_field (target, bitsize, bitpos, mode, value,
4496 type, align, cleared);
03dc44a6 4497 }
bbf6f052
RK
4498 }
4499 }
071a6595
PB
4500 /* set constructor assignments */
4501 else if (TREE_CODE (type) == SET_TYPE)
4502 {
e1a43f73 4503 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4504 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4505 tree domain = TYPE_DOMAIN (type);
4506 tree domain_min, domain_max, bitlength;
4507
9faa82d8 4508 /* The default implementation strategy is to extract the constant
071a6595
PB
4509 parts of the constructor, use that to initialize the target,
4510 and then "or" in whatever non-constant ranges we need in addition.
4511
4512 If a large set is all zero or all ones, it is
4513 probably better to set it using memset (if available) or bzero.
4514 Also, if a large set has just a single range, it may also be
4515 better to first clear all the first clear the set (using
0f41302f 4516 bzero/memset), and set the bits we want. */
071a6595 4517
0f41302f 4518 /* Check for all zeros. */
e1a43f73 4519 if (elt == NULL_TREE)
071a6595 4520 {
e1a43f73
PB
4521 if (!cleared)
4522 clear_storage (target, expr_size (exp),
4523 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4524 return;
4525 }
4526
071a6595
PB
4527 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4528 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4529 bitlength = size_binop (PLUS_EXPR,
4530 size_binop (MINUS_EXPR, domain_max, domain_min),
4531 size_one_node);
4532
e1a43f73
PB
4533 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4534 abort ();
4535 nbits = TREE_INT_CST_LOW (bitlength);
4536
4537 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4538 are "complicated" (more than one range), initialize (the
4539 constant parts) by copying from a constant. */
4540 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4541 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4542 {
b4ee5a72
PB
4543 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4544 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4545 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4546 HOST_WIDE_INT word = 0;
4547 int bit_pos = 0;
4548 int ibit = 0;
0f41302f 4549 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4550 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4551 for (;;)
071a6595 4552 {
b4ee5a72
PB
4553 if (bit_buffer[ibit])
4554 {
b09f3348 4555 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4556 word |= (1 << (set_word_size - 1 - bit_pos));
4557 else
4558 word |= 1 << bit_pos;
4559 }
4560 bit_pos++; ibit++;
4561 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4562 {
e1a43f73
PB
4563 if (word != 0 || ! cleared)
4564 {
4565 rtx datum = GEN_INT (word);
4566 rtx to_rtx;
0f41302f
MS
4567 /* The assumption here is that it is safe to use
4568 XEXP if the set is multi-word, but not if
4569 it's single-word. */
e1a43f73
PB
4570 if (GET_CODE (target) == MEM)
4571 {
4572 to_rtx = plus_constant (XEXP (target, 0), offset);
4573 to_rtx = change_address (target, mode, to_rtx);
4574 }
4575 else if (offset == 0)
4576 to_rtx = target;
4577 else
4578 abort ();
4579 emit_move_insn (to_rtx, datum);
4580 }
b4ee5a72
PB
4581 if (ibit == nbits)
4582 break;
4583 word = 0;
4584 bit_pos = 0;
4585 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4586 }
4587 }
071a6595 4588 }
e1a43f73
PB
4589 else if (!cleared)
4590 {
0f41302f 4591 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4592 if (TREE_CHAIN (elt) != NULL_TREE
4593 || (TREE_PURPOSE (elt) == NULL_TREE
4594 ? nbits != 1
4595 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4596 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4597 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4598 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4599 != nbits))))
4600 clear_storage (target, expr_size (exp),
4601 TYPE_ALIGN (type) / BITS_PER_UNIT);
4602 }
4603
4604 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4605 {
4606 /* start of range of element or NULL */
4607 tree startbit = TREE_PURPOSE (elt);
4608 /* end of range of element, or element value */
4609 tree endbit = TREE_VALUE (elt);
381127e8 4610#ifdef TARGET_MEM_FUNCTIONS
071a6595 4611 HOST_WIDE_INT startb, endb;
381127e8 4612#endif
071a6595
PB
4613 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4614
4615 bitlength_rtx = expand_expr (bitlength,
4616 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4617
4618 /* handle non-range tuple element like [ expr ] */
4619 if (startbit == NULL_TREE)
4620 {
4621 startbit = save_expr (endbit);
4622 endbit = startbit;
4623 }
4624 startbit = convert (sizetype, startbit);
4625 endbit = convert (sizetype, endbit);
4626 if (! integer_zerop (domain_min))
4627 {
4628 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4629 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4630 }
4631 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4632 EXPAND_CONST_ADDRESS);
4633 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4634 EXPAND_CONST_ADDRESS);
4635
4636 if (REG_P (target))
4637 {
4638 targetx = assign_stack_temp (GET_MODE (target),
4639 GET_MODE_SIZE (GET_MODE (target)),
4640 0);
4641 emit_move_insn (targetx, target);
4642 }
4643 else if (GET_CODE (target) == MEM)
4644 targetx = target;
4645 else
4646 abort ();
4647
4648#ifdef TARGET_MEM_FUNCTIONS
4649 /* Optimization: If startbit and endbit are
9faa82d8 4650 constants divisible by BITS_PER_UNIT,
0f41302f 4651 call memset instead. */
071a6595
PB
4652 if (TREE_CODE (startbit) == INTEGER_CST
4653 && TREE_CODE (endbit) == INTEGER_CST
4654 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4655 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4656 {
071a6595
PB
4657 emit_library_call (memset_libfunc, 0,
4658 VOIDmode, 3,
e1a43f73
PB
4659 plus_constant (XEXP (targetx, 0),
4660 startb / BITS_PER_UNIT),
071a6595 4661 Pmode,
3b6f75e2 4662 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4663 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4664 TYPE_MODE (sizetype));
071a6595
PB
4665 }
4666 else
4667#endif
4668 {
38a448ca 4669 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4670 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4671 bitlength_rtx, TYPE_MODE (sizetype),
4672 startbit_rtx, TYPE_MODE (sizetype),
4673 endbit_rtx, TYPE_MODE (sizetype));
4674 }
4675 if (REG_P (target))
4676 emit_move_insn (target, targetx);
4677 }
4678 }
bbf6f052
RK
4679
4680 else
4681 abort ();
4682}
4683
4684/* Store the value of EXP (an expression tree)
4685 into a subfield of TARGET which has mode MODE and occupies
4686 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4687 If MODE is VOIDmode, it means that we are storing into a bit-field.
4688
4689 If VALUE_MODE is VOIDmode, return nothing in particular.
4690 UNSIGNEDP is not used in this case.
4691
4692 Otherwise, return an rtx for the value stored. This rtx
4693 has mode VALUE_MODE if that is convenient to do.
4694 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4695
4696 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4697 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4698
4699 ALIAS_SET is the alias set for the destination. This value will
4700 (in general) be different from that for TARGET, since TARGET is a
4701 reference to the containing structure. */
bbf6f052
RK
4702
4703static rtx
4704store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4705 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4706 rtx target;
4707 int bitsize, bitpos;
4708 enum machine_mode mode;
4709 tree exp;
4710 enum machine_mode value_mode;
4711 int unsignedp;
4712 int align;
4713 int total_size;
ece32014 4714 int alias_set;
bbf6f052 4715{
906c4e36 4716 HOST_WIDE_INT width_mask = 0;
bbf6f052 4717
e9a25f70
JL
4718 if (TREE_CODE (exp) == ERROR_MARK)
4719 return const0_rtx;
4720
906c4e36
RK
4721 if (bitsize < HOST_BITS_PER_WIDE_INT)
4722 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4723
4724 /* If we are storing into an unaligned field of an aligned union that is
4725 in a register, we may have the mode of TARGET being an integer mode but
4726 MODE == BLKmode. In that case, get an aligned object whose size and
4727 alignment are the same as TARGET and store TARGET into it (we can avoid
4728 the store if the field being stored is the entire width of TARGET). Then
4729 call ourselves recursively to store the field into a BLKmode version of
4730 that object. Finally, load from the object into TARGET. This is not
4731 very efficient in general, but should only be slightly more expensive
4732 than the otherwise-required unaligned accesses. Perhaps this can be
4733 cleaned up later. */
4734
4735 if (mode == BLKmode
4736 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4737 {
4738 rtx object = assign_stack_temp (GET_MODE (target),
4739 GET_MODE_SIZE (GET_MODE (target)), 0);
4740 rtx blk_object = copy_rtx (object);
4741
c6df88cb
MM
4742 MEM_SET_IN_STRUCT_P (object, 1);
4743 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4744 PUT_MODE (blk_object, BLKmode);
4745
4746 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4747 emit_move_insn (object, target);
4748
4749 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4750 align, total_size, alias_set);
bbf6f052 4751
46093b97
RS
4752 /* Even though we aren't returning target, we need to
4753 give it the updated value. */
bbf6f052
RK
4754 emit_move_insn (target, object);
4755
46093b97 4756 return blk_object;
bbf6f052
RK
4757 }
4758
4759 /* If the structure is in a register or if the component
4760 is a bit field, we cannot use addressing to access it.
4761 Use bit-field techniques or SUBREG to store in it. */
4762
4fa52007 4763 if (mode == VOIDmode
6ab06cbb
JW
4764 || (mode != BLKmode && ! direct_store[(int) mode]
4765 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4766 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4767 || GET_CODE (target) == REG
c980ac49 4768 || GET_CODE (target) == SUBREG
ccc98036
RS
4769 /* If the field isn't aligned enough to store as an ordinary memref,
4770 store it as a bit field. */
14a774a9
RK
4771 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS
4772 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4773 || bitpos % GET_MODE_ALIGNMENT (mode)))
4774 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS
4775 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4776 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4777 /* If the RHS and field are a constant size and the size of the
4778 RHS isn't the same size as the bitfield, we must use bitfield
4779 operations. */
4780 || ((bitsize >= 0
4781 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4782 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4783 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
bbf6f052 4784 {
906c4e36 4785 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4786
ef19912d
RK
4787 /* If BITSIZE is narrower than the size of the type of EXP
4788 we will be narrowing TEMP. Normally, what's wanted are the
4789 low-order bits. However, if EXP's type is a record and this is
4790 big-endian machine, we want the upper BITSIZE bits. */
4791 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4792 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4793 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4794 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4795 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4796 - bitsize),
4797 temp, 1);
4798
bbd6cf73
RK
4799 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4800 MODE. */
4801 if (mode != VOIDmode && mode != BLKmode
4802 && mode != TYPE_MODE (TREE_TYPE (exp)))
4803 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4804
a281e72d
RK
4805 /* If the modes of TARGET and TEMP are both BLKmode, both
4806 must be in memory and BITPOS must be aligned on a byte
4807 boundary. If so, we simply do a block copy. */
4808 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4809 {
4810 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4811 || bitpos % BITS_PER_UNIT != 0)
4812 abort ();
4813
0086427c
RK
4814 target = change_address (target, VOIDmode,
4815 plus_constant (XEXP (target, 0),
a281e72d
RK
4816 bitpos / BITS_PER_UNIT));
4817
14a774a9
RK
4818 /* Find an alignment that is consistent with the bit position. */
4819 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4820 align >>= 1;
4821
a281e72d
RK
4822 emit_block_move (target, temp,
4823 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4824 / BITS_PER_UNIT),
14a774a9 4825 align);
a281e72d
RK
4826
4827 return value_mode == VOIDmode ? const0_rtx : target;
4828 }
4829
bbf6f052
RK
4830 /* Store the value in the bitfield. */
4831 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4832 if (value_mode != VOIDmode)
4833 {
4834 /* The caller wants an rtx for the value. */
4835 /* If possible, avoid refetching from the bitfield itself. */
4836 if (width_mask != 0
4837 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4838 {
9074de27 4839 tree count;
5c4d7cfb 4840 enum machine_mode tmode;
86a2c12a 4841
5c4d7cfb
RS
4842 if (unsignedp)
4843 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4844 tmode = GET_MODE (temp);
86a2c12a
RS
4845 if (tmode == VOIDmode)
4846 tmode = value_mode;
5c4d7cfb
RS
4847 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4848 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4849 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4850 }
bbf6f052 4851 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4852 NULL_RTX, value_mode, 0, align,
4853 total_size);
bbf6f052
RK
4854 }
4855 return const0_rtx;
4856 }
4857 else
4858 {
4859 rtx addr = XEXP (target, 0);
4860 rtx to_rtx;
4861
4862 /* If a value is wanted, it must be the lhs;
4863 so make the address stable for multiple use. */
4864
4865 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4866 && ! CONSTANT_ADDRESS_P (addr)
4867 /* A frame-pointer reference is already stable. */
4868 && ! (GET_CODE (addr) == PLUS
4869 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4870 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4871 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4872 addr = copy_to_reg (addr);
4873
4874 /* Now build a reference to just the desired component. */
4875
effbcc6a
RK
4876 to_rtx = copy_rtx (change_address (target, mode,
4877 plus_constant (addr,
4878 (bitpos
4879 / BITS_PER_UNIT))));
c6df88cb 4880 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4881 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4882
4883 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4884 }
4885}
4886\f
4887/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4888 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4889 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4890
4891 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4892 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4893 If the position of the field is variable, we store a tree
4894 giving the variable offset (in units) in *POFFSET.
4895 This offset is in addition to the bit position.
4896 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4897 We set *PALIGNMENT to the alignment in bytes of the address that will be
4898 computed. This is the alignment of the thing we return if *POFFSET
4899 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4900
4901 If any of the extraction expressions is volatile,
4902 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4903
4904 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4905 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4906 is redundant.
4907
4908 If the field describes a variable-sized object, *PMODE is set to
4909 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4910 this case, but the address of the object can be found. */
bbf6f052
RK
4911
4912tree
4969d05d 4913get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4914 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4915 tree exp;
4916 int *pbitsize;
4917 int *pbitpos;
7bb0943f 4918 tree *poffset;
bbf6f052
RK
4919 enum machine_mode *pmode;
4920 int *punsignedp;
4921 int *pvolatilep;
839c4796 4922 int *palignment;
bbf6f052 4923{
b50d17a1 4924 tree orig_exp = exp;
bbf6f052
RK
4925 tree size_tree = 0;
4926 enum machine_mode mode = VOIDmode;
742920c7 4927 tree offset = integer_zero_node;
c84e2712 4928 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4929
4930 if (TREE_CODE (exp) == COMPONENT_REF)
4931 {
4932 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4933 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4934 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4935 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4936 }
4937 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4938 {
4939 size_tree = TREE_OPERAND (exp, 1);
4940 *punsignedp = TREE_UNSIGNED (exp);
4941 }
4942 else
4943 {
4944 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4945 if (mode == BLKmode)
4946 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4947
bbf6f052
RK
4948 *pbitsize = GET_MODE_BITSIZE (mode);
4949 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4950 }
4951
4952 if (size_tree)
4953 {
4954 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4955 mode = BLKmode, *pbitsize = -1;
4956 else
4957 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4958 }
4959
4960 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4961 and find the ultimate containing object. */
4962
4963 *pbitpos = 0;
4964
4965 while (1)
4966 {
7bb0943f 4967 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4968 {
7bb0943f
RS
4969 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4970 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4971 : TREE_OPERAND (exp, 2));
e6d8c385 4972 tree constant = integer_zero_node, var = pos;
bbf6f052 4973
e7f3c83f
RK
4974 /* If this field hasn't been filled in yet, don't go
4975 past it. This should only happen when folding expressions
4976 made during type construction. */
4977 if (pos == 0)
4978 break;
4979
e6d8c385
RK
4980 /* Assume here that the offset is a multiple of a unit.
4981 If not, there should be an explicitly added constant. */
4982 if (TREE_CODE (pos) == PLUS_EXPR
4983 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4984 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4985 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4986 constant = pos, var = integer_zero_node;
4987
4988 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4989 offset = size_binop (PLUS_EXPR, offset,
4990 size_binop (EXACT_DIV_EXPR, var,
4991 size_int (BITS_PER_UNIT)));
bbf6f052 4992 }
bbf6f052 4993
742920c7 4994 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4995 {
742920c7
RK
4996 /* This code is based on the code in case ARRAY_REF in expand_expr
4997 below. We assume here that the size of an array element is
4998 always an integral multiple of BITS_PER_UNIT. */
4999
5000 tree index = TREE_OPERAND (exp, 1);
5001 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5002 tree low_bound
5003 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5004 tree index_type = TREE_TYPE (index);
ead17059 5005 tree xindex;
742920c7 5006
4c08eef0 5007 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 5008 {
4c08eef0
RK
5009 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5010 index);
742920c7
RK
5011 index_type = TREE_TYPE (index);
5012 }
5013
74a4fbfc
DB
5014 /* Optimize the special-case of a zero lower bound.
5015
5016 We convert the low_bound to sizetype to avoid some problems
5017 with constant folding. (E.g. suppose the lower bound is 1,
5018 and its mode is QI. Without the conversion, (ARRAY
5019 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5020 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5021
5022 But sizetype isn't quite right either (especially if
5023 the lowbound is negative). FIXME */
5024
ca0f2220 5025 if (! integer_zerop (low_bound))
74a4fbfc
DB
5026 index = fold (build (MINUS_EXPR, index_type, index,
5027 convert (sizetype, low_bound)));
ca0f2220 5028
f8dac6eb
R
5029 if (TREE_CODE (index) == INTEGER_CST)
5030 {
5031 index = convert (sbitsizetype, index);
5032 index_type = TREE_TYPE (index);
5033 }
5034
ead17059
RH
5035 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5036 convert (sbitsizetype,
5037 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 5038
ead17059
RH
5039 if (TREE_CODE (xindex) == INTEGER_CST
5040 && TREE_INT_CST_HIGH (xindex) == 0)
5041 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 5042 else
956d6950 5043 {
ead17059
RH
5044 /* Either the bit offset calculated above is not constant, or
5045 it overflowed. In either case, redo the multiplication
5046 against the size in units. This is especially important
5047 in the non-constant case to avoid a division at runtime. */
5048 xindex = fold (build (MULT_EXPR, ssizetype, index,
5049 convert (ssizetype,
5050 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5051
5052 if (contains_placeholder_p (xindex))
5053 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5054
5055 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 5056 }
bbf6f052
RK
5057 }
5058 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5059 && ! ((TREE_CODE (exp) == NOP_EXPR
5060 || TREE_CODE (exp) == CONVERT_EXPR)
5061 && (TYPE_MODE (TREE_TYPE (exp))
5062 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5063 break;
7bb0943f
RS
5064
5065 /* If any reference in the chain is volatile, the effect is volatile. */
5066 if (TREE_THIS_VOLATILE (exp))
5067 *pvolatilep = 1;
839c4796
RK
5068
5069 /* If the offset is non-constant already, then we can't assume any
5070 alignment more than the alignment here. */
5071 if (! integer_zerop (offset))
5072 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5073
bbf6f052
RK
5074 exp = TREE_OPERAND (exp, 0);
5075 }
5076
839c4796
RK
5077 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5078 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5079 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5080 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5081
742920c7
RK
5082 if (integer_zerop (offset))
5083 offset = 0;
5084
b50d17a1
RK
5085 if (offset != 0 && contains_placeholder_p (offset))
5086 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5087
bbf6f052 5088 *pmode = mode;
7bb0943f 5089 *poffset = offset;
839c4796 5090 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
5091 return exp;
5092}
921b3427
RK
5093
5094/* Subroutine of expand_exp: compute memory_usage from modifier. */
5095static enum memory_use_mode
5096get_memory_usage_from_modifier (modifier)
5097 enum expand_modifier modifier;
5098{
5099 switch (modifier)
5100 {
5101 case EXPAND_NORMAL:
e5e809f4 5102 case EXPAND_SUM:
921b3427
RK
5103 return MEMORY_USE_RO;
5104 break;
5105 case EXPAND_MEMORY_USE_WO:
5106 return MEMORY_USE_WO;
5107 break;
5108 case EXPAND_MEMORY_USE_RW:
5109 return MEMORY_USE_RW;
5110 break;
921b3427 5111 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5112 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5113 MEMORY_USE_DONT, because they are modifiers to a call of
5114 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5115 case EXPAND_CONST_ADDRESS:
e5e809f4 5116 case EXPAND_INITIALIZER:
921b3427
RK
5117 return MEMORY_USE_DONT;
5118 case EXPAND_MEMORY_USE_BAD:
5119 default:
5120 abort ();
5121 }
5122}
bbf6f052
RK
5123\f
5124/* Given an rtx VALUE that may contain additions and multiplications,
5125 return an equivalent value that just refers to a register or memory.
5126 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
5127 and returning a pseudo-register containing the value.
5128
5129 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5130
5131rtx
5132force_operand (value, target)
5133 rtx value, target;
5134{
5135 register optab binoptab = 0;
5136 /* Use a temporary to force order of execution of calls to
5137 `force_operand'. */
5138 rtx tmp;
5139 register rtx op2;
5140 /* Use subtarget as the target for operand 0 of a binary operation. */
5141 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5142
8b015896
RH
5143 /* Check for a PIC address load. */
5144 if (flag_pic
5145 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5146 && XEXP (value, 0) == pic_offset_table_rtx
5147 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5148 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5149 || GET_CODE (XEXP (value, 1)) == CONST))
5150 {
5151 if (!subtarget)
5152 subtarget = gen_reg_rtx (GET_MODE (value));
5153 emit_move_insn (subtarget, value);
5154 return subtarget;
5155 }
5156
bbf6f052
RK
5157 if (GET_CODE (value) == PLUS)
5158 binoptab = add_optab;
5159 else if (GET_CODE (value) == MINUS)
5160 binoptab = sub_optab;
5161 else if (GET_CODE (value) == MULT)
5162 {
5163 op2 = XEXP (value, 1);
5164 if (!CONSTANT_P (op2)
5165 && !(GET_CODE (op2) == REG && op2 != subtarget))
5166 subtarget = 0;
5167 tmp = force_operand (XEXP (value, 0), subtarget);
5168 return expand_mult (GET_MODE (value), tmp,
906c4e36 5169 force_operand (op2, NULL_RTX),
bbf6f052
RK
5170 target, 0);
5171 }
5172
5173 if (binoptab)
5174 {
5175 op2 = XEXP (value, 1);
5176 if (!CONSTANT_P (op2)
5177 && !(GET_CODE (op2) == REG && op2 != subtarget))
5178 subtarget = 0;
5179 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5180 {
5181 binoptab = add_optab;
5182 op2 = negate_rtx (GET_MODE (value), op2);
5183 }
5184
5185 /* Check for an addition with OP2 a constant integer and our first
5186 operand a PLUS of a virtual register and something else. In that
5187 case, we want to emit the sum of the virtual register and the
5188 constant first and then add the other value. This allows virtual
5189 register instantiation to simply modify the constant rather than
5190 creating another one around this addition. */
5191 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5192 && GET_CODE (XEXP (value, 0)) == PLUS
5193 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5194 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5195 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5196 {
5197 rtx temp = expand_binop (GET_MODE (value), binoptab,
5198 XEXP (XEXP (value, 0), 0), op2,
5199 subtarget, 0, OPTAB_LIB_WIDEN);
5200 return expand_binop (GET_MODE (value), binoptab, temp,
5201 force_operand (XEXP (XEXP (value, 0), 1), 0),
5202 target, 0, OPTAB_LIB_WIDEN);
5203 }
5204
5205 tmp = force_operand (XEXP (value, 0), subtarget);
5206 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5207 force_operand (op2, NULL_RTX),
bbf6f052 5208 target, 0, OPTAB_LIB_WIDEN);
8008b228 5209 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5210 because the only operations we are expanding here are signed ones. */
5211 }
5212 return value;
5213}
5214\f
5215/* Subroutine of expand_expr:
5216 save the non-copied parts (LIST) of an expr (LHS), and return a list
5217 which can restore these values to their previous values,
5218 should something modify their storage. */
5219
5220static tree
5221save_noncopied_parts (lhs, list)
5222 tree lhs;
5223 tree list;
5224{
5225 tree tail;
5226 tree parts = 0;
5227
5228 for (tail = list; tail; tail = TREE_CHAIN (tail))
5229 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5230 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5231 else
5232 {
5233 tree part = TREE_VALUE (tail);
5234 tree part_type = TREE_TYPE (part);
906c4e36 5235 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5236 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5237 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5238 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5239 parts = tree_cons (to_be_saved,
906c4e36
RK
5240 build (RTL_EXPR, part_type, NULL_TREE,
5241 (tree) target),
bbf6f052
RK
5242 parts);
5243 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5244 }
5245 return parts;
5246}
5247
5248/* Subroutine of expand_expr:
5249 record the non-copied parts (LIST) of an expr (LHS), and return a list
5250 which specifies the initial values of these parts. */
5251
5252static tree
5253init_noncopied_parts (lhs, list)
5254 tree lhs;
5255 tree list;
5256{
5257 tree tail;
5258 tree parts = 0;
5259
5260 for (tail = list; tail; tail = TREE_CHAIN (tail))
5261 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5262 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5263 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5264 {
5265 tree part = TREE_VALUE (tail);
5266 tree part_type = TREE_TYPE (part);
906c4e36 5267 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5268 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5269 }
5270 return parts;
5271}
5272
5273/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5274 EXP can reference X, which is being modified. TOP_P is nonzero if this
5275 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5276 for EXP, as opposed to a recursive call to this function.
5277
5278 It is always safe for this routine to return zero since it merely
5279 searches for optimization opportunities. */
bbf6f052
RK
5280
5281static int
e5e809f4 5282safe_from_p (x, exp, top_p)
bbf6f052
RK
5283 rtx x;
5284 tree exp;
e5e809f4 5285 int top_p;
bbf6f052
RK
5286{
5287 rtx exp_rtl = 0;
5288 int i, nops;
ff439b5f
CB
5289 static int save_expr_count;
5290 static int save_expr_size = 0;
5291 static tree *save_expr_rewritten;
5292 static tree save_expr_trees[256];
bbf6f052 5293
6676e72f
RK
5294 if (x == 0
5295 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5296 have no way of allocating temporaries of variable size
5297 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5298 So we assume here that something at a higher level has prevented a
f4510f37 5299 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5300 do this when X is BLKmode and when we are at the top level. */
5301 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5302 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5303 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5304 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5305 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5306 != INTEGER_CST)
f4510f37 5307 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5308 return 1;
5309
ff439b5f
CB
5310 if (top_p && save_expr_size == 0)
5311 {
5312 int rtn;
5313
5314 save_expr_count = 0;
5315 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5316 save_expr_rewritten = &save_expr_trees[0];
5317
5318 rtn = safe_from_p (x, exp, 1);
5319
5320 for (i = 0; i < save_expr_count; ++i)
5321 {
5322 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5323 abort ();
5324 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5325 }
5326
5327 save_expr_size = 0;
5328
5329 return rtn;
5330 }
5331
bbf6f052
RK
5332 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5333 find the underlying pseudo. */
5334 if (GET_CODE (x) == SUBREG)
5335 {
5336 x = SUBREG_REG (x);
5337 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5338 return 0;
5339 }
5340
5341 /* If X is a location in the outgoing argument area, it is always safe. */
5342 if (GET_CODE (x) == MEM
5343 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5344 || (GET_CODE (XEXP (x, 0)) == PLUS
5345 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5346 return 1;
5347
5348 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5349 {
5350 case 'd':
5351 exp_rtl = DECL_RTL (exp);
5352 break;
5353
5354 case 'c':
5355 return 1;
5356
5357 case 'x':
5358 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5359 return ((TREE_VALUE (exp) == 0
e5e809f4 5360 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5361 && (TREE_CHAIN (exp) == 0
e5e809f4 5362 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5363 else if (TREE_CODE (exp) == ERROR_MARK)
5364 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5365 else
5366 return 0;
5367
5368 case '1':
e5e809f4 5369 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5370
5371 case '2':
5372 case '<':
e5e809f4
JL
5373 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5374 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5375
5376 case 'e':
5377 case 'r':
5378 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5379 the expression. If it is set, we conflict iff we are that rtx or
5380 both are in memory. Otherwise, we check all operands of the
5381 expression recursively. */
5382
5383 switch (TREE_CODE (exp))
5384 {
5385 case ADDR_EXPR:
e44842fe 5386 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5387 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5388 || TREE_STATIC (exp));
bbf6f052
RK
5389
5390 case INDIRECT_REF:
5391 if (GET_CODE (x) == MEM)
5392 return 0;
5393 break;
5394
5395 case CALL_EXPR:
5396 exp_rtl = CALL_EXPR_RTL (exp);
5397 if (exp_rtl == 0)
5398 {
5399 /* Assume that the call will clobber all hard registers and
5400 all of memory. */
5401 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5402 || GET_CODE (x) == MEM)
5403 return 0;
5404 }
5405
5406 break;
5407
5408 case RTL_EXPR:
3bb5826a
RK
5409 /* If a sequence exists, we would have to scan every instruction
5410 in the sequence to see if it was safe. This is probably not
5411 worthwhile. */
5412 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5413 return 0;
5414
3bb5826a 5415 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5416 break;
5417
5418 case WITH_CLEANUP_EXPR:
5419 exp_rtl = RTL_EXPR_RTL (exp);
5420 break;
5421
5dab5552 5422 case CLEANUP_POINT_EXPR:
e5e809f4 5423 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5424
bbf6f052
RK
5425 case SAVE_EXPR:
5426 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5427 if (exp_rtl)
5428 break;
5429
5430 /* This SAVE_EXPR might appear many times in the top-level
5431 safe_from_p() expression, and if it has a complex
5432 subexpression, examining it multiple times could result
5433 in a combinatorial explosion. E.g. on an Alpha
5434 running at least 200MHz, a Fortran test case compiled with
5435 optimization took about 28 minutes to compile -- even though
5436 it was only a few lines long, and the complicated line causing
5437 so much time to be spent in the earlier version of safe_from_p()
5438 had only 293 or so unique nodes.
5439
5440 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5441 where it is so we can turn it back in the top-level safe_from_p()
5442 when we're done. */
5443
5444 /* For now, don't bother re-sizing the array. */
5445 if (save_expr_count >= save_expr_size)
5446 return 0;
5447 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5448
5449 nops = tree_code_length[(int) SAVE_EXPR];
5450 for (i = 0; i < nops; i++)
ff59bfe6
JM
5451 {
5452 tree operand = TREE_OPERAND (exp, i);
5453 if (operand == NULL_TREE)
5454 continue;
5455 TREE_SET_CODE (exp, ERROR_MARK);
5456 if (!safe_from_p (x, operand, 0))
5457 return 0;
5458 TREE_SET_CODE (exp, SAVE_EXPR);
5459 }
5460 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5461 return 1;
bbf6f052 5462
8129842c
RS
5463 case BIND_EXPR:
5464 /* The only operand we look at is operand 1. The rest aren't
5465 part of the expression. */
e5e809f4 5466 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5467
bbf6f052 5468 case METHOD_CALL_EXPR:
0f41302f 5469 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5470 abort ();
e9a25f70
JL
5471
5472 default:
5473 break;
bbf6f052
RK
5474 }
5475
5476 /* If we have an rtx, we do not need to scan our operands. */
5477 if (exp_rtl)
5478 break;
5479
5480 nops = tree_code_length[(int) TREE_CODE (exp)];
5481 for (i = 0; i < nops; i++)
5482 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5483 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5484 return 0;
5485 }
5486
5487 /* If we have an rtl, find any enclosed object. Then see if we conflict
5488 with it. */
5489 if (exp_rtl)
5490 {
5491 if (GET_CODE (exp_rtl) == SUBREG)
5492 {
5493 exp_rtl = SUBREG_REG (exp_rtl);
5494 if (GET_CODE (exp_rtl) == REG
5495 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5496 return 0;
5497 }
5498
5499 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5500 are memory and EXP is not readonly. */
5501 return ! (rtx_equal_p (x, exp_rtl)
5502 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5503 && ! TREE_READONLY (exp)));
5504 }
5505
5506 /* If we reach here, it is safe. */
5507 return 1;
5508}
5509
5510/* Subroutine of expand_expr: return nonzero iff EXP is an
5511 expression whose type is statically determinable. */
5512
5513static int
5514fixed_type_p (exp)
5515 tree exp;
5516{
5517 if (TREE_CODE (exp) == PARM_DECL
5518 || TREE_CODE (exp) == VAR_DECL
5519 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5520 || TREE_CODE (exp) == COMPONENT_REF
5521 || TREE_CODE (exp) == ARRAY_REF)
5522 return 1;
5523 return 0;
5524}
01c8a7c8
RK
5525
5526/* Subroutine of expand_expr: return rtx if EXP is a
5527 variable or parameter; else return 0. */
5528
5529static rtx
5530var_rtx (exp)
5531 tree exp;
5532{
5533 STRIP_NOPS (exp);
5534 switch (TREE_CODE (exp))
5535 {
5536 case PARM_DECL:
5537 case VAR_DECL:
5538 return DECL_RTL (exp);
5539 default:
5540 return 0;
5541 }
5542}
dbecbbe4
JL
5543
5544#ifdef MAX_INTEGER_COMPUTATION_MODE
5545void
5546check_max_integer_computation_mode (exp)
5547 tree exp;
5548{
5f652c07 5549 enum tree_code code;
dbecbbe4
JL
5550 enum machine_mode mode;
5551
5f652c07
JM
5552 /* Strip any NOPs that don't change the mode. */
5553 STRIP_NOPS (exp);
5554 code = TREE_CODE (exp);
5555
71bca506
JL
5556 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5557 if (code == NOP_EXPR
5558 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5559 return;
5560
dbecbbe4
JL
5561 /* First check the type of the overall operation. We need only look at
5562 unary, binary and relational operations. */
5563 if (TREE_CODE_CLASS (code) == '1'
5564 || TREE_CODE_CLASS (code) == '2'
5565 || TREE_CODE_CLASS (code) == '<')
5566 {
5567 mode = TYPE_MODE (TREE_TYPE (exp));
5568 if (GET_MODE_CLASS (mode) == MODE_INT
5569 && mode > MAX_INTEGER_COMPUTATION_MODE)
5570 fatal ("unsupported wide integer operation");
5571 }
5572
5573 /* Check operand of a unary op. */
5574 if (TREE_CODE_CLASS (code) == '1')
5575 {
5576 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5577 if (GET_MODE_CLASS (mode) == MODE_INT
5578 && mode > MAX_INTEGER_COMPUTATION_MODE)
5579 fatal ("unsupported wide integer operation");
5580 }
5581
5582 /* Check operands of a binary/comparison op. */
5583 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5584 {
5585 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5586 if (GET_MODE_CLASS (mode) == MODE_INT
5587 && mode > MAX_INTEGER_COMPUTATION_MODE)
5588 fatal ("unsupported wide integer operation");
5589
5590 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5591 if (GET_MODE_CLASS (mode) == MODE_INT
5592 && mode > MAX_INTEGER_COMPUTATION_MODE)
5593 fatal ("unsupported wide integer operation");
5594 }
5595}
5596#endif
5597
14a774a9
RK
5598\f
5599/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5600 has any readonly fields. If any of the fields have types that
5601 contain readonly fields, return true as well. */
5602
5603static int
5604readonly_fields_p (type)
5605 tree type;
5606{
5607 tree field;
5608
5609 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
77fd6d10
MM
5610 if (TREE_CODE (field) == FIELD_DECL
5611 && (TREE_READONLY (field)
5612 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5613 && readonly_fields_p (TREE_TYPE (field)))))
14a774a9
RK
5614 return 1;
5615
5616 return 0;
5617}
bbf6f052
RK
5618\f
5619/* expand_expr: generate code for computing expression EXP.
5620 An rtx for the computed value is returned. The value is never null.
5621 In the case of a void EXP, const0_rtx is returned.
5622
5623 The value may be stored in TARGET if TARGET is nonzero.
5624 TARGET is just a suggestion; callers must assume that
5625 the rtx returned may not be the same as TARGET.
5626
5627 If TARGET is CONST0_RTX, it means that the value will be ignored.
5628
5629 If TMODE is not VOIDmode, it suggests generating the
5630 result in mode TMODE. But this is done only when convenient.
5631 Otherwise, TMODE is ignored and the value generated in its natural mode.
5632 TMODE is just a suggestion; callers must assume that
5633 the rtx returned may not have mode TMODE.
5634
d6a5ac33
RK
5635 Note that TARGET may have neither TMODE nor MODE. In that case, it
5636 probably will not be used.
bbf6f052
RK
5637
5638 If MODIFIER is EXPAND_SUM then when EXP is an addition
5639 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5640 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5641 products as above, or REG or MEM, or constant.
5642 Ordinarily in such cases we would output mul or add instructions
5643 and then return a pseudo reg containing the sum.
5644
5645 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5646 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5647 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5648 This is used for outputting expressions used in initializers.
5649
5650 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5651 with a constant address even if that address is not normally legitimate.
5652 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5653
5654rtx
5655expand_expr (exp, target, tmode, modifier)
5656 register tree exp;
5657 rtx target;
5658 enum machine_mode tmode;
5659 enum expand_modifier modifier;
5660{
5661 register rtx op0, op1, temp;
5662 tree type = TREE_TYPE (exp);
5663 int unsignedp = TREE_UNSIGNED (type);
68557e14 5664 register enum machine_mode mode;
bbf6f052
RK
5665 register enum tree_code code = TREE_CODE (exp);
5666 optab this_optab;
68557e14
ML
5667 rtx subtarget, original_target;
5668 int ignore;
bbf6f052 5669 tree context;
921b3427
RK
5670 /* Used by check-memory-usage to make modifier read only. */
5671 enum expand_modifier ro_modifier;
bbf6f052 5672
68557e14
ML
5673 /* Handle ERROR_MARK before anybody tries to access its type. */
5674 if (TREE_CODE (exp) == ERROR_MARK)
5675 {
5676 op0 = CONST0_RTX (tmode);
5677 if (op0 != 0)
5678 return op0;
5679 return const0_rtx;
5680 }
5681
5682 mode = TYPE_MODE (type);
5683 /* Use subtarget as the target for operand 0 of a binary operation. */
5684 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5685 original_target = target;
5686 ignore = (target == const0_rtx
5687 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5688 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5689 || code == COND_EXPR)
5690 && TREE_CODE (type) == VOID_TYPE));
5691
921b3427
RK
5692 /* Make a read-only version of the modifier. */
5693 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5694 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5695 ro_modifier = modifier;
5696 else
5697 ro_modifier = EXPAND_NORMAL;
ca695ac9 5698
bbf6f052
RK
5699 /* Don't use hard regs as subtargets, because the combiner
5700 can only handle pseudo regs. */
5701 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5702 subtarget = 0;
5703 /* Avoid subtargets inside loops,
5704 since they hide some invariant expressions. */
5705 if (preserve_subexpressions_p ())
5706 subtarget = 0;
5707
dd27116b
RK
5708 /* If we are going to ignore this result, we need only do something
5709 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5710 is, short-circuit the most common cases here. Note that we must
5711 not call expand_expr with anything but const0_rtx in case this
5712 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5713
dd27116b
RK
5714 if (ignore)
5715 {
5716 if (! TREE_SIDE_EFFECTS (exp))
5717 return const0_rtx;
5718
14a774a9
RK
5719 /* Ensure we reference a volatile object even if value is ignored, but
5720 don't do this if all we are doing is taking its address. */
dd27116b
RK
5721 if (TREE_THIS_VOLATILE (exp)
5722 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
5723 && mode != VOIDmode && mode != BLKmode
5724 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 5725 {
921b3427 5726 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5727 if (GET_CODE (temp) == MEM)
5728 temp = copy_to_reg (temp);
5729 return const0_rtx;
5730 }
5731
14a774a9
RK
5732 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5733 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 5734 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5735 VOIDmode, ro_modifier);
14a774a9
RK
5736 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5737 || code == ARRAY_REF)
dd27116b 5738 {
921b3427
RK
5739 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5740 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5741 return const0_rtx;
5742 }
5743 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5744 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5745 /* If the second operand has no side effects, just evaluate
0f41302f 5746 the first. */
dd27116b 5747 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5748 VOIDmode, ro_modifier);
14a774a9
RK
5749 else if (code == BIT_FIELD_REF)
5750 {
5751 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5752 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5753 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5754 return const0_rtx;
5755 }
5756;
90764a87 5757 target = 0;
dd27116b 5758 }
bbf6f052 5759
dbecbbe4 5760#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5761 /* Only check stuff here if the mode we want is different from the mode
5762 of the expression; if it's the same, check_max_integer_computiation_mode
5763 will handle it. Do we really need to check this stuff at all? */
5764
ce3c0b53 5765 if (target
5f652c07 5766 && GET_MODE (target) != mode
ce3c0b53
JL
5767 && TREE_CODE (exp) != INTEGER_CST
5768 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5769 && TREE_CODE (exp) != ARRAY_REF
5770 && TREE_CODE (exp) != COMPONENT_REF
5771 && TREE_CODE (exp) != BIT_FIELD_REF
5772 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5773 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5774 && TREE_CODE (exp) != VAR_DECL
5775 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5776 {
5777 enum machine_mode mode = GET_MODE (target);
5778
5779 if (GET_MODE_CLASS (mode) == MODE_INT
5780 && mode > MAX_INTEGER_COMPUTATION_MODE)
5781 fatal ("unsupported wide integer operation");
5782 }
5783
5f652c07
JM
5784 if (tmode != mode
5785 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5786 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5787 && TREE_CODE (exp) != ARRAY_REF
5788 && TREE_CODE (exp) != COMPONENT_REF
5789 && TREE_CODE (exp) != BIT_FIELD_REF
5790 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5791 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5792 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5793 && TREE_CODE (exp) != RTL_EXPR
71bca506 5794 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5795 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5796 fatal ("unsupported wide integer operation");
5797
5798 check_max_integer_computation_mode (exp);
5799#endif
5800
e44842fe
RK
5801 /* If will do cse, generate all results into pseudo registers
5802 since 1) that allows cse to find more things
5803 and 2) otherwise cse could produce an insn the machine
5804 cannot support. */
5805
bbf6f052
RK
5806 if (! cse_not_expected && mode != BLKmode && target
5807 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5808 target = subtarget;
5809
bbf6f052
RK
5810 switch (code)
5811 {
5812 case LABEL_DECL:
b552441b
RS
5813 {
5814 tree function = decl_function_context (exp);
5815 /* Handle using a label in a containing function. */
d0977240
RK
5816 if (function != current_function_decl
5817 && function != inline_function_decl && function != 0)
b552441b
RS
5818 {
5819 struct function *p = find_function_data (function);
5820 /* Allocate in the memory associated with the function
5821 that the label is in. */
5822 push_obstacks (p->function_obstack,
5823 p->function_maybepermanent_obstack);
5824
49ad7cfa
BS
5825 p->expr->x_forced_labels
5826 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5827 p->expr->x_forced_labels);
b552441b
RS
5828 pop_obstacks ();
5829 }
ab87f8c8
JL
5830 else
5831 {
ab87f8c8
JL
5832 if (modifier == EXPAND_INITIALIZER)
5833 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5834 label_rtx (exp),
5835 forced_labels);
5836 }
c5c76735 5837
38a448ca
RH
5838 temp = gen_rtx_MEM (FUNCTION_MODE,
5839 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5840 if (function != current_function_decl
5841 && function != inline_function_decl && function != 0)
26fcb35a
RS
5842 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5843 return temp;
b552441b 5844 }
bbf6f052
RK
5845
5846 case PARM_DECL:
5847 if (DECL_RTL (exp) == 0)
5848 {
5849 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5850 return CONST0_RTX (mode);
bbf6f052
RK
5851 }
5852
0f41302f 5853 /* ... fall through ... */
d6a5ac33 5854
bbf6f052 5855 case VAR_DECL:
2dca20cd
RS
5856 /* If a static var's type was incomplete when the decl was written,
5857 but the type is complete now, lay out the decl now. */
5858 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5859 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5860 {
5861 push_obstacks_nochange ();
5862 end_temporary_allocation ();
5863 layout_decl (exp, 0);
5864 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5865 pop_obstacks ();
5866 }
d6a5ac33 5867
7d384cc0
KR
5868 /* Although static-storage variables start off initialized, according to
5869 ANSI C, a memcpy could overwrite them with uninitialized values. So
5870 we check them too. This also lets us check for read-only variables
5871 accessed via a non-const declaration, in case it won't be detected
5872 any other way (e.g., in an embedded system or OS kernel without
5873 memory protection).
5874
5875 Aggregates are not checked here; they're handled elsewhere. */
49ad7cfa
BS
5876 if (current_function && current_function_check_memory_usage
5877 && code == VAR_DECL
921b3427 5878 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5879 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5880 {
5881 enum memory_use_mode memory_usage;
5882 memory_usage = get_memory_usage_from_modifier (modifier);
5883
5884 if (memory_usage != MEMORY_USE_DONT)
5885 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5886 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5887 GEN_INT (int_size_in_bytes (type)),
5888 TYPE_MODE (sizetype),
956d6950
JL
5889 GEN_INT (memory_usage),
5890 TYPE_MODE (integer_type_node));
921b3427
RK
5891 }
5892
0f41302f 5893 /* ... fall through ... */
d6a5ac33 5894
2dca20cd 5895 case FUNCTION_DECL:
bbf6f052
RK
5896 case RESULT_DECL:
5897 if (DECL_RTL (exp) == 0)
5898 abort ();
d6a5ac33 5899
e44842fe
RK
5900 /* Ensure variable marked as used even if it doesn't go through
5901 a parser. If it hasn't be used yet, write out an external
5902 definition. */
5903 if (! TREE_USED (exp))
5904 {
5905 assemble_external (exp);
5906 TREE_USED (exp) = 1;
5907 }
5908
dc6d66b3
RK
5909 /* Show we haven't gotten RTL for this yet. */
5910 temp = 0;
5911
bbf6f052
RK
5912 /* Handle variables inherited from containing functions. */
5913 context = decl_function_context (exp);
5914
5915 /* We treat inline_function_decl as an alias for the current function
5916 because that is the inline function whose vars, types, etc.
5917 are being merged into the current function.
5918 See expand_inline_function. */
d6a5ac33 5919
bbf6f052
RK
5920 if (context != 0 && context != current_function_decl
5921 && context != inline_function_decl
5922 /* If var is static, we don't need a static chain to access it. */
5923 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5924 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5925 {
5926 rtx addr;
5927
5928 /* Mark as non-local and addressable. */
81feeecb 5929 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5930 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5931 abort ();
bbf6f052
RK
5932 mark_addressable (exp);
5933 if (GET_CODE (DECL_RTL (exp)) != MEM)
5934 abort ();
5935 addr = XEXP (DECL_RTL (exp), 0);
5936 if (GET_CODE (addr) == MEM)
38a448ca
RH
5937 addr = gen_rtx_MEM (Pmode,
5938 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5939 else
5940 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5941 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5942 }
4af3895e 5943
bbf6f052
RK
5944 /* This is the case of an array whose size is to be determined
5945 from its initializer, while the initializer is still being parsed.
5946 See expand_decl. */
d6a5ac33 5947
dc6d66b3
RK
5948 else if (GET_CODE (DECL_RTL (exp)) == MEM
5949 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5950 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5951 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5952
5953 /* If DECL_RTL is memory, we are in the normal case and either
5954 the address is not valid or it is not a register and -fforce-addr
5955 is specified, get the address into a register. */
5956
dc6d66b3
RK
5957 else if (GET_CODE (DECL_RTL (exp)) == MEM
5958 && modifier != EXPAND_CONST_ADDRESS
5959 && modifier != EXPAND_SUM
5960 && modifier != EXPAND_INITIALIZER
5961 && (! memory_address_p (DECL_MODE (exp),
5962 XEXP (DECL_RTL (exp), 0))
5963 || (flag_force_addr
5964 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5965 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5966 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5967
dc6d66b3
RK
5968 /* If we got something, return it. But first, set the alignment
5969 the address is a register. */
5970 if (temp != 0)
5971 {
5972 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5973 mark_reg_pointer (XEXP (temp, 0),
5974 DECL_ALIGN (exp) / BITS_PER_UNIT);
5975
5976 return temp;
5977 }
5978
1499e0a8
RK
5979 /* If the mode of DECL_RTL does not match that of the decl, it
5980 must be a promoted value. We return a SUBREG of the wanted mode,
5981 but mark it so that we know that it was already extended. */
5982
5983 if (GET_CODE (DECL_RTL (exp)) == REG
5984 && GET_MODE (DECL_RTL (exp)) != mode)
5985 {
1499e0a8
RK
5986 /* Get the signedness used for this variable. Ensure we get the
5987 same mode we got when the variable was declared. */
78911e8b
RK
5988 if (GET_MODE (DECL_RTL (exp))
5989 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5990 abort ();
5991
38a448ca 5992 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5993 SUBREG_PROMOTED_VAR_P (temp) = 1;
5994 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5995 return temp;
5996 }
5997
bbf6f052
RK
5998 return DECL_RTL (exp);
5999
6000 case INTEGER_CST:
6001 return immed_double_const (TREE_INT_CST_LOW (exp),
6002 TREE_INT_CST_HIGH (exp),
6003 mode);
6004
6005 case CONST_DECL:
921b3427
RK
6006 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6007 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6008
6009 case REAL_CST:
6010 /* If optimized, generate immediate CONST_DOUBLE
6011 which will be turned into memory by reload if necessary.
6012
6013 We used to force a register so that loop.c could see it. But
6014 this does not allow gen_* patterns to perform optimizations with
6015 the constants. It also produces two insns in cases like "x = 1.0;".
6016 On most machines, floating-point constants are not permitted in
6017 many insns, so we'd end up copying it to a register in any case.
6018
6019 Now, we do the copying in expand_binop, if appropriate. */
6020 return immed_real_const (exp);
6021
6022 case COMPLEX_CST:
6023 case STRING_CST:
6024 if (! TREE_CST_RTL (exp))
6025 output_constant_def (exp);
6026
6027 /* TREE_CST_RTL probably contains a constant address.
6028 On RISC machines where a constant address isn't valid,
6029 make some insns to get that address into a register. */
6030 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6031 && modifier != EXPAND_CONST_ADDRESS
6032 && modifier != EXPAND_INITIALIZER
6033 && modifier != EXPAND_SUM
d6a5ac33
RK
6034 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6035 || (flag_force_addr
6036 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6037 return change_address (TREE_CST_RTL (exp), VOIDmode,
6038 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6039 return TREE_CST_RTL (exp);
6040
bf1e5319 6041 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6042 {
6043 rtx to_return;
6044 char *saved_input_filename = input_filename;
6045 int saved_lineno = lineno;
6046 input_filename = EXPR_WFL_FILENAME (exp);
6047 lineno = EXPR_WFL_LINENO (exp);
6048 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6049 emit_line_note (input_filename, lineno);
6050 /* Possibly avoid switching back and force here */
6051 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6052 input_filename = saved_input_filename;
6053 lineno = saved_lineno;
6054 return to_return;
6055 }
bf1e5319 6056
bbf6f052
RK
6057 case SAVE_EXPR:
6058 context = decl_function_context (exp);
d6a5ac33 6059
d0977240
RK
6060 /* If this SAVE_EXPR was at global context, assume we are an
6061 initialization function and move it into our context. */
6062 if (context == 0)
6063 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6064
bbf6f052
RK
6065 /* We treat inline_function_decl as an alias for the current function
6066 because that is the inline function whose vars, types, etc.
6067 are being merged into the current function.
6068 See expand_inline_function. */
6069 if (context == current_function_decl || context == inline_function_decl)
6070 context = 0;
6071
6072 /* If this is non-local, handle it. */
6073 if (context)
6074 {
d0977240
RK
6075 /* The following call just exists to abort if the context is
6076 not of a containing function. */
6077 find_function_data (context);
6078
bbf6f052
RK
6079 temp = SAVE_EXPR_RTL (exp);
6080 if (temp && GET_CODE (temp) == REG)
6081 {
6082 put_var_into_stack (exp);
6083 temp = SAVE_EXPR_RTL (exp);
6084 }
6085 if (temp == 0 || GET_CODE (temp) != MEM)
6086 abort ();
6087 return change_address (temp, mode,
6088 fix_lexical_addr (XEXP (temp, 0), exp));
6089 }
6090 if (SAVE_EXPR_RTL (exp) == 0)
6091 {
06089a8b
RK
6092 if (mode == VOIDmode)
6093 temp = const0_rtx;
6094 else
e5e809f4 6095 temp = assign_temp (type, 3, 0, 0);
1499e0a8 6096
bbf6f052 6097 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6098 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6099 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6100 save_expr_regs);
ff78f773
RK
6101
6102 /* If the mode of TEMP does not match that of the expression, it
6103 must be a promoted value. We pass store_expr a SUBREG of the
6104 wanted mode but mark it so that we know that it was already
6105 extended. Note that `unsignedp' was modified above in
6106 this case. */
6107
6108 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6109 {
38a448ca 6110 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
6111 SUBREG_PROMOTED_VAR_P (temp) = 1;
6112 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6113 }
6114
4c7a0be9 6115 if (temp == const0_rtx)
921b3427
RK
6116 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6117 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6118 else
6119 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6120
6121 TREE_USED (exp) = 1;
bbf6f052 6122 }
1499e0a8
RK
6123
6124 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6125 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6126 but mark it so that we know that it was already extended. */
1499e0a8
RK
6127
6128 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6129 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6130 {
e70d22c8
RK
6131 /* Compute the signedness and make the proper SUBREG. */
6132 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6133 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6134 SUBREG_PROMOTED_VAR_P (temp) = 1;
6135 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6136 return temp;
6137 }
6138
bbf6f052
RK
6139 return SAVE_EXPR_RTL (exp);
6140
679163cf
MS
6141 case UNSAVE_EXPR:
6142 {
6143 rtx temp;
6144 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6145 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6146 return temp;
6147 }
6148
b50d17a1 6149 case PLACEHOLDER_EXPR:
e9a25f70
JL
6150 {
6151 tree placeholder_expr;
6152
6153 /* If there is an object on the head of the placeholder list,
e5e809f4 6154 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6155 further information, see tree.def. */
6156 for (placeholder_expr = placeholder_list;
6157 placeholder_expr != 0;
6158 placeholder_expr = TREE_CHAIN (placeholder_expr))
6159 {
6160 tree need_type = TYPE_MAIN_VARIANT (type);
6161 tree object = 0;
6162 tree old_list = placeholder_list;
6163 tree elt;
6164
e5e809f4
JL
6165 /* Find the outermost reference that is of the type we want.
6166 If none, see if any object has a type that is a pointer to
6167 the type we want. */
6168 for (elt = TREE_PURPOSE (placeholder_expr);
6169 elt != 0 && object == 0;
6170 elt
6171 = ((TREE_CODE (elt) == COMPOUND_EXPR
6172 || TREE_CODE (elt) == COND_EXPR)
6173 ? TREE_OPERAND (elt, 1)
6174 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6175 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6176 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6177 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6178 ? TREE_OPERAND (elt, 0) : 0))
6179 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6180 object = elt;
e9a25f70 6181
e9a25f70 6182 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6183 elt != 0 && object == 0;
6184 elt
6185 = ((TREE_CODE (elt) == COMPOUND_EXPR
6186 || TREE_CODE (elt) == COND_EXPR)
6187 ? TREE_OPERAND (elt, 1)
6188 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6189 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6190 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6191 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6192 ? TREE_OPERAND (elt, 0) : 0))
6193 if (POINTER_TYPE_P (TREE_TYPE (elt))
6194 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6195 == need_type))
e5e809f4 6196 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6197
e9a25f70 6198 if (object != 0)
2cde2255 6199 {
e9a25f70
JL
6200 /* Expand this object skipping the list entries before
6201 it was found in case it is also a PLACEHOLDER_EXPR.
6202 In that case, we want to translate it using subsequent
6203 entries. */
6204 placeholder_list = TREE_CHAIN (placeholder_expr);
6205 temp = expand_expr (object, original_target, tmode,
6206 ro_modifier);
6207 placeholder_list = old_list;
6208 return temp;
2cde2255 6209 }
e9a25f70
JL
6210 }
6211 }
b50d17a1
RK
6212
6213 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6214 abort ();
6215
6216 case WITH_RECORD_EXPR:
6217 /* Put the object on the placeholder list, expand our first operand,
6218 and pop the list. */
6219 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6220 placeholder_list);
6221 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6222 tmode, ro_modifier);
b50d17a1
RK
6223 placeholder_list = TREE_CHAIN (placeholder_list);
6224 return target;
6225
70e6ca43
APB
6226 case GOTO_EXPR:
6227 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6228 expand_goto (TREE_OPERAND (exp, 0));
6229 else
6230 expand_computed_goto (TREE_OPERAND (exp, 0));
6231 return const0_rtx;
6232
bbf6f052 6233 case EXIT_EXPR:
e44842fe
RK
6234 expand_exit_loop_if_false (NULL_PTR,
6235 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6236 return const0_rtx;
6237
f42e28dd
APB
6238 case LABELED_BLOCK_EXPR:
6239 if (LABELED_BLOCK_BODY (exp))
6240 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6241 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6242 return const0_rtx;
6243
6244 case EXIT_BLOCK_EXPR:
6245 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6246 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6247 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6248 return const0_rtx;
6249
bbf6f052 6250 case LOOP_EXPR:
0088fcb1 6251 push_temp_slots ();
bbf6f052
RK
6252 expand_start_loop (1);
6253 expand_expr_stmt (TREE_OPERAND (exp, 0));
6254 expand_end_loop ();
0088fcb1 6255 pop_temp_slots ();
bbf6f052
RK
6256
6257 return const0_rtx;
6258
6259 case BIND_EXPR:
6260 {
6261 tree vars = TREE_OPERAND (exp, 0);
6262 int vars_need_expansion = 0;
6263
6264 /* Need to open a binding contour here because
e976b8b2 6265 if there are any cleanups they must be contained here. */
8e91754e 6266 expand_start_bindings (2);
bbf6f052 6267
2df53c0b
RS
6268 /* Mark the corresponding BLOCK for output in its proper place. */
6269 if (TREE_OPERAND (exp, 2) != 0
6270 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6271 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6272
6273 /* If VARS have not yet been expanded, expand them now. */
6274 while (vars)
6275 {
6276 if (DECL_RTL (vars) == 0)
6277 {
6278 vars_need_expansion = 1;
6279 expand_decl (vars);
6280 }
6281 expand_decl_init (vars);
6282 vars = TREE_CHAIN (vars);
6283 }
6284
921b3427 6285 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6286
6287 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6288
6289 return temp;
6290 }
6291
6292 case RTL_EXPR:
83b853c9
JM
6293 if (RTL_EXPR_SEQUENCE (exp))
6294 {
6295 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6296 abort ();
6297 emit_insns (RTL_EXPR_SEQUENCE (exp));
6298 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6299 }
99310285 6300 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 6301 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6302 return RTL_EXPR_RTL (exp);
6303
6304 case CONSTRUCTOR:
dd27116b
RK
6305 /* If we don't need the result, just ensure we evaluate any
6306 subexpressions. */
6307 if (ignore)
6308 {
6309 tree elt;
6310 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6311 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6312 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6313 return const0_rtx;
6314 }
3207b172 6315
4af3895e
JVA
6316 /* All elts simple constants => refer to a constant in memory. But
6317 if this is a non-BLKmode mode, let it store a field at a time
6318 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6319 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6320 store directly into the target unless the type is large enough
6321 that memcpy will be used. If we are making an initializer and
3207b172 6322 all operands are constant, put it in memory as well. */
dd27116b 6323 else if ((TREE_STATIC (exp)
3207b172 6324 && ((mode == BLKmode
e5e809f4 6325 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6326 || TREE_ADDRESSABLE (exp)
6327 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
fbe1758d
AM
6328 && (!MOVE_BY_PIECES_P
6329 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6330 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6331 && ! mostly_zeros_p (exp))))
dd27116b 6332 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6333 {
6334 rtx constructor = output_constant_def (exp);
b552441b
RS
6335 if (modifier != EXPAND_CONST_ADDRESS
6336 && modifier != EXPAND_INITIALIZER
6337 && modifier != EXPAND_SUM
d6a5ac33
RK
6338 && (! memory_address_p (GET_MODE (constructor),
6339 XEXP (constructor, 0))
6340 || (flag_force_addr
6341 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6342 constructor = change_address (constructor, VOIDmode,
6343 XEXP (constructor, 0));
6344 return constructor;
6345 }
6346
bbf6f052
RK
6347 else
6348 {
e9ac02a6
JW
6349 /* Handle calls that pass values in multiple non-contiguous
6350 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6351 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6352 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6353 {
6354 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6355 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6356 else
6357 target = assign_temp (type, 0, 1, 1);
6358 }
07604beb
RK
6359
6360 if (TREE_READONLY (exp))
6361 {
9151b3bf 6362 if (GET_CODE (target) == MEM)
effbcc6a
RK
6363 target = copy_rtx (target);
6364
07604beb
RK
6365 RTX_UNCHANGING_P (target) = 1;
6366 }
6367
c5c76735 6368 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0);
bbf6f052
RK
6369 return target;
6370 }
6371
6372 case INDIRECT_REF:
6373 {
6374 tree exp1 = TREE_OPERAND (exp, 0);
6375 tree exp2;
7581a30f
JW
6376 tree index;
6377 tree string = string_constant (exp1, &index);
6378 int i;
6379
06eaa86f 6380 /* Try to optimize reads from const strings. */
7581a30f
JW
6381 if (string
6382 && TREE_CODE (string) == STRING_CST
6383 && TREE_CODE (index) == INTEGER_CST
6384 && !TREE_INT_CST_HIGH (index)
6385 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6386 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6387 && GET_MODE_SIZE (mode) == 1
6388 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6389 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6390
405f0da6
JW
6391 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6392 op0 = memory_address (mode, op0);
8c8a8e34 6393
49ad7cfa
BS
6394 if (current_function && current_function_check_memory_usage
6395 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6396 {
6397 enum memory_use_mode memory_usage;
6398 memory_usage = get_memory_usage_from_modifier (modifier);
6399
6400 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6401 {
6402 in_check_memory_usage = 1;
6403 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6404 op0, Pmode,
c85f7c16
JL
6405 GEN_INT (int_size_in_bytes (type)),
6406 TYPE_MODE (sizetype),
6407 GEN_INT (memory_usage),
6408 TYPE_MODE (integer_type_node));
6409 in_check_memory_usage = 0;
6410 }
921b3427
RK
6411 }
6412
38a448ca 6413 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6414 /* If address was computed by addition,
6415 mark this as an element of an aggregate. */
9ec36da5
JL
6416 if (TREE_CODE (exp1) == PLUS_EXPR
6417 || (TREE_CODE (exp1) == SAVE_EXPR
6418 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6419 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6420 || (TREE_CODE (exp1) == ADDR_EXPR
6421 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6422 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6423 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6424
2c4c436a 6425 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6426 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6427
6428 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6429 here, because, in C and C++, the fact that a location is accessed
6430 through a pointer to const does not mean that the value there can
6431 never change. Languages where it can never change should
6432 also set TREE_STATIC. */
5cb7a25a 6433 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6434
6435 /* If we are writing to this object and its type is a record with
6436 readonly fields, we must mark it as readonly so it will
6437 conflict with readonly references to those fields. */
6438 if (modifier == EXPAND_MEMORY_USE_WO
6439 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6440 RTX_UNCHANGING_P (temp) = 1;
6441
8c8a8e34
JW
6442 return temp;
6443 }
bbf6f052
RK
6444
6445 case ARRAY_REF:
742920c7
RK
6446 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6447 abort ();
bbf6f052 6448
bbf6f052 6449 {
742920c7
RK
6450 tree array = TREE_OPERAND (exp, 0);
6451 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6452 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6453 tree index = TREE_OPERAND (exp, 1);
6454 tree index_type = TREE_TYPE (index);
08293add 6455 HOST_WIDE_INT i;
b50d17a1 6456
d4c89139
PB
6457 /* Optimize the special-case of a zero lower bound.
6458
6459 We convert the low_bound to sizetype to avoid some problems
6460 with constant folding. (E.g. suppose the lower bound is 1,
6461 and its mode is QI. Without the conversion, (ARRAY
6462 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6463 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6464
6465 But sizetype isn't quite right either (especially if
6466 the lowbound is negative). FIXME */
6467
742920c7 6468 if (! integer_zerop (low_bound))
d4c89139
PB
6469 index = fold (build (MINUS_EXPR, index_type, index,
6470 convert (sizetype, low_bound)));
742920c7 6471
742920c7 6472 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6473 This is not done in fold so it won't happen inside &.
6474 Don't fold if this is for wide characters since it's too
6475 difficult to do correctly and this is a very rare case. */
742920c7
RK
6476
6477 if (TREE_CODE (array) == STRING_CST
6478 && TREE_CODE (index) == INTEGER_CST
6479 && !TREE_INT_CST_HIGH (index)
307b821c 6480 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6481 && GET_MODE_CLASS (mode) == MODE_INT
6482 && GET_MODE_SIZE (mode) == 1)
307b821c 6483 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6484
742920c7
RK
6485 /* If this is a constant index into a constant array,
6486 just get the value from the array. Handle both the cases when
6487 we have an explicit constructor and when our operand is a variable
6488 that was declared const. */
4af3895e 6489
742920c7
RK
6490 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6491 {
6492 if (TREE_CODE (index) == INTEGER_CST
6493 && TREE_INT_CST_HIGH (index) == 0)
6494 {
6495 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6496
6497 i = TREE_INT_CST_LOW (index);
6498 while (elem && i--)
6499 elem = TREE_CHAIN (elem);
6500 if (elem)
6501 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6502 tmode, ro_modifier);
742920c7
RK
6503 }
6504 }
4af3895e 6505
742920c7
RK
6506 else if (optimize >= 1
6507 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6508 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6509 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6510 {
08293add 6511 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6512 {
6513 tree init = DECL_INITIAL (array);
6514
6515 i = TREE_INT_CST_LOW (index);
6516 if (TREE_CODE (init) == CONSTRUCTOR)
6517 {
6518 tree elem = CONSTRUCTOR_ELTS (init);
6519
03dc44a6
RS
6520 while (elem
6521 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6522 elem = TREE_CHAIN (elem);
6523 if (elem)
6524 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6525 tmode, ro_modifier);
742920c7
RK
6526 }
6527 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6528 && TREE_INT_CST_HIGH (index) == 0
6529 && (TREE_INT_CST_LOW (index)
6530 < TREE_STRING_LENGTH (init)))
6531 return (GEN_INT
6532 (TREE_STRING_POINTER
6533 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6534 }
6535 }
6536 }
8c8a8e34 6537
08293add 6538 /* ... fall through ... */
bbf6f052
RK
6539
6540 case COMPONENT_REF:
6541 case BIT_FIELD_REF:
4af3895e 6542 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6543 appropriate field if it is present. Don't do this if we have
6544 already written the data since we want to refer to that copy
6545 and varasm.c assumes that's what we'll do. */
4af3895e 6546 if (code != ARRAY_REF
7a0b7b9a
RK
6547 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6548 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6549 {
6550 tree elt;
6551
6552 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6553 elt = TREE_CHAIN (elt))
86b5812c
RK
6554 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6555 /* We can normally use the value of the field in the
6556 CONSTRUCTOR. However, if this is a bitfield in
6557 an integral mode that we can fit in a HOST_WIDE_INT,
6558 we must mask only the number of bits in the bitfield,
6559 since this is done implicitly by the constructor. If
6560 the bitfield does not meet either of those conditions,
6561 we can't do this optimization. */
6562 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6563 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6564 == MODE_INT)
6565 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6566 <= HOST_BITS_PER_WIDE_INT))))
6567 {
6568 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6569 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6570 {
6571 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6572
6573 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6574 {
6575 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6576 op0 = expand_and (op0, op1, target);
6577 }
6578 else
6579 {
e5e809f4
JL
6580 enum machine_mode imode
6581 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6582 tree count
e5e809f4
JL
6583 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6584 0);
86b5812c
RK
6585
6586 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6587 target, 0);
6588 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6589 target, 0);
6590 }
6591 }
6592
6593 return op0;
6594 }
4af3895e
JVA
6595 }
6596
bbf6f052
RK
6597 {
6598 enum machine_mode mode1;
6599 int bitsize;
6600 int bitpos;
7bb0943f 6601 tree offset;
bbf6f052 6602 int volatilep = 0;
034f9101 6603 int alignment;
839c4796
RK
6604 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6605 &mode1, &unsignedp, &volatilep,
6606 &alignment);
bbf6f052 6607
e7f3c83f
RK
6608 /* If we got back the original object, something is wrong. Perhaps
6609 we are evaluating an expression too early. In any event, don't
6610 infinitely recurse. */
6611 if (tem == exp)
6612 abort ();
6613
3d27140a 6614 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6615 computation, since it will need a temporary and TARGET is known
6616 to have to do. This occurs in unchecked conversion in Ada. */
6617
6618 op0 = expand_expr (tem,
6619 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6620 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6621 != INTEGER_CST)
6622 ? target : NULL_RTX),
4ed67205 6623 VOIDmode,
14a774a9
RK
6624 (modifier == EXPAND_INITIALIZER
6625 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 6626 ? modifier : EXPAND_NORMAL);
bbf6f052 6627
8c8a8e34 6628 /* If this is a constant, put it into a register if it is a
14a774a9 6629 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6630 if (CONSTANT_P (op0))
6631 {
6632 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6633 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6634 && offset == 0)
8c8a8e34
JW
6635 op0 = force_reg (mode, op0);
6636 else
6637 op0 = validize_mem (force_const_mem (mode, op0));
6638 }
6639
7bb0943f
RS
6640 if (offset != 0)
6641 {
906c4e36 6642 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 6643
14a774a9
RK
6644 /* If this object is in memory, put it into a register.
6645 This case can't occur in C, but can in Ada if we have
6646 unchecked conversion of an expression from a scalar type to
6647 an array or record type. */
6648 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6649 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6650 {
6651 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6652
6653 mark_temp_addr_taken (memloc);
6654 emit_move_insn (memloc, op0);
6655 op0 = memloc;
6656 }
6657
7bb0943f
RS
6658 if (GET_CODE (op0) != MEM)
6659 abort ();
2d48c13d
JL
6660
6661 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6662 {
2d48c13d 6663#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6664 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6665#else
bd070e1a 6666 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6667#endif
bd070e1a 6668 }
2d48c13d 6669
14a774a9 6670 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6671 to call force_reg for that case. Avoid that case. */
89752202
HB
6672 if (GET_CODE (op0) == MEM
6673 && GET_MODE (op0) == BLKmode
efd07ca7 6674 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6675 && bitsize != 0
89752202
HB
6676 && (bitpos % bitsize) == 0
6677 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6678 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6679 {
6680 rtx temp = change_address (op0, mode1,
6681 plus_constant (XEXP (op0, 0),
6682 (bitpos /
6683 BITS_PER_UNIT)));
6684 if (GET_CODE (XEXP (temp, 0)) == REG)
6685 op0 = temp;
6686 else
6687 op0 = change_address (op0, mode1,
6688 force_reg (GET_MODE (XEXP (temp, 0)),
6689 XEXP (temp, 0)));
6690 bitpos = 0;
6691 }
6692
6693
7bb0943f 6694 op0 = change_address (op0, VOIDmode,
38a448ca 6695 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6696 force_reg (ptr_mode,
6697 offset_rtx)));
7bb0943f
RS
6698 }
6699
bbf6f052
RK
6700 /* Don't forget about volatility even if this is a bitfield. */
6701 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6702 {
6703 op0 = copy_rtx (op0);
6704 MEM_VOLATILE_P (op0) = 1;
6705 }
6706
921b3427 6707 /* Check the access. */
c5c76735 6708 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6709 {
6710 enum memory_use_mode memory_usage;
6711 memory_usage = get_memory_usage_from_modifier (modifier);
6712
6713 if (memory_usage != MEMORY_USE_DONT)
6714 {
6715 rtx to;
6716 int size;
6717
6718 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6719 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6720
6721 /* Check the access right of the pointer. */
e9a25f70
JL
6722 if (size > BITS_PER_UNIT)
6723 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6724 to, Pmode,
e9a25f70
JL
6725 GEN_INT (size / BITS_PER_UNIT),
6726 TYPE_MODE (sizetype),
956d6950
JL
6727 GEN_INT (memory_usage),
6728 TYPE_MODE (integer_type_node));
921b3427
RK
6729 }
6730 }
6731
ccc98036
RS
6732 /* In cases where an aligned union has an unaligned object
6733 as a field, we might be extracting a BLKmode value from
6734 an integer-mode (e.g., SImode) object. Handle this case
6735 by doing the extract into an object as wide as the field
6736 (which we know to be the width of a basic mode), then
f2420d0b
JW
6737 storing into memory, and changing the mode to BLKmode.
6738 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6739 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6740 if (mode1 == VOIDmode
ccc98036 6741 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6742 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6743 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6744 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6745 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6746 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6747 /* If the field isn't aligned enough to fetch as a memref,
6748 fetch it as a bit field. */
14a774a9
RK
6749 || (mode1 != BLKmode && SLOW_UNALIGNED_ACCESS
6750 && ((TYPE_ALIGN (TREE_TYPE (tem))
6751 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6752 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6753 || (modifier != EXPAND_CONST_ADDRESS
6754 && modifier != EXPAND_INITIALIZER
6755 && mode == BLKmode
6756 && SLOW_UNALIGNED_ACCESS
6757 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6758 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 6759 {
bbf6f052
RK
6760 enum machine_mode ext_mode = mode;
6761
14a774a9
RK
6762 if (ext_mode == BLKmode
6763 && ! (target != 0 && GET_CODE (op0) == MEM
6764 && GET_CODE (target) == MEM
6765 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
6766 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6767
6768 if (ext_mode == BLKmode)
a281e72d
RK
6769 {
6770 /* In this case, BITPOS must start at a byte boundary and
6771 TARGET, if specified, must be a MEM. */
6772 if (GET_CODE (op0) != MEM
6773 || (target != 0 && GET_CODE (target) != MEM)
6774 || bitpos % BITS_PER_UNIT != 0)
6775 abort ();
6776
6777 op0 = change_address (op0, VOIDmode,
6778 plus_constant (XEXP (op0, 0),
6779 bitpos / BITS_PER_UNIT));
6780 if (target == 0)
6781 target = assign_temp (type, 0, 1, 1);
6782
6783 emit_block_move (target, op0,
6784 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6785 / BITS_PER_UNIT),
6786 1);
6787
6788 return target;
6789 }
bbf6f052 6790
dc6d66b3
RK
6791 op0 = validize_mem (op0);
6792
6793 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6794 mark_reg_pointer (XEXP (op0, 0), alignment);
6795
6796 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6797 unsignedp, target, ext_mode, ext_mode,
034f9101 6798 alignment,
bbf6f052 6799 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6800
6801 /* If the result is a record type and BITSIZE is narrower than
6802 the mode of OP0, an integral mode, and this is a big endian
6803 machine, we must put the field into the high-order bits. */
6804 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6805 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6806 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6807 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6808 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6809 - bitsize),
6810 op0, 1);
6811
bbf6f052
RK
6812 if (mode == BLKmode)
6813 {
6814 rtx new = assign_stack_temp (ext_mode,
6815 bitsize / BITS_PER_UNIT, 0);
6816
6817 emit_move_insn (new, op0);
6818 op0 = copy_rtx (new);
6819 PUT_MODE (op0, BLKmode);
c6df88cb 6820 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6821 }
6822
6823 return op0;
6824 }
6825
05019f83
RK
6826 /* If the result is BLKmode, use that to access the object
6827 now as well. */
6828 if (mode == BLKmode)
6829 mode1 = BLKmode;
6830
bbf6f052
RK
6831 /* Get a reference to just this component. */
6832 if (modifier == EXPAND_CONST_ADDRESS
6833 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6834 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6835 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6836 else
6837 op0 = change_address (op0, mode1,
6838 plus_constant (XEXP (op0, 0),
6839 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6840
6841 if (GET_CODE (op0) == MEM)
6842 MEM_ALIAS_SET (op0) = get_alias_set (exp);
14a774a9 6843
dc6d66b3
RK
6844 if (GET_CODE (XEXP (op0, 0)) == REG)
6845 mark_reg_pointer (XEXP (op0, 0), alignment);
6846
c6df88cb 6847 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6848 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6849 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6850 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6851 || modifier == EXPAND_INITIALIZER)
bbf6f052 6852 return op0;
0d15e60c 6853 else if (target == 0)
bbf6f052 6854 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6855
bbf6f052
RK
6856 convert_move (target, op0, unsignedp);
6857 return target;
6858 }
6859
bbf6f052
RK
6860 /* Intended for a reference to a buffer of a file-object in Pascal.
6861 But it's not certain that a special tree code will really be
6862 necessary for these. INDIRECT_REF might work for them. */
6863 case BUFFER_REF:
6864 abort ();
6865
7308a047 6866 case IN_EXPR:
7308a047 6867 {
d6a5ac33
RK
6868 /* Pascal set IN expression.
6869
6870 Algorithm:
6871 rlo = set_low - (set_low%bits_per_word);
6872 the_word = set [ (index - rlo)/bits_per_word ];
6873 bit_index = index % bits_per_word;
6874 bitmask = 1 << bit_index;
6875 return !!(the_word & bitmask); */
6876
7308a047
RS
6877 tree set = TREE_OPERAND (exp, 0);
6878 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6879 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6880 tree set_type = TREE_TYPE (set);
7308a047
RS
6881 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6882 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6883 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6884 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6885 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6886 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6887 rtx setaddr = XEXP (setval, 0);
6888 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6889 rtx rlow;
6890 rtx diff, quo, rem, addr, bit, result;
7308a047 6891
d6a5ac33
RK
6892 preexpand_calls (exp);
6893
6894 /* If domain is empty, answer is no. Likewise if index is constant
6895 and out of bounds. */
51723711 6896 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6897 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6898 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6899 || (TREE_CODE (index) == INTEGER_CST
6900 && TREE_CODE (set_low_bound) == INTEGER_CST
6901 && tree_int_cst_lt (index, set_low_bound))
6902 || (TREE_CODE (set_high_bound) == INTEGER_CST
6903 && TREE_CODE (index) == INTEGER_CST
6904 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6905 return const0_rtx;
6906
d6a5ac33
RK
6907 if (target == 0)
6908 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6909
6910 /* If we get here, we have to generate the code for both cases
6911 (in range and out of range). */
6912
6913 op0 = gen_label_rtx ();
6914 op1 = gen_label_rtx ();
6915
6916 if (! (GET_CODE (index_val) == CONST_INT
6917 && GET_CODE (lo_r) == CONST_INT))
6918 {
c5d5d461
JL
6919 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6920 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6921 }
6922
6923 if (! (GET_CODE (index_val) == CONST_INT
6924 && GET_CODE (hi_r) == CONST_INT))
6925 {
c5d5d461
JL
6926 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6927 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6928 }
6929
6930 /* Calculate the element number of bit zero in the first word
6931 of the set. */
6932 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6933 rlow = GEN_INT (INTVAL (lo_r)
6934 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6935 else
17938e57
RK
6936 rlow = expand_binop (index_mode, and_optab, lo_r,
6937 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6938 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6939
d6a5ac33
RK
6940 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6941 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6942
6943 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6944 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6945 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6946 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6947
7308a047 6948 addr = memory_address (byte_mode,
d6a5ac33
RK
6949 expand_binop (index_mode, add_optab, diff,
6950 setaddr, NULL_RTX, iunsignedp,
17938e57 6951 OPTAB_LIB_WIDEN));
d6a5ac33 6952
7308a047
RS
6953 /* Extract the bit we want to examine */
6954 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6955 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6956 make_tree (TREE_TYPE (index), rem),
6957 NULL_RTX, 1);
6958 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6959 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6960 1, OPTAB_LIB_WIDEN);
17938e57
RK
6961
6962 if (result != target)
6963 convert_move (target, result, 1);
7308a047
RS
6964
6965 /* Output the code to handle the out-of-range case. */
6966 emit_jump (op0);
6967 emit_label (op1);
6968 emit_move_insn (target, const0_rtx);
6969 emit_label (op0);
6970 return target;
6971 }
6972
bbf6f052
RK
6973 case WITH_CLEANUP_EXPR:
6974 if (RTL_EXPR_RTL (exp) == 0)
6975 {
6976 RTL_EXPR_RTL (exp)
921b3427 6977 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6978 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6979
bbf6f052
RK
6980 /* That's it for this cleanup. */
6981 TREE_OPERAND (exp, 2) = 0;
6982 }
6983 return RTL_EXPR_RTL (exp);
6984
5dab5552
MS
6985 case CLEANUP_POINT_EXPR:
6986 {
e976b8b2
MS
6987 /* Start a new binding layer that will keep track of all cleanup
6988 actions to be performed. */
8e91754e 6989 expand_start_bindings (2);
e976b8b2 6990
d93d4205 6991 target_temp_slot_level = temp_slot_level;
e976b8b2 6992
921b3427 6993 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6994 /* If we're going to use this value, load it up now. */
6995 if (! ignore)
6996 op0 = force_not_mem (op0);
d93d4205 6997 preserve_temp_slots (op0);
e976b8b2 6998 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6999 }
7000 return op0;
7001
bbf6f052
RK
7002 case CALL_EXPR:
7003 /* Check for a built-in function. */
7004 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7005 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7006 == FUNCTION_DECL)
bbf6f052
RK
7007 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7008 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 7009
bbf6f052
RK
7010 /* If this call was expanded already by preexpand_calls,
7011 just return the result we got. */
7012 if (CALL_EXPR_RTL (exp) != 0)
7013 return CALL_EXPR_RTL (exp);
d6a5ac33 7014
8129842c 7015 return expand_call (exp, target, ignore);
bbf6f052
RK
7016
7017 case NON_LVALUE_EXPR:
7018 case NOP_EXPR:
7019 case CONVERT_EXPR:
7020 case REFERENCE_EXPR:
bbf6f052
RK
7021 if (TREE_CODE (type) == UNION_TYPE)
7022 {
7023 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7024
7025 /* If both input and output are BLKmode, this conversion
7026 isn't actually doing anything unless we need to make the
7027 alignment stricter. */
7028 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7029 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7030 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7031 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7032 modifier);
7033
bbf6f052 7034 if (target == 0)
06089a8b
RK
7035 {
7036 if (mode != BLKmode)
7037 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7038 else
7039 target = assign_temp (type, 0, 1, 1);
7040 }
d6a5ac33 7041
bbf6f052
RK
7042 if (GET_CODE (target) == MEM)
7043 /* Store data into beginning of memory target. */
7044 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7045 change_address (target, TYPE_MODE (valtype), 0), 0);
7046
bbf6f052
RK
7047 else if (GET_CODE (target) == REG)
7048 /* Store this field into a union of the proper type. */
14a774a9
RK
7049 store_field (target,
7050 MIN ((int_size_in_bytes (TREE_TYPE
7051 (TREE_OPERAND (exp, 0)))
7052 * BITS_PER_UNIT),
7053 GET_MODE_BITSIZE (mode)),
7054 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7055 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
bbf6f052
RK
7056 else
7057 abort ();
7058
7059 /* Return the entire union. */
7060 return target;
7061 }
d6a5ac33 7062
7f62854a
RK
7063 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7064 {
7065 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7066 ro_modifier);
7f62854a
RK
7067
7068 /* If the signedness of the conversion differs and OP0 is
7069 a promoted SUBREG, clear that indication since we now
7070 have to do the proper extension. */
7071 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7072 && GET_CODE (op0) == SUBREG)
7073 SUBREG_PROMOTED_VAR_P (op0) = 0;
7074
7075 return op0;
7076 }
7077
1499e0a8 7078 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7079 if (GET_MODE (op0) == mode)
7080 return op0;
12342f90 7081
d6a5ac33
RK
7082 /* If OP0 is a constant, just convert it into the proper mode. */
7083 if (CONSTANT_P (op0))
7084 return
7085 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7086 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7087
26fcb35a 7088 if (modifier == EXPAND_INITIALIZER)
38a448ca 7089 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7090
bbf6f052 7091 if (target == 0)
d6a5ac33
RK
7092 return
7093 convert_to_mode (mode, op0,
7094 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7095 else
d6a5ac33
RK
7096 convert_move (target, op0,
7097 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7098 return target;
7099
7100 case PLUS_EXPR:
0f41302f
MS
7101 /* We come here from MINUS_EXPR when the second operand is a
7102 constant. */
bbf6f052
RK
7103 plus_expr:
7104 this_optab = add_optab;
7105
7106 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7107 something else, make sure we add the register to the constant and
7108 then to the other thing. This case can occur during strength
7109 reduction and doing it this way will produce better code if the
7110 frame pointer or argument pointer is eliminated.
7111
7112 fold-const.c will ensure that the constant is always in the inner
7113 PLUS_EXPR, so the only case we need to do anything about is if
7114 sp, ap, or fp is our second argument, in which case we must swap
7115 the innermost first argument and our second argument. */
7116
7117 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7118 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7119 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7120 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7121 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7122 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7123 {
7124 tree t = TREE_OPERAND (exp, 1);
7125
7126 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7127 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7128 }
7129
88f63c77 7130 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7131 something, we might be forming a constant. So try to use
7132 plus_constant. If it produces a sum and we can't accept it,
7133 use force_operand. This allows P = &ARR[const] to generate
7134 efficient code on machines where a SYMBOL_REF is not a valid
7135 address.
7136
7137 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7138 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 7139 || mode == ptr_mode)
bbf6f052 7140 {
c980ac49
RS
7141 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7142 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7143 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7144 {
cbbc503e
JL
7145 rtx constant_part;
7146
c980ac49
RS
7147 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7148 EXPAND_SUM);
cbbc503e
JL
7149 /* Use immed_double_const to ensure that the constant is
7150 truncated according to the mode of OP1, then sign extended
7151 to a HOST_WIDE_INT. Using the constant directly can result
7152 in non-canonical RTL in a 64x32 cross compile. */
7153 constant_part
7154 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7155 (HOST_WIDE_INT) 0,
a5efcd63 7156 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7157 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7158 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7159 op1 = force_operand (op1, target);
7160 return op1;
7161 }
bbf6f052 7162
c980ac49
RS
7163 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7164 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7165 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7166 {
cbbc503e
JL
7167 rtx constant_part;
7168
c980ac49
RS
7169 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7170 EXPAND_SUM);
7171 if (! CONSTANT_P (op0))
7172 {
7173 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7174 VOIDmode, modifier);
709f5be1
RS
7175 /* Don't go to both_summands if modifier
7176 says it's not right to return a PLUS. */
7177 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7178 goto binop2;
c980ac49
RS
7179 goto both_summands;
7180 }
cbbc503e
JL
7181 /* Use immed_double_const to ensure that the constant is
7182 truncated according to the mode of OP1, then sign extended
7183 to a HOST_WIDE_INT. Using the constant directly can result
7184 in non-canonical RTL in a 64x32 cross compile. */
7185 constant_part
7186 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7187 (HOST_WIDE_INT) 0,
2a94e396 7188 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7189 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7190 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7191 op0 = force_operand (op0, target);
7192 return op0;
7193 }
bbf6f052
RK
7194 }
7195
7196 /* No sense saving up arithmetic to be done
7197 if it's all in the wrong mode to form part of an address.
7198 And force_operand won't know whether to sign-extend or
7199 zero-extend. */
7200 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7201 || mode != ptr_mode)
c980ac49 7202 goto binop;
bbf6f052
RK
7203
7204 preexpand_calls (exp);
e5e809f4 7205 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7206 subtarget = 0;
7207
921b3427
RK
7208 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7209 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7210
c980ac49 7211 both_summands:
bbf6f052
RK
7212 /* Make sure any term that's a sum with a constant comes last. */
7213 if (GET_CODE (op0) == PLUS
7214 && CONSTANT_P (XEXP (op0, 1)))
7215 {
7216 temp = op0;
7217 op0 = op1;
7218 op1 = temp;
7219 }
7220 /* If adding to a sum including a constant,
7221 associate it to put the constant outside. */
7222 if (GET_CODE (op1) == PLUS
7223 && CONSTANT_P (XEXP (op1, 1)))
7224 {
7225 rtx constant_term = const0_rtx;
7226
7227 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7228 if (temp != 0)
7229 op0 = temp;
6f90e075
JW
7230 /* Ensure that MULT comes first if there is one. */
7231 else if (GET_CODE (op0) == MULT)
38a448ca 7232 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7233 else
38a448ca 7234 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7235
7236 /* Let's also eliminate constants from op0 if possible. */
7237 op0 = eliminate_constant_term (op0, &constant_term);
7238
7239 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7240 their sum should be a constant. Form it into OP1, since the
7241 result we want will then be OP0 + OP1. */
7242
7243 temp = simplify_binary_operation (PLUS, mode, constant_term,
7244 XEXP (op1, 1));
7245 if (temp != 0)
7246 op1 = temp;
7247 else
38a448ca 7248 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7249 }
7250
7251 /* Put a constant term last and put a multiplication first. */
7252 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7253 temp = op1, op1 = op0, op0 = temp;
7254
7255 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7256 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7257
7258 case MINUS_EXPR:
ea87523e
RK
7259 /* For initializers, we are allowed to return a MINUS of two
7260 symbolic constants. Here we handle all cases when both operands
7261 are constant. */
bbf6f052
RK
7262 /* Handle difference of two symbolic constants,
7263 for the sake of an initializer. */
7264 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7265 && really_constant_p (TREE_OPERAND (exp, 0))
7266 && really_constant_p (TREE_OPERAND (exp, 1)))
7267 {
906c4e36 7268 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7269 VOIDmode, ro_modifier);
906c4e36 7270 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7271 VOIDmode, ro_modifier);
ea87523e 7272
ea87523e
RK
7273 /* If the last operand is a CONST_INT, use plus_constant of
7274 the negated constant. Else make the MINUS. */
7275 if (GET_CODE (op1) == CONST_INT)
7276 return plus_constant (op0, - INTVAL (op1));
7277 else
38a448ca 7278 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7279 }
7280 /* Convert A - const to A + (-const). */
7281 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7282 {
ae431183
RK
7283 tree negated = fold (build1 (NEGATE_EXPR, type,
7284 TREE_OPERAND (exp, 1)));
7285
ae431183 7286 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7287 /* If we can't negate the constant in TYPE, leave it alone and
7288 expand_binop will negate it for us. We used to try to do it
7289 here in the signed version of TYPE, but that doesn't work
7290 on POINTER_TYPEs. */;
ae431183
RK
7291 else
7292 {
7293 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7294 goto plus_expr;
7295 }
bbf6f052
RK
7296 }
7297 this_optab = sub_optab;
7298 goto binop;
7299
7300 case MULT_EXPR:
7301 preexpand_calls (exp);
7302 /* If first operand is constant, swap them.
7303 Thus the following special case checks need only
7304 check the second operand. */
7305 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7306 {
7307 register tree t1 = TREE_OPERAND (exp, 0);
7308 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7309 TREE_OPERAND (exp, 1) = t1;
7310 }
7311
7312 /* Attempt to return something suitable for generating an
7313 indexed address, for machines that support that. */
7314
88f63c77 7315 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7316 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7317 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7318 {
921b3427
RK
7319 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7320 EXPAND_SUM);
bbf6f052
RK
7321
7322 /* Apply distributive law if OP0 is x+c. */
7323 if (GET_CODE (op0) == PLUS
7324 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7325 return
7326 gen_rtx_PLUS
7327 (mode,
7328 gen_rtx_MULT
7329 (mode, XEXP (op0, 0),
7330 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7331 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7332 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7333
7334 if (GET_CODE (op0) != REG)
906c4e36 7335 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7336 if (GET_CODE (op0) != REG)
7337 op0 = copy_to_mode_reg (mode, op0);
7338
c5c76735
JL
7339 return
7340 gen_rtx_MULT (mode, op0,
7341 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7342 }
7343
e5e809f4 7344 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7345 subtarget = 0;
7346
7347 /* Check for multiplying things that have been extended
7348 from a narrower type. If this machine supports multiplying
7349 in that narrower type with a result in the desired type,
7350 do it that way, and avoid the explicit type-conversion. */
7351 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7352 && TREE_CODE (type) == INTEGER_TYPE
7353 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7354 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7355 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7356 && int_fits_type_p (TREE_OPERAND (exp, 1),
7357 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7358 /* Don't use a widening multiply if a shift will do. */
7359 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7360 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7361 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7362 ||
7363 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7364 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7365 ==
7366 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7367 /* If both operands are extended, they must either both
7368 be zero-extended or both be sign-extended. */
7369 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7370 ==
7371 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7372 {
7373 enum machine_mode innermode
7374 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7375 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7376 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7377 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7378 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7379 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7380 {
b10af0c8
TG
7381 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7382 {
7383 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7384 NULL_RTX, VOIDmode, 0);
7385 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7386 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7387 VOIDmode, 0);
7388 else
7389 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7390 NULL_RTX, VOIDmode, 0);
7391 goto binop2;
7392 }
7393 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7394 && innermode == word_mode)
7395 {
7396 rtx htem;
7397 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7398 NULL_RTX, VOIDmode, 0);
7399 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7400 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7401 VOIDmode, 0);
7402 else
7403 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7404 NULL_RTX, VOIDmode, 0);
7405 temp = expand_binop (mode, other_optab, op0, op1, target,
7406 unsignedp, OPTAB_LIB_WIDEN);
7407 htem = expand_mult_highpart_adjust (innermode,
7408 gen_highpart (innermode, temp),
7409 op0, op1,
7410 gen_highpart (innermode, temp),
7411 unsignedp);
7412 emit_move_insn (gen_highpart (innermode, temp), htem);
7413 return temp;
7414 }
bbf6f052
RK
7415 }
7416 }
7417 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7418 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7419 return expand_mult (mode, op0, op1, target, unsignedp);
7420
7421 case TRUNC_DIV_EXPR:
7422 case FLOOR_DIV_EXPR:
7423 case CEIL_DIV_EXPR:
7424 case ROUND_DIV_EXPR:
7425 case EXACT_DIV_EXPR:
7426 preexpand_calls (exp);
e5e809f4 7427 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7428 subtarget = 0;
7429 /* Possible optimization: compute the dividend with EXPAND_SUM
7430 then if the divisor is constant can optimize the case
7431 where some terms of the dividend have coeffs divisible by it. */
7432 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7433 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7434 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7435
7436 case RDIV_EXPR:
7437 this_optab = flodiv_optab;
7438 goto binop;
7439
7440 case TRUNC_MOD_EXPR:
7441 case FLOOR_MOD_EXPR:
7442 case CEIL_MOD_EXPR:
7443 case ROUND_MOD_EXPR:
7444 preexpand_calls (exp);
e5e809f4 7445 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7446 subtarget = 0;
7447 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7448 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7449 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7450
7451 case FIX_ROUND_EXPR:
7452 case FIX_FLOOR_EXPR:
7453 case FIX_CEIL_EXPR:
7454 abort (); /* Not used for C. */
7455
7456 case FIX_TRUNC_EXPR:
906c4e36 7457 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7458 if (target == 0)
7459 target = gen_reg_rtx (mode);
7460 expand_fix (target, op0, unsignedp);
7461 return target;
7462
7463 case FLOAT_EXPR:
906c4e36 7464 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7465 if (target == 0)
7466 target = gen_reg_rtx (mode);
7467 /* expand_float can't figure out what to do if FROM has VOIDmode.
7468 So give it the correct mode. With -O, cse will optimize this. */
7469 if (GET_MODE (op0) == VOIDmode)
7470 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7471 op0);
7472 expand_float (target, op0,
7473 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7474 return target;
7475
7476 case NEGATE_EXPR:
5b22bee8 7477 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7478 temp = expand_unop (mode, neg_optab, op0, target, 0);
7479 if (temp == 0)
7480 abort ();
7481 return temp;
7482
7483 case ABS_EXPR:
7484 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7485
2d7050fd 7486 /* Handle complex values specially. */
d6a5ac33
RK
7487 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7488 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7489 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7490
bbf6f052
RK
7491 /* Unsigned abs is simply the operand. Testing here means we don't
7492 risk generating incorrect code below. */
7493 if (TREE_UNSIGNED (type))
7494 return op0;
7495
91813b28 7496 return expand_abs (mode, op0, target,
e5e809f4 7497 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7498
7499 case MAX_EXPR:
7500 case MIN_EXPR:
7501 target = original_target;
e5e809f4 7502 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7503 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7504 || GET_MODE (target) != mode
bbf6f052
RK
7505 || (GET_CODE (target) == REG
7506 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7507 target = gen_reg_rtx (mode);
906c4e36 7508 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7509 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7510
7511 /* First try to do it with a special MIN or MAX instruction.
7512 If that does not win, use a conditional jump to select the proper
7513 value. */
7514 this_optab = (TREE_UNSIGNED (type)
7515 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7516 : (code == MIN_EXPR ? smin_optab : smax_optab));
7517
7518 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7519 OPTAB_WIDEN);
7520 if (temp != 0)
7521 return temp;
7522
fa2981d8
JW
7523 /* At this point, a MEM target is no longer useful; we will get better
7524 code without it. */
7525
7526 if (GET_CODE (target) == MEM)
7527 target = gen_reg_rtx (mode);
7528
ee456b1c
RK
7529 if (target != op0)
7530 emit_move_insn (target, op0);
d6a5ac33 7531
bbf6f052 7532 op0 = gen_label_rtx ();
d6a5ac33 7533
f81497d9
RS
7534 /* If this mode is an integer too wide to compare properly,
7535 compare word by word. Rely on cse to optimize constant cases. */
1c0290ea 7536 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
bbf6f052 7537 {
f81497d9 7538 if (code == MAX_EXPR)
d6a5ac33
RK
7539 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7540 target, op1, NULL_RTX, op0);
bbf6f052 7541 else
d6a5ac33
RK
7542 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7543 op1, target, NULL_RTX, op0);
bbf6f052 7544 }
f81497d9
RS
7545 else
7546 {
b30f05db
BS
7547 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7548 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7549 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7550 op0);
f81497d9 7551 }
b30f05db 7552 emit_move_insn (target, op1);
bbf6f052
RK
7553 emit_label (op0);
7554 return target;
7555
bbf6f052
RK
7556 case BIT_NOT_EXPR:
7557 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7558 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7559 if (temp == 0)
7560 abort ();
7561 return temp;
7562
7563 case FFS_EXPR:
7564 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7565 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7566 if (temp == 0)
7567 abort ();
7568 return temp;
7569
d6a5ac33
RK
7570 /* ??? Can optimize bitwise operations with one arg constant.
7571 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7572 and (a bitwise1 b) bitwise2 b (etc)
7573 but that is probably not worth while. */
7574
7575 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7576 boolean values when we want in all cases to compute both of them. In
7577 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7578 as actual zero-or-1 values and then bitwise anding. In cases where
7579 there cannot be any side effects, better code would be made by
7580 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7581 how to recognize those cases. */
7582
bbf6f052
RK
7583 case TRUTH_AND_EXPR:
7584 case BIT_AND_EXPR:
7585 this_optab = and_optab;
7586 goto binop;
7587
bbf6f052
RK
7588 case TRUTH_OR_EXPR:
7589 case BIT_IOR_EXPR:
7590 this_optab = ior_optab;
7591 goto binop;
7592
874726a8 7593 case TRUTH_XOR_EXPR:
bbf6f052
RK
7594 case BIT_XOR_EXPR:
7595 this_optab = xor_optab;
7596 goto binop;
7597
7598 case LSHIFT_EXPR:
7599 case RSHIFT_EXPR:
7600 case LROTATE_EXPR:
7601 case RROTATE_EXPR:
7602 preexpand_calls (exp);
e5e809f4 7603 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7604 subtarget = 0;
7605 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7606 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7607 unsignedp);
7608
d6a5ac33
RK
7609 /* Could determine the answer when only additive constants differ. Also,
7610 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7611 case LT_EXPR:
7612 case LE_EXPR:
7613 case GT_EXPR:
7614 case GE_EXPR:
7615 case EQ_EXPR:
7616 case NE_EXPR:
7617 preexpand_calls (exp);
7618 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7619 if (temp != 0)
7620 return temp;
d6a5ac33 7621
0f41302f 7622 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7623 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7624 && original_target
7625 && GET_CODE (original_target) == REG
7626 && (GET_MODE (original_target)
7627 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7628 {
d6a5ac33
RK
7629 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7630 VOIDmode, 0);
7631
bbf6f052
RK
7632 if (temp != original_target)
7633 temp = copy_to_reg (temp);
d6a5ac33 7634
bbf6f052 7635 op1 = gen_label_rtx ();
c5d5d461
JL
7636 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7637 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7638 emit_move_insn (temp, const1_rtx);
7639 emit_label (op1);
7640 return temp;
7641 }
d6a5ac33 7642
bbf6f052
RK
7643 /* If no set-flag instruction, must generate a conditional
7644 store into a temporary variable. Drop through
7645 and handle this like && and ||. */
7646
7647 case TRUTH_ANDIF_EXPR:
7648 case TRUTH_ORIF_EXPR:
e44842fe 7649 if (! ignore
e5e809f4 7650 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7651 /* Make sure we don't have a hard reg (such as function's return
7652 value) live across basic blocks, if not optimizing. */
7653 || (!optimize && GET_CODE (target) == REG
7654 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7655 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7656
7657 if (target)
7658 emit_clr_insn (target);
7659
bbf6f052
RK
7660 op1 = gen_label_rtx ();
7661 jumpifnot (exp, op1);
e44842fe
RK
7662
7663 if (target)
7664 emit_0_to_1_insn (target);
7665
bbf6f052 7666 emit_label (op1);
e44842fe 7667 return ignore ? const0_rtx : target;
bbf6f052
RK
7668
7669 case TRUTH_NOT_EXPR:
7670 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7671 /* The parser is careful to generate TRUTH_NOT_EXPR
7672 only with operands that are always zero or one. */
906c4e36 7673 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7674 target, 1, OPTAB_LIB_WIDEN);
7675 if (temp == 0)
7676 abort ();
7677 return temp;
7678
7679 case COMPOUND_EXPR:
7680 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7681 emit_queue ();
7682 return expand_expr (TREE_OPERAND (exp, 1),
7683 (ignore ? const0_rtx : target),
7684 VOIDmode, 0);
7685
7686 case COND_EXPR:
ac01eace
RK
7687 /* If we would have a "singleton" (see below) were it not for a
7688 conversion in each arm, bring that conversion back out. */
7689 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7690 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7691 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7692 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7693 {
7694 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7695 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7696
7697 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7698 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7699 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7700 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7701 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7702 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7703 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7704 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7705 return expand_expr (build1 (NOP_EXPR, type,
7706 build (COND_EXPR, TREE_TYPE (true),
7707 TREE_OPERAND (exp, 0),
7708 true, false)),
7709 target, tmode, modifier);
7710 }
7711
bbf6f052
RK
7712 {
7713 /* Note that COND_EXPRs whose type is a structure or union
7714 are required to be constructed to contain assignments of
7715 a temporary variable, so that we can evaluate them here
7716 for side effect only. If type is void, we must do likewise. */
7717
7718 /* If an arm of the branch requires a cleanup,
7719 only that cleanup is performed. */
7720
7721 tree singleton = 0;
7722 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7723
7724 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7725 convert it to our mode, if necessary. */
7726 if (integer_onep (TREE_OPERAND (exp, 1))
7727 && integer_zerop (TREE_OPERAND (exp, 2))
7728 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7729 {
dd27116b
RK
7730 if (ignore)
7731 {
7732 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7733 ro_modifier);
dd27116b
RK
7734 return const0_rtx;
7735 }
7736
921b3427 7737 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7738 if (GET_MODE (op0) == mode)
7739 return op0;
d6a5ac33 7740
bbf6f052
RK
7741 if (target == 0)
7742 target = gen_reg_rtx (mode);
7743 convert_move (target, op0, unsignedp);
7744 return target;
7745 }
7746
ac01eace
RK
7747 /* Check for X ? A + B : A. If we have this, we can copy A to the
7748 output and conditionally add B. Similarly for unary operations.
7749 Don't do this if X has side-effects because those side effects
7750 might affect A or B and the "?" operation is a sequence point in
7751 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7752
7753 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7754 && operand_equal_p (TREE_OPERAND (exp, 2),
7755 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7756 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7757 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7758 && operand_equal_p (TREE_OPERAND (exp, 1),
7759 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7760 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7761 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7762 && operand_equal_p (TREE_OPERAND (exp, 2),
7763 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7764 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7765 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7766 && operand_equal_p (TREE_OPERAND (exp, 1),
7767 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7768 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7769
01c8a7c8
RK
7770 /* If we are not to produce a result, we have no target. Otherwise,
7771 if a target was specified use it; it will not be used as an
7772 intermediate target unless it is safe. If no target, use a
7773 temporary. */
7774
7775 if (ignore)
7776 temp = 0;
7777 else if (original_target
e5e809f4 7778 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7779 || (singleton && GET_CODE (original_target) == REG
7780 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7781 && original_target == var_rtx (singleton)))
7782 && GET_MODE (original_target) == mode
7c00d1fe
RK
7783#ifdef HAVE_conditional_move
7784 && (! can_conditionally_move_p (mode)
7785 || GET_CODE (original_target) == REG
7786 || TREE_ADDRESSABLE (type))
7787#endif
01c8a7c8
RK
7788 && ! (GET_CODE (original_target) == MEM
7789 && MEM_VOLATILE_P (original_target)))
7790 temp = original_target;
7791 else if (TREE_ADDRESSABLE (type))
7792 abort ();
7793 else
7794 temp = assign_temp (type, 0, 0, 1);
7795
ac01eace
RK
7796 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7797 do the test of X as a store-flag operation, do this as
7798 A + ((X != 0) << log C). Similarly for other simple binary
7799 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7800 if (temp && singleton && binary_op
bbf6f052
RK
7801 && (TREE_CODE (binary_op) == PLUS_EXPR
7802 || TREE_CODE (binary_op) == MINUS_EXPR
7803 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7804 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7805 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7806 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7807 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7808 {
7809 rtx result;
7810 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7811 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7812 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7813 : xor_optab);
bbf6f052
RK
7814
7815 /* If we had X ? A : A + 1, do this as A + (X == 0).
7816
7817 We have to invert the truth value here and then put it
7818 back later if do_store_flag fails. We cannot simply copy
7819 TREE_OPERAND (exp, 0) to another variable and modify that
7820 because invert_truthvalue can modify the tree pointed to
7821 by its argument. */
7822 if (singleton == TREE_OPERAND (exp, 1))
7823 TREE_OPERAND (exp, 0)
7824 = invert_truthvalue (TREE_OPERAND (exp, 0));
7825
7826 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7827 (safe_from_p (temp, singleton, 1)
906c4e36 7828 ? temp : NULL_RTX),
bbf6f052
RK
7829 mode, BRANCH_COST <= 1);
7830
ac01eace
RK
7831 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7832 result = expand_shift (LSHIFT_EXPR, mode, result,
7833 build_int_2 (tree_log2
7834 (TREE_OPERAND
7835 (binary_op, 1)),
7836 0),
e5e809f4 7837 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7838 ? temp : NULL_RTX), 0);
7839
bbf6f052
RK
7840 if (result)
7841 {
906c4e36 7842 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7843 return expand_binop (mode, boptab, op1, result, temp,
7844 unsignedp, OPTAB_LIB_WIDEN);
7845 }
7846 else if (singleton == TREE_OPERAND (exp, 1))
7847 TREE_OPERAND (exp, 0)
7848 = invert_truthvalue (TREE_OPERAND (exp, 0));
7849 }
7850
dabf8373 7851 do_pending_stack_adjust ();
bbf6f052
RK
7852 NO_DEFER_POP;
7853 op0 = gen_label_rtx ();
7854
7855 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7856 {
7857 if (temp != 0)
7858 {
7859 /* If the target conflicts with the other operand of the
7860 binary op, we can't use it. Also, we can't use the target
7861 if it is a hard register, because evaluating the condition
7862 might clobber it. */
7863 if ((binary_op
e5e809f4 7864 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7865 || (GET_CODE (temp) == REG
7866 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7867 temp = gen_reg_rtx (mode);
7868 store_expr (singleton, temp, 0);
7869 }
7870 else
906c4e36 7871 expand_expr (singleton,
2937cf87 7872 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7873 if (singleton == TREE_OPERAND (exp, 1))
7874 jumpif (TREE_OPERAND (exp, 0), op0);
7875 else
7876 jumpifnot (TREE_OPERAND (exp, 0), op0);
7877
956d6950 7878 start_cleanup_deferral ();
bbf6f052
RK
7879 if (binary_op && temp == 0)
7880 /* Just touch the other operand. */
7881 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7882 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7883 else if (binary_op)
7884 store_expr (build (TREE_CODE (binary_op), type,
7885 make_tree (type, temp),
7886 TREE_OPERAND (binary_op, 1)),
7887 temp, 0);
7888 else
7889 store_expr (build1 (TREE_CODE (unary_op), type,
7890 make_tree (type, temp)),
7891 temp, 0);
7892 op1 = op0;
bbf6f052 7893 }
bbf6f052
RK
7894 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7895 comparison operator. If we have one of these cases, set the
7896 output to A, branch on A (cse will merge these two references),
7897 then set the output to FOO. */
7898 else if (temp
7899 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7900 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7901 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7902 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7903 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7904 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7905 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7906 {
7907 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7908 temp = gen_reg_rtx (mode);
7909 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7910 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7911
956d6950 7912 start_cleanup_deferral ();
bbf6f052
RK
7913 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7914 op1 = op0;
7915 }
7916 else if (temp
7917 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7918 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7919 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7920 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7921 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7922 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7923 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7924 {
7925 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7926 temp = gen_reg_rtx (mode);
7927 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7928 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7929
956d6950 7930 start_cleanup_deferral ();
bbf6f052
RK
7931 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7932 op1 = op0;
7933 }
7934 else
7935 {
7936 op1 = gen_label_rtx ();
7937 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7938
956d6950 7939 start_cleanup_deferral ();
2ac84cfe
NS
7940
7941 /* One branch of the cond can be void, if it never returns. For
7942 example A ? throw : E */
7943 if (temp != 0
7944 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
7945 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7946 else
906c4e36
RK
7947 expand_expr (TREE_OPERAND (exp, 1),
7948 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7949 end_cleanup_deferral ();
bbf6f052
RK
7950 emit_queue ();
7951 emit_jump_insn (gen_jump (op1));
7952 emit_barrier ();
7953 emit_label (op0);
956d6950 7954 start_cleanup_deferral ();
2ac84cfe
NS
7955 if (temp != 0
7956 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
7957 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7958 else
906c4e36
RK
7959 expand_expr (TREE_OPERAND (exp, 2),
7960 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7961 }
7962
956d6950 7963 end_cleanup_deferral ();
bbf6f052
RK
7964
7965 emit_queue ();
7966 emit_label (op1);
7967 OK_DEFER_POP;
5dab5552 7968
bbf6f052
RK
7969 return temp;
7970 }
7971
7972 case TARGET_EXPR:
7973 {
7974 /* Something needs to be initialized, but we didn't know
7975 where that thing was when building the tree. For example,
7976 it could be the return value of a function, or a parameter
7977 to a function which lays down in the stack, or a temporary
7978 variable which must be passed by reference.
7979
7980 We guarantee that the expression will either be constructed
7981 or copied into our original target. */
7982
7983 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7984 tree cleanups = NULL_TREE;
5c062816 7985 tree exp1;
bbf6f052
RK
7986
7987 if (TREE_CODE (slot) != VAR_DECL)
7988 abort ();
7989
9c51f375
RK
7990 if (! ignore)
7991 target = original_target;
7992
6fbfac92
JM
7993 /* Set this here so that if we get a target that refers to a
7994 register variable that's already been used, put_reg_into_stack
7995 knows that it should fix up those uses. */
7996 TREE_USED (slot) = 1;
7997
bbf6f052
RK
7998 if (target == 0)
7999 {
8000 if (DECL_RTL (slot) != 0)
ac993f4f
MS
8001 {
8002 target = DECL_RTL (slot);
5c062816 8003 /* If we have already expanded the slot, so don't do
ac993f4f 8004 it again. (mrs) */
5c062816
MS
8005 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8006 return target;
ac993f4f 8007 }
bbf6f052
RK
8008 else
8009 {
e9a25f70 8010 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8011 /* All temp slots at this level must not conflict. */
8012 preserve_temp_slots (target);
8013 DECL_RTL (slot) = target;
e9a25f70
JL
8014 if (TREE_ADDRESSABLE (slot))
8015 {
8016 TREE_ADDRESSABLE (slot) = 0;
8017 mark_addressable (slot);
8018 }
bbf6f052 8019
e287fd6e
RK
8020 /* Since SLOT is not known to the called function
8021 to belong to its stack frame, we must build an explicit
8022 cleanup. This case occurs when we must build up a reference
8023 to pass the reference as an argument. In this case,
8024 it is very likely that such a reference need not be
8025 built here. */
8026
8027 if (TREE_OPERAND (exp, 2) == 0)
8028 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8029 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8030 }
bbf6f052
RK
8031 }
8032 else
8033 {
8034 /* This case does occur, when expanding a parameter which
8035 needs to be constructed on the stack. The target
8036 is the actual stack address that we want to initialize.
8037 The function we call will perform the cleanup in this case. */
8038
8c042b47
RS
8039 /* If we have already assigned it space, use that space,
8040 not target that we were passed in, as our target
8041 parameter is only a hint. */
8042 if (DECL_RTL (slot) != 0)
8043 {
8044 target = DECL_RTL (slot);
8045 /* If we have already expanded the slot, so don't do
8046 it again. (mrs) */
8047 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8048 return target;
8049 }
21002281
JW
8050 else
8051 {
8052 DECL_RTL (slot) = target;
8053 /* If we must have an addressable slot, then make sure that
8054 the RTL that we just stored in slot is OK. */
8055 if (TREE_ADDRESSABLE (slot))
8056 {
8057 TREE_ADDRESSABLE (slot) = 0;
8058 mark_addressable (slot);
8059 }
8060 }
bbf6f052
RK
8061 }
8062
4847c938 8063 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8064 /* Mark it as expanded. */
8065 TREE_OPERAND (exp, 1) = NULL_TREE;
8066
41531e5b 8067 store_expr (exp1, target, 0);
61d6b1cc 8068
e976b8b2 8069 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 8070
41531e5b 8071 return target;
bbf6f052
RK
8072 }
8073
8074 case INIT_EXPR:
8075 {
8076 tree lhs = TREE_OPERAND (exp, 0);
8077 tree rhs = TREE_OPERAND (exp, 1);
8078 tree noncopied_parts = 0;
8079 tree lhs_type = TREE_TYPE (lhs);
8080
8081 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8082 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8083 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8084 TYPE_NONCOPIED_PARTS (lhs_type));
8085 while (noncopied_parts != 0)
8086 {
8087 expand_assignment (TREE_VALUE (noncopied_parts),
8088 TREE_PURPOSE (noncopied_parts), 0, 0);
8089 noncopied_parts = TREE_CHAIN (noncopied_parts);
8090 }
8091 return temp;
8092 }
8093
8094 case MODIFY_EXPR:
8095 {
8096 /* If lhs is complex, expand calls in rhs before computing it.
8097 That's so we don't compute a pointer and save it over a call.
8098 If lhs is simple, compute it first so we can give it as a
8099 target if the rhs is just a call. This avoids an extra temp and copy
8100 and that prevents a partial-subsumption which makes bad code.
8101 Actually we could treat component_ref's of vars like vars. */
8102
8103 tree lhs = TREE_OPERAND (exp, 0);
8104 tree rhs = TREE_OPERAND (exp, 1);
8105 tree noncopied_parts = 0;
8106 tree lhs_type = TREE_TYPE (lhs);
8107
8108 temp = 0;
8109
8110 if (TREE_CODE (lhs) != VAR_DECL
8111 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
8112 && TREE_CODE (lhs) != PARM_DECL
8113 && ! (TREE_CODE (lhs) == INDIRECT_REF
8114 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
8115 preexpand_calls (exp);
8116
8117 /* Check for |= or &= of a bitfield of size one into another bitfield
8118 of size 1. In this case, (unless we need the result of the
8119 assignment) we can do this more efficiently with a
8120 test followed by an assignment, if necessary.
8121
8122 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8123 things change so we do, this code should be enhanced to
8124 support it. */
8125 if (ignore
8126 && TREE_CODE (lhs) == COMPONENT_REF
8127 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8128 || TREE_CODE (rhs) == BIT_AND_EXPR)
8129 && TREE_OPERAND (rhs, 0) == lhs
8130 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8131 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8132 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8133 {
8134 rtx label = gen_label_rtx ();
8135
8136 do_jump (TREE_OPERAND (rhs, 1),
8137 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8138 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8139 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8140 (TREE_CODE (rhs) == BIT_IOR_EXPR
8141 ? integer_one_node
8142 : integer_zero_node)),
8143 0, 0);
e7c33f54 8144 do_pending_stack_adjust ();
bbf6f052
RK
8145 emit_label (label);
8146 return const0_rtx;
8147 }
8148
8149 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8150 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8151 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8152 TYPE_NONCOPIED_PARTS (lhs_type));
8153
8154 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8155 while (noncopied_parts != 0)
8156 {
8157 expand_assignment (TREE_PURPOSE (noncopied_parts),
8158 TREE_VALUE (noncopied_parts), 0, 0);
8159 noncopied_parts = TREE_CHAIN (noncopied_parts);
8160 }
8161 return temp;
8162 }
8163
6e7f84a7
APB
8164 case RETURN_EXPR:
8165 if (!TREE_OPERAND (exp, 0))
8166 expand_null_return ();
8167 else
8168 expand_return (TREE_OPERAND (exp, 0));
8169 return const0_rtx;
8170
bbf6f052
RK
8171 case PREINCREMENT_EXPR:
8172 case PREDECREMENT_EXPR:
7b8b9722 8173 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8174
8175 case POSTINCREMENT_EXPR:
8176 case POSTDECREMENT_EXPR:
8177 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8178 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8179
8180 case ADDR_EXPR:
987c71d9 8181 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8182 be a MEM corresponding to a stack slot. */
987c71d9
RK
8183 temp = 0;
8184
bbf6f052
RK
8185 /* Are we taking the address of a nested function? */
8186 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8187 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8188 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8189 && ! TREE_STATIC (exp))
bbf6f052
RK
8190 {
8191 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8192 op0 = force_operand (op0, target);
8193 }
682ba3a6
RK
8194 /* If we are taking the address of something erroneous, just
8195 return a zero. */
8196 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8197 return const0_rtx;
bbf6f052
RK
8198 else
8199 {
e287fd6e
RK
8200 /* We make sure to pass const0_rtx down if we came in with
8201 ignore set, to avoid doing the cleanups twice for something. */
8202 op0 = expand_expr (TREE_OPERAND (exp, 0),
8203 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8204 (modifier == EXPAND_INITIALIZER
8205 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8206
119af78a
RK
8207 /* If we are going to ignore the result, OP0 will have been set
8208 to const0_rtx, so just return it. Don't get confused and
8209 think we are taking the address of the constant. */
8210 if (ignore)
8211 return op0;
8212
3539e816
MS
8213 op0 = protect_from_queue (op0, 0);
8214
c5c76735
JL
8215 /* We would like the object in memory. If it is a constant, we can
8216 have it be statically allocated into memory. For a non-constant,
8217 we need to allocate some memory and store the value into it. */
896102d0
RK
8218
8219 if (CONSTANT_P (op0))
8220 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8221 op0);
987c71d9 8222 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8223 {
8224 mark_temp_addr_taken (op0);
8225 temp = XEXP (op0, 0);
8226 }
896102d0 8227
682ba3a6 8228 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 8229 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
8230 {
8231 /* If this object is in a register, it must be not
0f41302f 8232 be BLKmode. */
896102d0 8233 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 8234 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 8235
7a0b7b9a 8236 mark_temp_addr_taken (memloc);
896102d0
RK
8237 emit_move_insn (memloc, op0);
8238 op0 = memloc;
8239 }
8240
bbf6f052
RK
8241 if (GET_CODE (op0) != MEM)
8242 abort ();
8243
8244 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8245 {
8246 temp = XEXP (op0, 0);
8247#ifdef POINTERS_EXTEND_UNSIGNED
8248 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8249 && mode == ptr_mode)
9fcfcce7 8250 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8251#endif
8252 return temp;
8253 }
987c71d9 8254
bbf6f052
RK
8255 op0 = force_operand (XEXP (op0, 0), target);
8256 }
987c71d9 8257
bbf6f052 8258 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8259 op0 = force_reg (Pmode, op0);
8260
dc6d66b3
RK
8261 if (GET_CODE (op0) == REG
8262 && ! REG_USERVAR_P (op0))
8263 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
8264
8265 /* If we might have had a temp slot, add an equivalent address
8266 for it. */
8267 if (temp != 0)
8268 update_temp_slot_address (temp, op0);
8269
88f63c77
RK
8270#ifdef POINTERS_EXTEND_UNSIGNED
8271 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8272 && mode == ptr_mode)
9fcfcce7 8273 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8274#endif
8275
bbf6f052
RK
8276 return op0;
8277
8278 case ENTRY_VALUE_EXPR:
8279 abort ();
8280
7308a047
RS
8281 /* COMPLEX type for Extended Pascal & Fortran */
8282 case COMPLEX_EXPR:
8283 {
8284 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8285 rtx insns;
7308a047
RS
8286
8287 /* Get the rtx code of the operands. */
8288 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8289 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8290
8291 if (! target)
8292 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8293
6551fa4d 8294 start_sequence ();
7308a047
RS
8295
8296 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8297 emit_move_insn (gen_realpart (mode, target), op0);
8298 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8299
6551fa4d
JW
8300 insns = get_insns ();
8301 end_sequence ();
8302
7308a047 8303 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8304 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8305 each with a separate pseudo as destination.
8306 It's not correct for flow to treat them as a unit. */
6d6e61ce 8307 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8308 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8309 else
8310 emit_insns (insns);
7308a047
RS
8311
8312 return target;
8313 }
8314
8315 case REALPART_EXPR:
2d7050fd
RS
8316 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8317 return gen_realpart (mode, op0);
7308a047
RS
8318
8319 case IMAGPART_EXPR:
2d7050fd
RS
8320 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8321 return gen_imagpart (mode, op0);
7308a047
RS
8322
8323 case CONJ_EXPR:
8324 {
62acb978 8325 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8326 rtx imag_t;
6551fa4d 8327 rtx insns;
7308a047
RS
8328
8329 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8330
8331 if (! target)
d6a5ac33 8332 target = gen_reg_rtx (mode);
7308a047 8333
6551fa4d 8334 start_sequence ();
7308a047
RS
8335
8336 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8337 emit_move_insn (gen_realpart (partmode, target),
8338 gen_realpart (partmode, op0));
7308a047 8339
62acb978
RK
8340 imag_t = gen_imagpart (partmode, target);
8341 temp = expand_unop (partmode, neg_optab,
8342 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8343 if (temp != imag_t)
8344 emit_move_insn (imag_t, temp);
8345
6551fa4d
JW
8346 insns = get_insns ();
8347 end_sequence ();
8348
d6a5ac33
RK
8349 /* Conjugate should appear as a single unit
8350 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8351 each with a separate pseudo as destination.
8352 It's not correct for flow to treat them as a unit. */
6d6e61ce 8353 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8354 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8355 else
8356 emit_insns (insns);
7308a047
RS
8357
8358 return target;
8359 }
8360
e976b8b2
MS
8361 case TRY_CATCH_EXPR:
8362 {
8363 tree handler = TREE_OPERAND (exp, 1);
8364
8365 expand_eh_region_start ();
8366
8367 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8368
8369 expand_eh_region_end (handler);
8370
8371 return op0;
8372 }
8373
b335b813
PB
8374 case TRY_FINALLY_EXPR:
8375 {
8376 tree try_block = TREE_OPERAND (exp, 0);
8377 tree finally_block = TREE_OPERAND (exp, 1);
8378 rtx finally_label = gen_label_rtx ();
8379 rtx done_label = gen_label_rtx ();
8380 rtx return_link = gen_reg_rtx (Pmode);
8381 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8382 (tree) finally_label, (tree) return_link);
8383 TREE_SIDE_EFFECTS (cleanup) = 1;
8384
8385 /* Start a new binding layer that will keep track of all cleanup
8386 actions to be performed. */
8e91754e 8387 expand_start_bindings (2);
b335b813
PB
8388
8389 target_temp_slot_level = temp_slot_level;
8390
8391 expand_decl_cleanup (NULL_TREE, cleanup);
8392 op0 = expand_expr (try_block, target, tmode, modifier);
8393
8394 preserve_temp_slots (op0);
8395 expand_end_bindings (NULL_TREE, 0, 0);
8396 emit_jump (done_label);
8397 emit_label (finally_label);
8398 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8399 emit_indirect_jump (return_link);
8400 emit_label (done_label);
8401 return op0;
8402 }
8403
8404 case GOTO_SUBROUTINE_EXPR:
8405 {
8406 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8407 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8408 rtx return_address = gen_label_rtx ();
8409 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8410 emit_jump (subr);
8411 emit_label (return_address);
8412 return const0_rtx;
8413 }
8414
e976b8b2
MS
8415 case POPDCC_EXPR:
8416 {
8417 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8418 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8419 return const0_rtx;
8420 }
8421
8422 case POPDHC_EXPR:
8423 {
8424 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8425 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8426 return const0_rtx;
8427 }
8428
d3707adb
RH
8429 case VA_ARG_EXPR:
8430 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8431
bbf6f052 8432 default:
90764a87 8433 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8434 }
8435
8436 /* Here to do an ordinary binary operator, generating an instruction
8437 from the optab already placed in `this_optab'. */
8438 binop:
8439 preexpand_calls (exp);
e5e809f4 8440 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8441 subtarget = 0;
8442 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8443 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8444 binop2:
8445 temp = expand_binop (mode, this_optab, op0, op1, target,
8446 unsignedp, OPTAB_LIB_WIDEN);
8447 if (temp == 0)
8448 abort ();
8449 return temp;
8450}
b93a436e 8451\f
14a774a9
RK
8452/* Similar to expand_expr, except that we don't specify a target, target
8453 mode, or modifier and we return the alignment of the inner type. This is
8454 used in cases where it is not necessary to align the result to the
8455 alignment of its type as long as we know the alignment of the result, for
8456 example for comparisons of BLKmode values. */
8457
8458static rtx
8459expand_expr_unaligned (exp, palign)
8460 register tree exp;
8461 int *palign;
8462{
8463 register rtx op0;
8464 tree type = TREE_TYPE (exp);
8465 register enum machine_mode mode = TYPE_MODE (type);
8466
8467 /* Default the alignment we return to that of the type. */
8468 *palign = TYPE_ALIGN (type);
8469
8470 /* The only cases in which we do anything special is if the resulting mode
8471 is BLKmode. */
8472 if (mode != BLKmode)
8473 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8474
8475 switch (TREE_CODE (exp))
8476 {
8477 case CONVERT_EXPR:
8478 case NOP_EXPR:
8479 case NON_LVALUE_EXPR:
8480 /* Conversions between BLKmode values don't change the underlying
8481 alignment or value. */
8482 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8483 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8484 break;
8485
8486 case ARRAY_REF:
8487 /* Much of the code for this case is copied directly from expand_expr.
8488 We need to duplicate it here because we will do something different
8489 in the fall-through case, so we need to handle the same exceptions
8490 it does. */
8491 {
8492 tree array = TREE_OPERAND (exp, 0);
8493 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8494 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8495 tree index = TREE_OPERAND (exp, 1);
8496 tree index_type = TREE_TYPE (index);
8497 HOST_WIDE_INT i;
8498
8499 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8500 abort ();
8501
8502 /* Optimize the special-case of a zero lower bound.
8503
8504 We convert the low_bound to sizetype to avoid some problems
8505 with constant folding. (E.g. suppose the lower bound is 1,
8506 and its mode is QI. Without the conversion, (ARRAY
8507 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8508 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8509
8510 But sizetype isn't quite right either (especially if
8511 the lowbound is negative). FIXME */
8512
8513 if (! integer_zerop (low_bound))
8514 index = fold (build (MINUS_EXPR, index_type, index,
8515 convert (sizetype, low_bound)));
8516
8517 /* If this is a constant index into a constant array,
8518 just get the value from the array. Handle both the cases when
8519 we have an explicit constructor and when our operand is a variable
8520 that was declared const. */
8521
8522 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8523 {
8524 if (TREE_CODE (index) == INTEGER_CST
8525 && TREE_INT_CST_HIGH (index) == 0)
8526 {
8527 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8528
8529 i = TREE_INT_CST_LOW (index);
8530 while (elem && i--)
8531 elem = TREE_CHAIN (elem);
8532 if (elem)
8533 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8534 palign);
8535 }
8536 }
8537
8538 else if (optimize >= 1
8539 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8540 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8541 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8542 {
8543 if (TREE_CODE (index) == INTEGER_CST)
8544 {
8545 tree init = DECL_INITIAL (array);
8546
8547 i = TREE_INT_CST_LOW (index);
8548 if (TREE_CODE (init) == CONSTRUCTOR)
8549 {
8550 tree elem = CONSTRUCTOR_ELTS (init);
8551
8552 while (elem
8553 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8554 elem = TREE_CHAIN (elem);
8555 if (elem)
8556 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8557 palign);
8558 }
8559 }
8560 }
8561 }
8562
8563 /* ... fall through ... */
8564
8565 case COMPONENT_REF:
8566 case BIT_FIELD_REF:
8567 /* If the operand is a CONSTRUCTOR, we can just extract the
8568 appropriate field if it is present. Don't do this if we have
8569 already written the data since we want to refer to that copy
8570 and varasm.c assumes that's what we'll do. */
8571 if (TREE_CODE (exp) != ARRAY_REF
8572 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8573 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8574 {
8575 tree elt;
8576
8577 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8578 elt = TREE_CHAIN (elt))
8579 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8580 /* Note that unlike the case in expand_expr, we know this is
8581 BLKmode and hence not an integer. */
8582 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8583 }
8584
8585 {
8586 enum machine_mode mode1;
8587 int bitsize;
8588 int bitpos;
8589 tree offset;
8590 int volatilep = 0;
8591 int alignment;
8592 int unsignedp;
8593 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8594 &mode1, &unsignedp, &volatilep,
8595 &alignment);
8596
8597 /* If we got back the original object, something is wrong. Perhaps
8598 we are evaluating an expression too early. In any event, don't
8599 infinitely recurse. */
8600 if (tem == exp)
8601 abort ();
8602
8603 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8604
8605 /* If this is a constant, put it into a register if it is a
8606 legitimate constant and OFFSET is 0 and memory if it isn't. */
8607 if (CONSTANT_P (op0))
8608 {
8609 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8610
8611 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8612 && offset == 0)
8613 op0 = force_reg (inner_mode, op0);
8614 else
8615 op0 = validize_mem (force_const_mem (inner_mode, op0));
8616 }
8617
8618 if (offset != 0)
8619 {
8620 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8621
8622 /* If this object is in a register, put it into memory.
8623 This case can't occur in C, but can in Ada if we have
8624 unchecked conversion of an expression from a scalar type to
8625 an array or record type. */
8626 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8627 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8628 {
8629 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8630
8631 mark_temp_addr_taken (memloc);
8632 emit_move_insn (memloc, op0);
8633 op0 = memloc;
8634 }
8635
8636 if (GET_CODE (op0) != MEM)
8637 abort ();
8638
8639 if (GET_MODE (offset_rtx) != ptr_mode)
8640 {
8641#ifdef POINTERS_EXTEND_UNSIGNED
8642 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8643#else
8644 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8645#endif
8646 }
8647
8648 op0 = change_address (op0, VOIDmode,
8649 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8650 force_reg (ptr_mode,
8651 offset_rtx)));
8652 }
8653
8654 /* Don't forget about volatility even if this is a bitfield. */
8655 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8656 {
8657 op0 = copy_rtx (op0);
8658 MEM_VOLATILE_P (op0) = 1;
8659 }
8660
8661 /* Check the access. */
8662 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8663 {
8664 rtx to;
8665 int size;
8666
8667 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8668 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8669
8670 /* Check the access right of the pointer. */
8671 if (size > BITS_PER_UNIT)
8672 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8673 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8674 TYPE_MODE (sizetype),
8675 GEN_INT (MEMORY_USE_RO),
8676 TYPE_MODE (integer_type_node));
8677 }
8678
a2b99161
RK
8679 /* In cases where an aligned union has an unaligned object
8680 as a field, we might be extracting a BLKmode value from
8681 an integer-mode (e.g., SImode) object. Handle this case
8682 by doing the extract into an object as wide as the field
8683 (which we know to be the width of a basic mode), then
8684 storing into memory, and changing the mode to BLKmode.
8685 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8686 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8687 if (mode1 == VOIDmode
8688 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8689 || (SLOW_UNALIGNED_ACCESS
8690 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8691 || bitpos % TYPE_ALIGN (type) != 0)))
8692 {
8693 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8694
8695 if (ext_mode == BLKmode)
8696 {
8697 /* In this case, BITPOS must start at a byte boundary. */
8698 if (GET_CODE (op0) != MEM
8699 || bitpos % BITS_PER_UNIT != 0)
8700 abort ();
8701
8702 op0 = change_address (op0, VOIDmode,
8703 plus_constant (XEXP (op0, 0),
8704 bitpos / BITS_PER_UNIT));
8705 }
8706 else
8707 {
8708 rtx new = assign_stack_temp (ext_mode,
8709 bitsize / BITS_PER_UNIT, 0);
8710
8711 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8712 unsignedp, NULL_RTX, ext_mode,
8713 ext_mode, alignment,
8714 int_size_in_bytes (TREE_TYPE (tem)));
8715
8716 /* If the result is a record type and BITSIZE is narrower than
8717 the mode of OP0, an integral mode, and this is a big endian
8718 machine, we must put the field into the high-order bits. */
8719 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8720 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8721 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8722 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8723 size_int (GET_MODE_BITSIZE
8724 (GET_MODE (op0))
8725 - bitsize),
8726 op0, 1);
8727
8728
8729 emit_move_insn (new, op0);
8730 op0 = copy_rtx (new);
8731 PUT_MODE (op0, BLKmode);
8732 }
8733 }
8734 else
8735 /* Get a reference to just this component. */
8736 op0 = change_address (op0, mode1,
8737 plus_constant (XEXP (op0, 0),
8738 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
8739
8740 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8741
8742 /* Adjust the alignment in case the bit position is not
8743 a multiple of the alignment of the inner object. */
8744 while (bitpos % alignment != 0)
8745 alignment >>= 1;
8746
8747 if (GET_CODE (XEXP (op0, 0)) == REG)
8748 mark_reg_pointer (XEXP (op0, 0), alignment);
8749
8750 MEM_IN_STRUCT_P (op0) = 1;
8751 MEM_VOLATILE_P (op0) |= volatilep;
8752
8753 *palign = alignment;
8754 return op0;
8755 }
8756
8757 default:
8758 break;
8759
8760 }
8761
8762 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8763}
8764\f
b93a436e
JL
8765/* Return the tree node and offset if a given argument corresponds to
8766 a string constant. */
8767
28f4ec01 8768tree
b93a436e
JL
8769string_constant (arg, ptr_offset)
8770 tree arg;
8771 tree *ptr_offset;
8772{
8773 STRIP_NOPS (arg);
8774
8775 if (TREE_CODE (arg) == ADDR_EXPR
8776 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8777 {
8778 *ptr_offset = integer_zero_node;
8779 return TREE_OPERAND (arg, 0);
8780 }
8781 else if (TREE_CODE (arg) == PLUS_EXPR)
8782 {
8783 tree arg0 = TREE_OPERAND (arg, 0);
8784 tree arg1 = TREE_OPERAND (arg, 1);
8785
8786 STRIP_NOPS (arg0);
8787 STRIP_NOPS (arg1);
8788
8789 if (TREE_CODE (arg0) == ADDR_EXPR
8790 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8791 {
b93a436e
JL
8792 *ptr_offset = arg1;
8793 return TREE_OPERAND (arg0, 0);
bbf6f052 8794 }
b93a436e
JL
8795 else if (TREE_CODE (arg1) == ADDR_EXPR
8796 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8797 {
b93a436e
JL
8798 *ptr_offset = arg0;
8799 return TREE_OPERAND (arg1, 0);
bbf6f052 8800 }
b93a436e 8801 }
ca695ac9 8802
b93a436e
JL
8803 return 0;
8804}
ca695ac9 8805\f
b93a436e
JL
8806/* Expand code for a post- or pre- increment or decrement
8807 and return the RTX for the result.
8808 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8809
b93a436e
JL
8810static rtx
8811expand_increment (exp, post, ignore)
8812 register tree exp;
8813 int post, ignore;
ca695ac9 8814{
b93a436e
JL
8815 register rtx op0, op1;
8816 register rtx temp, value;
8817 register tree incremented = TREE_OPERAND (exp, 0);
8818 optab this_optab = add_optab;
8819 int icode;
8820 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8821 int op0_is_copy = 0;
8822 int single_insn = 0;
8823 /* 1 means we can't store into OP0 directly,
8824 because it is a subreg narrower than a word,
8825 and we don't dare clobber the rest of the word. */
8826 int bad_subreg = 0;
1499e0a8 8827
b93a436e
JL
8828 /* Stabilize any component ref that might need to be
8829 evaluated more than once below. */
8830 if (!post
8831 || TREE_CODE (incremented) == BIT_FIELD_REF
8832 || (TREE_CODE (incremented) == COMPONENT_REF
8833 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8834 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8835 incremented = stabilize_reference (incremented);
8836 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8837 ones into save exprs so that they don't accidentally get evaluated
8838 more than once by the code below. */
8839 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8840 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8841 incremented = save_expr (incremented);
e9a25f70 8842
b93a436e
JL
8843 /* Compute the operands as RTX.
8844 Note whether OP0 is the actual lvalue or a copy of it:
8845 I believe it is a copy iff it is a register or subreg
8846 and insns were generated in computing it. */
e9a25f70 8847
b93a436e
JL
8848 temp = get_last_insn ();
8849 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 8850
b93a436e
JL
8851 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8852 in place but instead must do sign- or zero-extension during assignment,
8853 so we copy it into a new register and let the code below use it as
8854 a copy.
e9a25f70 8855
b93a436e
JL
8856 Note that we can safely modify this SUBREG since it is know not to be
8857 shared (it was made by the expand_expr call above). */
8858
8859 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8860 {
8861 if (post)
8862 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8863 else
8864 bad_subreg = 1;
8865 }
8866 else if (GET_CODE (op0) == SUBREG
8867 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8868 {
8869 /* We cannot increment this SUBREG in place. If we are
8870 post-incrementing, get a copy of the old value. Otherwise,
8871 just mark that we cannot increment in place. */
8872 if (post)
8873 op0 = copy_to_reg (op0);
8874 else
8875 bad_subreg = 1;
e9a25f70
JL
8876 }
8877
b93a436e
JL
8878 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8879 && temp != get_last_insn ());
8880 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8881 EXPAND_MEMORY_USE_BAD);
1499e0a8 8882
b93a436e
JL
8883 /* Decide whether incrementing or decrementing. */
8884 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8885 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8886 this_optab = sub_optab;
8887
8888 /* Convert decrement by a constant into a negative increment. */
8889 if (this_optab == sub_optab
8890 && GET_CODE (op1) == CONST_INT)
ca695ac9 8891 {
b93a436e
JL
8892 op1 = GEN_INT (- INTVAL (op1));
8893 this_optab = add_optab;
ca695ac9 8894 }
1499e0a8 8895
b93a436e
JL
8896 /* For a preincrement, see if we can do this with a single instruction. */
8897 if (!post)
8898 {
8899 icode = (int) this_optab->handlers[(int) mode].insn_code;
8900 if (icode != (int) CODE_FOR_nothing
8901 /* Make sure that OP0 is valid for operands 0 and 1
8902 of the insn we want to queue. */
a995e389
RH
8903 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8904 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8905 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
8906 single_insn = 1;
8907 }
bbf6f052 8908
b93a436e
JL
8909 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8910 then we cannot just increment OP0. We must therefore contrive to
8911 increment the original value. Then, for postincrement, we can return
8912 OP0 since it is a copy of the old value. For preincrement, expand here
8913 unless we can do it with a single insn.
bbf6f052 8914
b93a436e
JL
8915 Likewise if storing directly into OP0 would clobber high bits
8916 we need to preserve (bad_subreg). */
8917 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8918 {
b93a436e
JL
8919 /* This is the easiest way to increment the value wherever it is.
8920 Problems with multiple evaluation of INCREMENTED are prevented
8921 because either (1) it is a component_ref or preincrement,
8922 in which case it was stabilized above, or (2) it is an array_ref
8923 with constant index in an array in a register, which is
8924 safe to reevaluate. */
8925 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8926 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8927 ? MINUS_EXPR : PLUS_EXPR),
8928 TREE_TYPE (exp),
8929 incremented,
8930 TREE_OPERAND (exp, 1));
a358cee0 8931
b93a436e
JL
8932 while (TREE_CODE (incremented) == NOP_EXPR
8933 || TREE_CODE (incremented) == CONVERT_EXPR)
8934 {
8935 newexp = convert (TREE_TYPE (incremented), newexp);
8936 incremented = TREE_OPERAND (incremented, 0);
8937 }
bbf6f052 8938
b93a436e
JL
8939 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8940 return post ? op0 : temp;
8941 }
bbf6f052 8942
b93a436e
JL
8943 if (post)
8944 {
8945 /* We have a true reference to the value in OP0.
8946 If there is an insn to add or subtract in this mode, queue it.
8947 Queueing the increment insn avoids the register shuffling
8948 that often results if we must increment now and first save
8949 the old value for subsequent use. */
bbf6f052 8950
b93a436e
JL
8951#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8952 op0 = stabilize (op0);
8953#endif
41dfd40c 8954
b93a436e
JL
8955 icode = (int) this_optab->handlers[(int) mode].insn_code;
8956 if (icode != (int) CODE_FOR_nothing
8957 /* Make sure that OP0 is valid for operands 0 and 1
8958 of the insn we want to queue. */
a995e389
RH
8959 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8960 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 8961 {
a995e389 8962 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8963 op1 = force_reg (mode, op1);
bbf6f052 8964
b93a436e
JL
8965 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8966 }
8967 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8968 {
8969 rtx addr = (general_operand (XEXP (op0, 0), mode)
8970 ? force_reg (Pmode, XEXP (op0, 0))
8971 : copy_to_reg (XEXP (op0, 0)));
8972 rtx temp, result;
ca695ac9 8973
b93a436e
JL
8974 op0 = change_address (op0, VOIDmode, addr);
8975 temp = force_reg (GET_MODE (op0), op0);
a995e389 8976 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8977 op1 = force_reg (mode, op1);
ca695ac9 8978
b93a436e
JL
8979 /* The increment queue is LIFO, thus we have to `queue'
8980 the instructions in reverse order. */
8981 enqueue_insn (op0, gen_move_insn (op0, temp));
8982 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8983 return result;
bbf6f052
RK
8984 }
8985 }
ca695ac9 8986
b93a436e
JL
8987 /* Preincrement, or we can't increment with one simple insn. */
8988 if (post)
8989 /* Save a copy of the value before inc or dec, to return it later. */
8990 temp = value = copy_to_reg (op0);
8991 else
8992 /* Arrange to return the incremented value. */
8993 /* Copy the rtx because expand_binop will protect from the queue,
8994 and the results of that would be invalid for us to return
8995 if our caller does emit_queue before using our result. */
8996 temp = copy_rtx (value = op0);
bbf6f052 8997
b93a436e
JL
8998 /* Increment however we can. */
8999 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 9000 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9001 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9002 /* Make sure the value is stored into OP0. */
9003 if (op1 != op0)
9004 emit_move_insn (op0, op1);
5718612f 9005
b93a436e
JL
9006 return temp;
9007}
9008\f
9009/* Expand all function calls contained within EXP, innermost ones first.
9010 But don't look within expressions that have sequence points.
9011 For each CALL_EXPR, record the rtx for its value
9012 in the CALL_EXPR_RTL field. */
5718612f 9013
b93a436e
JL
9014static void
9015preexpand_calls (exp)
9016 tree exp;
9017{
9018 register int nops, i;
9019 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9020
b93a436e
JL
9021 if (! do_preexpand_calls)
9022 return;
5718612f 9023
b93a436e 9024 /* Only expressions and references can contain calls. */
bbf6f052 9025
b93a436e
JL
9026 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9027 return;
bbf6f052 9028
b93a436e
JL
9029 switch (TREE_CODE (exp))
9030 {
9031 case CALL_EXPR:
9032 /* Do nothing if already expanded. */
9033 if (CALL_EXPR_RTL (exp) != 0
9034 /* Do nothing if the call returns a variable-sized object. */
9035 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9036 /* Do nothing to built-in functions. */
9037 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9038 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9039 == FUNCTION_DECL)
9040 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9041 return;
bbf6f052 9042
b93a436e
JL
9043 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9044 return;
bbf6f052 9045
b93a436e
JL
9046 case COMPOUND_EXPR:
9047 case COND_EXPR:
9048 case TRUTH_ANDIF_EXPR:
9049 case TRUTH_ORIF_EXPR:
9050 /* If we find one of these, then we can be sure
9051 the adjust will be done for it (since it makes jumps).
9052 Do it now, so that if this is inside an argument
9053 of a function, we don't get the stack adjustment
9054 after some other args have already been pushed. */
9055 do_pending_stack_adjust ();
9056 return;
bbf6f052 9057
b93a436e
JL
9058 case BLOCK:
9059 case RTL_EXPR:
9060 case WITH_CLEANUP_EXPR:
9061 case CLEANUP_POINT_EXPR:
9062 case TRY_CATCH_EXPR:
9063 return;
bbf6f052 9064
b93a436e
JL
9065 case SAVE_EXPR:
9066 if (SAVE_EXPR_RTL (exp) != 0)
9067 return;
9068
9069 default:
9070 break;
ca695ac9 9071 }
bbf6f052 9072
b93a436e
JL
9073 nops = tree_code_length[(int) TREE_CODE (exp)];
9074 for (i = 0; i < nops; i++)
9075 if (TREE_OPERAND (exp, i) != 0)
9076 {
19832c77
MM
9077 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9078 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9079 It doesn't happen before the call is made. */
9080 ;
9081 else
9082 {
9083 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9084 if (type == 'e' || type == '<' || type == '1' || type == '2'
9085 || type == 'r')
9086 preexpand_calls (TREE_OPERAND (exp, i));
9087 }
b93a436e
JL
9088 }
9089}
9090\f
9091/* At the start of a function, record that we have no previously-pushed
9092 arguments waiting to be popped. */
bbf6f052 9093
b93a436e
JL
9094void
9095init_pending_stack_adjust ()
9096{
9097 pending_stack_adjust = 0;
9098}
bbf6f052 9099
b93a436e 9100/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9101 so the adjustment won't get done.
9102
9103 Note, if the current function calls alloca, then it must have a
9104 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9105
b93a436e
JL
9106void
9107clear_pending_stack_adjust ()
9108{
9109#ifdef EXIT_IGNORE_STACK
9110 if (optimize > 0
060fbabf
JL
9111 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9112 && EXIT_IGNORE_STACK
b93a436e
JL
9113 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9114 && ! flag_inline_functions)
9115 pending_stack_adjust = 0;
9116#endif
9117}
bbf6f052 9118
b93a436e
JL
9119/* Pop any previously-pushed arguments that have not been popped yet. */
9120
9121void
9122do_pending_stack_adjust ()
9123{
9124 if (inhibit_defer_pop == 0)
ca695ac9 9125 {
b93a436e
JL
9126 if (pending_stack_adjust != 0)
9127 adjust_stack (GEN_INT (pending_stack_adjust));
9128 pending_stack_adjust = 0;
bbf6f052 9129 }
bbf6f052
RK
9130}
9131\f
b93a436e 9132/* Expand conditional expressions. */
bbf6f052 9133
b93a436e
JL
9134/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9135 LABEL is an rtx of code CODE_LABEL, in this function and all the
9136 functions here. */
bbf6f052 9137
b93a436e
JL
9138void
9139jumpifnot (exp, label)
ca695ac9 9140 tree exp;
b93a436e 9141 rtx label;
bbf6f052 9142{
b93a436e
JL
9143 do_jump (exp, label, NULL_RTX);
9144}
bbf6f052 9145
b93a436e 9146/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9147
b93a436e
JL
9148void
9149jumpif (exp, label)
9150 tree exp;
9151 rtx label;
9152{
9153 do_jump (exp, NULL_RTX, label);
9154}
ca695ac9 9155
b93a436e
JL
9156/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9157 the result is zero, or IF_TRUE_LABEL if the result is one.
9158 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9159 meaning fall through in that case.
ca695ac9 9160
b93a436e
JL
9161 do_jump always does any pending stack adjust except when it does not
9162 actually perform a jump. An example where there is no jump
9163 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9164
b93a436e
JL
9165 This function is responsible for optimizing cases such as
9166 &&, || and comparison operators in EXP. */
5718612f 9167
b93a436e
JL
9168void
9169do_jump (exp, if_false_label, if_true_label)
9170 tree exp;
9171 rtx if_false_label, if_true_label;
9172{
9173 register enum tree_code code = TREE_CODE (exp);
9174 /* Some cases need to create a label to jump to
9175 in order to properly fall through.
9176 These cases set DROP_THROUGH_LABEL nonzero. */
9177 rtx drop_through_label = 0;
9178 rtx temp;
b93a436e
JL
9179 int i;
9180 tree type;
9181 enum machine_mode mode;
ca695ac9 9182
dbecbbe4
JL
9183#ifdef MAX_INTEGER_COMPUTATION_MODE
9184 check_max_integer_computation_mode (exp);
9185#endif
9186
b93a436e 9187 emit_queue ();
ca695ac9 9188
b93a436e 9189 switch (code)
ca695ac9 9190 {
b93a436e 9191 case ERROR_MARK:
ca695ac9 9192 break;
bbf6f052 9193
b93a436e
JL
9194 case INTEGER_CST:
9195 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9196 if (temp)
9197 emit_jump (temp);
9198 break;
bbf6f052 9199
b93a436e
JL
9200#if 0
9201 /* This is not true with #pragma weak */
9202 case ADDR_EXPR:
9203 /* The address of something can never be zero. */
9204 if (if_true_label)
9205 emit_jump (if_true_label);
9206 break;
9207#endif
bbf6f052 9208
b93a436e
JL
9209 case NOP_EXPR:
9210 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9211 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9212 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9213 goto normal;
9214 case CONVERT_EXPR:
9215 /* If we are narrowing the operand, we have to do the compare in the
9216 narrower mode. */
9217 if ((TYPE_PRECISION (TREE_TYPE (exp))
9218 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9219 goto normal;
9220 case NON_LVALUE_EXPR:
9221 case REFERENCE_EXPR:
9222 case ABS_EXPR:
9223 case NEGATE_EXPR:
9224 case LROTATE_EXPR:
9225 case RROTATE_EXPR:
9226 /* These cannot change zero->non-zero or vice versa. */
9227 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9228 break;
bbf6f052 9229
14a774a9
RK
9230 case WITH_RECORD_EXPR:
9231 /* Put the object on the placeholder list, recurse through our first
9232 operand, and pop the list. */
9233 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9234 placeholder_list);
9235 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9236 placeholder_list = TREE_CHAIN (placeholder_list);
9237 break;
9238
b93a436e
JL
9239#if 0
9240 /* This is never less insns than evaluating the PLUS_EXPR followed by
9241 a test and can be longer if the test is eliminated. */
9242 case PLUS_EXPR:
9243 /* Reduce to minus. */
9244 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9245 TREE_OPERAND (exp, 0),
9246 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9247 TREE_OPERAND (exp, 1))));
9248 /* Process as MINUS. */
ca695ac9 9249#endif
bbf6f052 9250
b93a436e
JL
9251 case MINUS_EXPR:
9252 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9253 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9254 TREE_OPERAND (exp, 0),
9255 TREE_OPERAND (exp, 1)),
9256 NE, NE, if_false_label, if_true_label);
b93a436e 9257 break;
bbf6f052 9258
b93a436e
JL
9259 case BIT_AND_EXPR:
9260 /* If we are AND'ing with a small constant, do this comparison in the
9261 smallest type that fits. If the machine doesn't have comparisons
9262 that small, it will be converted back to the wider comparison.
9263 This helps if we are testing the sign bit of a narrower object.
9264 combine can't do this for us because it can't know whether a
9265 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9266
b93a436e
JL
9267 if (! SLOW_BYTE_ACCESS
9268 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9269 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9270 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9271 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9272 && (type = type_for_mode (mode, 1)) != 0
9273 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9274 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9275 != CODE_FOR_nothing))
9276 {
9277 do_jump (convert (type, exp), if_false_label, if_true_label);
9278 break;
9279 }
9280 goto normal;
bbf6f052 9281
b93a436e
JL
9282 case TRUTH_NOT_EXPR:
9283 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9284 break;
bbf6f052 9285
b93a436e
JL
9286 case TRUTH_ANDIF_EXPR:
9287 if (if_false_label == 0)
9288 if_false_label = drop_through_label = gen_label_rtx ();
9289 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9290 start_cleanup_deferral ();
9291 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9292 end_cleanup_deferral ();
9293 break;
bbf6f052 9294
b93a436e
JL
9295 case TRUTH_ORIF_EXPR:
9296 if (if_true_label == 0)
9297 if_true_label = drop_through_label = gen_label_rtx ();
9298 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9299 start_cleanup_deferral ();
9300 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9301 end_cleanup_deferral ();
9302 break;
bbf6f052 9303
b93a436e
JL
9304 case COMPOUND_EXPR:
9305 push_temp_slots ();
9306 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9307 preserve_temp_slots (NULL_RTX);
9308 free_temp_slots ();
9309 pop_temp_slots ();
9310 emit_queue ();
9311 do_pending_stack_adjust ();
9312 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9313 break;
bbf6f052 9314
b93a436e
JL
9315 case COMPONENT_REF:
9316 case BIT_FIELD_REF:
9317 case ARRAY_REF:
9318 {
9319 int bitsize, bitpos, unsignedp;
9320 enum machine_mode mode;
9321 tree type;
9322 tree offset;
9323 int volatilep = 0;
9324 int alignment;
bbf6f052 9325
b93a436e
JL
9326 /* Get description of this reference. We don't actually care
9327 about the underlying object here. */
9328 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9329 &mode, &unsignedp, &volatilep,
9330 &alignment);
bbf6f052 9331
b93a436e
JL
9332 type = type_for_size (bitsize, unsignedp);
9333 if (! SLOW_BYTE_ACCESS
9334 && type != 0 && bitsize >= 0
9335 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9336 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9337 != CODE_FOR_nothing))
9338 {
9339 do_jump (convert (type, exp), if_false_label, if_true_label);
9340 break;
9341 }
9342 goto normal;
9343 }
bbf6f052 9344
b93a436e
JL
9345 case COND_EXPR:
9346 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9347 if (integer_onep (TREE_OPERAND (exp, 1))
9348 && integer_zerop (TREE_OPERAND (exp, 2)))
9349 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9350
b93a436e
JL
9351 else if (integer_zerop (TREE_OPERAND (exp, 1))
9352 && integer_onep (TREE_OPERAND (exp, 2)))
9353 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9354
b93a436e
JL
9355 else
9356 {
9357 register rtx label1 = gen_label_rtx ();
9358 drop_through_label = gen_label_rtx ();
bbf6f052 9359
b93a436e 9360 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9361
b93a436e
JL
9362 start_cleanup_deferral ();
9363 /* Now the THEN-expression. */
9364 do_jump (TREE_OPERAND (exp, 1),
9365 if_false_label ? if_false_label : drop_through_label,
9366 if_true_label ? if_true_label : drop_through_label);
9367 /* In case the do_jump just above never jumps. */
9368 do_pending_stack_adjust ();
9369 emit_label (label1);
bbf6f052 9370
b93a436e
JL
9371 /* Now the ELSE-expression. */
9372 do_jump (TREE_OPERAND (exp, 2),
9373 if_false_label ? if_false_label : drop_through_label,
9374 if_true_label ? if_true_label : drop_through_label);
9375 end_cleanup_deferral ();
9376 }
9377 break;
bbf6f052 9378
b93a436e
JL
9379 case EQ_EXPR:
9380 {
9381 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9382
9ec36da5
JL
9383 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9384 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9385 {
9386 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9387 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9388 do_jump
9389 (fold
9390 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9391 fold (build (EQ_EXPR, TREE_TYPE (exp),
9392 fold (build1 (REALPART_EXPR,
9393 TREE_TYPE (inner_type),
9394 exp0)),
9395 fold (build1 (REALPART_EXPR,
9396 TREE_TYPE (inner_type),
9397 exp1)))),
9398 fold (build (EQ_EXPR, TREE_TYPE (exp),
9399 fold (build1 (IMAGPART_EXPR,
9400 TREE_TYPE (inner_type),
9401 exp0)),
9402 fold (build1 (IMAGPART_EXPR,
9403 TREE_TYPE (inner_type),
9404 exp1)))))),
9405 if_false_label, if_true_label);
9406 }
9ec36da5
JL
9407
9408 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9409 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9410
b93a436e 9411 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1c0290ea 9412 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9413 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9414 else
b30f05db 9415 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9416 break;
9417 }
bbf6f052 9418
b93a436e
JL
9419 case NE_EXPR:
9420 {
9421 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9422
9ec36da5
JL
9423 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9424 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9425 {
9426 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9427 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9428 do_jump
9429 (fold
9430 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9431 fold (build (NE_EXPR, TREE_TYPE (exp),
9432 fold (build1 (REALPART_EXPR,
9433 TREE_TYPE (inner_type),
9434 exp0)),
9435 fold (build1 (REALPART_EXPR,
9436 TREE_TYPE (inner_type),
9437 exp1)))),
9438 fold (build (NE_EXPR, TREE_TYPE (exp),
9439 fold (build1 (IMAGPART_EXPR,
9440 TREE_TYPE (inner_type),
9441 exp0)),
9442 fold (build1 (IMAGPART_EXPR,
9443 TREE_TYPE (inner_type),
9444 exp1)))))),
9445 if_false_label, if_true_label);
9446 }
9ec36da5
JL
9447
9448 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9449 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9450
b93a436e 9451 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1c0290ea 9452 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9453 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9454 else
b30f05db 9455 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9456 break;
9457 }
bbf6f052 9458
b93a436e 9459 case LT_EXPR:
1c0290ea
BS
9460 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9461 if (GET_MODE_CLASS (mode) == MODE_INT
9462 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9463 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9464 else
b30f05db 9465 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9466 break;
bbf6f052 9467
b93a436e 9468 case LE_EXPR:
1c0290ea
BS
9469 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9470 if (GET_MODE_CLASS (mode) == MODE_INT
9471 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9472 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9473 else
b30f05db 9474 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9475 break;
bbf6f052 9476
b93a436e 9477 case GT_EXPR:
1c0290ea
BS
9478 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9479 if (GET_MODE_CLASS (mode) == MODE_INT
9480 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9481 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9482 else
b30f05db 9483 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9484 break;
bbf6f052 9485
b93a436e 9486 case GE_EXPR:
1c0290ea
BS
9487 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9488 if (GET_MODE_CLASS (mode) == MODE_INT
9489 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9490 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9491 else
b30f05db 9492 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9493 break;
bbf6f052 9494
b93a436e
JL
9495 default:
9496 normal:
9497 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9498#if 0
9499 /* This is not needed any more and causes poor code since it causes
9500 comparisons and tests from non-SI objects to have different code
9501 sequences. */
9502 /* Copy to register to avoid generating bad insns by cse
9503 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9504 if (!cse_not_expected && GET_CODE (temp) == MEM)
9505 temp = copy_to_reg (temp);
ca695ac9 9506#endif
b93a436e 9507 do_pending_stack_adjust ();
b30f05db
BS
9508 /* Do any postincrements in the expression that was tested. */
9509 emit_queue ();
9510
9511 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9512 {
9513 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9514 if (target)
9515 emit_jump (target);
9516 }
b93a436e 9517 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1c0290ea 9518 && ! can_compare_p (GET_MODE (temp), ccp_jump))
b93a436e
JL
9519 /* Note swapping the labels gives us not-equal. */
9520 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9521 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9522 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9523 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9524 GET_MODE (temp), NULL_RTX, 0,
9525 if_false_label, if_true_label);
b93a436e
JL
9526 else
9527 abort ();
9528 }
bbf6f052 9529
b93a436e
JL
9530 if (drop_through_label)
9531 {
9532 /* If do_jump produces code that might be jumped around,
9533 do any stack adjusts from that code, before the place
9534 where control merges in. */
9535 do_pending_stack_adjust ();
9536 emit_label (drop_through_label);
9537 }
bbf6f052 9538}
b93a436e
JL
9539\f
9540/* Given a comparison expression EXP for values too wide to be compared
9541 with one insn, test the comparison and jump to the appropriate label.
9542 The code of EXP is ignored; we always test GT if SWAP is 0,
9543 and LT if SWAP is 1. */
bbf6f052 9544
b93a436e
JL
9545static void
9546do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9547 tree exp;
9548 int swap;
9549 rtx if_false_label, if_true_label;
9550{
9551 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9552 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9553 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9554 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9555
b30f05db 9556 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9557}
9558
b93a436e
JL
9559/* Compare OP0 with OP1, word at a time, in mode MODE.
9560 UNSIGNEDP says to do unsigned comparison.
9561 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9562
b93a436e
JL
9563void
9564do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9565 enum machine_mode mode;
9566 int unsignedp;
9567 rtx op0, op1;
9568 rtx if_false_label, if_true_label;
f81497d9 9569{
b93a436e
JL
9570 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9571 rtx drop_through_label = 0;
9572 int i;
f81497d9 9573
b93a436e
JL
9574 if (! if_true_label || ! if_false_label)
9575 drop_through_label = gen_label_rtx ();
9576 if (! if_true_label)
9577 if_true_label = drop_through_label;
9578 if (! if_false_label)
9579 if_false_label = drop_through_label;
f81497d9 9580
b93a436e
JL
9581 /* Compare a word at a time, high order first. */
9582 for (i = 0; i < nwords; i++)
9583 {
b93a436e 9584 rtx op0_word, op1_word;
bbf6f052 9585
b93a436e
JL
9586 if (WORDS_BIG_ENDIAN)
9587 {
9588 op0_word = operand_subword_force (op0, i, mode);
9589 op1_word = operand_subword_force (op1, i, mode);
9590 }
9591 else
9592 {
9593 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9594 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9595 }
bbf6f052 9596
b93a436e 9597 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9598 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9599 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9600 NULL_RTX, if_true_label);
bbf6f052 9601
b93a436e 9602 /* Consider lower words only if these are equal. */
b30f05db
BS
9603 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9604 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9605 }
bbf6f052 9606
b93a436e
JL
9607 if (if_false_label)
9608 emit_jump (if_false_label);
9609 if (drop_through_label)
9610 emit_label (drop_through_label);
bbf6f052
RK
9611}
9612
b93a436e
JL
9613/* Given an EQ_EXPR expression EXP for values too wide to be compared
9614 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9615
b93a436e
JL
9616static void
9617do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9618 tree exp;
9619 rtx if_false_label, if_true_label;
bbf6f052 9620{
b93a436e
JL
9621 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9622 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9623 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9624 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9625 int i;
9626 rtx drop_through_label = 0;
bbf6f052 9627
b93a436e
JL
9628 if (! if_false_label)
9629 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9630
b93a436e 9631 for (i = 0; i < nwords; i++)
b30f05db
BS
9632 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9633 operand_subword_force (op1, i, mode),
9634 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9635 word_mode, NULL_RTX, 0, if_false_label,
9636 NULL_RTX);
bbf6f052 9637
b93a436e
JL
9638 if (if_true_label)
9639 emit_jump (if_true_label);
9640 if (drop_through_label)
9641 emit_label (drop_through_label);
bbf6f052 9642}
b93a436e
JL
9643\f
9644/* Jump according to whether OP0 is 0.
9645 We assume that OP0 has an integer mode that is too wide
9646 for the available compare insns. */
bbf6f052 9647
f5963e61 9648void
b93a436e
JL
9649do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9650 rtx op0;
9651 rtx if_false_label, if_true_label;
ca695ac9 9652{
b93a436e
JL
9653 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9654 rtx part;
9655 int i;
9656 rtx drop_through_label = 0;
bbf6f052 9657
b93a436e
JL
9658 /* The fastest way of doing this comparison on almost any machine is to
9659 "or" all the words and compare the result. If all have to be loaded
9660 from memory and this is a very wide item, it's possible this may
9661 be slower, but that's highly unlikely. */
bbf6f052 9662
b93a436e
JL
9663 part = gen_reg_rtx (word_mode);
9664 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9665 for (i = 1; i < nwords && part != 0; i++)
9666 part = expand_binop (word_mode, ior_optab, part,
9667 operand_subword_force (op0, i, GET_MODE (op0)),
9668 part, 1, OPTAB_WIDEN);
bbf6f052 9669
b93a436e
JL
9670 if (part != 0)
9671 {
b30f05db
BS
9672 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9673 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9674
b93a436e
JL
9675 return;
9676 }
bbf6f052 9677
b93a436e
JL
9678 /* If we couldn't do the "or" simply, do this with a series of compares. */
9679 if (! if_false_label)
9680 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9681
b93a436e 9682 for (i = 0; i < nwords; i++)
b30f05db
BS
9683 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9684 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9685 if_false_label, NULL_RTX);
bbf6f052 9686
b93a436e
JL
9687 if (if_true_label)
9688 emit_jump (if_true_label);
0f41302f 9689
b93a436e
JL
9690 if (drop_through_label)
9691 emit_label (drop_through_label);
bbf6f052 9692}
b93a436e 9693\f
b30f05db 9694/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9695 (including code to compute the values to be compared)
9696 and set (CC0) according to the result.
b30f05db 9697 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9698
b93a436e 9699 We force a stack adjustment unless there are currently
b30f05db 9700 things pushed on the stack that aren't yet used.
ca695ac9 9701
b30f05db
BS
9702 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9703 compared.
9704
9705 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9706 size of MODE should be used. */
9707
9708rtx
9709compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9710 register rtx op0, op1;
9711 enum rtx_code code;
9712 int unsignedp;
9713 enum machine_mode mode;
9714 rtx size;
9715 int align;
b93a436e 9716{
b30f05db 9717 rtx tem;
76bbe028 9718
b30f05db
BS
9719 /* If one operand is constant, make it the second one. Only do this
9720 if the other operand is not constant as well. */
ca695ac9 9721
b30f05db
BS
9722 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9723 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 9724 {
b30f05db
BS
9725 tem = op0;
9726 op0 = op1;
9727 op1 = tem;
9728 code = swap_condition (code);
ca695ac9 9729 }
bbf6f052 9730
b30f05db 9731 if (flag_force_mem)
b93a436e 9732 {
b30f05db
BS
9733 op0 = force_not_mem (op0);
9734 op1 = force_not_mem (op1);
9735 }
bbf6f052 9736
b30f05db
BS
9737 do_pending_stack_adjust ();
9738
9739 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9740 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9741 return tem;
9742
9743#if 0
9744 /* There's no need to do this now that combine.c can eliminate lots of
9745 sign extensions. This can be less efficient in certain cases on other
9746 machines. */
9747
9748 /* If this is a signed equality comparison, we can do it as an
9749 unsigned comparison since zero-extension is cheaper than sign
9750 extension and comparisons with zero are done as unsigned. This is
9751 the case even on machines that can do fast sign extension, since
9752 zero-extension is easier to combine with other operations than
9753 sign-extension is. If we are comparing against a constant, we must
9754 convert it to what it would look like unsigned. */
9755 if ((code == EQ || code == NE) && ! unsignedp
9756 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9757 {
9758 if (GET_CODE (op1) == CONST_INT
9759 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9760 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9761 unsignedp = 1;
b93a436e
JL
9762 }
9763#endif
b30f05db
BS
9764
9765 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 9766
b30f05db 9767 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9768}
bbf6f052 9769
b30f05db 9770/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9771 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9772
b93a436e
JL
9773 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9774 compared.
bbf6f052 9775
b93a436e
JL
9776 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9777 size of MODE should be used. */
ca695ac9 9778
b30f05db
BS
9779void
9780do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9781 if_false_label, if_true_label)
b93a436e
JL
9782 register rtx op0, op1;
9783 enum rtx_code code;
9784 int unsignedp;
9785 enum machine_mode mode;
9786 rtx size;
9787 int align;
b30f05db 9788 rtx if_false_label, if_true_label;
bbf6f052 9789{
b93a436e 9790 rtx tem;
b30f05db
BS
9791 int dummy_true_label = 0;
9792
9793 /* Reverse the comparison if that is safe and we want to jump if it is
9794 false. */
9795 if (! if_true_label && ! FLOAT_MODE_P (mode))
9796 {
9797 if_true_label = if_false_label;
9798 if_false_label = 0;
9799 code = reverse_condition (code);
9800 }
bbf6f052 9801
b93a436e
JL
9802 /* If one operand is constant, make it the second one. Only do this
9803 if the other operand is not constant as well. */
e7c33f54 9804
b93a436e
JL
9805 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9806 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 9807 {
b93a436e
JL
9808 tem = op0;
9809 op0 = op1;
9810 op1 = tem;
9811 code = swap_condition (code);
9812 }
bbf6f052 9813
b93a436e
JL
9814 if (flag_force_mem)
9815 {
9816 op0 = force_not_mem (op0);
9817 op1 = force_not_mem (op1);
9818 }
bbf6f052 9819
b93a436e 9820 do_pending_stack_adjust ();
ca695ac9 9821
b93a436e
JL
9822 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9823 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9824 {
9825 if (tem == const_true_rtx)
9826 {
9827 if (if_true_label)
9828 emit_jump (if_true_label);
9829 }
9830 else
9831 {
9832 if (if_false_label)
9833 emit_jump (if_false_label);
9834 }
9835 return;
9836 }
ca695ac9 9837
b93a436e
JL
9838#if 0
9839 /* There's no need to do this now that combine.c can eliminate lots of
9840 sign extensions. This can be less efficient in certain cases on other
9841 machines. */
ca695ac9 9842
b93a436e
JL
9843 /* If this is a signed equality comparison, we can do it as an
9844 unsigned comparison since zero-extension is cheaper than sign
9845 extension and comparisons with zero are done as unsigned. This is
9846 the case even on machines that can do fast sign extension, since
9847 zero-extension is easier to combine with other operations than
9848 sign-extension is. If we are comparing against a constant, we must
9849 convert it to what it would look like unsigned. */
9850 if ((code == EQ || code == NE) && ! unsignedp
9851 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9852 {
9853 if (GET_CODE (op1) == CONST_INT
9854 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9855 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9856 unsignedp = 1;
9857 }
9858#endif
ca695ac9 9859
b30f05db
BS
9860 if (! if_true_label)
9861 {
9862 dummy_true_label = 1;
9863 if_true_label = gen_label_rtx ();
9864 }
9865
9866 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9867 if_true_label);
9868
9869 if (if_false_label)
9870 emit_jump (if_false_label);
9871 if (dummy_true_label)
9872 emit_label (if_true_label);
9873}
9874
9875/* Generate code for a comparison expression EXP (including code to compute
9876 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9877 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9878 generated code will drop through.
9879 SIGNED_CODE should be the rtx operation for this comparison for
9880 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9881
9882 We force a stack adjustment unless there are currently
9883 things pushed on the stack that aren't yet used. */
9884
9885static void
9886do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9887 if_true_label)
9888 register tree exp;
9889 enum rtx_code signed_code, unsigned_code;
9890 rtx if_false_label, if_true_label;
9891{
14a774a9 9892 int align0, align1;
b30f05db
BS
9893 register rtx op0, op1;
9894 register tree type;
9895 register enum machine_mode mode;
9896 int unsignedp;
9897 enum rtx_code code;
9898
9899 /* Don't crash if the comparison was erroneous. */
14a774a9 9900 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
9901 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9902 return;
9903
14a774a9 9904 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
b30f05db
BS
9905 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9906 mode = TYPE_MODE (type);
9907 unsignedp = TREE_UNSIGNED (type);
9908 code = unsignedp ? unsigned_code : signed_code;
9909
9910#ifdef HAVE_canonicalize_funcptr_for_compare
9911 /* If function pointers need to be "canonicalized" before they can
9912 be reliably compared, then canonicalize them. */
9913 if (HAVE_canonicalize_funcptr_for_compare
9914 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9915 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9916 == FUNCTION_TYPE))
9917 {
9918 rtx new_op0 = gen_reg_rtx (mode);
9919
9920 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9921 op0 = new_op0;
9922 }
9923
9924 if (HAVE_canonicalize_funcptr_for_compare
9925 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9926 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9927 == FUNCTION_TYPE))
9928 {
9929 rtx new_op1 = gen_reg_rtx (mode);
9930
9931 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9932 op1 = new_op1;
9933 }
9934#endif
9935
9936 /* Do any postincrements in the expression that was tested. */
9937 emit_queue ();
9938
9939 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9940 ((mode == BLKmode)
9941 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
14a774a9 9942 MIN (align0, align1) / BITS_PER_UNIT,
b30f05db 9943 if_false_label, if_true_label);
b93a436e
JL
9944}
9945\f
9946/* Generate code to calculate EXP using a store-flag instruction
9947 and return an rtx for the result. EXP is either a comparison
9948 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9949
b93a436e 9950 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9951
b93a436e
JL
9952 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9953 cheap.
ca695ac9 9954
b93a436e
JL
9955 Return zero if there is no suitable set-flag instruction
9956 available on this machine.
ca695ac9 9957
b93a436e
JL
9958 Once expand_expr has been called on the arguments of the comparison,
9959 we are committed to doing the store flag, since it is not safe to
9960 re-evaluate the expression. We emit the store-flag insn by calling
9961 emit_store_flag, but only expand the arguments if we have a reason
9962 to believe that emit_store_flag will be successful. If we think that
9963 it will, but it isn't, we have to simulate the store-flag with a
9964 set/jump/set sequence. */
ca695ac9 9965
b93a436e
JL
9966static rtx
9967do_store_flag (exp, target, mode, only_cheap)
9968 tree exp;
9969 rtx target;
9970 enum machine_mode mode;
9971 int only_cheap;
9972{
9973 enum rtx_code code;
9974 tree arg0, arg1, type;
9975 tree tem;
9976 enum machine_mode operand_mode;
9977 int invert = 0;
9978 int unsignedp;
9979 rtx op0, op1;
9980 enum insn_code icode;
9981 rtx subtarget = target;
381127e8 9982 rtx result, label;
ca695ac9 9983
b93a436e
JL
9984 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9985 result at the end. We can't simply invert the test since it would
9986 have already been inverted if it were valid. This case occurs for
9987 some floating-point comparisons. */
ca695ac9 9988
b93a436e
JL
9989 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9990 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9991
b93a436e
JL
9992 arg0 = TREE_OPERAND (exp, 0);
9993 arg1 = TREE_OPERAND (exp, 1);
9994 type = TREE_TYPE (arg0);
9995 operand_mode = TYPE_MODE (type);
9996 unsignedp = TREE_UNSIGNED (type);
ca695ac9 9997
b93a436e
JL
9998 /* We won't bother with BLKmode store-flag operations because it would mean
9999 passing a lot of information to emit_store_flag. */
10000 if (operand_mode == BLKmode)
10001 return 0;
ca695ac9 10002
b93a436e
JL
10003 /* We won't bother with store-flag operations involving function pointers
10004 when function pointers must be canonicalized before comparisons. */
10005#ifdef HAVE_canonicalize_funcptr_for_compare
10006 if (HAVE_canonicalize_funcptr_for_compare
10007 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10008 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10009 == FUNCTION_TYPE))
10010 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10011 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10012 == FUNCTION_TYPE))))
10013 return 0;
ca695ac9
JB
10014#endif
10015
b93a436e
JL
10016 STRIP_NOPS (arg0);
10017 STRIP_NOPS (arg1);
ca695ac9 10018
b93a436e
JL
10019 /* Get the rtx comparison code to use. We know that EXP is a comparison
10020 operation of some type. Some comparisons against 1 and -1 can be
10021 converted to comparisons with zero. Do so here so that the tests
10022 below will be aware that we have a comparison with zero. These
10023 tests will not catch constants in the first operand, but constants
10024 are rarely passed as the first operand. */
ca695ac9 10025
b93a436e
JL
10026 switch (TREE_CODE (exp))
10027 {
10028 case EQ_EXPR:
10029 code = EQ;
bbf6f052 10030 break;
b93a436e
JL
10031 case NE_EXPR:
10032 code = NE;
bbf6f052 10033 break;
b93a436e
JL
10034 case LT_EXPR:
10035 if (integer_onep (arg1))
10036 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10037 else
10038 code = unsignedp ? LTU : LT;
ca695ac9 10039 break;
b93a436e
JL
10040 case LE_EXPR:
10041 if (! unsignedp && integer_all_onesp (arg1))
10042 arg1 = integer_zero_node, code = LT;
10043 else
10044 code = unsignedp ? LEU : LE;
ca695ac9 10045 break;
b93a436e
JL
10046 case GT_EXPR:
10047 if (! unsignedp && integer_all_onesp (arg1))
10048 arg1 = integer_zero_node, code = GE;
10049 else
10050 code = unsignedp ? GTU : GT;
10051 break;
10052 case GE_EXPR:
10053 if (integer_onep (arg1))
10054 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10055 else
10056 code = unsignedp ? GEU : GE;
ca695ac9 10057 break;
ca695ac9 10058 default:
b93a436e 10059 abort ();
bbf6f052 10060 }
bbf6f052 10061
b93a436e
JL
10062 /* Put a constant second. */
10063 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10064 {
10065 tem = arg0; arg0 = arg1; arg1 = tem;
10066 code = swap_condition (code);
ca695ac9 10067 }
bbf6f052 10068
b93a436e
JL
10069 /* If this is an equality or inequality test of a single bit, we can
10070 do this by shifting the bit being tested to the low-order bit and
10071 masking the result with the constant 1. If the condition was EQ,
10072 we xor it with 1. This does not require an scc insn and is faster
10073 than an scc insn even if we have it. */
d39985fa 10074
b93a436e
JL
10075 if ((code == NE || code == EQ)
10076 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10077 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10078 {
10079 tree inner = TREE_OPERAND (arg0, 0);
10080 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10081 int ops_unsignedp;
bbf6f052 10082
b93a436e
JL
10083 /* If INNER is a right shift of a constant and it plus BITNUM does
10084 not overflow, adjust BITNUM and INNER. */
ca695ac9 10085
b93a436e
JL
10086 if (TREE_CODE (inner) == RSHIFT_EXPR
10087 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10088 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10089 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10090 < TYPE_PRECISION (type)))
ca695ac9 10091 {
b93a436e
JL
10092 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10093 inner = TREE_OPERAND (inner, 0);
ca695ac9 10094 }
ca695ac9 10095
b93a436e
JL
10096 /* If we are going to be able to omit the AND below, we must do our
10097 operations as unsigned. If we must use the AND, we have a choice.
10098 Normally unsigned is faster, but for some machines signed is. */
10099 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10100#ifdef LOAD_EXTEND_OP
10101 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10102#else
10103 : 1
10104#endif
10105 );
bbf6f052 10106
b93a436e
JL
10107 if (subtarget == 0 || GET_CODE (subtarget) != REG
10108 || GET_MODE (subtarget) != operand_mode
e5e809f4 10109 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10110 subtarget = 0;
bbf6f052 10111
b93a436e 10112 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10113
b93a436e
JL
10114 if (bitnum != 0)
10115 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10116 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10117
b93a436e
JL
10118 if (GET_MODE (op0) != mode)
10119 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10120
b93a436e
JL
10121 if ((code == EQ && ! invert) || (code == NE && invert))
10122 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10123 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10124
b93a436e
JL
10125 /* Put the AND last so it can combine with more things. */
10126 if (bitnum != TYPE_PRECISION (type) - 1)
10127 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10128
b93a436e
JL
10129 return op0;
10130 }
bbf6f052 10131
b93a436e 10132 /* Now see if we are likely to be able to do this. Return if not. */
1c0290ea 10133 if (! can_compare_p (operand_mode, ccp_store_flag))
b93a436e
JL
10134 return 0;
10135 icode = setcc_gen_code[(int) code];
10136 if (icode == CODE_FOR_nothing
a995e389 10137 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10138 {
b93a436e
JL
10139 /* We can only do this if it is one of the special cases that
10140 can be handled without an scc insn. */
10141 if ((code == LT && integer_zerop (arg1))
10142 || (! only_cheap && code == GE && integer_zerop (arg1)))
10143 ;
10144 else if (BRANCH_COST >= 0
10145 && ! only_cheap && (code == NE || code == EQ)
10146 && TREE_CODE (type) != REAL_TYPE
10147 && ((abs_optab->handlers[(int) operand_mode].insn_code
10148 != CODE_FOR_nothing)
10149 || (ffs_optab->handlers[(int) operand_mode].insn_code
10150 != CODE_FOR_nothing)))
10151 ;
10152 else
10153 return 0;
ca695ac9 10154 }
b93a436e
JL
10155
10156 preexpand_calls (exp);
10157 if (subtarget == 0 || GET_CODE (subtarget) != REG
10158 || GET_MODE (subtarget) != operand_mode
e5e809f4 10159 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10160 subtarget = 0;
10161
10162 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10163 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10164
10165 if (target == 0)
10166 target = gen_reg_rtx (mode);
10167
10168 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10169 because, if the emit_store_flag does anything it will succeed and
10170 OP0 and OP1 will not be used subsequently. */
ca695ac9 10171
b93a436e
JL
10172 result = emit_store_flag (target, code,
10173 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10174 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10175 operand_mode, unsignedp, 1);
ca695ac9 10176
b93a436e
JL
10177 if (result)
10178 {
10179 if (invert)
10180 result = expand_binop (mode, xor_optab, result, const1_rtx,
10181 result, 0, OPTAB_LIB_WIDEN);
10182 return result;
ca695ac9 10183 }
bbf6f052 10184
b93a436e
JL
10185 /* If this failed, we have to do this with set/compare/jump/set code. */
10186 if (GET_CODE (target) != REG
10187 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10188 target = gen_reg_rtx (GET_MODE (target));
10189
10190 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10191 result = compare_from_rtx (op0, op1, code, unsignedp,
10192 operand_mode, NULL_RTX, 0);
10193 if (GET_CODE (result) == CONST_INT)
10194 return (((result == const0_rtx && ! invert)
10195 || (result != const0_rtx && invert))
10196 ? const0_rtx : const1_rtx);
ca695ac9 10197
b93a436e
JL
10198 label = gen_label_rtx ();
10199 if (bcc_gen_fctn[(int) code] == 0)
10200 abort ();
0f41302f 10201
b93a436e
JL
10202 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10203 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10204 emit_label (label);
bbf6f052 10205
b93a436e 10206 return target;
ca695ac9 10207}
b93a436e
JL
10208\f
10209/* Generate a tablejump instruction (used for switch statements). */
10210
10211#ifdef HAVE_tablejump
e87b4f3f 10212
b93a436e
JL
10213/* INDEX is the value being switched on, with the lowest value
10214 in the table already subtracted.
10215 MODE is its expected mode (needed if INDEX is constant).
10216 RANGE is the length of the jump table.
10217 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10218
b93a436e
JL
10219 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10220 index value is out of range. */
0f41302f 10221
ca695ac9 10222void
b93a436e
JL
10223do_tablejump (index, mode, range, table_label, default_label)
10224 rtx index, range, table_label, default_label;
10225 enum machine_mode mode;
ca695ac9 10226{
b93a436e 10227 register rtx temp, vector;
88d3b7f0 10228
b93a436e
JL
10229 /* Do an unsigned comparison (in the proper mode) between the index
10230 expression and the value which represents the length of the range.
10231 Since we just finished subtracting the lower bound of the range
10232 from the index expression, this comparison allows us to simultaneously
10233 check that the original index expression value is both greater than
10234 or equal to the minimum value of the range and less than or equal to
10235 the maximum value of the range. */
709f5be1 10236
c5d5d461
JL
10237 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10238 0, default_label);
bbf6f052 10239
b93a436e
JL
10240 /* If index is in range, it must fit in Pmode.
10241 Convert to Pmode so we can index with it. */
10242 if (mode != Pmode)
10243 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10244
b93a436e
JL
10245 /* Don't let a MEM slip thru, because then INDEX that comes
10246 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10247 and break_out_memory_refs will go to work on it and mess it up. */
10248#ifdef PIC_CASE_VECTOR_ADDRESS
10249 if (flag_pic && GET_CODE (index) != REG)
10250 index = copy_to_mode_reg (Pmode, index);
10251#endif
ca695ac9 10252
b93a436e
JL
10253 /* If flag_force_addr were to affect this address
10254 it could interfere with the tricky assumptions made
10255 about addresses that contain label-refs,
10256 which may be valid only very near the tablejump itself. */
10257 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10258 GET_MODE_SIZE, because this indicates how large insns are. The other
10259 uses should all be Pmode, because they are addresses. This code
10260 could fail if addresses and insns are not the same size. */
10261 index = gen_rtx_PLUS (Pmode,
10262 gen_rtx_MULT (Pmode, index,
10263 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10264 gen_rtx_LABEL_REF (Pmode, table_label));
10265#ifdef PIC_CASE_VECTOR_ADDRESS
10266 if (flag_pic)
10267 index = PIC_CASE_VECTOR_ADDRESS (index);
10268 else
bbf6f052 10269#endif
b93a436e
JL
10270 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10271 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10272 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10273 RTX_UNCHANGING_P (vector) = 1;
10274 convert_move (temp, vector, 0);
10275
10276 emit_jump_insn (gen_tablejump (temp, table_label));
10277
10278 /* If we are generating PIC code or if the table is PC-relative, the
10279 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10280 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10281 emit_barrier ();
bbf6f052 10282}
b93a436e
JL
10283
10284#endif /* HAVE_tablejump */
This page took 2.325906 seconds and 5 git commands to generate.