]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
sh-protos.h: New file.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
ccc50f7d 2 Copyright (C) 1988, 92-98, 1999, 2000 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
d7db6646 43#include "ggc.h"
b1474bb7 44#include "tm_p.h"
bbf6f052 45
bbf6f052 46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
18543a22
ILT
68/* Assume that case vectors are not pc-relative. */
69#ifndef CASE_VECTOR_PC_RELATIVE
70#define CASE_VECTOR_PC_RELATIVE 0
71#endif
72
bbf6f052
RK
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
956d6950 86/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
87 usage. Used when current_function_check_memory_usage is true, to avoid
88 infinite recursion. */
956d6950
JL
89static int in_check_memory_usage;
90
14a774a9
RK
91/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
92static tree placeholder_list = 0;
93
4969d05d
RK
94/* This structure is used by move_by_pieces to describe the move to
95 be performed. */
4969d05d
RK
96struct move_by_pieces
97{
98 rtx to;
99 rtx to_addr;
100 int autinc_to;
101 int explicit_inc_to;
e9cf6a97 102 int to_struct;
c5c76735 103 int to_readonly;
4969d05d
RK
104 rtx from;
105 rtx from_addr;
106 int autinc_from;
107 int explicit_inc_from;
e9cf6a97 108 int from_struct;
c5c76735 109 int from_readonly;
4969d05d
RK
110 int len;
111 int offset;
112 int reverse;
113};
114
9de08200
RK
115/* This structure is used by clear_by_pieces to describe the clear to
116 be performed. */
117
118struct clear_by_pieces
119{
120 rtx to;
121 rtx to_addr;
122 int autinc_to;
123 int explicit_inc_to;
124 int to_struct;
125 int len;
126 int offset;
127 int reverse;
128};
129
292b1216 130extern struct obstack permanent_obstack;
c02bd5d9 131
03566575
JW
132static rtx get_push_address PROTO ((int));
133
4969d05d 134static rtx enqueue_insn PROTO((rtx, rtx));
4969d05d 135static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 136static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 137 struct move_by_pieces *));
9de08200 138static void clear_by_pieces PROTO((rtx, int, int));
c5c76735
JL
139static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...),
140 enum machine_mode,
9de08200
RK
141 struct clear_by_pieces *));
142static int is_zeros_p PROTO((tree));
143static int mostly_zeros_p PROTO((tree));
d77fac3b 144static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
c5c76735 145 tree, tree, int, int));
b7010412 146static void store_constructor PROTO((tree, rtx, int, int, int));
4969d05d 147static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
148 enum machine_mode, int, int,
149 int, int));
e009aaf3
JL
150static enum memory_use_mode
151 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
152static tree save_noncopied_parts PROTO((tree, tree));
153static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 154static int safe_from_p PROTO((rtx, tree, int));
4969d05d 155static int fixed_type_p PROTO((tree));
01c8a7c8 156static rtx var_rtx PROTO((tree));
14a774a9
RK
157static int readonly_fields_p PROTO((tree));
158static rtx expand_expr_unaligned PROTO((tree, int *));
7b8b9722 159static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
160static void preexpand_calls PROTO((tree));
161static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
162static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
b30f05db 163static void do_compare_and_jump PROTO((tree, enum rtx_code, enum rtx_code, rtx, rtx));
4969d05d 164static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 165
4fa52007
RK
166/* Record for each mode whether we can move a register directly to or
167 from an object of that mode in memory. If we can't, we won't try
168 to use that mode directly when accessing a field of that mode. */
169
170static char direct_load[NUM_MACHINE_MODES];
171static char direct_store[NUM_MACHINE_MODES];
172
7e24ffc9
HPN
173/* If a memory-to-memory move would take MOVE_RATIO or more simple
174 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
175
176#ifndef MOVE_RATIO
266007a7 177#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
178#define MOVE_RATIO 2
179#else
996d9dac
MM
180/* If we are optimizing for space (-Os), cut down the default move ratio */
181#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
182#endif
183#endif
e87b4f3f 184
fbe1758d
AM
185/* This macro is used to determine whether move_by_pieces should be called
186 to perform a structure copy. */
187#ifndef MOVE_BY_PIECES_P
188#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
189 (SIZE, ALIGN) < MOVE_RATIO)
190#endif
191
266007a7 192/* This array records the insn_code of insns to perform block moves. */
e6677db3 193enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 194
9de08200
RK
195/* This array records the insn_code of insns to perform block clears. */
196enum insn_code clrstr_optab[NUM_MACHINE_MODES];
197
0f41302f 198/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
199
200#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 201#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 202#endif
bbf6f052 203\f
4fa52007 204/* This is run once per compilation to set up which modes can be used
266007a7 205 directly in memory and to initialize the block move optab. */
4fa52007
RK
206
207void
208init_expr_once ()
209{
210 rtx insn, pat;
211 enum machine_mode mode;
cff48d8f 212 int num_clobbers;
9ec36da5
JL
213 rtx mem, mem1;
214 char *free_point;
215
216 start_sequence ();
217
218 /* Since we are on the permanent obstack, we must be sure we save this
219 spot AFTER we call start_sequence, since it will reuse the rtl it
220 makes. */
221 free_point = (char *) oballoc (0);
222
e2549997
RS
223 /* Try indexing by frame ptr and try by stack ptr.
224 It is known that on the Convex the stack ptr isn't a valid index.
225 With luck, one or the other is valid on any machine. */
9ec36da5
JL
226 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
227 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 228
38a448ca 229 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
230 pat = PATTERN (insn);
231
232 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
233 mode = (enum machine_mode) ((int) mode + 1))
234 {
235 int regno;
236 rtx reg;
4fa52007
RK
237
238 direct_load[(int) mode] = direct_store[(int) mode] = 0;
239 PUT_MODE (mem, mode);
e2549997 240 PUT_MODE (mem1, mode);
4fa52007 241
e6fe56a4
RK
242 /* See if there is some register that can be used in this mode and
243 directly loaded or stored from memory. */
244
7308a047
RS
245 if (mode != VOIDmode && mode != BLKmode)
246 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
247 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
248 regno++)
249 {
250 if (! HARD_REGNO_MODE_OK (regno, mode))
251 continue;
e6fe56a4 252
38a448ca 253 reg = gen_rtx_REG (mode, regno);
e6fe56a4 254
7308a047
RS
255 SET_SRC (pat) = mem;
256 SET_DEST (pat) = reg;
257 if (recog (pat, insn, &num_clobbers) >= 0)
258 direct_load[(int) mode] = 1;
e6fe56a4 259
e2549997
RS
260 SET_SRC (pat) = mem1;
261 SET_DEST (pat) = reg;
262 if (recog (pat, insn, &num_clobbers) >= 0)
263 direct_load[(int) mode] = 1;
264
7308a047
RS
265 SET_SRC (pat) = reg;
266 SET_DEST (pat) = mem;
267 if (recog (pat, insn, &num_clobbers) >= 0)
268 direct_store[(int) mode] = 1;
e2549997
RS
269
270 SET_SRC (pat) = reg;
271 SET_DEST (pat) = mem1;
272 if (recog (pat, insn, &num_clobbers) >= 0)
273 direct_store[(int) mode] = 1;
7308a047 274 }
4fa52007
RK
275 }
276
277 end_sequence ();
9ec36da5 278 obfree (free_point);
4fa52007 279}
cff48d8f 280
bbf6f052
RK
281/* This is run at the start of compiling a function. */
282
283void
284init_expr ()
285{
01d939e8 286 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 287
49ad7cfa 288 pending_chain = 0;
bbf6f052
RK
289 pending_stack_adjust = 0;
290 inhibit_defer_pop = 0;
bbf6f052 291 saveregs_value = 0;
0006469d 292 apply_args_value = 0;
e87b4f3f 293 forced_labels = 0;
bbf6f052
RK
294}
295
fa51b01b
RH
296void
297mark_expr_status (p)
298 struct expr_status *p;
299{
300 if (p == NULL)
301 return;
302
303 ggc_mark_rtx (p->x_saveregs_value);
304 ggc_mark_rtx (p->x_apply_args_value);
305 ggc_mark_rtx (p->x_forced_labels);
306}
307
308void
309free_expr_status (f)
310 struct function *f;
311{
312 free (f->expr);
313 f->expr = NULL;
314}
315
49ad7cfa 316/* Small sanity check that the queue is empty at the end of a function. */
bbf6f052 317void
49ad7cfa 318finish_expr_for_function ()
bbf6f052 319{
49ad7cfa
BS
320 if (pending_chain)
321 abort ();
bbf6f052
RK
322}
323\f
324/* Manage the queue of increment instructions to be output
325 for POSTINCREMENT_EXPR expressions, etc. */
326
bbf6f052
RK
327/* Queue up to increment (or change) VAR later. BODY says how:
328 BODY should be the same thing you would pass to emit_insn
329 to increment right away. It will go to emit_insn later on.
330
331 The value is a QUEUED expression to be used in place of VAR
332 where you want to guarantee the pre-incrementation value of VAR. */
333
334static rtx
335enqueue_insn (var, body)
336 rtx var, body;
337{
c5c76735
JL
338 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
339 body, pending_chain);
bbf6f052
RK
340 return pending_chain;
341}
342
343/* Use protect_from_queue to convert a QUEUED expression
344 into something that you can put immediately into an instruction.
345 If the queued incrementation has not happened yet,
346 protect_from_queue returns the variable itself.
347 If the incrementation has happened, protect_from_queue returns a temp
348 that contains a copy of the old value of the variable.
349
350 Any time an rtx which might possibly be a QUEUED is to be put
351 into an instruction, it must be passed through protect_from_queue first.
352 QUEUED expressions are not meaningful in instructions.
353
354 Do not pass a value through protect_from_queue and then hold
355 on to it for a while before putting it in an instruction!
356 If the queue is flushed in between, incorrect code will result. */
357
358rtx
359protect_from_queue (x, modify)
360 register rtx x;
361 int modify;
362{
363 register RTX_CODE code = GET_CODE (x);
364
365#if 0 /* A QUEUED can hang around after the queue is forced out. */
366 /* Shortcut for most common case. */
367 if (pending_chain == 0)
368 return x;
369#endif
370
371 if (code != QUEUED)
372 {
e9baa644
RK
373 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
374 use of autoincrement. Make a copy of the contents of the memory
375 location rather than a copy of the address, but not if the value is
376 of mode BLKmode. Don't modify X in place since it might be
377 shared. */
bbf6f052
RK
378 if (code == MEM && GET_MODE (x) != BLKmode
379 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
380 {
381 register rtx y = XEXP (x, 0);
38a448ca 382 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 383
e9baa644 384 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 385 MEM_COPY_ATTRIBUTES (new, x);
41472af8 386 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 387
bbf6f052
RK
388 if (QUEUED_INSN (y))
389 {
e9baa644
RK
390 register rtx temp = gen_reg_rtx (GET_MODE (new));
391 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
392 QUEUED_INSN (y));
393 return temp;
394 }
e9baa644 395 return new;
bbf6f052
RK
396 }
397 /* Otherwise, recursively protect the subexpressions of all
398 the kinds of rtx's that can contain a QUEUED. */
399 if (code == MEM)
3f15938e
RS
400 {
401 rtx tem = protect_from_queue (XEXP (x, 0), 0);
402 if (tem != XEXP (x, 0))
403 {
404 x = copy_rtx (x);
405 XEXP (x, 0) = tem;
406 }
407 }
bbf6f052
RK
408 else if (code == PLUS || code == MULT)
409 {
3f15938e
RS
410 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
411 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
412 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
413 {
414 x = copy_rtx (x);
415 XEXP (x, 0) = new0;
416 XEXP (x, 1) = new1;
417 }
bbf6f052
RK
418 }
419 return x;
420 }
421 /* If the increment has not happened, use the variable itself. */
422 if (QUEUED_INSN (x) == 0)
423 return QUEUED_VAR (x);
424 /* If the increment has happened and a pre-increment copy exists,
425 use that copy. */
426 if (QUEUED_COPY (x) != 0)
427 return QUEUED_COPY (x);
428 /* The increment has happened but we haven't set up a pre-increment copy.
429 Set one up now, and use it. */
430 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
431 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
432 QUEUED_INSN (x));
433 return QUEUED_COPY (x);
434}
435
436/* Return nonzero if X contains a QUEUED expression:
437 if it contains anything that will be altered by a queued increment.
438 We handle only combinations of MEM, PLUS, MINUS and MULT operators
439 since memory addresses generally contain only those. */
440
1f06ee8d 441int
bbf6f052
RK
442queued_subexp_p (x)
443 rtx x;
444{
445 register enum rtx_code code = GET_CODE (x);
446 switch (code)
447 {
448 case QUEUED:
449 return 1;
450 case MEM:
451 return queued_subexp_p (XEXP (x, 0));
452 case MULT:
453 case PLUS:
454 case MINUS:
e9a25f70
JL
455 return (queued_subexp_p (XEXP (x, 0))
456 || queued_subexp_p (XEXP (x, 1)));
457 default:
458 return 0;
bbf6f052 459 }
bbf6f052
RK
460}
461
462/* Perform all the pending incrementations. */
463
464void
465emit_queue ()
466{
467 register rtx p;
381127e8 468 while ((p = pending_chain))
bbf6f052 469 {
41b083c4
R
470 rtx body = QUEUED_BODY (p);
471
472 if (GET_CODE (body) == SEQUENCE)
473 {
474 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
475 emit_insn (QUEUED_BODY (p));
476 }
477 else
478 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
479 pending_chain = QUEUED_NEXT (p);
480 }
481}
bbf6f052
RK
482\f
483/* Copy data from FROM to TO, where the machine modes are not the same.
484 Both modes may be integer, or both may be floating.
485 UNSIGNEDP should be nonzero if FROM is an unsigned type.
486 This causes zero-extension instead of sign-extension. */
487
488void
489convert_move (to, from, unsignedp)
490 register rtx to, from;
491 int unsignedp;
492{
493 enum machine_mode to_mode = GET_MODE (to);
494 enum machine_mode from_mode = GET_MODE (from);
495 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
496 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
497 enum insn_code code;
498 rtx libcall;
499
500 /* rtx code for making an equivalent value. */
501 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
502
503 to = protect_from_queue (to, 1);
504 from = protect_from_queue (from, 0);
505
506 if (to_real != from_real)
507 abort ();
508
1499e0a8
RK
509 /* If FROM is a SUBREG that indicates that we have already done at least
510 the required extension, strip it. We don't handle such SUBREGs as
511 TO here. */
512
513 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
514 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
515 >= GET_MODE_SIZE (to_mode))
516 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
517 from = gen_lowpart (to_mode, from), from_mode = to_mode;
518
519 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
520 abort ();
521
bbf6f052
RK
522 if (to_mode == from_mode
523 || (from_mode == VOIDmode && CONSTANT_P (from)))
524 {
525 emit_move_insn (to, from);
526 return;
527 }
528
529 if (to_real)
530 {
81d79e2c
RS
531 rtx value;
532
2b01c326 533 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 534 {
2b01c326
RK
535 /* Try converting directly if the insn is supported. */
536 if ((code = can_extend_p (to_mode, from_mode, 0))
537 != CODE_FOR_nothing)
538 {
539 emit_unop_insn (code, to, from, UNKNOWN);
540 return;
541 }
bbf6f052 542 }
2b01c326 543
b424402e
RS
544#ifdef HAVE_trunchfqf2
545 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
546 {
547 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
548 return;
549 }
550#endif
704af6a1
JL
551#ifdef HAVE_trunctqfqf2
552 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
553 {
554 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
555 return;
556 }
557#endif
b424402e
RS
558#ifdef HAVE_truncsfqf2
559 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
560 {
561 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
562 return;
563 }
564#endif
565#ifdef HAVE_truncdfqf2
566 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
567 {
568 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
569 return;
570 }
571#endif
572#ifdef HAVE_truncxfqf2
573 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
574 {
575 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
576 return;
577 }
578#endif
579#ifdef HAVE_trunctfqf2
580 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
581 {
582 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
583 return;
584 }
585#endif
03747aa3
RK
586
587#ifdef HAVE_trunctqfhf2
588 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
589 {
590 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
591 return;
592 }
593#endif
b424402e
RS
594#ifdef HAVE_truncsfhf2
595 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
596 {
597 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
598 return;
599 }
600#endif
601#ifdef HAVE_truncdfhf2
602 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
603 {
604 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
605 return;
606 }
607#endif
608#ifdef HAVE_truncxfhf2
609 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
610 {
611 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
612 return;
613 }
614#endif
615#ifdef HAVE_trunctfhf2
616 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
617 {
618 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
619 return;
620 }
621#endif
2b01c326
RK
622
623#ifdef HAVE_truncsftqf2
624 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
625 {
626 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
627 return;
628 }
629#endif
630#ifdef HAVE_truncdftqf2
631 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
632 {
633 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
634 return;
635 }
636#endif
637#ifdef HAVE_truncxftqf2
638 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
639 {
640 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
641 return;
642 }
643#endif
644#ifdef HAVE_trunctftqf2
645 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
646 {
647 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
648 return;
649 }
650#endif
651
bbf6f052
RK
652#ifdef HAVE_truncdfsf2
653 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
654 {
655 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
656 return;
657 }
658#endif
b092b471
JW
659#ifdef HAVE_truncxfsf2
660 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
661 {
662 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
663 return;
664 }
665#endif
bbf6f052
RK
666#ifdef HAVE_trunctfsf2
667 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
668 {
669 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
670 return;
671 }
672#endif
b092b471
JW
673#ifdef HAVE_truncxfdf2
674 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
675 {
676 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
677 return;
678 }
679#endif
bbf6f052
RK
680#ifdef HAVE_trunctfdf2
681 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
682 {
683 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
684 return;
685 }
686#endif
687
b092b471
JW
688 libcall = (rtx) 0;
689 switch (from_mode)
690 {
691 case SFmode:
692 switch (to_mode)
693 {
694 case DFmode:
695 libcall = extendsfdf2_libfunc;
696 break;
697
698 case XFmode:
699 libcall = extendsfxf2_libfunc;
700 break;
701
702 case TFmode:
703 libcall = extendsftf2_libfunc;
704 break;
e9a25f70
JL
705
706 default:
707 break;
b092b471
JW
708 }
709 break;
710
711 case DFmode:
712 switch (to_mode)
713 {
714 case SFmode:
715 libcall = truncdfsf2_libfunc;
716 break;
717
718 case XFmode:
719 libcall = extenddfxf2_libfunc;
720 break;
721
722 case TFmode:
723 libcall = extenddftf2_libfunc;
724 break;
e9a25f70
JL
725
726 default:
727 break;
b092b471
JW
728 }
729 break;
730
731 case XFmode:
732 switch (to_mode)
733 {
734 case SFmode:
735 libcall = truncxfsf2_libfunc;
736 break;
737
738 case DFmode:
739 libcall = truncxfdf2_libfunc;
740 break;
e9a25f70
JL
741
742 default:
743 break;
b092b471
JW
744 }
745 break;
746
747 case TFmode:
748 switch (to_mode)
749 {
750 case SFmode:
751 libcall = trunctfsf2_libfunc;
752 break;
753
754 case DFmode:
755 libcall = trunctfdf2_libfunc;
756 break;
e9a25f70
JL
757
758 default:
759 break;
b092b471
JW
760 }
761 break;
e9a25f70
JL
762
763 default:
764 break;
b092b471
JW
765 }
766
767 if (libcall == (rtx) 0)
768 /* This conversion is not implemented yet. */
bbf6f052
RK
769 abort ();
770
81d79e2c
RS
771 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
772 1, from, from_mode);
773 emit_move_insn (to, value);
bbf6f052
RK
774 return;
775 }
776
777 /* Now both modes are integers. */
778
779 /* Handle expanding beyond a word. */
780 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
781 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
782 {
783 rtx insns;
784 rtx lowpart;
785 rtx fill_value;
786 rtx lowfrom;
787 int i;
788 enum machine_mode lowpart_mode;
789 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
790
791 /* Try converting directly if the insn is supported. */
792 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
793 != CODE_FOR_nothing)
794 {
cd1b4b44
RK
795 /* If FROM is a SUBREG, put it into a register. Do this
796 so that we always generate the same set of insns for
797 better cse'ing; if an intermediate assignment occurred,
798 we won't be doing the operation directly on the SUBREG. */
799 if (optimize > 0 && GET_CODE (from) == SUBREG)
800 from = force_reg (from_mode, from);
bbf6f052
RK
801 emit_unop_insn (code, to, from, equiv_code);
802 return;
803 }
804 /* Next, try converting via full word. */
805 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
806 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
807 != CODE_FOR_nothing))
808 {
a81fee56 809 if (GET_CODE (to) == REG)
38a448ca 810 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
811 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
812 emit_unop_insn (code, to,
813 gen_lowpart (word_mode, to), equiv_code);
814 return;
815 }
816
817 /* No special multiword conversion insn; do it by hand. */
818 start_sequence ();
819
5c5033c3
RK
820 /* Since we will turn this into a no conflict block, we must ensure
821 that the source does not overlap the target. */
822
823 if (reg_overlap_mentioned_p (to, from))
824 from = force_reg (from_mode, from);
825
bbf6f052
RK
826 /* Get a copy of FROM widened to a word, if necessary. */
827 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
828 lowpart_mode = word_mode;
829 else
830 lowpart_mode = from_mode;
831
832 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
833
834 lowpart = gen_lowpart (lowpart_mode, to);
835 emit_move_insn (lowpart, lowfrom);
836
837 /* Compute the value to put in each remaining word. */
838 if (unsignedp)
839 fill_value = const0_rtx;
840 else
841 {
842#ifdef HAVE_slt
843 if (HAVE_slt
a995e389 844 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
845 && STORE_FLAG_VALUE == -1)
846 {
906c4e36
RK
847 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
848 lowpart_mode, 0, 0);
bbf6f052
RK
849 fill_value = gen_reg_rtx (word_mode);
850 emit_insn (gen_slt (fill_value));
851 }
852 else
853#endif
854 {
855 fill_value
856 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
857 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 858 NULL_RTX, 0);
bbf6f052
RK
859 fill_value = convert_to_mode (word_mode, fill_value, 1);
860 }
861 }
862
863 /* Fill the remaining words. */
864 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
865 {
866 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
867 rtx subword = operand_subword (to, index, 1, to_mode);
868
869 if (subword == 0)
870 abort ();
871
872 if (fill_value != subword)
873 emit_move_insn (subword, fill_value);
874 }
875
876 insns = get_insns ();
877 end_sequence ();
878
906c4e36 879 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 880 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
881 return;
882 }
883
d3c64ee3
RS
884 /* Truncating multi-word to a word or less. */
885 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
886 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 887 {
431a6eca
JW
888 if (!((GET_CODE (from) == MEM
889 && ! MEM_VOLATILE_P (from)
890 && direct_load[(int) to_mode]
891 && ! mode_dependent_address_p (XEXP (from, 0)))
892 || GET_CODE (from) == REG
893 || GET_CODE (from) == SUBREG))
894 from = force_reg (from_mode, from);
bbf6f052
RK
895 convert_move (to, gen_lowpart (word_mode, from), 0);
896 return;
897 }
898
899 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
900 if (to_mode == PQImode)
901 {
902 if (from_mode != QImode)
903 from = convert_to_mode (QImode, from, unsignedp);
904
905#ifdef HAVE_truncqipqi2
906 if (HAVE_truncqipqi2)
907 {
908 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
909 return;
910 }
911#endif /* HAVE_truncqipqi2 */
912 abort ();
913 }
914
915 if (from_mode == PQImode)
916 {
917 if (to_mode != QImode)
918 {
919 from = convert_to_mode (QImode, from, unsignedp);
920 from_mode = QImode;
921 }
922 else
923 {
924#ifdef HAVE_extendpqiqi2
925 if (HAVE_extendpqiqi2)
926 {
927 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
928 return;
929 }
930#endif /* HAVE_extendpqiqi2 */
931 abort ();
932 }
933 }
934
bbf6f052
RK
935 if (to_mode == PSImode)
936 {
937 if (from_mode != SImode)
938 from = convert_to_mode (SImode, from, unsignedp);
939
1f584163
DE
940#ifdef HAVE_truncsipsi2
941 if (HAVE_truncsipsi2)
bbf6f052 942 {
1f584163 943 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
944 return;
945 }
1f584163 946#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
947 abort ();
948 }
949
950 if (from_mode == PSImode)
951 {
952 if (to_mode != SImode)
953 {
954 from = convert_to_mode (SImode, from, unsignedp);
955 from_mode = SImode;
956 }
957 else
958 {
1f584163
DE
959#ifdef HAVE_extendpsisi2
960 if (HAVE_extendpsisi2)
bbf6f052 961 {
1f584163 962 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
963 return;
964 }
1f584163 965#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
966 abort ();
967 }
968 }
969
0407367d
RK
970 if (to_mode == PDImode)
971 {
972 if (from_mode != DImode)
973 from = convert_to_mode (DImode, from, unsignedp);
974
975#ifdef HAVE_truncdipdi2
976 if (HAVE_truncdipdi2)
977 {
978 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
979 return;
980 }
981#endif /* HAVE_truncdipdi2 */
982 abort ();
983 }
984
985 if (from_mode == PDImode)
986 {
987 if (to_mode != DImode)
988 {
989 from = convert_to_mode (DImode, from, unsignedp);
990 from_mode = DImode;
991 }
992 else
993 {
994#ifdef HAVE_extendpdidi2
995 if (HAVE_extendpdidi2)
996 {
997 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
998 return;
999 }
1000#endif /* HAVE_extendpdidi2 */
1001 abort ();
1002 }
1003 }
1004
bbf6f052
RK
1005 /* Now follow all the conversions between integers
1006 no more than a word long. */
1007
1008 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1009 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1010 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1011 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1012 {
d3c64ee3
RS
1013 if (!((GET_CODE (from) == MEM
1014 && ! MEM_VOLATILE_P (from)
1015 && direct_load[(int) to_mode]
1016 && ! mode_dependent_address_p (XEXP (from, 0)))
1017 || GET_CODE (from) == REG
1018 || GET_CODE (from) == SUBREG))
1019 from = force_reg (from_mode, from);
34aa3599
RK
1020 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1021 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1022 from = copy_to_reg (from);
bbf6f052
RK
1023 emit_move_insn (to, gen_lowpart (to_mode, from));
1024 return;
1025 }
1026
d3c64ee3 1027 /* Handle extension. */
bbf6f052
RK
1028 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1029 {
1030 /* Convert directly if that works. */
1031 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1032 != CODE_FOR_nothing)
1033 {
1034 emit_unop_insn (code, to, from, equiv_code);
1035 return;
1036 }
1037 else
1038 {
1039 enum machine_mode intermediate;
2b28d92e
NC
1040 rtx tmp;
1041 tree shift_amount;
bbf6f052
RK
1042
1043 /* Search for a mode to convert via. */
1044 for (intermediate = from_mode; intermediate != VOIDmode;
1045 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1046 if (((can_extend_p (to_mode, intermediate, unsignedp)
1047 != CODE_FOR_nothing)
1048 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1049 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1050 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1051 && (can_extend_p (intermediate, from_mode, unsignedp)
1052 != CODE_FOR_nothing))
1053 {
1054 convert_move (to, convert_to_mode (intermediate, from,
1055 unsignedp), unsignedp);
1056 return;
1057 }
1058
2b28d92e
NC
1059 /* No suitable intermediate mode.
1060 Generate what we need with shifts. */
1061 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1062 - GET_MODE_BITSIZE (from_mode), 0);
1063 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1064 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1065 to, unsignedp);
1066 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1067 to, unsignedp);
1068 if (tmp != to)
1069 emit_move_insn (to, tmp);
1070 return;
bbf6f052
RK
1071 }
1072 }
1073
1074 /* Support special truncate insns for certain modes. */
1075
1076 if (from_mode == DImode && to_mode == SImode)
1077 {
1078#ifdef HAVE_truncdisi2
1079 if (HAVE_truncdisi2)
1080 {
1081 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1082 return;
1083 }
1084#endif
1085 convert_move (to, force_reg (from_mode, from), unsignedp);
1086 return;
1087 }
1088
1089 if (from_mode == DImode && to_mode == HImode)
1090 {
1091#ifdef HAVE_truncdihi2
1092 if (HAVE_truncdihi2)
1093 {
1094 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1095 return;
1096 }
1097#endif
1098 convert_move (to, force_reg (from_mode, from), unsignedp);
1099 return;
1100 }
1101
1102 if (from_mode == DImode && to_mode == QImode)
1103 {
1104#ifdef HAVE_truncdiqi2
1105 if (HAVE_truncdiqi2)
1106 {
1107 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1108 return;
1109 }
1110#endif
1111 convert_move (to, force_reg (from_mode, from), unsignedp);
1112 return;
1113 }
1114
1115 if (from_mode == SImode && to_mode == HImode)
1116 {
1117#ifdef HAVE_truncsihi2
1118 if (HAVE_truncsihi2)
1119 {
1120 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1121 return;
1122 }
1123#endif
1124 convert_move (to, force_reg (from_mode, from), unsignedp);
1125 return;
1126 }
1127
1128 if (from_mode == SImode && to_mode == QImode)
1129 {
1130#ifdef HAVE_truncsiqi2
1131 if (HAVE_truncsiqi2)
1132 {
1133 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1134 return;
1135 }
1136#endif
1137 convert_move (to, force_reg (from_mode, from), unsignedp);
1138 return;
1139 }
1140
1141 if (from_mode == HImode && to_mode == QImode)
1142 {
1143#ifdef HAVE_trunchiqi2
1144 if (HAVE_trunchiqi2)
1145 {
1146 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1147 return;
1148 }
1149#endif
1150 convert_move (to, force_reg (from_mode, from), unsignedp);
1151 return;
1152 }
1153
b9bcad65
RK
1154 if (from_mode == TImode && to_mode == DImode)
1155 {
1156#ifdef HAVE_trunctidi2
1157 if (HAVE_trunctidi2)
1158 {
1159 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1160 return;
1161 }
1162#endif
1163 convert_move (to, force_reg (from_mode, from), unsignedp);
1164 return;
1165 }
1166
1167 if (from_mode == TImode && to_mode == SImode)
1168 {
1169#ifdef HAVE_trunctisi2
1170 if (HAVE_trunctisi2)
1171 {
1172 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1173 return;
1174 }
1175#endif
1176 convert_move (to, force_reg (from_mode, from), unsignedp);
1177 return;
1178 }
1179
1180 if (from_mode == TImode && to_mode == HImode)
1181 {
1182#ifdef HAVE_trunctihi2
1183 if (HAVE_trunctihi2)
1184 {
1185 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1186 return;
1187 }
1188#endif
1189 convert_move (to, force_reg (from_mode, from), unsignedp);
1190 return;
1191 }
1192
1193 if (from_mode == TImode && to_mode == QImode)
1194 {
1195#ifdef HAVE_trunctiqi2
1196 if (HAVE_trunctiqi2)
1197 {
1198 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1199 return;
1200 }
1201#endif
1202 convert_move (to, force_reg (from_mode, from), unsignedp);
1203 return;
1204 }
1205
bbf6f052
RK
1206 /* Handle truncation of volatile memrefs, and so on;
1207 the things that couldn't be truncated directly,
1208 and for which there was no special instruction. */
1209 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1210 {
1211 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1212 emit_move_insn (to, temp);
1213 return;
1214 }
1215
1216 /* Mode combination is not recognized. */
1217 abort ();
1218}
1219
1220/* Return an rtx for a value that would result
1221 from converting X to mode MODE.
1222 Both X and MODE may be floating, or both integer.
1223 UNSIGNEDP is nonzero if X is an unsigned value.
1224 This can be done by referring to a part of X in place
5d901c31
RS
1225 or by copying to a new temporary with conversion.
1226
1227 This function *must not* call protect_from_queue
1228 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1229
1230rtx
1231convert_to_mode (mode, x, unsignedp)
1232 enum machine_mode mode;
1233 rtx x;
1234 int unsignedp;
5ffe63ed
RS
1235{
1236 return convert_modes (mode, VOIDmode, x, unsignedp);
1237}
1238
1239/* Return an rtx for a value that would result
1240 from converting X from mode OLDMODE to mode MODE.
1241 Both modes may be floating, or both integer.
1242 UNSIGNEDP is nonzero if X is an unsigned value.
1243
1244 This can be done by referring to a part of X in place
1245 or by copying to a new temporary with conversion.
1246
1247 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1248
1249 This function *must not* call protect_from_queue
1250 except when putting X into an insn (in which case convert_move does it). */
1251
1252rtx
1253convert_modes (mode, oldmode, x, unsignedp)
1254 enum machine_mode mode, oldmode;
1255 rtx x;
1256 int unsignedp;
bbf6f052
RK
1257{
1258 register rtx temp;
5ffe63ed 1259
1499e0a8
RK
1260 /* If FROM is a SUBREG that indicates that we have already done at least
1261 the required extension, strip it. */
1262
1263 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1264 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1265 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1266 x = gen_lowpart (mode, x);
bbf6f052 1267
64791b18
RK
1268 if (GET_MODE (x) != VOIDmode)
1269 oldmode = GET_MODE (x);
1270
5ffe63ed 1271 if (mode == oldmode)
bbf6f052
RK
1272 return x;
1273
1274 /* There is one case that we must handle specially: If we are converting
906c4e36 1275 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1276 we are to interpret the constant as unsigned, gen_lowpart will do
1277 the wrong if the constant appears negative. What we want to do is
1278 make the high-order word of the constant zero, not all ones. */
1279
1280 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1281 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1282 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1283 {
1284 HOST_WIDE_INT val = INTVAL (x);
1285
1286 if (oldmode != VOIDmode
1287 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1288 {
1289 int width = GET_MODE_BITSIZE (oldmode);
1290
1291 /* We need to zero extend VAL. */
1292 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1293 }
1294
1295 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1296 }
bbf6f052
RK
1297
1298 /* We can do this with a gen_lowpart if both desired and current modes
1299 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1300 non-volatile MEM. Except for the constant case where MODE is no
1301 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1302
ba2e110c
RK
1303 if ((GET_CODE (x) == CONST_INT
1304 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1305 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1306 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1307 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1308 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1309 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1310 && direct_load[(int) mode])
2bf29316
JW
1311 || (GET_CODE (x) == REG
1312 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1313 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1314 {
1315 /* ?? If we don't know OLDMODE, we have to assume here that
1316 X does not need sign- or zero-extension. This may not be
1317 the case, but it's the best we can do. */
1318 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1319 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1320 {
1321 HOST_WIDE_INT val = INTVAL (x);
1322 int width = GET_MODE_BITSIZE (oldmode);
1323
1324 /* We must sign or zero-extend in this case. Start by
1325 zero-extending, then sign extend if we need to. */
1326 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1327 if (! unsignedp
1328 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1329 val |= (HOST_WIDE_INT) (-1) << width;
1330
1331 return GEN_INT (val);
1332 }
1333
1334 return gen_lowpart (mode, x);
1335 }
bbf6f052
RK
1336
1337 temp = gen_reg_rtx (mode);
1338 convert_move (temp, x, unsignedp);
1339 return temp;
1340}
1341\f
fbe1758d
AM
1342
1343/* This macro is used to determine what the largest unit size that
1344 move_by_pieces can use is. */
1345
1346/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1347 move efficiently, as opposed to MOVE_MAX which is the maximum
1348 number of bhytes we can move with a single instruction. */
1349
1350#ifndef MOVE_MAX_PIECES
1351#define MOVE_MAX_PIECES MOVE_MAX
1352#endif
1353
bbf6f052
RK
1354/* Generate several move instructions to copy LEN bytes
1355 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1356 The caller must pass FROM and TO
1357 through protect_from_queue before calling.
1358 ALIGN (in bytes) is maximum alignment we can assume. */
1359
2e245dac 1360void
bbf6f052
RK
1361move_by_pieces (to, from, len, align)
1362 rtx to, from;
1363 int len, align;
1364{
1365 struct move_by_pieces data;
1366 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1367 int max_size = MOVE_MAX_PIECES + 1;
1368 enum machine_mode mode = VOIDmode, tmode;
1369 enum insn_code icode;
bbf6f052
RK
1370
1371 data.offset = 0;
1372 data.to_addr = to_addr;
1373 data.from_addr = from_addr;
1374 data.to = to;
1375 data.from = from;
1376 data.autinc_to
1377 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1378 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1379 data.autinc_from
1380 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1381 || GET_CODE (from_addr) == POST_INC
1382 || GET_CODE (from_addr) == POST_DEC);
1383
1384 data.explicit_inc_from = 0;
1385 data.explicit_inc_to = 0;
1386 data.reverse
1387 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1388 if (data.reverse) data.offset = len;
1389 data.len = len;
1390
e9cf6a97
JW
1391 data.to_struct = MEM_IN_STRUCT_P (to);
1392 data.from_struct = MEM_IN_STRUCT_P (from);
c5c76735
JL
1393 data.to_readonly = RTX_UNCHANGING_P (to);
1394 data.from_readonly = RTX_UNCHANGING_P (from);
e9cf6a97 1395
bbf6f052
RK
1396 /* If copying requires more than two move insns,
1397 copy addresses to registers (to make displacements shorter)
1398 and use post-increment if available. */
1399 if (!(data.autinc_from && data.autinc_to)
1400 && move_by_pieces_ninsns (len, align) > 2)
1401 {
fbe1758d
AM
1402 /* Find the mode of the largest move... */
1403 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1404 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1405 if (GET_MODE_SIZE (tmode) < max_size)
1406 mode = tmode;
1407
1408 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1409 {
1410 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1411 data.autinc_from = 1;
1412 data.explicit_inc_from = -1;
1413 }
fbe1758d 1414 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1415 {
1416 data.from_addr = copy_addr_to_reg (from_addr);
1417 data.autinc_from = 1;
1418 data.explicit_inc_from = 1;
1419 }
bbf6f052
RK
1420 if (!data.autinc_from && CONSTANT_P (from_addr))
1421 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1422 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1423 {
1424 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1425 data.autinc_to = 1;
1426 data.explicit_inc_to = -1;
1427 }
fbe1758d 1428 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1429 {
1430 data.to_addr = copy_addr_to_reg (to_addr);
1431 data.autinc_to = 1;
1432 data.explicit_inc_to = 1;
1433 }
bbf6f052
RK
1434 if (!data.autinc_to && CONSTANT_P (to_addr))
1435 data.to_addr = copy_addr_to_reg (to_addr);
1436 }
1437
e1565e65 1438 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
e87b4f3f 1439 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1440 align = MOVE_MAX;
bbf6f052
RK
1441
1442 /* First move what we can in the largest integer mode, then go to
1443 successively smaller modes. */
1444
1445 while (max_size > 1)
1446 {
e7c33f54
RK
1447 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1448 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1449 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1450 mode = tmode;
1451
1452 if (mode == VOIDmode)
1453 break;
1454
1455 icode = mov_optab->handlers[(int) mode].insn_code;
1456 if (icode != CODE_FOR_nothing
1457 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1458 GET_MODE_SIZE (mode)))
1459 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1460
1461 max_size = GET_MODE_SIZE (mode);
1462 }
1463
1464 /* The code above should have handled everything. */
2a8e278c 1465 if (data.len > 0)
bbf6f052
RK
1466 abort ();
1467}
1468
1469/* Return number of insns required to move L bytes by pieces.
1470 ALIGN (in bytes) is maximum alignment we can assume. */
1471
1472static int
1473move_by_pieces_ninsns (l, align)
1474 unsigned int l;
1475 int align;
1476{
1477 register int n_insns = 0;
e87b4f3f 1478 int max_size = MOVE_MAX + 1;
bbf6f052 1479
e1565e65 1480 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
e87b4f3f 1481 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1482 align = MOVE_MAX;
bbf6f052
RK
1483
1484 while (max_size > 1)
1485 {
1486 enum machine_mode mode = VOIDmode, tmode;
1487 enum insn_code icode;
1488
e7c33f54
RK
1489 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1490 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1491 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1492 mode = tmode;
1493
1494 if (mode == VOIDmode)
1495 break;
1496
1497 icode = mov_optab->handlers[(int) mode].insn_code;
1498 if (icode != CODE_FOR_nothing
1499 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1500 GET_MODE_SIZE (mode)))
1501 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1502
1503 max_size = GET_MODE_SIZE (mode);
1504 }
1505
1506 return n_insns;
1507}
1508
1509/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1510 with move instructions for mode MODE. GENFUN is the gen_... function
1511 to make a move insn for that mode. DATA has all the other info. */
1512
1513static void
1514move_by_pieces_1 (genfun, mode, data)
eae4b970 1515 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1516 enum machine_mode mode;
1517 struct move_by_pieces *data;
1518{
1519 register int size = GET_MODE_SIZE (mode);
1520 register rtx to1, from1;
1521
1522 while (data->len >= size)
1523 {
1524 if (data->reverse) data->offset -= size;
1525
1526 to1 = (data->autinc_to
38a448ca 1527 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1528 : copy_rtx (change_address (data->to, mode,
1529 plus_constant (data->to_addr,
1530 data->offset))));
e9cf6a97 1531 MEM_IN_STRUCT_P (to1) = data->to_struct;
c5c76735 1532 RTX_UNCHANGING_P (to1) = data->to_readonly;
effbcc6a 1533
db3cf6fb
MS
1534 from1
1535 = (data->autinc_from
38a448ca 1536 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1537 : copy_rtx (change_address (data->from, mode,
1538 plus_constant (data->from_addr,
1539 data->offset))));
e9cf6a97 1540 MEM_IN_STRUCT_P (from1) = data->from_struct;
c5c76735 1541 RTX_UNCHANGING_P (from1) = data->from_readonly;
bbf6f052 1542
940da324 1543 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1544 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1545 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1546 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1547
1548 emit_insn ((*genfun) (to1, from1));
940da324 1549 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1550 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1551 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1552 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1553
1554 if (! data->reverse) data->offset += size;
1555
1556 data->len -= size;
1557 }
1558}
1559\f
1560/* Emit code to move a block Y to a block X.
1561 This may be done with string-move instructions,
1562 with multiple scalar move instructions, or with a library call.
1563
1564 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1565 with mode BLKmode.
1566 SIZE is an rtx that says how long they are.
1567 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1568 measured in bytes.
bbf6f052 1569
e9a25f70
JL
1570 Return the address of the new block, if memcpy is called and returns it,
1571 0 otherwise. */
1572
1573rtx
bbf6f052
RK
1574emit_block_move (x, y, size, align)
1575 rtx x, y;
1576 rtx size;
1577 int align;
1578{
e9a25f70 1579 rtx retval = 0;
52cf7115
JL
1580#ifdef TARGET_MEM_FUNCTIONS
1581 static tree fn;
1582 tree call_expr, arg_list;
1583#endif
e9a25f70 1584
bbf6f052
RK
1585 if (GET_MODE (x) != BLKmode)
1586 abort ();
1587
1588 if (GET_MODE (y) != BLKmode)
1589 abort ();
1590
1591 x = protect_from_queue (x, 1);
1592 y = protect_from_queue (y, 0);
5d901c31 1593 size = protect_from_queue (size, 0);
bbf6f052
RK
1594
1595 if (GET_CODE (x) != MEM)
1596 abort ();
1597 if (GET_CODE (y) != MEM)
1598 abort ();
1599 if (size == 0)
1600 abort ();
1601
fbe1758d 1602 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1603 move_by_pieces (x, y, INTVAL (size), align);
1604 else
1605 {
1606 /* Try the most limited insn first, because there's no point
1607 including more than one in the machine description unless
1608 the more limited one has some advantage. */
266007a7 1609
0bba3f6f 1610 rtx opalign = GEN_INT (align);
266007a7
RK
1611 enum machine_mode mode;
1612
1613 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1614 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1615 {
266007a7 1616 enum insn_code code = movstr_optab[(int) mode];
a995e389 1617 insn_operand_predicate_fn pred;
266007a7
RK
1618
1619 if (code != CODE_FOR_nothing
803090c4
RK
1620 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1621 here because if SIZE is less than the mode mask, as it is
8008b228 1622 returned by the macro, it will definitely be less than the
803090c4 1623 actual mode mask. */
8ca00751
RK
1624 && ((GET_CODE (size) == CONST_INT
1625 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1626 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1627 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1628 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1629 || (*pred) (x, BLKmode))
1630 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1631 || (*pred) (y, BLKmode))
1632 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1633 || (*pred) (opalign, VOIDmode)))
bbf6f052 1634 {
1ba1e2a8 1635 rtx op2;
266007a7
RK
1636 rtx last = get_last_insn ();
1637 rtx pat;
1638
1ba1e2a8 1639 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1640 pred = insn_data[(int) code].operand[2].predicate;
1641 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1642 op2 = copy_to_mode_reg (mode, op2);
1643
1644 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1645 if (pat)
1646 {
1647 emit_insn (pat);
e9a25f70 1648 return 0;
266007a7
RK
1649 }
1650 else
1651 delete_insns_since (last);
bbf6f052
RK
1652 }
1653 }
bbf6f052 1654
4bc973ae
JL
1655 /* X, Y, or SIZE may have been passed through protect_from_queue.
1656
1657 It is unsafe to save the value generated by protect_from_queue
1658 and reuse it later. Consider what happens if emit_queue is
1659 called before the return value from protect_from_queue is used.
1660
1661 Expansion of the CALL_EXPR below will call emit_queue before
1662 we are finished emitting RTL for argument setup. So if we are
1663 not careful we could get the wrong value for an argument.
1664
1665 To avoid this problem we go ahead and emit code to copy X, Y &
1666 SIZE into new pseudos. We can then place those new pseudos
1667 into an RTL_EXPR and use them later, even after a call to
1668 emit_queue.
1669
1670 Note this is not strictly needed for library calls since they
1671 do not call emit_queue before loading their arguments. However,
1672 we may need to have library calls call emit_queue in the future
1673 since failing to do so could cause problems for targets which
1674 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1675 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1676 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1677
1678#ifdef TARGET_MEM_FUNCTIONS
1679 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1680#else
1681 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1682 TREE_UNSIGNED (integer_type_node));
f3dc586a 1683 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1684#endif
1685
bbf6f052 1686#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1687 /* It is incorrect to use the libcall calling conventions to call
1688 memcpy in this context.
1689
1690 This could be a user call to memcpy and the user may wish to
1691 examine the return value from memcpy.
1692
1693 For targets where libcalls and normal calls have different conventions
1694 for returning pointers, we could end up generating incorrect code.
1695
1696 So instead of using a libcall sequence we build up a suitable
1697 CALL_EXPR and expand the call in the normal fashion. */
1698 if (fn == NULL_TREE)
1699 {
1700 tree fntype;
1701
1702 /* This was copied from except.c, I don't know if all this is
1703 necessary in this context or not. */
1704 fn = get_identifier ("memcpy");
1705 push_obstacks_nochange ();
1706 end_temporary_allocation ();
1707 fntype = build_pointer_type (void_type_node);
1708 fntype = build_function_type (fntype, NULL_TREE);
1709 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 1710 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1711 DECL_EXTERNAL (fn) = 1;
1712 TREE_PUBLIC (fn) = 1;
1713 DECL_ARTIFICIAL (fn) = 1;
1714 make_decl_rtl (fn, NULL_PTR, 1);
1715 assemble_external (fn);
1716 pop_obstacks ();
1717 }
1718
1719 /* We need to make an argument list for the function call.
1720
1721 memcpy has three arguments, the first two are void * addresses and
1722 the last is a size_t byte count for the copy. */
1723 arg_list
1724 = build_tree_list (NULL_TREE,
4bc973ae 1725 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1726 TREE_CHAIN (arg_list)
1727 = build_tree_list (NULL_TREE,
4bc973ae 1728 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1729 TREE_CHAIN (TREE_CHAIN (arg_list))
1730 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1731 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1732
1733 /* Now we have to build up the CALL_EXPR itself. */
1734 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1735 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1736 call_expr, arg_list, NULL_TREE);
1737 TREE_SIDE_EFFECTS (call_expr) = 1;
1738
1739 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1740#else
d562e42e 1741 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1742 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1743 convert_to_mode (TYPE_MODE (integer_type_node), size,
1744 TREE_UNSIGNED (integer_type_node)),
1745 TYPE_MODE (integer_type_node));
bbf6f052
RK
1746#endif
1747 }
e9a25f70
JL
1748
1749 return retval;
bbf6f052
RK
1750}
1751\f
1752/* Copy all or part of a value X into registers starting at REGNO.
1753 The number of registers to be filled is NREGS. */
1754
1755void
1756move_block_to_reg (regno, x, nregs, mode)
1757 int regno;
1758 rtx x;
1759 int nregs;
1760 enum machine_mode mode;
1761{
1762 int i;
381127e8
RL
1763#ifdef HAVE_load_multiple
1764 rtx pat;
1765 rtx last;
1766#endif
bbf6f052 1767
72bb9717
RK
1768 if (nregs == 0)
1769 return;
1770
bbf6f052
RK
1771 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1772 x = validize_mem (force_const_mem (mode, x));
1773
1774 /* See if the machine can do this with a load multiple insn. */
1775#ifdef HAVE_load_multiple
c3a02afe 1776 if (HAVE_load_multiple)
bbf6f052 1777 {
c3a02afe 1778 last = get_last_insn ();
38a448ca 1779 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1780 GEN_INT (nregs));
1781 if (pat)
1782 {
1783 emit_insn (pat);
1784 return;
1785 }
1786 else
1787 delete_insns_since (last);
bbf6f052 1788 }
bbf6f052
RK
1789#endif
1790
1791 for (i = 0; i < nregs; i++)
38a448ca 1792 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1793 operand_subword_force (x, i, mode));
1794}
1795
1796/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1797 The number of registers to be filled is NREGS. SIZE indicates the number
1798 of bytes in the object X. */
1799
bbf6f052
RK
1800
1801void
0040593d 1802move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1803 int regno;
1804 rtx x;
1805 int nregs;
0040593d 1806 int size;
bbf6f052
RK
1807{
1808 int i;
381127e8
RL
1809#ifdef HAVE_store_multiple
1810 rtx pat;
1811 rtx last;
1812#endif
58a32c5c 1813 enum machine_mode mode;
bbf6f052 1814
58a32c5c
DE
1815 /* If SIZE is that of a mode no bigger than a word, just use that
1816 mode's store operation. */
1817 if (size <= UNITS_PER_WORD
1818 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1819 {
1820 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1821 gen_rtx_REG (mode, regno));
58a32c5c
DE
1822 return;
1823 }
1824
0040593d 1825 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1826 to the left before storing to memory. Note that the previous test
1827 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1828 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1829 {
1830 rtx tem = operand_subword (x, 0, 1, BLKmode);
1831 rtx shift;
1832
1833 if (tem == 0)
1834 abort ();
1835
1836 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1837 gen_rtx_REG (word_mode, regno),
0040593d
JW
1838 build_int_2 ((UNITS_PER_WORD - size)
1839 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1840 emit_move_insn (tem, shift);
1841 return;
1842 }
1843
bbf6f052
RK
1844 /* See if the machine can do this with a store multiple insn. */
1845#ifdef HAVE_store_multiple
c3a02afe 1846 if (HAVE_store_multiple)
bbf6f052 1847 {
c3a02afe 1848 last = get_last_insn ();
38a448ca 1849 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1850 GEN_INT (nregs));
1851 if (pat)
1852 {
1853 emit_insn (pat);
1854 return;
1855 }
1856 else
1857 delete_insns_since (last);
bbf6f052 1858 }
bbf6f052
RK
1859#endif
1860
1861 for (i = 0; i < nregs; i++)
1862 {
1863 rtx tem = operand_subword (x, i, 1, BLKmode);
1864
1865 if (tem == 0)
1866 abort ();
1867
38a448ca 1868 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1869 }
1870}
1871
aac5cc16
RH
1872/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1873 registers represented by a PARALLEL. SSIZE represents the total size of
1874 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1875 SRC in bits. */
1876/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1877 the balance will be in what would be the low-order memory addresses, i.e.
1878 left justified for big endian, right justified for little endian. This
1879 happens to be true for the targets currently using this support. If this
1880 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1881 would be needed. */
fffa9c1d
JW
1882
1883void
aac5cc16
RH
1884emit_group_load (dst, orig_src, ssize, align)
1885 rtx dst, orig_src;
1886 int align, ssize;
fffa9c1d 1887{
aac5cc16
RH
1888 rtx *tmps, src;
1889 int start, i;
fffa9c1d 1890
aac5cc16 1891 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1892 abort ();
1893
1894 /* Check for a NULL entry, used to indicate that the parameter goes
1895 both on the stack and in registers. */
aac5cc16
RH
1896 if (XEXP (XVECEXP (dst, 0, 0), 0))
1897 start = 0;
fffa9c1d 1898 else
aac5cc16
RH
1899 start = 1;
1900
1901 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1902
1903 /* If we won't be loading directly from memory, protect the real source
1904 from strange tricks we might play. */
1905 src = orig_src;
1906 if (GET_CODE (src) != MEM)
1907 {
8b725198
JJ
1908 if (GET_CODE (src) == VOIDmode)
1909 src = gen_reg_rtx (GET_MODE (dst));
1910 else
1911 src = gen_reg_rtx (GET_MODE (orig_src));
aac5cc16
RH
1912 emit_move_insn (src, orig_src);
1913 }
1914
1915 /* Process the pieces. */
1916 for (i = start; i < XVECLEN (dst, 0); i++)
1917 {
1918 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1919 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1920 int bytelen = GET_MODE_SIZE (mode);
1921 int shift = 0;
1922
1923 /* Handle trailing fragments that run over the size of the struct. */
1924 if (ssize >= 0 && bytepos + bytelen > ssize)
1925 {
1926 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1927 bytelen = ssize - bytepos;
1928 if (bytelen <= 0)
1929 abort();
1930 }
1931
1932 /* Optimize the access just a bit. */
1933 if (GET_CODE (src) == MEM
1934 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1935 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1936 && bytelen == GET_MODE_SIZE (mode))
1937 {
1938 tmps[i] = gen_reg_rtx (mode);
1939 emit_move_insn (tmps[i],
1940 change_address (src, mode,
1941 plus_constant (XEXP (src, 0),
1942 bytepos)));
fffa9c1d 1943 }
7c4a6db0
JW
1944 else if (GET_CODE (src) == CONCAT)
1945 {
1946 if (bytepos == 0
1947 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1948 tmps[i] = XEXP (src, 0);
1949 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1950 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1951 tmps[i] = XEXP (src, 1);
1952 else
1953 abort ();
1954 }
fffa9c1d 1955 else
aac5cc16
RH
1956 {
1957 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1958 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1959 mode, mode, align, ssize);
1960 }
fffa9c1d 1961
aac5cc16
RH
1962 if (BYTES_BIG_ENDIAN && shift)
1963 {
1964 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1965 tmps[i], 0, OPTAB_WIDEN);
1966 }
fffa9c1d 1967 }
aac5cc16
RH
1968 emit_queue();
1969
1970 /* Copy the extracted pieces into the proper (probable) hard regs. */
1971 for (i = start; i < XVECLEN (dst, 0); i++)
1972 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1973}
1974
aac5cc16
RH
1975/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1976 registers represented by a PARALLEL. SSIZE represents the total size of
1977 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1978
1979void
aac5cc16
RH
1980emit_group_store (orig_dst, src, ssize, align)
1981 rtx orig_dst, src;
1982 int ssize, align;
fffa9c1d 1983{
aac5cc16
RH
1984 rtx *tmps, dst;
1985 int start, i;
fffa9c1d 1986
aac5cc16 1987 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1988 abort ();
1989
1990 /* Check for a NULL entry, used to indicate that the parameter goes
1991 both on the stack and in registers. */
aac5cc16
RH
1992 if (XEXP (XVECEXP (src, 0, 0), 0))
1993 start = 0;
fffa9c1d 1994 else
aac5cc16
RH
1995 start = 1;
1996
1997 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 1998
aac5cc16
RH
1999 /* Copy the (probable) hard regs into pseudos. */
2000 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2001 {
aac5cc16
RH
2002 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2003 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2004 emit_move_insn (tmps[i], reg);
2005 }
2006 emit_queue();
fffa9c1d 2007
aac5cc16
RH
2008 /* If we won't be storing directly into memory, protect the real destination
2009 from strange tricks we might play. */
2010 dst = orig_dst;
10a9f2be
JW
2011 if (GET_CODE (dst) == PARALLEL)
2012 {
2013 rtx temp;
2014
2015 /* We can get a PARALLEL dst if there is a conditional expression in
2016 a return statement. In that case, the dst and src are the same,
2017 so no action is necessary. */
2018 if (rtx_equal_p (dst, src))
2019 return;
2020
2021 /* It is unclear if we can ever reach here, but we may as well handle
2022 it. Allocate a temporary, and split this into a store/load to/from
2023 the temporary. */
2024
2025 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2026 emit_group_store (temp, src, ssize, align);
2027 emit_group_load (dst, temp, ssize, align);
2028 return;
2029 }
2030 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2031 {
2032 dst = gen_reg_rtx (GET_MODE (orig_dst));
2033 /* Make life a bit easier for combine. */
2034 emit_move_insn (dst, const0_rtx);
2035 }
2036 else if (! MEM_IN_STRUCT_P (dst))
2037 {
2038 /* store_bit_field requires that memory operations have
2039 mem_in_struct_p set; we might not. */
fffa9c1d 2040
aac5cc16 2041 dst = copy_rtx (orig_dst);
c6df88cb 2042 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2043 }
2044
2045 /* Process the pieces. */
2046 for (i = start; i < XVECLEN (src, 0); i++)
2047 {
2048 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2049 enum machine_mode mode = GET_MODE (tmps[i]);
2050 int bytelen = GET_MODE_SIZE (mode);
2051
2052 /* Handle trailing fragments that run over the size of the struct. */
2053 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2054 {
aac5cc16
RH
2055 if (BYTES_BIG_ENDIAN)
2056 {
2057 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2058 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2059 tmps[i], 0, OPTAB_WIDEN);
2060 }
2061 bytelen = ssize - bytepos;
71bc0330 2062 }
fffa9c1d 2063
aac5cc16
RH
2064 /* Optimize the access just a bit. */
2065 if (GET_CODE (dst) == MEM
2066 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2067 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2068 && bytelen == GET_MODE_SIZE (mode))
2069 {
2070 emit_move_insn (change_address (dst, mode,
2071 plus_constant (XEXP (dst, 0),
2072 bytepos)),
2073 tmps[i]);
2074 }
2075 else
2076 {
2077 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2078 mode, tmps[i], align, ssize);
2079 }
fffa9c1d 2080 }
aac5cc16
RH
2081 emit_queue();
2082
2083 /* Copy from the pseudo into the (probable) hard reg. */
2084 if (GET_CODE (dst) == REG)
2085 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2086}
2087
c36fce9a
GRK
2088/* Generate code to copy a BLKmode object of TYPE out of a
2089 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2090 is null, a stack temporary is created. TGTBLK is returned.
2091
2092 The primary purpose of this routine is to handle functions
2093 that return BLKmode structures in registers. Some machines
2094 (the PA for example) want to return all small structures
2095 in registers regardless of the structure's alignment.
2096 */
2097
2098rtx
2099copy_blkmode_from_reg(tgtblk,srcreg,type)
2100 rtx tgtblk;
2101 rtx srcreg;
2102 tree type;
2103{
2104 int bytes = int_size_in_bytes (type);
2105 rtx src = NULL, dst = NULL;
c84e2712 2106 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
c36fce9a
GRK
2107 int bitpos, xbitpos, big_endian_correction = 0;
2108
2109 if (tgtblk == 0)
2110 {
2111 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2112 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2113 preserve_temp_slots (tgtblk);
2114 }
2115
2116 /* This code assumes srcreg is at least a full word. If it isn't,
2117 copy it into a new pseudo which is a full word. */
2118 if (GET_MODE (srcreg) != BLKmode
2119 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2120 srcreg = convert_to_mode (word_mode, srcreg,
2121 TREE_UNSIGNED (type));
2122
2123 /* Structures whose size is not a multiple of a word are aligned
2124 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2125 machine, this means we must skip the empty high order bytes when
2126 calculating the bit offset. */
2127 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2128 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2129 * BITS_PER_UNIT));
2130
2131 /* Copy the structure BITSIZE bites at a time.
2132
2133 We could probably emit more efficient code for machines
2134 which do not use strict alignment, but it doesn't seem
2135 worth the effort at the current time. */
2136 for (bitpos = 0, xbitpos = big_endian_correction;
2137 bitpos < bytes * BITS_PER_UNIT;
2138 bitpos += bitsize, xbitpos += bitsize)
2139 {
2140
2141 /* We need a new source operand each time xbitpos is on a
2142 word boundary and when xbitpos == big_endian_correction
2143 (the first time through). */
2144 if (xbitpos % BITS_PER_WORD == 0
2145 || xbitpos == big_endian_correction)
2146 src = operand_subword_force (srcreg,
2147 xbitpos / BITS_PER_WORD,
2148 BLKmode);
2149
2150 /* We need a new destination operand each time bitpos is on
2151 a word boundary. */
2152 if (bitpos % BITS_PER_WORD == 0)
2153 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2154
2155 /* Use xbitpos for the source extraction (right justified) and
2156 xbitpos for the destination store (left justified). */
2157 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2158 extract_bit_field (src, bitsize,
2159 xbitpos % BITS_PER_WORD, 1,
2160 NULL_RTX, word_mode,
2161 word_mode,
2162 bitsize / BITS_PER_UNIT,
2163 BITS_PER_WORD),
2164 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2165 }
2166 return tgtblk;
2167}
2168
2169
94b25f81
RK
2170/* Add a USE expression for REG to the (possibly empty) list pointed
2171 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2172
2173void
b3f8cf4a
RK
2174use_reg (call_fusage, reg)
2175 rtx *call_fusage, reg;
2176{
0304dfbb
DE
2177 if (GET_CODE (reg) != REG
2178 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2179 abort();
2180
2181 *call_fusage
38a448ca
RH
2182 = gen_rtx_EXPR_LIST (VOIDmode,
2183 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2184}
2185
94b25f81
RK
2186/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2187 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2188
2189void
0304dfbb
DE
2190use_regs (call_fusage, regno, nregs)
2191 rtx *call_fusage;
bbf6f052
RK
2192 int regno;
2193 int nregs;
2194{
0304dfbb 2195 int i;
bbf6f052 2196
0304dfbb
DE
2197 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2198 abort ();
2199
2200 for (i = 0; i < nregs; i++)
38a448ca 2201 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2202}
fffa9c1d
JW
2203
2204/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2205 PARALLEL REGS. This is for calls that pass values in multiple
2206 non-contiguous locations. The Irix 6 ABI has examples of this. */
2207
2208void
2209use_group_regs (call_fusage, regs)
2210 rtx *call_fusage;
2211 rtx regs;
2212{
2213 int i;
2214
6bd35f86
DE
2215 for (i = 0; i < XVECLEN (regs, 0); i++)
2216 {
2217 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2218
6bd35f86
DE
2219 /* A NULL entry means the parameter goes both on the stack and in
2220 registers. This can also be a MEM for targets that pass values
2221 partially on the stack and partially in registers. */
e9a25f70 2222 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2223 use_reg (call_fusage, reg);
2224 }
fffa9c1d 2225}
bbf6f052 2226\f
9de08200
RK
2227/* Generate several move instructions to clear LEN bytes of block TO.
2228 (A MEM rtx with BLKmode). The caller must pass TO through
2229 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2230 we can assume. */
2231
2232static void
2233clear_by_pieces (to, len, align)
2234 rtx to;
2235 int len, align;
2236{
2237 struct clear_by_pieces data;
2238 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2239 int max_size = MOVE_MAX_PIECES + 1;
2240 enum machine_mode mode = VOIDmode, tmode;
2241 enum insn_code icode;
9de08200
RK
2242
2243 data.offset = 0;
2244 data.to_addr = to_addr;
2245 data.to = to;
2246 data.autinc_to
2247 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2248 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2249
2250 data.explicit_inc_to = 0;
2251 data.reverse
2252 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2253 if (data.reverse) data.offset = len;
2254 data.len = len;
2255
2256 data.to_struct = MEM_IN_STRUCT_P (to);
2257
2258 /* If copying requires more than two move insns,
2259 copy addresses to registers (to make displacements shorter)
2260 and use post-increment if available. */
2261 if (!data.autinc_to
2262 && move_by_pieces_ninsns (len, align) > 2)
2263 {
fbe1758d
AM
2264 /* Determine the main mode we'll be using */
2265 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2266 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2267 if (GET_MODE_SIZE (tmode) < max_size)
2268 mode = tmode;
2269
2270 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2271 {
2272 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2273 data.autinc_to = 1;
2274 data.explicit_inc_to = -1;
2275 }
fbe1758d 2276 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2277 {
2278 data.to_addr = copy_addr_to_reg (to_addr);
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = 1;
2281 }
9de08200
RK
2282 if (!data.autinc_to && CONSTANT_P (to_addr))
2283 data.to_addr = copy_addr_to_reg (to_addr);
2284 }
2285
e1565e65 2286 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
9de08200
RK
2287 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2288 align = MOVE_MAX;
2289
2290 /* First move what we can in the largest integer mode, then go to
2291 successively smaller modes. */
2292
2293 while (max_size > 1)
2294 {
9de08200
RK
2295 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2296 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2297 if (GET_MODE_SIZE (tmode) < max_size)
2298 mode = tmode;
2299
2300 if (mode == VOIDmode)
2301 break;
2302
2303 icode = mov_optab->handlers[(int) mode].insn_code;
2304 if (icode != CODE_FOR_nothing
2305 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2306 GET_MODE_SIZE (mode)))
2307 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2308
2309 max_size = GET_MODE_SIZE (mode);
2310 }
2311
2312 /* The code above should have handled everything. */
2313 if (data.len != 0)
2314 abort ();
2315}
2316
2317/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2318 with move instructions for mode MODE. GENFUN is the gen_... function
2319 to make a move insn for that mode. DATA has all the other info. */
2320
2321static void
2322clear_by_pieces_1 (genfun, mode, data)
eae4b970 2323 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2324 enum machine_mode mode;
2325 struct clear_by_pieces *data;
2326{
2327 register int size = GET_MODE_SIZE (mode);
2328 register rtx to1;
2329
2330 while (data->len >= size)
2331 {
2332 if (data->reverse) data->offset -= size;
2333
2334 to1 = (data->autinc_to
38a448ca 2335 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2336 : copy_rtx (change_address (data->to, mode,
2337 plus_constant (data->to_addr,
2338 data->offset))));
9de08200
RK
2339 MEM_IN_STRUCT_P (to1) = data->to_struct;
2340
940da324 2341 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2342 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2343
2344 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2345 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2347
2348 if (! data->reverse) data->offset += size;
2349
2350 data->len -= size;
2351 }
2352}
2353\f
bbf6f052 2354/* Write zeros through the storage of OBJECT.
9de08200 2355 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2356 the maximum alignment we can is has, measured in bytes.
bbf6f052 2357
e9a25f70
JL
2358 If we call a function that returns the length of the block, return it. */
2359
2360rtx
9de08200 2361clear_storage (object, size, align)
bbf6f052 2362 rtx object;
4c08eef0 2363 rtx size;
9de08200 2364 int align;
bbf6f052 2365{
52cf7115
JL
2366#ifdef TARGET_MEM_FUNCTIONS
2367 static tree fn;
2368 tree call_expr, arg_list;
2369#endif
e9a25f70
JL
2370 rtx retval = 0;
2371
bbf6f052
RK
2372 if (GET_MODE (object) == BLKmode)
2373 {
9de08200
RK
2374 object = protect_from_queue (object, 1);
2375 size = protect_from_queue (size, 0);
2376
2377 if (GET_CODE (size) == CONST_INT
fbe1758d 2378 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2379 clear_by_pieces (object, INTVAL (size), align);
2380
2381 else
2382 {
2383 /* Try the most limited insn first, because there's no point
2384 including more than one in the machine description unless
2385 the more limited one has some advantage. */
2386
2387 rtx opalign = GEN_INT (align);
2388 enum machine_mode mode;
2389
2390 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2391 mode = GET_MODE_WIDER_MODE (mode))
2392 {
2393 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2394 insn_operand_predicate_fn pred;
9de08200
RK
2395
2396 if (code != CODE_FOR_nothing
2397 /* We don't need MODE to be narrower than
2398 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2399 the mode mask, as it is returned by the macro, it will
2400 definitely be less than the actual mode mask. */
2401 && ((GET_CODE (size) == CONST_INT
2402 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2403 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2404 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2405 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2406 || (*pred) (object, BLKmode))
2407 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2408 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2409 {
2410 rtx op1;
2411 rtx last = get_last_insn ();
2412 rtx pat;
2413
2414 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2415 pred = insn_data[(int) code].operand[1].predicate;
2416 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2417 op1 = copy_to_mode_reg (mode, op1);
2418
2419 pat = GEN_FCN ((int) code) (object, op1, opalign);
2420 if (pat)
2421 {
2422 emit_insn (pat);
e9a25f70 2423 return 0;
9de08200
RK
2424 }
2425 else
2426 delete_insns_since (last);
2427 }
2428 }
2429
4bc973ae 2430 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2431
4bc973ae
JL
2432 It is unsafe to save the value generated by protect_from_queue
2433 and reuse it later. Consider what happens if emit_queue is
2434 called before the return value from protect_from_queue is used.
52cf7115 2435
4bc973ae
JL
2436 Expansion of the CALL_EXPR below will call emit_queue before
2437 we are finished emitting RTL for argument setup. So if we are
2438 not careful we could get the wrong value for an argument.
52cf7115 2439
4bc973ae
JL
2440 To avoid this problem we go ahead and emit code to copy OBJECT
2441 and SIZE into new pseudos. We can then place those new pseudos
2442 into an RTL_EXPR and use them later, even after a call to
2443 emit_queue.
52cf7115 2444
4bc973ae
JL
2445 Note this is not strictly needed for library calls since they
2446 do not call emit_queue before loading their arguments. However,
2447 we may need to have library calls call emit_queue in the future
2448 since failing to do so could cause problems for targets which
2449 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2450 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2451
4bc973ae
JL
2452#ifdef TARGET_MEM_FUNCTIONS
2453 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2454#else
2455 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2456 TREE_UNSIGNED (integer_type_node));
f3dc586a 2457 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2458#endif
52cf7115 2459
52cf7115 2460
4bc973ae
JL
2461#ifdef TARGET_MEM_FUNCTIONS
2462 /* It is incorrect to use the libcall calling conventions to call
2463 memset in this context.
52cf7115 2464
4bc973ae
JL
2465 This could be a user call to memset and the user may wish to
2466 examine the return value from memset.
52cf7115 2467
4bc973ae
JL
2468 For targets where libcalls and normal calls have different
2469 conventions for returning pointers, we could end up generating
2470 incorrect code.
2471
2472 So instead of using a libcall sequence we build up a suitable
2473 CALL_EXPR and expand the call in the normal fashion. */
2474 if (fn == NULL_TREE)
2475 {
2476 tree fntype;
2477
2478 /* This was copied from except.c, I don't know if all this is
2479 necessary in this context or not. */
2480 fn = get_identifier ("memset");
2481 push_obstacks_nochange ();
2482 end_temporary_allocation ();
2483 fntype = build_pointer_type (void_type_node);
2484 fntype = build_function_type (fntype, NULL_TREE);
2485 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2486 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2487 DECL_EXTERNAL (fn) = 1;
2488 TREE_PUBLIC (fn) = 1;
2489 DECL_ARTIFICIAL (fn) = 1;
2490 make_decl_rtl (fn, NULL_PTR, 1);
2491 assemble_external (fn);
2492 pop_obstacks ();
2493 }
2494
2495 /* We need to make an argument list for the function call.
2496
2497 memset has three arguments, the first is a void * addresses, the
2498 second a integer with the initialization value, the last is a
2499 size_t byte count for the copy. */
2500 arg_list
2501 = build_tree_list (NULL_TREE,
2502 make_tree (build_pointer_type (void_type_node),
2503 object));
2504 TREE_CHAIN (arg_list)
2505 = build_tree_list (NULL_TREE,
2506 make_tree (integer_type_node, const0_rtx));
2507 TREE_CHAIN (TREE_CHAIN (arg_list))
2508 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2509 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2510
2511 /* Now we have to build up the CALL_EXPR itself. */
2512 call_expr = build1 (ADDR_EXPR,
2513 build_pointer_type (TREE_TYPE (fn)), fn);
2514 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2515 call_expr, arg_list, NULL_TREE);
2516 TREE_SIDE_EFFECTS (call_expr) = 1;
2517
2518 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2519#else
9de08200 2520 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2521 VOIDmode, 2, object, Pmode, size,
9de08200 2522 TYPE_MODE (integer_type_node));
bbf6f052 2523#endif
9de08200 2524 }
bbf6f052
RK
2525 }
2526 else
66ed0683 2527 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2528
2529 return retval;
bbf6f052
RK
2530}
2531
2532/* Generate code to copy Y into X.
2533 Both Y and X must have the same mode, except that
2534 Y can be a constant with VOIDmode.
2535 This mode cannot be BLKmode; use emit_block_move for that.
2536
2537 Return the last instruction emitted. */
2538
2539rtx
2540emit_move_insn (x, y)
2541 rtx x, y;
2542{
2543 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2544
2545 x = protect_from_queue (x, 1);
2546 y = protect_from_queue (y, 0);
2547
2548 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2549 abort ();
2550
ee5332b8
RH
2551 /* Never force constant_p_rtx to memory. */
2552 if (GET_CODE (y) == CONSTANT_P_RTX)
2553 ;
2554 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2555 y = force_const_mem (mode, y);
2556
2557 /* If X or Y are memory references, verify that their addresses are valid
2558 for the machine. */
2559 if (GET_CODE (x) == MEM
2560 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2561 && ! push_operand (x, GET_MODE (x)))
2562 || (flag_force_addr
2563 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2564 x = change_address (x, VOIDmode, XEXP (x, 0));
2565
2566 if (GET_CODE (y) == MEM
2567 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2568 || (flag_force_addr
2569 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2570 y = change_address (y, VOIDmode, XEXP (y, 0));
2571
2572 if (mode == BLKmode)
2573 abort ();
2574
261c4230
RS
2575 return emit_move_insn_1 (x, y);
2576}
2577
2578/* Low level part of emit_move_insn.
2579 Called just like emit_move_insn, but assumes X and Y
2580 are basically valid. */
2581
2582rtx
2583emit_move_insn_1 (x, y)
2584 rtx x, y;
2585{
2586 enum machine_mode mode = GET_MODE (x);
2587 enum machine_mode submode;
2588 enum mode_class class = GET_MODE_CLASS (mode);
2589 int i;
2590
76bbe028
ZW
2591 if (mode >= MAX_MACHINE_MODE)
2592 abort ();
2593
bbf6f052
RK
2594 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2595 return
2596 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2597
89742723 2598 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2599 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2600 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2601 * BITS_PER_UNIT),
2602 (class == MODE_COMPLEX_INT
2603 ? MODE_INT : MODE_FLOAT),
2604 0))
7308a047
RS
2605 && (mov_optab->handlers[(int) submode].insn_code
2606 != CODE_FOR_nothing))
2607 {
2608 /* Don't split destination if it is a stack push. */
2609 int stack = push_operand (x, GET_MODE (x));
7308a047 2610
7308a047
RS
2611 /* If this is a stack, push the highpart first, so it
2612 will be in the argument order.
2613
2614 In that case, change_address is used only to convert
2615 the mode, not to change the address. */
c937357e
RS
2616 if (stack)
2617 {
e33c0d66
RS
2618 /* Note that the real part always precedes the imag part in memory
2619 regardless of machine's endianness. */
c937357e
RS
2620#ifdef STACK_GROWS_DOWNWARD
2621 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2622 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2623 gen_imagpart (submode, y)));
c937357e 2624 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2625 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2626 gen_realpart (submode, y)));
c937357e
RS
2627#else
2628 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2629 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2630 gen_realpart (submode, y)));
c937357e 2631 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2632 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2633 gen_imagpart (submode, y)));
c937357e
RS
2634#endif
2635 }
2636 else
2637 {
235ae7be
DM
2638 rtx realpart_x, realpart_y;
2639 rtx imagpart_x, imagpart_y;
2640
405f63da
MM
2641 /* If this is a complex value with each part being smaller than a
2642 word, the usual calling sequence will likely pack the pieces into
2643 a single register. Unfortunately, SUBREG of hard registers only
2644 deals in terms of words, so we have a problem converting input
2645 arguments to the CONCAT of two registers that is used elsewhere
2646 for complex values. If this is before reload, we can copy it into
2647 memory and reload. FIXME, we should see about using extract and
2648 insert on integer registers, but complex short and complex char
2649 variables should be rarely used. */
2650 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2651 && (reload_in_progress | reload_completed) == 0)
2652 {
2653 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2654 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2655
2656 if (packed_dest_p || packed_src_p)
2657 {
2658 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2659 ? MODE_FLOAT : MODE_INT);
2660
2661 enum machine_mode reg_mode =
2662 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2663
2664 if (reg_mode != BLKmode)
2665 {
2666 rtx mem = assign_stack_temp (reg_mode,
2667 GET_MODE_SIZE (mode), 0);
2668
2669 rtx cmem = change_address (mem, mode, NULL_RTX);
2670
01d939e8 2671 cfun->cannot_inline = "function uses short complex types";
405f63da
MM
2672
2673 if (packed_dest_p)
2674 {
2675 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2676 emit_move_insn_1 (cmem, y);
2677 return emit_move_insn_1 (sreg, mem);
2678 }
2679 else
2680 {
2681 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2682 emit_move_insn_1 (mem, sreg);
2683 return emit_move_insn_1 (x, cmem);
2684 }
2685 }
2686 }
2687 }
2688
235ae7be
DM
2689 realpart_x = gen_realpart (submode, x);
2690 realpart_y = gen_realpart (submode, y);
2691 imagpart_x = gen_imagpart (submode, x);
2692 imagpart_y = gen_imagpart (submode, y);
2693
2694 /* Show the output dies here. This is necessary for SUBREGs
2695 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2696 hard regs shouldn't appear here except as return values.
2697 We never want to emit such a clobber after reload. */
2698 if (x != y
235ae7be
DM
2699 && ! (reload_in_progress || reload_completed)
2700 && (GET_CODE (realpart_x) == SUBREG
2701 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2702 {
c14c6529 2703 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2704 }
2638126a 2705
c937357e 2706 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2707 (realpart_x, realpart_y));
c937357e 2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2709 (imagpart_x, imagpart_y));
c937357e 2710 }
7308a047 2711
7a1ab50a 2712 return get_last_insn ();
7308a047
RS
2713 }
2714
bbf6f052
RK
2715 /* This will handle any multi-word mode that lacks a move_insn pattern.
2716 However, you will get better code if you define such patterns,
2717 even if they must turn into multiple assembler instructions. */
a4320483 2718 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2719 {
2720 rtx last_insn = 0;
235ae7be
DM
2721 rtx seq;
2722 int need_clobber;
6551fa4d 2723
a98c9f1a
RK
2724#ifdef PUSH_ROUNDING
2725
2726 /* If X is a push on the stack, do the push now and replace
2727 X with a reference to the stack pointer. */
2728 if (push_operand (x, GET_MODE (x)))
2729 {
2730 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2731 x = change_address (x, VOIDmode, stack_pointer_rtx);
2732 }
2733#endif
2734
235ae7be 2735 start_sequence ();
15a7a8ec 2736
235ae7be 2737 need_clobber = 0;
bbf6f052
RK
2738 for (i = 0;
2739 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2740 i++)
2741 {
2742 rtx xpart = operand_subword (x, i, 1, mode);
2743 rtx ypart = operand_subword (y, i, 1, mode);
2744
2745 /* If we can't get a part of Y, put Y into memory if it is a
2746 constant. Otherwise, force it into a register. If we still
2747 can't get a part of Y, abort. */
2748 if (ypart == 0 && CONSTANT_P (y))
2749 {
2750 y = force_const_mem (mode, y);
2751 ypart = operand_subword (y, i, 1, mode);
2752 }
2753 else if (ypart == 0)
2754 ypart = operand_subword_force (y, i, mode);
2755
2756 if (xpart == 0 || ypart == 0)
2757 abort ();
2758
235ae7be
DM
2759 need_clobber |= (GET_CODE (xpart) == SUBREG);
2760
bbf6f052
RK
2761 last_insn = emit_move_insn (xpart, ypart);
2762 }
6551fa4d 2763
235ae7be
DM
2764 seq = gen_sequence ();
2765 end_sequence ();
2766
2767 /* Show the output dies here. This is necessary for SUBREGs
2768 of pseudos since we cannot track their lifetimes correctly;
2769 hard regs shouldn't appear here except as return values.
2770 We never want to emit such a clobber after reload. */
2771 if (x != y
2772 && ! (reload_in_progress || reload_completed)
2773 && need_clobber != 0)
2774 {
2775 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2776 }
2777
2778 emit_insn (seq);
2779
bbf6f052
RK
2780 return last_insn;
2781 }
2782 else
2783 abort ();
2784}
2785\f
2786/* Pushing data onto the stack. */
2787
2788/* Push a block of length SIZE (perhaps variable)
2789 and return an rtx to address the beginning of the block.
2790 Note that it is not possible for the value returned to be a QUEUED.
2791 The value may be virtual_outgoing_args_rtx.
2792
2793 EXTRA is the number of bytes of padding to push in addition to SIZE.
2794 BELOW nonzero means this padding comes at low addresses;
2795 otherwise, the padding comes at high addresses. */
2796
2797rtx
2798push_block (size, extra, below)
2799 rtx size;
2800 int extra, below;
2801{
2802 register rtx temp;
88f63c77
RK
2803
2804 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2805 if (CONSTANT_P (size))
2806 anti_adjust_stack (plus_constant (size, extra));
2807 else if (GET_CODE (size) == REG && extra == 0)
2808 anti_adjust_stack (size);
2809 else
2810 {
2811 rtx temp = copy_to_mode_reg (Pmode, size);
2812 if (extra != 0)
906c4e36 2813 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2814 temp, 0, OPTAB_LIB_WIDEN);
2815 anti_adjust_stack (temp);
2816 }
2817
e1a9b2ab
HB
2818#if defined (STACK_GROWS_DOWNWARD) \
2819 || (defined (ARGS_GROW_DOWNWARD) \
2820 && !defined (ACCUMULATE_OUTGOING_ARGS))
2821
2822 /* Return the lowest stack address when STACK or ARGS grow downward and
2823 we are not aaccumulating outgoing arguments (the c4x port uses such
2824 conventions). */
bbf6f052
RK
2825 temp = virtual_outgoing_args_rtx;
2826 if (extra != 0 && below)
2827 temp = plus_constant (temp, extra);
2828#else
2829 if (GET_CODE (size) == CONST_INT)
2830 temp = plus_constant (virtual_outgoing_args_rtx,
2831 - INTVAL (size) - (below ? 0 : extra));
2832 else if (extra != 0 && !below)
38a448ca 2833 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2834 negate_rtx (Pmode, plus_constant (size, extra)));
2835 else
38a448ca 2836 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
c5c76735 2837 negate_rtx (Pmode, size));
bbf6f052
RK
2838#endif
2839
2840 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2841}
2842
87e38d84 2843rtx
bbf6f052
RK
2844gen_push_operand ()
2845{
38a448ca 2846 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2847}
2848
921b3427
RK
2849/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2850 block of SIZE bytes. */
2851
2852static rtx
2853get_push_address (size)
2854 int size;
2855{
2856 register rtx temp;
2857
2858 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2859 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2860 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2861 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2862 else
2863 temp = stack_pointer_rtx;
2864
c85f7c16 2865 return copy_to_reg (temp);
921b3427
RK
2866}
2867
bbf6f052
RK
2868/* Generate code to push X onto the stack, assuming it has mode MODE and
2869 type TYPE.
2870 MODE is redundant except when X is a CONST_INT (since they don't
2871 carry mode info).
2872 SIZE is an rtx for the size of data to be copied (in bytes),
2873 needed only if X is BLKmode.
2874
2875 ALIGN (in bytes) is maximum alignment we can assume.
2876
cd048831
RK
2877 If PARTIAL and REG are both nonzero, then copy that many of the first
2878 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2879 The amount of space pushed is decreased by PARTIAL words,
2880 rounded *down* to a multiple of PARM_BOUNDARY.
2881 REG must be a hard register in this case.
cd048831
RK
2882 If REG is zero but PARTIAL is not, take any all others actions for an
2883 argument partially in registers, but do not actually load any
2884 registers.
bbf6f052
RK
2885
2886 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2887 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2888
2889 On a machine that lacks real push insns, ARGS_ADDR is the address of
2890 the bottom of the argument block for this call. We use indexing off there
2891 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2892 argument block has not been preallocated.
2893
e5e809f4
JL
2894 ARGS_SO_FAR is the size of args previously pushed for this call.
2895
2896 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2897 for arguments passed in registers. If nonzero, it will be the number
2898 of bytes required. */
bbf6f052
RK
2899
2900void
2901emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
2902 args_addr, args_so_far, reg_parm_stack_space,
2903 alignment_pad)
bbf6f052
RK
2904 register rtx x;
2905 enum machine_mode mode;
2906 tree type;
2907 rtx size;
2908 int align;
2909 int partial;
2910 rtx reg;
2911 int extra;
2912 rtx args_addr;
2913 rtx args_so_far;
e5e809f4 2914 int reg_parm_stack_space;
4fc026cd 2915 rtx alignment_pad;
bbf6f052
RK
2916{
2917 rtx xinner;
2918 enum direction stack_direction
2919#ifdef STACK_GROWS_DOWNWARD
2920 = downward;
2921#else
2922 = upward;
2923#endif
2924
2925 /* Decide where to pad the argument: `downward' for below,
2926 `upward' for above, or `none' for don't pad it.
2927 Default is below for small data on big-endian machines; else above. */
2928 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2929
2930 /* Invert direction if stack is post-update. */
2931 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2932 if (where_pad != none)
2933 where_pad = (where_pad == downward ? upward : downward);
2934
2935 xinner = x = protect_from_queue (x, 0);
2936
2937 if (mode == BLKmode)
2938 {
2939 /* Copy a block into the stack, entirely or partially. */
2940
2941 register rtx temp;
2942 int used = partial * UNITS_PER_WORD;
2943 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2944 int skip;
2945
2946 if (size == 0)
2947 abort ();
2948
2949 used -= offset;
2950
2951 /* USED is now the # of bytes we need not copy to the stack
2952 because registers will take care of them. */
2953
2954 if (partial != 0)
2955 xinner = change_address (xinner, BLKmode,
2956 plus_constant (XEXP (xinner, 0), used));
2957
2958 /* If the partial register-part of the arg counts in its stack size,
2959 skip the part of stack space corresponding to the registers.
2960 Otherwise, start copying to the beginning of the stack space,
2961 by setting SKIP to 0. */
e5e809f4 2962 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2963
2964#ifdef PUSH_ROUNDING
2965 /* Do it with several push insns if that doesn't take lots of insns
2966 and if there is no difficulty with push insns that skip bytes
2967 on the stack for alignment purposes. */
2968 if (args_addr == 0
2969 && GET_CODE (size) == CONST_INT
2970 && skip == 0
15914757 2971 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2972 /* Here we avoid the case of a structure whose weak alignment
2973 forces many pushes of a small amount of data,
2974 and such small pushes do rounding that causes trouble. */
e1565e65 2975 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
e87b4f3f 2976 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2977 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2978 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2979 {
2980 /* Push padding now if padding above and stack grows down,
2981 or if padding below and stack grows up.
2982 But if space already allocated, this has already been done. */
2983 if (extra && args_addr == 0
2984 && where_pad != none && where_pad != stack_direction)
906c4e36 2985 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2986
38a448ca 2987 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2988 INTVAL (size) - used, align);
921b3427 2989
7d384cc0 2990 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2991 {
2992 rtx temp;
2993
956d6950 2994 in_check_memory_usage = 1;
921b3427 2995 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2996 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2997 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2998 temp, Pmode,
2999 XEXP (xinner, 0), Pmode,
921b3427
RK
3000 GEN_INT (INTVAL(size) - used),
3001 TYPE_MODE (sizetype));
3002 else
3003 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3004 temp, Pmode,
921b3427
RK
3005 GEN_INT (INTVAL(size) - used),
3006 TYPE_MODE (sizetype),
956d6950
JL
3007 GEN_INT (MEMORY_USE_RW),
3008 TYPE_MODE (integer_type_node));
3009 in_check_memory_usage = 0;
921b3427 3010 }
bbf6f052
RK
3011 }
3012 else
3013#endif /* PUSH_ROUNDING */
3014 {
3015 /* Otherwise make space on the stack and copy the data
3016 to the address of that space. */
3017
3018 /* Deduct words put into registers from the size we must copy. */
3019 if (partial != 0)
3020 {
3021 if (GET_CODE (size) == CONST_INT)
906c4e36 3022 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3023 else
3024 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3025 GEN_INT (used), NULL_RTX, 0,
3026 OPTAB_LIB_WIDEN);
bbf6f052
RK
3027 }
3028
3029 /* Get the address of the stack space.
3030 In this case, we do not deal with EXTRA separately.
3031 A single stack adjust will do. */
3032 if (! args_addr)
3033 {
3034 temp = push_block (size, extra, where_pad == downward);
3035 extra = 0;
3036 }
3037 else if (GET_CODE (args_so_far) == CONST_INT)
3038 temp = memory_address (BLKmode,
3039 plus_constant (args_addr,
3040 skip + INTVAL (args_so_far)));
3041 else
3042 temp = memory_address (BLKmode,
38a448ca
RH
3043 plus_constant (gen_rtx_PLUS (Pmode,
3044 args_addr,
3045 args_so_far),
bbf6f052 3046 skip));
7d384cc0 3047 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3048 {
3049 rtx target;
3050
956d6950 3051 in_check_memory_usage = 1;
921b3427 3052 target = copy_to_reg (temp);
c85f7c16 3053 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3054 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3055 target, Pmode,
3056 XEXP (xinner, 0), Pmode,
921b3427
RK
3057 size, TYPE_MODE (sizetype));
3058 else
3059 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3060 target, Pmode,
921b3427 3061 size, TYPE_MODE (sizetype),
956d6950
JL
3062 GEN_INT (MEMORY_USE_RW),
3063 TYPE_MODE (integer_type_node));
3064 in_check_memory_usage = 0;
921b3427 3065 }
bbf6f052
RK
3066
3067 /* TEMP is the address of the block. Copy the data there. */
3068 if (GET_CODE (size) == CONST_INT
fbe1758d 3069 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
bbf6f052 3070 {
38a448ca 3071 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
3072 INTVAL (size), align);
3073 goto ret;
3074 }
e5e809f4 3075 else
bbf6f052 3076 {
e5e809f4
JL
3077 rtx opalign = GEN_INT (align);
3078 enum machine_mode mode;
9e6a5703 3079 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
3080
3081 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3082 mode != VOIDmode;
3083 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3084 {
e5e809f4 3085 enum insn_code code = movstr_optab[(int) mode];
a995e389 3086 insn_operand_predicate_fn pred;
e5e809f4
JL
3087
3088 if (code != CODE_FOR_nothing
3089 && ((GET_CODE (size) == CONST_INT
3090 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3091 <= (GET_MODE_MASK (mode) >> 1)))
3092 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3093 && (!(pred = insn_data[(int) code].operand[0].predicate)
3094 || ((*pred) (target, BLKmode)))
3095 && (!(pred = insn_data[(int) code].operand[1].predicate)
3096 || ((*pred) (xinner, BLKmode)))
3097 && (!(pred = insn_data[(int) code].operand[3].predicate)
3098 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3099 {
3100 rtx op2 = convert_to_mode (mode, size, 1);
3101 rtx last = get_last_insn ();
3102 rtx pat;
3103
a995e389
RH
3104 pred = insn_data[(int) code].operand[2].predicate;
3105 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3106 op2 = copy_to_mode_reg (mode, op2);
3107
3108 pat = GEN_FCN ((int) code) (target, xinner,
3109 op2, opalign);
3110 if (pat)
3111 {
3112 emit_insn (pat);
3113 goto ret;
3114 }
3115 else
3116 delete_insns_since (last);
3117 }
c841050e 3118 }
bbf6f052 3119 }
bbf6f052
RK
3120
3121#ifndef ACCUMULATE_OUTGOING_ARGS
3122 /* If the source is referenced relative to the stack pointer,
3123 copy it to another register to stabilize it. We do not need
3124 to do this if we know that we won't be changing sp. */
3125
3126 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3127 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3128 temp = copy_to_reg (temp);
3129#endif
3130
3131 /* Make inhibit_defer_pop nonzero around the library call
3132 to force it to pop the bcopy-arguments right away. */
3133 NO_DEFER_POP;
3134#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3135 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3136 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3137 convert_to_mode (TYPE_MODE (sizetype),
3138 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3139 TYPE_MODE (sizetype));
bbf6f052 3140#else
d562e42e 3141 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3142 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3143 convert_to_mode (TYPE_MODE (integer_type_node),
3144 size,
3145 TREE_UNSIGNED (integer_type_node)),
3146 TYPE_MODE (integer_type_node));
bbf6f052
RK
3147#endif
3148 OK_DEFER_POP;
3149 }
3150 }
3151 else if (partial > 0)
3152 {
3153 /* Scalar partly in registers. */
3154
3155 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3156 int i;
3157 int not_stack;
3158 /* # words of start of argument
3159 that we must make space for but need not store. */
3160 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3161 int args_offset = INTVAL (args_so_far);
3162 int skip;
3163
3164 /* Push padding now if padding above and stack grows down,
3165 or if padding below and stack grows up.
3166 But if space already allocated, this has already been done. */
3167 if (extra && args_addr == 0
3168 && where_pad != none && where_pad != stack_direction)
906c4e36 3169 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3170
3171 /* If we make space by pushing it, we might as well push
3172 the real data. Otherwise, we can leave OFFSET nonzero
3173 and leave the space uninitialized. */
3174 if (args_addr == 0)
3175 offset = 0;
3176
3177 /* Now NOT_STACK gets the number of words that we don't need to
3178 allocate on the stack. */
3179 not_stack = partial - offset;
3180
3181 /* If the partial register-part of the arg counts in its stack size,
3182 skip the part of stack space corresponding to the registers.
3183 Otherwise, start copying to the beginning of the stack space,
3184 by setting SKIP to 0. */
e5e809f4 3185 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3186
3187 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3188 x = validize_mem (force_const_mem (mode, x));
3189
3190 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3191 SUBREGs of such registers are not allowed. */
3192 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3193 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3194 x = copy_to_reg (x);
3195
3196 /* Loop over all the words allocated on the stack for this arg. */
3197 /* We can do it by words, because any scalar bigger than a word
3198 has a size a multiple of a word. */
3199#ifndef PUSH_ARGS_REVERSED
3200 for (i = not_stack; i < size; i++)
3201#else
3202 for (i = size - 1; i >= not_stack; i--)
3203#endif
3204 if (i >= not_stack + offset)
3205 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3206 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3207 0, args_addr,
3208 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3209 * UNITS_PER_WORD)),
4fc026cd 3210 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3211 }
3212 else
3213 {
3214 rtx addr;
921b3427 3215 rtx target = NULL_RTX;
bbf6f052
RK
3216
3217 /* Push padding now if padding above and stack grows down,
3218 or if padding below and stack grows up.
3219 But if space already allocated, this has already been done. */
3220 if (extra && args_addr == 0
3221 && where_pad != none && where_pad != stack_direction)
906c4e36 3222 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3223
3224#ifdef PUSH_ROUNDING
3225 if (args_addr == 0)
3226 addr = gen_push_operand ();
3227 else
3228#endif
921b3427
RK
3229 {
3230 if (GET_CODE (args_so_far) == CONST_INT)
3231 addr
3232 = memory_address (mode,
3233 plus_constant (args_addr,
3234 INTVAL (args_so_far)));
3235 else
38a448ca
RH
3236 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3237 args_so_far));
921b3427
RK
3238 target = addr;
3239 }
bbf6f052 3240
38a448ca 3241 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3242
7d384cc0 3243 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3244 {
956d6950 3245 in_check_memory_usage = 1;
921b3427
RK
3246 if (target == 0)
3247 target = get_push_address (GET_MODE_SIZE (mode));
3248
c85f7c16 3249 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3250 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3251 target, Pmode,
3252 XEXP (x, 0), Pmode,
921b3427
RK
3253 GEN_INT (GET_MODE_SIZE (mode)),
3254 TYPE_MODE (sizetype));
3255 else
3256 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3257 target, Pmode,
921b3427
RK
3258 GEN_INT (GET_MODE_SIZE (mode)),
3259 TYPE_MODE (sizetype),
956d6950
JL
3260 GEN_INT (MEMORY_USE_RW),
3261 TYPE_MODE (integer_type_node));
3262 in_check_memory_usage = 0;
921b3427 3263 }
bbf6f052
RK
3264 }
3265
3266 ret:
3267 /* If part should go in registers, copy that part
3268 into the appropriate registers. Do this now, at the end,
3269 since mem-to-mem copies above may do function calls. */
cd048831 3270 if (partial > 0 && reg != 0)
fffa9c1d
JW
3271 {
3272 /* Handle calls that pass values in multiple non-contiguous locations.
3273 The Irix 6 ABI has examples of this. */
3274 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3275 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3276 else
3277 move_block_to_reg (REGNO (reg), x, partial, mode);
3278 }
bbf6f052
RK
3279
3280 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3281 anti_adjust_stack (GEN_INT (extra));
4fc026cd
CM
3282
3283 if (alignment_pad)
3284 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3285}
3286\f
bbf6f052
RK
3287/* Expand an assignment that stores the value of FROM into TO.
3288 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3289 (This may contain a QUEUED rtx;
3290 if the value is constant, this rtx is a constant.)
3291 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3292
3293 SUGGEST_REG is no longer actually used.
3294 It used to mean, copy the value through a register
3295 and return that register, if that is possible.
709f5be1 3296 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3297
3298rtx
3299expand_assignment (to, from, want_value, suggest_reg)
3300 tree to, from;
3301 int want_value;
c5c76735 3302 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3303{
3304 register rtx to_rtx = 0;
3305 rtx result;
3306
3307 /* Don't crash if the lhs of the assignment was erroneous. */
3308
3309 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3310 {
3311 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3312 return want_value ? result : NULL_RTX;
3313 }
bbf6f052
RK
3314
3315 /* Assignment of a structure component needs special treatment
3316 if the structure component's rtx is not simply a MEM.
6be58303
JW
3317 Assignment of an array element at a constant index, and assignment of
3318 an array element in an unaligned packed structure field, has the same
3319 problem. */
bbf6f052 3320
08293add
RK
3321 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3322 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3323 {
3324 enum machine_mode mode1;
3325 int bitsize;
3326 int bitpos;
7bb0943f 3327 tree offset;
bbf6f052
RK
3328 int unsignedp;
3329 int volatilep = 0;
0088fcb1 3330 tree tem;
d78d243c 3331 int alignment;
0088fcb1
RK
3332
3333 push_temp_slots ();
839c4796
RK
3334 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3335 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3336
3337 /* If we are going to use store_bit_field and extract_bit_field,
3338 make sure to_rtx will be safe for multiple use. */
3339
3340 if (mode1 == VOIDmode && want_value)
3341 tem = stabilize_reference (tem);
3342
921b3427 3343 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3344 if (offset != 0)
3345 {
906c4e36 3346 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3347
3348 if (GET_CODE (to_rtx) != MEM)
3349 abort ();
bd070e1a
RH
3350
3351 if (GET_MODE (offset_rtx) != ptr_mode)
3352 {
3353#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3354 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3355#else
3356 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3357#endif
3358 }
3359
9a7b9f4f
JL
3360 /* A constant address in TO_RTX can have VOIDmode, we must not try
3361 to call force_reg for that case. Avoid that case. */
89752202
HB
3362 if (GET_CODE (to_rtx) == MEM
3363 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3364 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3365 && bitsize
3366 && (bitpos % bitsize) == 0
3367 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3368 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3369 {
3370 rtx temp = change_address (to_rtx, mode1,
3371 plus_constant (XEXP (to_rtx, 0),
3372 (bitpos /
3373 BITS_PER_UNIT)));
3374 if (GET_CODE (XEXP (temp, 0)) == REG)
3375 to_rtx = temp;
3376 else
3377 to_rtx = change_address (to_rtx, mode1,
3378 force_reg (GET_MODE (XEXP (temp, 0)),
3379 XEXP (temp, 0)));
3380 bitpos = 0;
3381 }
3382
7bb0943f 3383 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3384 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3385 force_reg (ptr_mode,
3386 offset_rtx)));
7bb0943f 3387 }
c5c76735 3388
bbf6f052
RK
3389 if (volatilep)
3390 {
3391 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3392 {
3393 /* When the offset is zero, to_rtx is the address of the
3394 structure we are storing into, and hence may be shared.
3395 We must make a new MEM before setting the volatile bit. */
3396 if (offset == 0)
effbcc6a
RK
3397 to_rtx = copy_rtx (to_rtx);
3398
01188446
JW
3399 MEM_VOLATILE_P (to_rtx) = 1;
3400 }
bbf6f052
RK
3401#if 0 /* This was turned off because, when a field is volatile
3402 in an object which is not volatile, the object may be in a register,
3403 and then we would abort over here. */
3404 else
3405 abort ();
3406#endif
3407 }
3408
956d6950
JL
3409 if (TREE_CODE (to) == COMPONENT_REF
3410 && TREE_READONLY (TREE_OPERAND (to, 1)))
3411 {
8bd6ecc2 3412 if (offset == 0)
956d6950
JL
3413 to_rtx = copy_rtx (to_rtx);
3414
3415 RTX_UNCHANGING_P (to_rtx) = 1;
3416 }
3417
921b3427 3418 /* Check the access. */
7d384cc0 3419 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3420 {
3421 rtx to_addr;
3422 int size;
3423 int best_mode_size;
3424 enum machine_mode best_mode;
3425
3426 best_mode = get_best_mode (bitsize, bitpos,
3427 TYPE_ALIGN (TREE_TYPE (tem)),
3428 mode1, volatilep);
3429 if (best_mode == VOIDmode)
3430 best_mode = QImode;
3431
3432 best_mode_size = GET_MODE_BITSIZE (best_mode);
3433 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3434 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3435 size *= GET_MODE_SIZE (best_mode);
3436
3437 /* Check the access right of the pointer. */
e9a25f70
JL
3438 if (size)
3439 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3440 to_addr, Pmode,
e9a25f70 3441 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3442 GEN_INT (MEMORY_USE_WO),
3443 TYPE_MODE (integer_type_node));
921b3427
RK
3444 }
3445
bbf6f052
RK
3446 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3447 (want_value
3448 /* Spurious cast makes HPUX compiler happy. */
3449 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3450 : VOIDmode),
3451 unsignedp,
3452 /* Required alignment of containing datum. */
d78d243c 3453 alignment,
ece32014
MM
3454 int_size_in_bytes (TREE_TYPE (tem)),
3455 get_alias_set (to));
bbf6f052
RK
3456 preserve_temp_slots (result);
3457 free_temp_slots ();
0088fcb1 3458 pop_temp_slots ();
bbf6f052 3459
709f5be1
RS
3460 /* If the value is meaningful, convert RESULT to the proper mode.
3461 Otherwise, return nothing. */
5ffe63ed
RS
3462 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3463 TYPE_MODE (TREE_TYPE (from)),
3464 result,
3465 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3466 : NULL_RTX);
bbf6f052
RK
3467 }
3468
cd1db108
RS
3469 /* If the rhs is a function call and its value is not an aggregate,
3470 call the function before we start to compute the lhs.
3471 This is needed for correct code for cases such as
3472 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3473 requires loading up part of an address in a separate insn.
3474
3475 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3476 a promoted variable where the zero- or sign- extension needs to be done.
3477 Handling this in the normal way is safe because no computation is done
3478 before the call. */
3479 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3480 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3481 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3482 {
0088fcb1
RK
3483 rtx value;
3484
3485 push_temp_slots ();
3486 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3487 if (to_rtx == 0)
921b3427 3488 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3489
fffa9c1d
JW
3490 /* Handle calls that return values in multiple non-contiguous locations.
3491 The Irix 6 ABI has examples of this. */
3492 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3493 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3494 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3495 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3496 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3497 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45 3498 else
6419e5b0
DT
3499 {
3500#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3501 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3502 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3503 value = convert_memory_address (GET_MODE (to_rtx), value);
3504#endif
3505 emit_move_insn (to_rtx, value);
3506 }
cd1db108
RS
3507 preserve_temp_slots (to_rtx);
3508 free_temp_slots ();
0088fcb1 3509 pop_temp_slots ();
709f5be1 3510 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3511 }
3512
bbf6f052
RK
3513 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3514 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3515
3516 if (to_rtx == 0)
41472af8
MM
3517 {
3518 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3519 if (GET_CODE (to_rtx) == MEM)
3520 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3521 }
bbf6f052 3522
86d38d25 3523 /* Don't move directly into a return register. */
14a774a9
RK
3524 if (TREE_CODE (to) == RESULT_DECL
3525 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3526 {
0088fcb1
RK
3527 rtx temp;
3528
3529 push_temp_slots ();
3530 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3531
3532 if (GET_CODE (to_rtx) == PARALLEL)
3533 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
3534 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
3535 else
3536 emit_move_insn (to_rtx, temp);
3537
86d38d25
RS
3538 preserve_temp_slots (to_rtx);
3539 free_temp_slots ();
0088fcb1 3540 pop_temp_slots ();
709f5be1 3541 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3542 }
3543
bbf6f052
RK
3544 /* In case we are returning the contents of an object which overlaps
3545 the place the value is being stored, use a safe function when copying
3546 a value through a pointer into a structure value return block. */
3547 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3548 && current_function_returns_struct
3549 && !current_function_returns_pcc_struct)
3550 {
0088fcb1
RK
3551 rtx from_rtx, size;
3552
3553 push_temp_slots ();
33a20d10 3554 size = expr_size (from);
921b3427
RK
3555 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3556 EXPAND_MEMORY_USE_DONT);
3557
3558 /* Copy the rights of the bitmap. */
7d384cc0 3559 if (current_function_check_memory_usage)
921b3427 3560 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3561 XEXP (to_rtx, 0), Pmode,
3562 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3563 convert_to_mode (TYPE_MODE (sizetype),
3564 size, TREE_UNSIGNED (sizetype)),
3565 TYPE_MODE (sizetype));
bbf6f052
RK
3566
3567#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3568 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3569 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3570 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3571 convert_to_mode (TYPE_MODE (sizetype),
3572 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3573 TYPE_MODE (sizetype));
bbf6f052 3574#else
d562e42e 3575 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3576 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3577 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3578 convert_to_mode (TYPE_MODE (integer_type_node),
3579 size, TREE_UNSIGNED (integer_type_node)),
3580 TYPE_MODE (integer_type_node));
bbf6f052
RK
3581#endif
3582
3583 preserve_temp_slots (to_rtx);
3584 free_temp_slots ();
0088fcb1 3585 pop_temp_slots ();
709f5be1 3586 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3587 }
3588
3589 /* Compute FROM and store the value in the rtx we got. */
3590
0088fcb1 3591 push_temp_slots ();
bbf6f052
RK
3592 result = store_expr (from, to_rtx, want_value);
3593 preserve_temp_slots (result);
3594 free_temp_slots ();
0088fcb1 3595 pop_temp_slots ();
709f5be1 3596 return want_value ? result : NULL_RTX;
bbf6f052
RK
3597}
3598
3599/* Generate code for computing expression EXP,
3600 and storing the value into TARGET.
bbf6f052
RK
3601 TARGET may contain a QUEUED rtx.
3602
709f5be1
RS
3603 If WANT_VALUE is nonzero, return a copy of the value
3604 not in TARGET, so that we can be sure to use the proper
3605 value in a containing expression even if TARGET has something
3606 else stored in it. If possible, we copy the value through a pseudo
3607 and return that pseudo. Or, if the value is constant, we try to
3608 return the constant. In some cases, we return a pseudo
3609 copied *from* TARGET.
3610
3611 If the mode is BLKmode then we may return TARGET itself.
3612 It turns out that in BLKmode it doesn't cause a problem.
3613 because C has no operators that could combine two different
3614 assignments into the same BLKmode object with different values
3615 with no sequence point. Will other languages need this to
3616 be more thorough?
3617
3618 If WANT_VALUE is 0, we return NULL, to make sure
3619 to catch quickly any cases where the caller uses the value
3620 and fails to set WANT_VALUE. */
bbf6f052
RK
3621
3622rtx
709f5be1 3623store_expr (exp, target, want_value)
bbf6f052
RK
3624 register tree exp;
3625 register rtx target;
709f5be1 3626 int want_value;
bbf6f052
RK
3627{
3628 register rtx temp;
3629 int dont_return_target = 0;
3630
3631 if (TREE_CODE (exp) == COMPOUND_EXPR)
3632 {
3633 /* Perform first part of compound expression, then assign from second
3634 part. */
3635 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3636 emit_queue ();
709f5be1 3637 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3638 }
3639 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3640 {
3641 /* For conditional expression, get safe form of the target. Then
3642 test the condition, doing the appropriate assignment on either
3643 side. This avoids the creation of unnecessary temporaries.
3644 For non-BLKmode, it is more efficient not to do this. */
3645
3646 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3647
3648 emit_queue ();
3649 target = protect_from_queue (target, 1);
3650
dabf8373 3651 do_pending_stack_adjust ();
bbf6f052
RK
3652 NO_DEFER_POP;
3653 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3654 start_cleanup_deferral ();
709f5be1 3655 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3656 end_cleanup_deferral ();
bbf6f052
RK
3657 emit_queue ();
3658 emit_jump_insn (gen_jump (lab2));
3659 emit_barrier ();
3660 emit_label (lab1);
956d6950 3661 start_cleanup_deferral ();
709f5be1 3662 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3663 end_cleanup_deferral ();
bbf6f052
RK
3664 emit_queue ();
3665 emit_label (lab2);
3666 OK_DEFER_POP;
a3a58acc 3667
709f5be1 3668 return want_value ? target : NULL_RTX;
bbf6f052 3669 }
bbf6f052 3670 else if (queued_subexp_p (target))
709f5be1
RS
3671 /* If target contains a postincrement, let's not risk
3672 using it as the place to generate the rhs. */
bbf6f052
RK
3673 {
3674 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3675 {
3676 /* Expand EXP into a new pseudo. */
3677 temp = gen_reg_rtx (GET_MODE (target));
3678 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3679 }
3680 else
906c4e36 3681 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3682
3683 /* If target is volatile, ANSI requires accessing the value
3684 *from* the target, if it is accessed. So make that happen.
3685 In no case return the target itself. */
3686 if (! MEM_VOLATILE_P (target) && want_value)
3687 dont_return_target = 1;
bbf6f052 3688 }
12f06d17
CH
3689 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3690 && GET_MODE (target) != BLKmode)
3691 /* If target is in memory and caller wants value in a register instead,
3692 arrange that. Pass TARGET as target for expand_expr so that,
3693 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3694 We know expand_expr will not use the target in that case.
3695 Don't do this if TARGET is volatile because we are supposed
3696 to write it and then read it. */
3697 {
1da93fe0 3698 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3699 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3700 temp = copy_to_reg (temp);
3701 dont_return_target = 1;
3702 }
1499e0a8
RK
3703 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3704 /* If this is an scalar in a register that is stored in a wider mode
3705 than the declared mode, compute the result into its declared mode
3706 and then convert to the wider mode. Our value is the computed
3707 expression. */
3708 {
5a32d038 3709 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3710 which will often result in some optimizations. Do the conversion
3711 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3712 the extend. But don't do this if the type of EXP is a subtype
3713 of something else since then the conversion might involve
3714 more than just converting modes. */
3715 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3716 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3717 {
3718 if (TREE_UNSIGNED (TREE_TYPE (exp))
3719 != SUBREG_PROMOTED_UNSIGNED_P (target))
3720 exp
3721 = convert
3722 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3723 TREE_TYPE (exp)),
3724 exp);
3725
3726 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3727 SUBREG_PROMOTED_UNSIGNED_P (target)),
3728 exp);
3729 }
5a32d038 3730
1499e0a8 3731 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3732
766f36c7 3733 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3734 the access now so it gets done only once. Likewise if
3735 it contains TARGET. */
3736 if (GET_CODE (temp) == MEM && want_value
3737 && (MEM_VOLATILE_P (temp)
3738 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3739 temp = copy_to_reg (temp);
3740
b258707c
RS
3741 /* If TEMP is a VOIDmode constant, use convert_modes to make
3742 sure that we properly convert it. */
3743 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3744 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3745 TYPE_MODE (TREE_TYPE (exp)), temp,
3746 SUBREG_PROMOTED_UNSIGNED_P (target));
3747
1499e0a8
RK
3748 convert_move (SUBREG_REG (target), temp,
3749 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3750
3751 /* If we promoted a constant, change the mode back down to match
3752 target. Otherwise, the caller might get confused by a result whose
3753 mode is larger than expected. */
3754
3755 if (want_value && GET_MODE (temp) != GET_MODE (target)
3756 && GET_MODE (temp) != VOIDmode)
3757 {
3758 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3759 SUBREG_PROMOTED_VAR_P (temp) = 1;
3760 SUBREG_PROMOTED_UNSIGNED_P (temp)
3761 = SUBREG_PROMOTED_UNSIGNED_P (target);
3762 }
3763
709f5be1 3764 return want_value ? temp : NULL_RTX;
1499e0a8 3765 }
bbf6f052
RK
3766 else
3767 {
3768 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3769 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3770 If TARGET is a volatile mem ref, either return TARGET
3771 or return a reg copied *from* TARGET; ANSI requires this.
3772
3773 Otherwise, if TEMP is not TARGET, return TEMP
3774 if it is constant (for efficiency),
3775 or if we really want the correct value. */
bbf6f052
RK
3776 if (!(target && GET_CODE (target) == REG
3777 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3778 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3779 && ! rtx_equal_p (temp, target)
709f5be1 3780 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3781 dont_return_target = 1;
3782 }
3783
b258707c
RS
3784 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3785 the same as that of TARGET, adjust the constant. This is needed, for
3786 example, in case it is a CONST_DOUBLE and we want only a word-sized
3787 value. */
3788 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3789 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3790 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3791 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3792 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3793
7d384cc0 3794 if (current_function_check_memory_usage
921b3427
RK
3795 && GET_CODE (target) == MEM
3796 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3797 {
3798 if (GET_CODE (temp) == MEM)
3799 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3800 XEXP (target, 0), Pmode,
3801 XEXP (temp, 0), Pmode,
921b3427
RK
3802 expr_size (exp), TYPE_MODE (sizetype));
3803 else
3804 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3805 XEXP (target, 0), Pmode,
921b3427 3806 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3807 GEN_INT (MEMORY_USE_WO),
3808 TYPE_MODE (integer_type_node));
921b3427
RK
3809 }
3810
bbf6f052
RK
3811 /* If value was not generated in the target, store it there.
3812 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3813 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3814 one or both of them are volatile memory refs, we have to distinguish
3815 two cases:
3816 - expand_expr has used TARGET. In this case, we must not generate
3817 another copy. This can be detected by TARGET being equal according
3818 to == .
3819 - expand_expr has not used TARGET - that means that the source just
3820 happens to have the same RTX form. Since temp will have been created
3821 by expand_expr, it will compare unequal according to == .
3822 We must generate a copy in this case, to reach the correct number
3823 of volatile memory references. */
bbf6f052 3824
6036acbb 3825 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3826 || (temp != target && (side_effects_p (temp)
3827 || side_effects_p (target))))
6036acbb 3828 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3829 {
3830 target = protect_from_queue (target, 1);
3831 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3832 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3833 {
3834 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3835 if (dont_return_target)
3836 {
3837 /* In this case, we will return TEMP,
3838 so make sure it has the proper mode.
3839 But don't forget to store the value into TARGET. */
3840 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3841 emit_move_insn (target, temp);
3842 }
3843 else
3844 convert_move (target, temp, unsignedp);
3845 }
3846
3847 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3848 {
3849 /* Handle copying a string constant into an array.
3850 The string constant may be shorter than the array.
3851 So copy just the string's actual length, and clear the rest. */
3852 rtx size;
22619c3f 3853 rtx addr;
bbf6f052 3854
e87b4f3f
RS
3855 /* Get the size of the data type of the string,
3856 which is actually the size of the target. */
3857 size = expr_size (exp);
3858 if (GET_CODE (size) == CONST_INT
3859 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3860 emit_block_move (target, temp, size,
3861 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3862 else
bbf6f052 3863 {
e87b4f3f
RS
3864 /* Compute the size of the data to copy from the string. */
3865 tree copy_size
c03b7665 3866 = size_binop (MIN_EXPR,
b50d17a1 3867 make_tree (sizetype, size),
c03b7665
RK
3868 convert (sizetype,
3869 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3870 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3871 VOIDmode, 0);
e87b4f3f
RS
3872 rtx label = 0;
3873
3874 /* Copy that much. */
3875 emit_block_move (target, temp, copy_size_rtx,
3876 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3877
88f63c77
RK
3878 /* Figure out how much is left in TARGET that we have to clear.
3879 Do all calculations in ptr_mode. */
3880
3881 addr = XEXP (target, 0);
3882 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3883
e87b4f3f
RS
3884 if (GET_CODE (copy_size_rtx) == CONST_INT)
3885 {
88f63c77 3886 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3887 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3888 }
3889 else
3890 {
88f63c77
RK
3891 addr = force_reg (ptr_mode, addr);
3892 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3893 copy_size_rtx, NULL_RTX, 0,
3894 OPTAB_LIB_WIDEN);
e87b4f3f 3895
88f63c77 3896 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3897 copy_size_rtx, NULL_RTX, 0,
3898 OPTAB_LIB_WIDEN);
e87b4f3f 3899
e87b4f3f 3900 label = gen_label_rtx ();
c5d5d461
JL
3901 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3902 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3903 }
3904
3905 if (size != const0_rtx)
3906 {
921b3427 3907 /* Be sure we can write on ADDR. */
7d384cc0 3908 if (current_function_check_memory_usage)
921b3427 3909 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3910 addr, Pmode,
921b3427 3911 size, TYPE_MODE (sizetype),
956d6950
JL
3912 GEN_INT (MEMORY_USE_WO),
3913 TYPE_MODE (integer_type_node));
bbf6f052 3914#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3915 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3916 addr, ptr_mode,
3b6f75e2
JW
3917 const0_rtx, TYPE_MODE (integer_type_node),
3918 convert_to_mode (TYPE_MODE (sizetype),
3919 size,
3920 TREE_UNSIGNED (sizetype)),
3921 TYPE_MODE (sizetype));
bbf6f052 3922#else
d562e42e 3923 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3924 addr, ptr_mode,
3b6f75e2
JW
3925 convert_to_mode (TYPE_MODE (integer_type_node),
3926 size,
3927 TREE_UNSIGNED (integer_type_node)),
3928 TYPE_MODE (integer_type_node));
bbf6f052 3929#endif
e87b4f3f 3930 }
22619c3f 3931
e87b4f3f
RS
3932 if (label)
3933 emit_label (label);
bbf6f052
RK
3934 }
3935 }
fffa9c1d
JW
3936 /* Handle calls that return values in multiple non-contiguous locations.
3937 The Irix 6 ABI has examples of this. */
3938 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3939 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3940 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3941 else if (GET_MODE (temp) == BLKmode)
3942 emit_block_move (target, temp, expr_size (exp),
3943 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3944 else
3945 emit_move_insn (target, temp);
3946 }
709f5be1 3947
766f36c7
RK
3948 /* If we don't want a value, return NULL_RTX. */
3949 if (! want_value)
3950 return NULL_RTX;
3951
3952 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3953 ??? The latter test doesn't seem to make sense. */
3954 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3955 return temp;
766f36c7
RK
3956
3957 /* Return TARGET itself if it is a hard register. */
3958 else if (want_value && GET_MODE (target) != BLKmode
3959 && ! (GET_CODE (target) == REG
3960 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3961 return copy_to_reg (target);
766f36c7
RK
3962
3963 else
709f5be1 3964 return target;
bbf6f052
RK
3965}
3966\f
9de08200
RK
3967/* Return 1 if EXP just contains zeros. */
3968
3969static int
3970is_zeros_p (exp)
3971 tree exp;
3972{
3973 tree elt;
3974
3975 switch (TREE_CODE (exp))
3976 {
3977 case CONVERT_EXPR:
3978 case NOP_EXPR:
3979 case NON_LVALUE_EXPR:
3980 return is_zeros_p (TREE_OPERAND (exp, 0));
3981
3982 case INTEGER_CST:
3983 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3984
3985 case COMPLEX_CST:
3986 return
3987 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3988
3989 case REAL_CST:
41c9120b 3990 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3991
3992 case CONSTRUCTOR:
e1a43f73
PB
3993 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3994 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3995 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3996 if (! is_zeros_p (TREE_VALUE (elt)))
3997 return 0;
3998
3999 return 1;
e9a25f70
JL
4000
4001 default:
4002 return 0;
9de08200 4003 }
9de08200
RK
4004}
4005
4006/* Return 1 if EXP contains mostly (3/4) zeros. */
4007
4008static int
4009mostly_zeros_p (exp)
4010 tree exp;
4011{
9de08200
RK
4012 if (TREE_CODE (exp) == CONSTRUCTOR)
4013 {
e1a43f73
PB
4014 int elts = 0, zeros = 0;
4015 tree elt = CONSTRUCTOR_ELTS (exp);
4016 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4017 {
4018 /* If there are no ranges of true bits, it is all zero. */
4019 return elt == NULL_TREE;
4020 }
4021 for (; elt; elt = TREE_CHAIN (elt))
4022 {
4023 /* We do not handle the case where the index is a RANGE_EXPR,
4024 so the statistic will be somewhat inaccurate.
4025 We do make a more accurate count in store_constructor itself,
4026 so since this function is only used for nested array elements,
0f41302f 4027 this should be close enough. */
e1a43f73
PB
4028 if (mostly_zeros_p (TREE_VALUE (elt)))
4029 zeros++;
4030 elts++;
4031 }
9de08200
RK
4032
4033 return 4 * zeros >= 3 * elts;
4034 }
4035
4036 return is_zeros_p (exp);
4037}
4038\f
e1a43f73
PB
4039/* Helper function for store_constructor.
4040 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4041 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4042 ALIGN and CLEARED are as for store_constructor.
23ccec44
JW
4043
4044 This provides a recursive shortcut back to store_constructor when it isn't
4045 necessary to go through store_field. This is so that we can pass through
4046 the cleared field to let store_constructor know that we may not have to
4047 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4048
4049static void
4050store_constructor_field (target, bitsize, bitpos,
c5c76735 4051 mode, exp, type, align, cleared)
e1a43f73
PB
4052 rtx target;
4053 int bitsize, bitpos;
4054 enum machine_mode mode;
4055 tree exp, type;
c5c76735 4056 int align;
e1a43f73
PB
4057 int cleared;
4058{
4059 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4060 && bitpos % BITS_PER_UNIT == 0
4061 /* If we have a non-zero bitpos for a register target, then we just
4062 let store_field do the bitfield handling. This is unlikely to
4063 generate unnecessary clear instructions anyways. */
4064 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4065 {
126e5b0d 4066 if (bitpos != 0)
ce64861e
RK
4067 target
4068 = change_address (target,
4069 GET_MODE (target) == BLKmode
4070 || 0 != (bitpos
4071 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4072 ? BLKmode : VOIDmode,
4073 plus_constant (XEXP (target, 0),
4074 bitpos / BITS_PER_UNIT));
b7010412 4075 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4076 }
4077 else
c5c76735
JL
4078 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0,
4079 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT,
0db5adc6 4080 int_size_in_bytes (type), 0);
e1a43f73
PB
4081}
4082
bbf6f052 4083/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4084 TARGET is either a REG or a MEM.
c5c76735 4085 ALIGN is the maximum known alignment for TARGET, in bits.
b7010412
RK
4086 CLEARED is true if TARGET is known to have been zero'd.
4087 SIZE is the number of bytes of TARGET we are allowed to modify: this
4088 may not be the same as the size of EXP if we are assigning to a field
4089 which has been packed to exclude padding bits. */
bbf6f052
RK
4090
4091static void
b7010412 4092store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4093 tree exp;
4094 rtx target;
c5c76735 4095 int align;
e1a43f73 4096 int cleared;
b7010412 4097 int size;
bbf6f052 4098{
4af3895e 4099 tree type = TREE_TYPE (exp);
a5efcd63 4100#ifdef WORD_REGISTER_OPERATIONS
34c73909 4101 rtx exp_size = expr_size (exp);
a5efcd63 4102#endif
4af3895e 4103
bbf6f052
RK
4104 /* We know our target cannot conflict, since safe_from_p has been called. */
4105#if 0
4106 /* Don't try copying piece by piece into a hard register
4107 since that is vulnerable to being clobbered by EXP.
4108 Instead, construct in a pseudo register and then copy it all. */
4109 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4110 {
4111 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4112 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4113 emit_move_insn (target, temp);
4114 return;
4115 }
4116#endif
4117
e44842fe
RK
4118 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4119 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4120 {
4121 register tree elt;
4122
4af3895e 4123 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4124 if ((TREE_CODE (type) == UNION_TYPE
4125 || TREE_CODE (type) == QUAL_UNION_TYPE)
4126 && ! cleared)
a59f8640
R
4127 {
4128 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4129
4130 /* If the constructor is empty, clear the union. */
4131 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
4132 clear_storage (target, expr_size (exp),
4133 TYPE_ALIGN (type) / BITS_PER_UNIT);
4134 }
4af3895e
JVA
4135
4136 /* If we are building a static constructor into a register,
4137 set the initial value as zero so we can fold the value into
67225c15
RK
4138 a constant. But if more than one register is involved,
4139 this probably loses. */
4140 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4141 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4142 {
4143 if (! cleared)
e9a25f70 4144 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4145
9de08200
RK
4146 cleared = 1;
4147 }
4148
4149 /* If the constructor has fewer fields than the structure
4150 or if we are initializing the structure to mostly zeros,
bbf6f052 4151 clear the whole structure first. */
9376fcd6
RK
4152 else if (size > 0
4153 && ((list_length (CONSTRUCTOR_ELTS (exp))
4154 != list_length (TYPE_FIELDS (type)))
4155 || mostly_zeros_p (exp)))
9de08200
RK
4156 {
4157 if (! cleared)
b7010412 4158 clear_storage (target, GEN_INT (size),
c5c76735 4159 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4160
4161 cleared = 1;
4162 }
dd1db5ec 4163 else if (! cleared)
bbf6f052 4164 /* Inform later passes that the old value is dead. */
38a448ca 4165 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4166
4167 /* Store each element of the constructor into
4168 the corresponding field of TARGET. */
4169
4170 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4171 {
4172 register tree field = TREE_PURPOSE (elt);
c5c76735 4173#ifdef WORD_REGISTER_OPERATIONS
34c73909 4174 tree value = TREE_VALUE (elt);
c5c76735 4175#endif
bbf6f052
RK
4176 register enum machine_mode mode;
4177 int bitsize;
b50d17a1 4178 int bitpos = 0;
bbf6f052 4179 int unsignedp;
b50d17a1
RK
4180 tree pos, constant = 0, offset = 0;
4181 rtx to_rtx = target;
bbf6f052 4182
f32fd778
RS
4183 /* Just ignore missing fields.
4184 We cleared the whole structure, above,
4185 if any fields are missing. */
4186 if (field == 0)
4187 continue;
4188
e1a43f73
PB
4189 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4190 continue;
9de08200 4191
14a774a9
RK
4192 if (TREE_CODE (DECL_SIZE (field)) == INTEGER_CST)
4193 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4194 else
4195 bitsize = -1;
4196
bbf6f052
RK
4197 unsignedp = TREE_UNSIGNED (field);
4198 mode = DECL_MODE (field);
4199 if (DECL_BIT_FIELD (field))
4200 mode = VOIDmode;
4201
b50d17a1
RK
4202 pos = DECL_FIELD_BITPOS (field);
4203 if (TREE_CODE (pos) == INTEGER_CST)
4204 constant = pos;
4205 else if (TREE_CODE (pos) == PLUS_EXPR
4206 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4207 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4208 else
4209 offset = pos;
4210
4211 if (constant)
cd11b87e 4212 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4213
4214 if (offset)
4215 {
4216 rtx offset_rtx;
4217
4218 if (contains_placeholder_p (offset))
4219 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4220 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4221
9f887d05 4222 offset = size_binop (EXACT_DIV_EXPR, offset,
b50d17a1 4223 size_int (BITS_PER_UNIT));
bbf6f052 4224
b50d17a1
RK
4225 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4226 if (GET_CODE (to_rtx) != MEM)
4227 abort ();
4228
bd070e1a
RH
4229 if (GET_MODE (offset_rtx) != ptr_mode)
4230 {
4231#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4232 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4233#else
4234 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4235#endif
4236 }
4237
b50d17a1
RK
4238 to_rtx
4239 = change_address (to_rtx, VOIDmode,
38a448ca 4240 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4241 force_reg (ptr_mode,
4242 offset_rtx)));
b50d17a1 4243 }
c5c76735 4244
cf04eb80
RK
4245 if (TREE_READONLY (field))
4246 {
9151b3bf 4247 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4248 to_rtx = copy_rtx (to_rtx);
4249
cf04eb80
RK
4250 RTX_UNCHANGING_P (to_rtx) = 1;
4251 }
4252
34c73909
R
4253#ifdef WORD_REGISTER_OPERATIONS
4254 /* If this initializes a field that is smaller than a word, at the
4255 start of a word, try to widen it to a full word.
4256 This special case allows us to output C++ member function
4257 initializations in a form that the optimizers can understand. */
4258 if (constant
4259 && GET_CODE (target) == REG
4260 && bitsize < BITS_PER_WORD
4261 && bitpos % BITS_PER_WORD == 0
4262 && GET_MODE_CLASS (mode) == MODE_INT
4263 && TREE_CODE (value) == INTEGER_CST
4264 && GET_CODE (exp_size) == CONST_INT
4265 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4266 {
4267 tree type = TREE_TYPE (value);
4268 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4269 {
4270 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4271 value = convert (type, value);
4272 }
4273 if (BYTES_BIG_ENDIAN)
4274 value
4275 = fold (build (LSHIFT_EXPR, type, value,
4276 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4277 bitsize = BITS_PER_WORD;
4278 mode = word_mode;
4279 }
4280#endif
c5c76735
JL
4281 store_constructor_field (to_rtx, bitsize, bitpos, mode,
4282 TREE_VALUE (elt), type,
4283 MIN (align,
4284 DECL_ALIGN (TREE_PURPOSE (elt))),
4285 cleared);
bbf6f052
RK
4286 }
4287 }
4af3895e 4288 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4289 {
4290 register tree elt;
4291 register int i;
e1a43f73 4292 int need_to_clear;
4af3895e 4293 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4294 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4295 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4296 tree elttype = TREE_TYPE (type);
bbf6f052 4297
e1a43f73 4298 /* If the constructor has fewer elements than the array,
38e01259 4299 clear the whole array first. Similarly if this is
e1a43f73
PB
4300 static constructor of a non-BLKmode object. */
4301 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4302 need_to_clear = 1;
4303 else
4304 {
4305 HOST_WIDE_INT count = 0, zero_count = 0;
4306 need_to_clear = 0;
4307 /* This loop is a more accurate version of the loop in
4308 mostly_zeros_p (it handles RANGE_EXPR in an index).
4309 It is also needed to check for missing elements. */
4310 for (elt = CONSTRUCTOR_ELTS (exp);
4311 elt != NULL_TREE;
df0faff1 4312 elt = TREE_CHAIN (elt))
e1a43f73
PB
4313 {
4314 tree index = TREE_PURPOSE (elt);
4315 HOST_WIDE_INT this_node_count;
4316 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4317 {
4318 tree lo_index = TREE_OPERAND (index, 0);
4319 tree hi_index = TREE_OPERAND (index, 1);
4320 if (TREE_CODE (lo_index) != INTEGER_CST
4321 || TREE_CODE (hi_index) != INTEGER_CST)
4322 {
4323 need_to_clear = 1;
4324 break;
4325 }
4326 this_node_count = TREE_INT_CST_LOW (hi_index)
4327 - TREE_INT_CST_LOW (lo_index) + 1;
4328 }
4329 else
4330 this_node_count = 1;
4331 count += this_node_count;
4332 if (mostly_zeros_p (TREE_VALUE (elt)))
4333 zero_count += this_node_count;
4334 }
8e958f70 4335 /* Clear the entire array first if there are any missing elements,
0f41302f 4336 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4337 if (count < maxelt - minelt + 1
4338 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4339 need_to_clear = 1;
4340 }
9376fcd6 4341 if (need_to_clear && size > 0)
9de08200
RK
4342 {
4343 if (! cleared)
b7010412 4344 clear_storage (target, GEN_INT (size),
c5c76735 4345 (align + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
9de08200
RK
4346 cleared = 1;
4347 }
bbf6f052
RK
4348 else
4349 /* Inform later passes that the old value is dead. */
38a448ca 4350 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4351
4352 /* Store each element of the constructor into
4353 the corresponding element of TARGET, determined
4354 by counting the elements. */
4355 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4356 elt;
4357 elt = TREE_CHAIN (elt), i++)
4358 {
4359 register enum machine_mode mode;
4360 int bitsize;
4361 int bitpos;
4362 int unsignedp;
e1a43f73 4363 tree value = TREE_VALUE (elt);
c5c76735 4364 int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4365 tree index = TREE_PURPOSE (elt);
4366 rtx xtarget = target;
bbf6f052 4367
e1a43f73
PB
4368 if (cleared && is_zeros_p (value))
4369 continue;
9de08200 4370
bbf6f052 4371 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4372 mode = TYPE_MODE (elttype);
4373 if (mode == BLKmode)
4374 {
4375 if (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4376 && TREE_INT_CST_HIGH (TYPE_SIZE (elttype)) == 0)
4377 bitsize = TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4378 else
4379 bitsize = -1;
4380 }
4381 else
4382 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4383
e1a43f73
PB
4384 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4385 {
4386 tree lo_index = TREE_OPERAND (index, 0);
4387 tree hi_index = TREE_OPERAND (index, 1);
4388 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4389 struct nesting *loop;
05c0b405
PB
4390 HOST_WIDE_INT lo, hi, count;
4391 tree position;
e1a43f73 4392
0f41302f 4393 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4394 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4395 && TREE_CODE (hi_index) == INTEGER_CST
4396 && (lo = TREE_INT_CST_LOW (lo_index),
4397 hi = TREE_INT_CST_LOW (hi_index),
4398 count = hi - lo + 1,
4399 (GET_CODE (target) != MEM
4400 || count <= 2
4401 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4402 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4403 <= 40 * 8))))
e1a43f73 4404 {
05c0b405
PB
4405 lo -= minelt; hi -= minelt;
4406 for (; lo <= hi; lo++)
e1a43f73 4407 {
05c0b405 4408 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
c5c76735
JL
4409 store_constructor_field (target, bitsize, bitpos, mode,
4410 value, type, align, cleared);
e1a43f73
PB
4411 }
4412 }
4413 else
4414 {
4415 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4416 loop_top = gen_label_rtx ();
4417 loop_end = gen_label_rtx ();
4418
4419 unsignedp = TREE_UNSIGNED (domain);
4420
4421 index = build_decl (VAR_DECL, NULL_TREE, domain);
4422
4423 DECL_RTL (index) = index_r
4424 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4425 &unsignedp, 0));
4426
4427 if (TREE_CODE (value) == SAVE_EXPR
4428 && SAVE_EXPR_RTL (value) == 0)
4429 {
0f41302f
MS
4430 /* Make sure value gets expanded once before the
4431 loop. */
e1a43f73
PB
4432 expand_expr (value, const0_rtx, VOIDmode, 0);
4433 emit_queue ();
4434 }
4435 store_expr (lo_index, index_r, 0);
4436 loop = expand_start_loop (0);
4437
0f41302f 4438 /* Assign value to element index. */
e1a43f73
PB
4439 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4440 size_int (BITS_PER_UNIT));
4441 position = size_binop (MULT_EXPR,
4442 size_binop (MINUS_EXPR, index,
4443 TYPE_MIN_VALUE (domain)),
4444 position);
4445 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4446 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4447 xtarget = change_address (target, mode, addr);
4448 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4449 store_constructor (value, xtarget, align, cleared,
4450 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4451 else
4452 store_expr (value, xtarget, 0);
4453
4454 expand_exit_loop_if_false (loop,
4455 build (LT_EXPR, integer_type_node,
4456 index, hi_index));
4457
4458 expand_increment (build (PREINCREMENT_EXPR,
4459 TREE_TYPE (index),
7b8b9722 4460 index, integer_one_node), 0, 0);
e1a43f73
PB
4461 expand_end_loop ();
4462 emit_label (loop_end);
4463
4464 /* Needed by stupid register allocation. to extend the
4465 lifetime of pseudo-regs used by target past the end
4466 of the loop. */
38a448ca 4467 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4468 }
4469 }
4470 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4471 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4472 {
e1a43f73 4473 rtx pos_rtx, addr;
03dc44a6
RS
4474 tree position;
4475
5b6c44ff
RK
4476 if (index == 0)
4477 index = size_int (i);
4478
e1a43f73
PB
4479 if (minelt)
4480 index = size_binop (MINUS_EXPR, index,
4481 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4482 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4483 size_int (BITS_PER_UNIT));
4484 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4485 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4486 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4487 xtarget = change_address (target, mode, addr);
e1a43f73 4488 store_expr (value, xtarget, 0);
03dc44a6
RS
4489 }
4490 else
4491 {
4492 if (index != 0)
7c314719 4493 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4494 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4495 else
4496 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
c5c76735
JL
4497 store_constructor_field (target, bitsize, bitpos, mode, value,
4498 type, align, cleared);
03dc44a6 4499 }
bbf6f052
RK
4500 }
4501 }
071a6595
PB
4502 /* set constructor assignments */
4503 else if (TREE_CODE (type) == SET_TYPE)
4504 {
e1a43f73 4505 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4506 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4507 tree domain = TYPE_DOMAIN (type);
4508 tree domain_min, domain_max, bitlength;
4509
9faa82d8 4510 /* The default implementation strategy is to extract the constant
071a6595
PB
4511 parts of the constructor, use that to initialize the target,
4512 and then "or" in whatever non-constant ranges we need in addition.
4513
4514 If a large set is all zero or all ones, it is
4515 probably better to set it using memset (if available) or bzero.
4516 Also, if a large set has just a single range, it may also be
4517 better to first clear all the first clear the set (using
0f41302f 4518 bzero/memset), and set the bits we want. */
071a6595 4519
0f41302f 4520 /* Check for all zeros. */
9376fcd6 4521 if (elt == NULL_TREE && size > 0)
071a6595 4522 {
e1a43f73 4523 if (!cleared)
b7010412 4524 clear_storage (target, GEN_INT (size),
e1a43f73 4525 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4526 return;
4527 }
4528
071a6595
PB
4529 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4530 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4531 bitlength = size_binop (PLUS_EXPR,
4532 size_binop (MINUS_EXPR, domain_max, domain_min),
4533 size_one_node);
4534
e1a43f73
PB
4535 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4536 abort ();
4537 nbits = TREE_INT_CST_LOW (bitlength);
4538
4539 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4540 are "complicated" (more than one range), initialize (the
4541 constant parts) by copying from a constant. */
4542 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4543 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4544 {
b4ee5a72
PB
4545 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4546 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4547 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4548 HOST_WIDE_INT word = 0;
4549 int bit_pos = 0;
4550 int ibit = 0;
0f41302f 4551 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4552 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4553 for (;;)
071a6595 4554 {
b4ee5a72
PB
4555 if (bit_buffer[ibit])
4556 {
b09f3348 4557 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4558 word |= (1 << (set_word_size - 1 - bit_pos));
4559 else
4560 word |= 1 << bit_pos;
4561 }
4562 bit_pos++; ibit++;
4563 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4564 {
e1a43f73
PB
4565 if (word != 0 || ! cleared)
4566 {
4567 rtx datum = GEN_INT (word);
4568 rtx to_rtx;
0f41302f
MS
4569 /* The assumption here is that it is safe to use
4570 XEXP if the set is multi-word, but not if
4571 it's single-word. */
e1a43f73
PB
4572 if (GET_CODE (target) == MEM)
4573 {
4574 to_rtx = plus_constant (XEXP (target, 0), offset);
4575 to_rtx = change_address (target, mode, to_rtx);
4576 }
4577 else if (offset == 0)
4578 to_rtx = target;
4579 else
4580 abort ();
4581 emit_move_insn (to_rtx, datum);
4582 }
b4ee5a72
PB
4583 if (ibit == nbits)
4584 break;
4585 word = 0;
4586 bit_pos = 0;
4587 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4588 }
4589 }
071a6595 4590 }
e1a43f73
PB
4591 else if (!cleared)
4592 {
0f41302f 4593 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4594 if (TREE_CHAIN (elt) != NULL_TREE
4595 || (TREE_PURPOSE (elt) == NULL_TREE
4596 ? nbits != 1
4597 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4598 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4599 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4600 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4601 != nbits))))
4602 clear_storage (target, expr_size (exp),
4603 TYPE_ALIGN (type) / BITS_PER_UNIT);
4604 }
4605
4606 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4607 {
4608 /* start of range of element or NULL */
4609 tree startbit = TREE_PURPOSE (elt);
4610 /* end of range of element, or element value */
4611 tree endbit = TREE_VALUE (elt);
381127e8 4612#ifdef TARGET_MEM_FUNCTIONS
071a6595 4613 HOST_WIDE_INT startb, endb;
381127e8 4614#endif
071a6595
PB
4615 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4616
4617 bitlength_rtx = expand_expr (bitlength,
4618 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4619
4620 /* handle non-range tuple element like [ expr ] */
4621 if (startbit == NULL_TREE)
4622 {
4623 startbit = save_expr (endbit);
4624 endbit = startbit;
4625 }
4626 startbit = convert (sizetype, startbit);
4627 endbit = convert (sizetype, endbit);
4628 if (! integer_zerop (domain_min))
4629 {
4630 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4631 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4632 }
4633 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4634 EXPAND_CONST_ADDRESS);
4635 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4636 EXPAND_CONST_ADDRESS);
4637
4638 if (REG_P (target))
4639 {
4640 targetx = assign_stack_temp (GET_MODE (target),
4641 GET_MODE_SIZE (GET_MODE (target)),
4642 0);
4643 emit_move_insn (targetx, target);
4644 }
4645 else if (GET_CODE (target) == MEM)
4646 targetx = target;
4647 else
4648 abort ();
4649
4650#ifdef TARGET_MEM_FUNCTIONS
4651 /* Optimization: If startbit and endbit are
9faa82d8 4652 constants divisible by BITS_PER_UNIT,
0f41302f 4653 call memset instead. */
071a6595
PB
4654 if (TREE_CODE (startbit) == INTEGER_CST
4655 && TREE_CODE (endbit) == INTEGER_CST
4656 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4657 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4658 {
071a6595
PB
4659 emit_library_call (memset_libfunc, 0,
4660 VOIDmode, 3,
e1a43f73
PB
4661 plus_constant (XEXP (targetx, 0),
4662 startb / BITS_PER_UNIT),
071a6595 4663 Pmode,
3b6f75e2 4664 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4665 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4666 TYPE_MODE (sizetype));
071a6595
PB
4667 }
4668 else
4669#endif
4670 {
38a448ca 4671 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4672 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4673 bitlength_rtx, TYPE_MODE (sizetype),
4674 startbit_rtx, TYPE_MODE (sizetype),
4675 endbit_rtx, TYPE_MODE (sizetype));
4676 }
4677 if (REG_P (target))
4678 emit_move_insn (target, targetx);
4679 }
4680 }
bbf6f052
RK
4681
4682 else
4683 abort ();
4684}
4685
4686/* Store the value of EXP (an expression tree)
4687 into a subfield of TARGET which has mode MODE and occupies
4688 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4689 If MODE is VOIDmode, it means that we are storing into a bit-field.
4690
4691 If VALUE_MODE is VOIDmode, return nothing in particular.
4692 UNSIGNEDP is not used in this case.
4693
4694 Otherwise, return an rtx for the value stored. This rtx
4695 has mode VALUE_MODE if that is convenient to do.
4696 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4697
4698 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4699 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4700
4701 ALIAS_SET is the alias set for the destination. This value will
4702 (in general) be different from that for TARGET, since TARGET is a
4703 reference to the containing structure. */
bbf6f052
RK
4704
4705static rtx
4706store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4707 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4708 rtx target;
4709 int bitsize, bitpos;
4710 enum machine_mode mode;
4711 tree exp;
4712 enum machine_mode value_mode;
4713 int unsignedp;
4714 int align;
4715 int total_size;
ece32014 4716 int alias_set;
bbf6f052 4717{
906c4e36 4718 HOST_WIDE_INT width_mask = 0;
bbf6f052 4719
e9a25f70
JL
4720 if (TREE_CODE (exp) == ERROR_MARK)
4721 return const0_rtx;
4722
906c4e36
RK
4723 if (bitsize < HOST_BITS_PER_WIDE_INT)
4724 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4725
4726 /* If we are storing into an unaligned field of an aligned union that is
4727 in a register, we may have the mode of TARGET being an integer mode but
4728 MODE == BLKmode. In that case, get an aligned object whose size and
4729 alignment are the same as TARGET and store TARGET into it (we can avoid
4730 the store if the field being stored is the entire width of TARGET). Then
4731 call ourselves recursively to store the field into a BLKmode version of
4732 that object. Finally, load from the object into TARGET. This is not
4733 very efficient in general, but should only be slightly more expensive
4734 than the otherwise-required unaligned accesses. Perhaps this can be
4735 cleaned up later. */
4736
4737 if (mode == BLKmode
4738 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4739 {
4740 rtx object = assign_stack_temp (GET_MODE (target),
4741 GET_MODE_SIZE (GET_MODE (target)), 0);
4742 rtx blk_object = copy_rtx (object);
4743
c6df88cb
MM
4744 MEM_SET_IN_STRUCT_P (object, 1);
4745 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4746 PUT_MODE (blk_object, BLKmode);
4747
4748 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4749 emit_move_insn (object, target);
4750
4751 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4752 align, total_size, alias_set);
bbf6f052 4753
46093b97
RS
4754 /* Even though we aren't returning target, we need to
4755 give it the updated value. */
bbf6f052
RK
4756 emit_move_insn (target, object);
4757
46093b97 4758 return blk_object;
bbf6f052
RK
4759 }
4760
4761 /* If the structure is in a register or if the component
4762 is a bit field, we cannot use addressing to access it.
4763 Use bit-field techniques or SUBREG to store in it. */
4764
4fa52007 4765 if (mode == VOIDmode
6ab06cbb
JW
4766 || (mode != BLKmode && ! direct_store[(int) mode]
4767 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4768 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4769 || GET_CODE (target) == REG
c980ac49 4770 || GET_CODE (target) == SUBREG
ccc98036
RS
4771 /* If the field isn't aligned enough to store as an ordinary memref,
4772 store it as a bit field. */
e1565e65 4773 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
14a774a9
RK
4774 && (align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode)
4775 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 4776 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
14a774a9
RK
4777 && (TYPE_ALIGN (TREE_TYPE (exp)) > align * BITS_PER_UNIT
4778 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4779 /* If the RHS and field are a constant size and the size of the
4780 RHS isn't the same size as the bitfield, we must use bitfield
4781 operations. */
4782 || ((bitsize >= 0
4783 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST)
4784 && (TREE_INT_CST_HIGH (TYPE_SIZE (TREE_TYPE (exp))) != 0
4785 || TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) != bitsize)))
bbf6f052 4786 {
906c4e36 4787 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4788
ef19912d
RK
4789 /* If BITSIZE is narrower than the size of the type of EXP
4790 we will be narrowing TEMP. Normally, what's wanted are the
4791 low-order bits. However, if EXP's type is a record and this is
4792 big-endian machine, we want the upper BITSIZE bits. */
4793 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4794 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4795 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4796 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4797 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4798 - bitsize),
4799 temp, 1);
4800
bbd6cf73
RK
4801 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4802 MODE. */
4803 if (mode != VOIDmode && mode != BLKmode
4804 && mode != TYPE_MODE (TREE_TYPE (exp)))
4805 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4806
a281e72d
RK
4807 /* If the modes of TARGET and TEMP are both BLKmode, both
4808 must be in memory and BITPOS must be aligned on a byte
4809 boundary. If so, we simply do a block copy. */
4810 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4811 {
4812 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4813 || bitpos % BITS_PER_UNIT != 0)
4814 abort ();
4815
0086427c
RK
4816 target = change_address (target, VOIDmode,
4817 plus_constant (XEXP (target, 0),
a281e72d
RK
4818 bitpos / BITS_PER_UNIT));
4819
14a774a9
RK
4820 /* Find an alignment that is consistent with the bit position. */
4821 while ((bitpos % (align * BITS_PER_UNIT)) != 0)
4822 align >>= 1;
4823
a281e72d
RK
4824 emit_block_move (target, temp,
4825 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4826 / BITS_PER_UNIT),
14a774a9 4827 align);
a281e72d
RK
4828
4829 return value_mode == VOIDmode ? const0_rtx : target;
4830 }
4831
bbf6f052
RK
4832 /* Store the value in the bitfield. */
4833 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4834 if (value_mode != VOIDmode)
4835 {
4836 /* The caller wants an rtx for the value. */
4837 /* If possible, avoid refetching from the bitfield itself. */
4838 if (width_mask != 0
4839 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4840 {
9074de27 4841 tree count;
5c4d7cfb 4842 enum machine_mode tmode;
86a2c12a 4843
5c4d7cfb
RS
4844 if (unsignedp)
4845 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4846 tmode = GET_MODE (temp);
86a2c12a
RS
4847 if (tmode == VOIDmode)
4848 tmode = value_mode;
5c4d7cfb
RS
4849 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4850 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4851 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4852 }
bbf6f052 4853 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4854 NULL_RTX, value_mode, 0, align,
4855 total_size);
bbf6f052
RK
4856 }
4857 return const0_rtx;
4858 }
4859 else
4860 {
4861 rtx addr = XEXP (target, 0);
4862 rtx to_rtx;
4863
4864 /* If a value is wanted, it must be the lhs;
4865 so make the address stable for multiple use. */
4866
4867 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4868 && ! CONSTANT_ADDRESS_P (addr)
4869 /* A frame-pointer reference is already stable. */
4870 && ! (GET_CODE (addr) == PLUS
4871 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4872 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4873 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4874 addr = copy_to_reg (addr);
4875
4876 /* Now build a reference to just the desired component. */
4877
effbcc6a
RK
4878 to_rtx = copy_rtx (change_address (target, mode,
4879 plus_constant (addr,
4880 (bitpos
4881 / BITS_PER_UNIT))));
c6df88cb 4882 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4883 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4884
4885 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4886 }
4887}
4888\f
4889/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4890 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4891 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4892
4893 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4894 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4895 If the position of the field is variable, we store a tree
4896 giving the variable offset (in units) in *POFFSET.
4897 This offset is in addition to the bit position.
4898 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4899 We set *PALIGNMENT to the alignment in bytes of the address that will be
4900 computed. This is the alignment of the thing we return if *POFFSET
4901 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4902
4903 If any of the extraction expressions is volatile,
4904 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4905
4906 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4907 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4908 is redundant.
4909
4910 If the field describes a variable-sized object, *PMODE is set to
4911 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4912 this case, but the address of the object can be found. */
bbf6f052
RK
4913
4914tree
4969d05d 4915get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4916 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4917 tree exp;
4918 int *pbitsize;
4919 int *pbitpos;
7bb0943f 4920 tree *poffset;
bbf6f052
RK
4921 enum machine_mode *pmode;
4922 int *punsignedp;
4923 int *pvolatilep;
839c4796 4924 int *palignment;
bbf6f052 4925{
b50d17a1 4926 tree orig_exp = exp;
bbf6f052
RK
4927 tree size_tree = 0;
4928 enum machine_mode mode = VOIDmode;
742920c7 4929 tree offset = integer_zero_node;
c84e2712 4930 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4931
4932 if (TREE_CODE (exp) == COMPONENT_REF)
4933 {
4934 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4935 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4936 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4937 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4938 }
4939 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4940 {
4941 size_tree = TREE_OPERAND (exp, 1);
4942 *punsignedp = TREE_UNSIGNED (exp);
4943 }
4944 else
4945 {
4946 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4947 if (mode == BLKmode)
4948 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4949
bbf6f052
RK
4950 *pbitsize = GET_MODE_BITSIZE (mode);
4951 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4952 }
4953
4954 if (size_tree)
4955 {
4956 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4957 mode = BLKmode, *pbitsize = -1;
4958 else
4959 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4960 }
4961
4962 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4963 and find the ultimate containing object. */
4964
4965 *pbitpos = 0;
4966
4967 while (1)
4968 {
7bb0943f 4969 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4970 {
7bb0943f
RS
4971 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4972 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4973 : TREE_OPERAND (exp, 2));
e6d8c385 4974 tree constant = integer_zero_node, var = pos;
bbf6f052 4975
e7f3c83f
RK
4976 /* If this field hasn't been filled in yet, don't go
4977 past it. This should only happen when folding expressions
4978 made during type construction. */
4979 if (pos == 0)
4980 break;
4981
e6d8c385
RK
4982 /* Assume here that the offset is a multiple of a unit.
4983 If not, there should be an explicitly added constant. */
4984 if (TREE_CODE (pos) == PLUS_EXPR
4985 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4986 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4987 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4988 constant = pos, var = integer_zero_node;
4989
4990 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4991 offset = size_binop (PLUS_EXPR, offset,
4992 size_binop (EXACT_DIV_EXPR, var,
4993 size_int (BITS_PER_UNIT)));
bbf6f052 4994 }
bbf6f052 4995
742920c7 4996 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4997 {
742920c7
RK
4998 /* This code is based on the code in case ARRAY_REF in expand_expr
4999 below. We assume here that the size of an array element is
5000 always an integral multiple of BITS_PER_UNIT. */
5001
5002 tree index = TREE_OPERAND (exp, 1);
5003 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
5004 tree low_bound
5005 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
5006 tree index_type = TREE_TYPE (index);
ead17059 5007 tree xindex;
742920c7 5008
4c08eef0 5009 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 5010 {
4c08eef0
RK
5011 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
5012 index);
742920c7
RK
5013 index_type = TREE_TYPE (index);
5014 }
5015
74a4fbfc
DB
5016 /* Optimize the special-case of a zero lower bound.
5017
5018 We convert the low_bound to sizetype to avoid some problems
5019 with constant folding. (E.g. suppose the lower bound is 1,
5020 and its mode is QI. Without the conversion, (ARRAY
5021 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
5022 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
5023
5024 But sizetype isn't quite right either (especially if
5025 the lowbound is negative). FIXME */
5026
ca0f2220 5027 if (! integer_zerop (low_bound))
74a4fbfc
DB
5028 index = fold (build (MINUS_EXPR, index_type, index,
5029 convert (sizetype, low_bound)));
ca0f2220 5030
f8dac6eb
R
5031 if (TREE_CODE (index) == INTEGER_CST)
5032 {
5033 index = convert (sbitsizetype, index);
5034 index_type = TREE_TYPE (index);
5035 }
5036
ead17059
RH
5037 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
5038 convert (sbitsizetype,
5039 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 5040
ead17059
RH
5041 if (TREE_CODE (xindex) == INTEGER_CST
5042 && TREE_INT_CST_HIGH (xindex) == 0)
5043 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 5044 else
956d6950 5045 {
ead17059
RH
5046 /* Either the bit offset calculated above is not constant, or
5047 it overflowed. In either case, redo the multiplication
5048 against the size in units. This is especially important
5049 in the non-constant case to avoid a division at runtime. */
5050 xindex = fold (build (MULT_EXPR, ssizetype, index,
5051 convert (ssizetype,
5052 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
5053
5054 if (contains_placeholder_p (xindex))
5055 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
5056
5057 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 5058 }
bbf6f052
RK
5059 }
5060 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5061 && ! ((TREE_CODE (exp) == NOP_EXPR
5062 || TREE_CODE (exp) == CONVERT_EXPR)
5063 && (TYPE_MODE (TREE_TYPE (exp))
5064 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5065 break;
7bb0943f
RS
5066
5067 /* If any reference in the chain is volatile, the effect is volatile. */
5068 if (TREE_THIS_VOLATILE (exp))
5069 *pvolatilep = 1;
839c4796
RK
5070
5071 /* If the offset is non-constant already, then we can't assume any
5072 alignment more than the alignment here. */
5073 if (! integer_zerop (offset))
5074 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5075
bbf6f052
RK
5076 exp = TREE_OPERAND (exp, 0);
5077 }
5078
839c4796
RK
5079 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
5080 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5081 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5082 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5083
742920c7
RK
5084 if (integer_zerop (offset))
5085 offset = 0;
5086
b50d17a1
RK
5087 if (offset != 0 && contains_placeholder_p (offset))
5088 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
5089
bbf6f052 5090 *pmode = mode;
7bb0943f 5091 *poffset = offset;
839c4796 5092 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
5093 return exp;
5094}
921b3427
RK
5095
5096/* Subroutine of expand_exp: compute memory_usage from modifier. */
5097static enum memory_use_mode
5098get_memory_usage_from_modifier (modifier)
5099 enum expand_modifier modifier;
5100{
5101 switch (modifier)
5102 {
5103 case EXPAND_NORMAL:
e5e809f4 5104 case EXPAND_SUM:
921b3427
RK
5105 return MEMORY_USE_RO;
5106 break;
5107 case EXPAND_MEMORY_USE_WO:
5108 return MEMORY_USE_WO;
5109 break;
5110 case EXPAND_MEMORY_USE_RW:
5111 return MEMORY_USE_RW;
5112 break;
921b3427 5113 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5114 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5115 MEMORY_USE_DONT, because they are modifiers to a call of
5116 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5117 case EXPAND_CONST_ADDRESS:
e5e809f4 5118 case EXPAND_INITIALIZER:
921b3427
RK
5119 return MEMORY_USE_DONT;
5120 case EXPAND_MEMORY_USE_BAD:
5121 default:
5122 abort ();
5123 }
5124}
bbf6f052
RK
5125\f
5126/* Given an rtx VALUE that may contain additions and multiplications,
5127 return an equivalent value that just refers to a register or memory.
5128 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
5129 and returning a pseudo-register containing the value.
5130
5131 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5132
5133rtx
5134force_operand (value, target)
5135 rtx value, target;
5136{
5137 register optab binoptab = 0;
5138 /* Use a temporary to force order of execution of calls to
5139 `force_operand'. */
5140 rtx tmp;
5141 register rtx op2;
5142 /* Use subtarget as the target for operand 0 of a binary operation. */
5143 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5144
8b015896
RH
5145 /* Check for a PIC address load. */
5146 if (flag_pic
5147 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5148 && XEXP (value, 0) == pic_offset_table_rtx
5149 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5150 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5151 || GET_CODE (XEXP (value, 1)) == CONST))
5152 {
5153 if (!subtarget)
5154 subtarget = gen_reg_rtx (GET_MODE (value));
5155 emit_move_insn (subtarget, value);
5156 return subtarget;
5157 }
5158
bbf6f052
RK
5159 if (GET_CODE (value) == PLUS)
5160 binoptab = add_optab;
5161 else if (GET_CODE (value) == MINUS)
5162 binoptab = sub_optab;
5163 else if (GET_CODE (value) == MULT)
5164 {
5165 op2 = XEXP (value, 1);
5166 if (!CONSTANT_P (op2)
5167 && !(GET_CODE (op2) == REG && op2 != subtarget))
5168 subtarget = 0;
5169 tmp = force_operand (XEXP (value, 0), subtarget);
5170 return expand_mult (GET_MODE (value), tmp,
906c4e36 5171 force_operand (op2, NULL_RTX),
bbf6f052
RK
5172 target, 0);
5173 }
5174
5175 if (binoptab)
5176 {
5177 op2 = XEXP (value, 1);
5178 if (!CONSTANT_P (op2)
5179 && !(GET_CODE (op2) == REG && op2 != subtarget))
5180 subtarget = 0;
5181 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5182 {
5183 binoptab = add_optab;
5184 op2 = negate_rtx (GET_MODE (value), op2);
5185 }
5186
5187 /* Check for an addition with OP2 a constant integer and our first
5188 operand a PLUS of a virtual register and something else. In that
5189 case, we want to emit the sum of the virtual register and the
5190 constant first and then add the other value. This allows virtual
5191 register instantiation to simply modify the constant rather than
5192 creating another one around this addition. */
5193 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5194 && GET_CODE (XEXP (value, 0)) == PLUS
5195 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5196 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5197 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5198 {
5199 rtx temp = expand_binop (GET_MODE (value), binoptab,
5200 XEXP (XEXP (value, 0), 0), op2,
5201 subtarget, 0, OPTAB_LIB_WIDEN);
5202 return expand_binop (GET_MODE (value), binoptab, temp,
5203 force_operand (XEXP (XEXP (value, 0), 1), 0),
5204 target, 0, OPTAB_LIB_WIDEN);
5205 }
5206
5207 tmp = force_operand (XEXP (value, 0), subtarget);
5208 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5209 force_operand (op2, NULL_RTX),
bbf6f052 5210 target, 0, OPTAB_LIB_WIDEN);
8008b228 5211 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5212 because the only operations we are expanding here are signed ones. */
5213 }
5214 return value;
5215}
5216\f
5217/* Subroutine of expand_expr:
5218 save the non-copied parts (LIST) of an expr (LHS), and return a list
5219 which can restore these values to their previous values,
5220 should something modify their storage. */
5221
5222static tree
5223save_noncopied_parts (lhs, list)
5224 tree lhs;
5225 tree list;
5226{
5227 tree tail;
5228 tree parts = 0;
5229
5230 for (tail = list; tail; tail = TREE_CHAIN (tail))
5231 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5232 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5233 else
5234 {
5235 tree part = TREE_VALUE (tail);
5236 tree part_type = TREE_TYPE (part);
906c4e36 5237 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5238 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5239 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5240 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5241 parts = tree_cons (to_be_saved,
906c4e36
RK
5242 build (RTL_EXPR, part_type, NULL_TREE,
5243 (tree) target),
bbf6f052
RK
5244 parts);
5245 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5246 }
5247 return parts;
5248}
5249
5250/* Subroutine of expand_expr:
5251 record the non-copied parts (LIST) of an expr (LHS), and return a list
5252 which specifies the initial values of these parts. */
5253
5254static tree
5255init_noncopied_parts (lhs, list)
5256 tree lhs;
5257 tree list;
5258{
5259 tree tail;
5260 tree parts = 0;
5261
5262 for (tail = list; tail; tail = TREE_CHAIN (tail))
5263 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5264 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5265 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5266 {
5267 tree part = TREE_VALUE (tail);
5268 tree part_type = TREE_TYPE (part);
906c4e36 5269 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5270 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5271 }
5272 return parts;
5273}
5274
5275/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5276 EXP can reference X, which is being modified. TOP_P is nonzero if this
5277 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5278 for EXP, as opposed to a recursive call to this function.
5279
5280 It is always safe for this routine to return zero since it merely
5281 searches for optimization opportunities. */
bbf6f052
RK
5282
5283static int
e5e809f4 5284safe_from_p (x, exp, top_p)
bbf6f052
RK
5285 rtx x;
5286 tree exp;
e5e809f4 5287 int top_p;
bbf6f052
RK
5288{
5289 rtx exp_rtl = 0;
5290 int i, nops;
ff439b5f
CB
5291 static int save_expr_count;
5292 static int save_expr_size = 0;
5293 static tree *save_expr_rewritten;
5294 static tree save_expr_trees[256];
bbf6f052 5295
6676e72f
RK
5296 if (x == 0
5297 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5298 have no way of allocating temporaries of variable size
5299 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5300 So we assume here that something at a higher level has prevented a
f4510f37 5301 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5302 do this when X is BLKmode and when we are at the top level. */
5303 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5304 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5305 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5306 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5307 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5308 != INTEGER_CST)
f4510f37 5309 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5310 return 1;
5311
ff439b5f
CB
5312 if (top_p && save_expr_size == 0)
5313 {
5314 int rtn;
5315
5316 save_expr_count = 0;
5317 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5318 save_expr_rewritten = &save_expr_trees[0];
5319
5320 rtn = safe_from_p (x, exp, 1);
5321
5322 for (i = 0; i < save_expr_count; ++i)
5323 {
5324 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5325 abort ();
5326 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5327 }
5328
5329 save_expr_size = 0;
5330
5331 return rtn;
5332 }
5333
bbf6f052
RK
5334 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5335 find the underlying pseudo. */
5336 if (GET_CODE (x) == SUBREG)
5337 {
5338 x = SUBREG_REG (x);
5339 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5340 return 0;
5341 }
5342
5343 /* If X is a location in the outgoing argument area, it is always safe. */
5344 if (GET_CODE (x) == MEM
5345 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5346 || (GET_CODE (XEXP (x, 0)) == PLUS
5347 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5348 return 1;
5349
5350 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5351 {
5352 case 'd':
5353 exp_rtl = DECL_RTL (exp);
5354 break;
5355
5356 case 'c':
5357 return 1;
5358
5359 case 'x':
5360 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5361 return ((TREE_VALUE (exp) == 0
e5e809f4 5362 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5363 && (TREE_CHAIN (exp) == 0
e5e809f4 5364 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5365 else if (TREE_CODE (exp) == ERROR_MARK)
5366 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5367 else
5368 return 0;
5369
5370 case '1':
e5e809f4 5371 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5372
5373 case '2':
5374 case '<':
e5e809f4
JL
5375 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5376 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5377
5378 case 'e':
5379 case 'r':
5380 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5381 the expression. If it is set, we conflict iff we are that rtx or
5382 both are in memory. Otherwise, we check all operands of the
5383 expression recursively. */
5384
5385 switch (TREE_CODE (exp))
5386 {
5387 case ADDR_EXPR:
e44842fe 5388 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5389 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5390 || TREE_STATIC (exp));
bbf6f052
RK
5391
5392 case INDIRECT_REF:
5393 if (GET_CODE (x) == MEM)
5394 return 0;
5395 break;
5396
5397 case CALL_EXPR:
5398 exp_rtl = CALL_EXPR_RTL (exp);
5399 if (exp_rtl == 0)
5400 {
5401 /* Assume that the call will clobber all hard registers and
5402 all of memory. */
5403 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5404 || GET_CODE (x) == MEM)
5405 return 0;
5406 }
5407
5408 break;
5409
5410 case RTL_EXPR:
3bb5826a
RK
5411 /* If a sequence exists, we would have to scan every instruction
5412 in the sequence to see if it was safe. This is probably not
5413 worthwhile. */
5414 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5415 return 0;
5416
3bb5826a 5417 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5418 break;
5419
5420 case WITH_CLEANUP_EXPR:
5421 exp_rtl = RTL_EXPR_RTL (exp);
5422 break;
5423
5dab5552 5424 case CLEANUP_POINT_EXPR:
e5e809f4 5425 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5426
bbf6f052
RK
5427 case SAVE_EXPR:
5428 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5429 if (exp_rtl)
5430 break;
5431
5432 /* This SAVE_EXPR might appear many times in the top-level
5433 safe_from_p() expression, and if it has a complex
5434 subexpression, examining it multiple times could result
5435 in a combinatorial explosion. E.g. on an Alpha
5436 running at least 200MHz, a Fortran test case compiled with
5437 optimization took about 28 minutes to compile -- even though
5438 it was only a few lines long, and the complicated line causing
5439 so much time to be spent in the earlier version of safe_from_p()
5440 had only 293 or so unique nodes.
5441
5442 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5443 where it is so we can turn it back in the top-level safe_from_p()
5444 when we're done. */
5445
5446 /* For now, don't bother re-sizing the array. */
5447 if (save_expr_count >= save_expr_size)
5448 return 0;
5449 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5450
5451 nops = tree_code_length[(int) SAVE_EXPR];
5452 for (i = 0; i < nops; i++)
ff59bfe6
JM
5453 {
5454 tree operand = TREE_OPERAND (exp, i);
5455 if (operand == NULL_TREE)
5456 continue;
5457 TREE_SET_CODE (exp, ERROR_MARK);
5458 if (!safe_from_p (x, operand, 0))
5459 return 0;
5460 TREE_SET_CODE (exp, SAVE_EXPR);
5461 }
5462 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5463 return 1;
bbf6f052 5464
8129842c
RS
5465 case BIND_EXPR:
5466 /* The only operand we look at is operand 1. The rest aren't
5467 part of the expression. */
e5e809f4 5468 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5469
bbf6f052 5470 case METHOD_CALL_EXPR:
0f41302f 5471 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5472 abort ();
e9a25f70
JL
5473
5474 default:
5475 break;
bbf6f052
RK
5476 }
5477
5478 /* If we have an rtx, we do not need to scan our operands. */
5479 if (exp_rtl)
5480 break;
5481
5482 nops = tree_code_length[(int) TREE_CODE (exp)];
5483 for (i = 0; i < nops; i++)
5484 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5485 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5486 return 0;
5487 }
5488
5489 /* If we have an rtl, find any enclosed object. Then see if we conflict
5490 with it. */
5491 if (exp_rtl)
5492 {
5493 if (GET_CODE (exp_rtl) == SUBREG)
5494 {
5495 exp_rtl = SUBREG_REG (exp_rtl);
5496 if (GET_CODE (exp_rtl) == REG
5497 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5498 return 0;
5499 }
5500
5501 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5502 are memory and EXP is not readonly. */
5503 return ! (rtx_equal_p (x, exp_rtl)
5504 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5505 && ! TREE_READONLY (exp)));
5506 }
5507
5508 /* If we reach here, it is safe. */
5509 return 1;
5510}
5511
5512/* Subroutine of expand_expr: return nonzero iff EXP is an
5513 expression whose type is statically determinable. */
5514
5515static int
5516fixed_type_p (exp)
5517 tree exp;
5518{
5519 if (TREE_CODE (exp) == PARM_DECL
5520 || TREE_CODE (exp) == VAR_DECL
5521 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5522 || TREE_CODE (exp) == COMPONENT_REF
5523 || TREE_CODE (exp) == ARRAY_REF)
5524 return 1;
5525 return 0;
5526}
01c8a7c8
RK
5527
5528/* Subroutine of expand_expr: return rtx if EXP is a
5529 variable or parameter; else return 0. */
5530
5531static rtx
5532var_rtx (exp)
5533 tree exp;
5534{
5535 STRIP_NOPS (exp);
5536 switch (TREE_CODE (exp))
5537 {
5538 case PARM_DECL:
5539 case VAR_DECL:
5540 return DECL_RTL (exp);
5541 default:
5542 return 0;
5543 }
5544}
dbecbbe4
JL
5545
5546#ifdef MAX_INTEGER_COMPUTATION_MODE
5547void
5548check_max_integer_computation_mode (exp)
5549 tree exp;
5550{
5f652c07 5551 enum tree_code code;
dbecbbe4
JL
5552 enum machine_mode mode;
5553
5f652c07
JM
5554 /* Strip any NOPs that don't change the mode. */
5555 STRIP_NOPS (exp);
5556 code = TREE_CODE (exp);
5557
71bca506
JL
5558 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5559 if (code == NOP_EXPR
5560 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5561 return;
5562
dbecbbe4
JL
5563 /* First check the type of the overall operation. We need only look at
5564 unary, binary and relational operations. */
5565 if (TREE_CODE_CLASS (code) == '1'
5566 || TREE_CODE_CLASS (code) == '2'
5567 || TREE_CODE_CLASS (code) == '<')
5568 {
5569 mode = TYPE_MODE (TREE_TYPE (exp));
5570 if (GET_MODE_CLASS (mode) == MODE_INT
5571 && mode > MAX_INTEGER_COMPUTATION_MODE)
5572 fatal ("unsupported wide integer operation");
5573 }
5574
5575 /* Check operand of a unary op. */
5576 if (TREE_CODE_CLASS (code) == '1')
5577 {
5578 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5579 if (GET_MODE_CLASS (mode) == MODE_INT
5580 && mode > MAX_INTEGER_COMPUTATION_MODE)
5581 fatal ("unsupported wide integer operation");
5582 }
5583
5584 /* Check operands of a binary/comparison op. */
5585 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5586 {
5587 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5588 if (GET_MODE_CLASS (mode) == MODE_INT
5589 && mode > MAX_INTEGER_COMPUTATION_MODE)
5590 fatal ("unsupported wide integer operation");
5591
5592 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5593 if (GET_MODE_CLASS (mode) == MODE_INT
5594 && mode > MAX_INTEGER_COMPUTATION_MODE)
5595 fatal ("unsupported wide integer operation");
5596 }
5597}
5598#endif
5599
14a774a9
RK
5600\f
5601/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5602 has any readonly fields. If any of the fields have types that
5603 contain readonly fields, return true as well. */
5604
5605static int
5606readonly_fields_p (type)
5607 tree type;
5608{
5609 tree field;
5610
5611 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
77fd6d10
MM
5612 if (TREE_CODE (field) == FIELD_DECL
5613 && (TREE_READONLY (field)
5614 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5615 && readonly_fields_p (TREE_TYPE (field)))))
14a774a9
RK
5616 return 1;
5617
5618 return 0;
5619}
bbf6f052
RK
5620\f
5621/* expand_expr: generate code for computing expression EXP.
5622 An rtx for the computed value is returned. The value is never null.
5623 In the case of a void EXP, const0_rtx is returned.
5624
5625 The value may be stored in TARGET if TARGET is nonzero.
5626 TARGET is just a suggestion; callers must assume that
5627 the rtx returned may not be the same as TARGET.
5628
5629 If TARGET is CONST0_RTX, it means that the value will be ignored.
5630
5631 If TMODE is not VOIDmode, it suggests generating the
5632 result in mode TMODE. But this is done only when convenient.
5633 Otherwise, TMODE is ignored and the value generated in its natural mode.
5634 TMODE is just a suggestion; callers must assume that
5635 the rtx returned may not have mode TMODE.
5636
d6a5ac33
RK
5637 Note that TARGET may have neither TMODE nor MODE. In that case, it
5638 probably will not be used.
bbf6f052
RK
5639
5640 If MODIFIER is EXPAND_SUM then when EXP is an addition
5641 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5642 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5643 products as above, or REG or MEM, or constant.
5644 Ordinarily in such cases we would output mul or add instructions
5645 and then return a pseudo reg containing the sum.
5646
5647 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5648 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5649 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5650 This is used for outputting expressions used in initializers.
5651
5652 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5653 with a constant address even if that address is not normally legitimate.
5654 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5655
5656rtx
5657expand_expr (exp, target, tmode, modifier)
5658 register tree exp;
5659 rtx target;
5660 enum machine_mode tmode;
5661 enum expand_modifier modifier;
5662{
5663 register rtx op0, op1, temp;
5664 tree type = TREE_TYPE (exp);
5665 int unsignedp = TREE_UNSIGNED (type);
68557e14 5666 register enum machine_mode mode;
bbf6f052
RK
5667 register enum tree_code code = TREE_CODE (exp);
5668 optab this_optab;
68557e14
ML
5669 rtx subtarget, original_target;
5670 int ignore;
bbf6f052 5671 tree context;
921b3427
RK
5672 /* Used by check-memory-usage to make modifier read only. */
5673 enum expand_modifier ro_modifier;
bbf6f052 5674
68557e14
ML
5675 /* Handle ERROR_MARK before anybody tries to access its type. */
5676 if (TREE_CODE (exp) == ERROR_MARK)
5677 {
5678 op0 = CONST0_RTX (tmode);
5679 if (op0 != 0)
5680 return op0;
5681 return const0_rtx;
5682 }
5683
5684 mode = TYPE_MODE (type);
5685 /* Use subtarget as the target for operand 0 of a binary operation. */
5686 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5687 original_target = target;
5688 ignore = (target == const0_rtx
5689 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5690 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5691 || code == COND_EXPR)
5692 && TREE_CODE (type) == VOID_TYPE));
5693
921b3427
RK
5694 /* Make a read-only version of the modifier. */
5695 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5696 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5697 ro_modifier = modifier;
5698 else
5699 ro_modifier = EXPAND_NORMAL;
ca695ac9 5700
bbf6f052
RK
5701 /* Don't use hard regs as subtargets, because the combiner
5702 can only handle pseudo regs. */
5703 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5704 subtarget = 0;
5705 /* Avoid subtargets inside loops,
5706 since they hide some invariant expressions. */
5707 if (preserve_subexpressions_p ())
5708 subtarget = 0;
5709
dd27116b
RK
5710 /* If we are going to ignore this result, we need only do something
5711 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5712 is, short-circuit the most common cases here. Note that we must
5713 not call expand_expr with anything but const0_rtx in case this
5714 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5715
dd27116b
RK
5716 if (ignore)
5717 {
5718 if (! TREE_SIDE_EFFECTS (exp))
5719 return const0_rtx;
5720
14a774a9
RK
5721 /* Ensure we reference a volatile object even if value is ignored, but
5722 don't do this if all we are doing is taking its address. */
dd27116b
RK
5723 if (TREE_THIS_VOLATILE (exp)
5724 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
5725 && mode != VOIDmode && mode != BLKmode
5726 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 5727 {
921b3427 5728 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5729 if (GET_CODE (temp) == MEM)
5730 temp = copy_to_reg (temp);
5731 return const0_rtx;
5732 }
5733
14a774a9
RK
5734 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5735 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 5736 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5737 VOIDmode, ro_modifier);
14a774a9
RK
5738 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5739 || code == ARRAY_REF)
dd27116b 5740 {
921b3427
RK
5741 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5742 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5743 return const0_rtx;
5744 }
5745 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5746 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5747 /* If the second operand has no side effects, just evaluate
0f41302f 5748 the first. */
dd27116b 5749 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5750 VOIDmode, ro_modifier);
14a774a9
RK
5751 else if (code == BIT_FIELD_REF)
5752 {
5753 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5754 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5755 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5756 return const0_rtx;
5757 }
5758;
90764a87 5759 target = 0;
dd27116b 5760 }
bbf6f052 5761
dbecbbe4 5762#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5763 /* Only check stuff here if the mode we want is different from the mode
5764 of the expression; if it's the same, check_max_integer_computiation_mode
5765 will handle it. Do we really need to check this stuff at all? */
5766
ce3c0b53 5767 if (target
5f652c07 5768 && GET_MODE (target) != mode
ce3c0b53
JL
5769 && TREE_CODE (exp) != INTEGER_CST
5770 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5771 && TREE_CODE (exp) != ARRAY_REF
5772 && TREE_CODE (exp) != COMPONENT_REF
5773 && TREE_CODE (exp) != BIT_FIELD_REF
5774 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5775 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5776 && TREE_CODE (exp) != VAR_DECL
5777 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5778 {
5779 enum machine_mode mode = GET_MODE (target);
5780
5781 if (GET_MODE_CLASS (mode) == MODE_INT
5782 && mode > MAX_INTEGER_COMPUTATION_MODE)
5783 fatal ("unsupported wide integer operation");
5784 }
5785
5f652c07
JM
5786 if (tmode != mode
5787 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5788 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5789 && TREE_CODE (exp) != ARRAY_REF
5790 && TREE_CODE (exp) != COMPONENT_REF
5791 && TREE_CODE (exp) != BIT_FIELD_REF
5792 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5793 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5794 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5795 && TREE_CODE (exp) != RTL_EXPR
71bca506 5796 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5797 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5798 fatal ("unsupported wide integer operation");
5799
5800 check_max_integer_computation_mode (exp);
5801#endif
5802
e44842fe
RK
5803 /* If will do cse, generate all results into pseudo registers
5804 since 1) that allows cse to find more things
5805 and 2) otherwise cse could produce an insn the machine
5806 cannot support. */
5807
bbf6f052
RK
5808 if (! cse_not_expected && mode != BLKmode && target
5809 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5810 target = subtarget;
5811
bbf6f052
RK
5812 switch (code)
5813 {
5814 case LABEL_DECL:
b552441b
RS
5815 {
5816 tree function = decl_function_context (exp);
5817 /* Handle using a label in a containing function. */
d0977240
RK
5818 if (function != current_function_decl
5819 && function != inline_function_decl && function != 0)
b552441b
RS
5820 {
5821 struct function *p = find_function_data (function);
5822 /* Allocate in the memory associated with the function
5823 that the label is in. */
5824 push_obstacks (p->function_obstack,
5825 p->function_maybepermanent_obstack);
5826
49ad7cfa
BS
5827 p->expr->x_forced_labels
5828 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5829 p->expr->x_forced_labels);
b552441b
RS
5830 pop_obstacks ();
5831 }
ab87f8c8
JL
5832 else
5833 {
ab87f8c8
JL
5834 if (modifier == EXPAND_INITIALIZER)
5835 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5836 label_rtx (exp),
5837 forced_labels);
5838 }
c5c76735 5839
38a448ca
RH
5840 temp = gen_rtx_MEM (FUNCTION_MODE,
5841 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5842 if (function != current_function_decl
5843 && function != inline_function_decl && function != 0)
26fcb35a
RS
5844 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5845 return temp;
b552441b 5846 }
bbf6f052
RK
5847
5848 case PARM_DECL:
5849 if (DECL_RTL (exp) == 0)
5850 {
5851 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5852 return CONST0_RTX (mode);
bbf6f052
RK
5853 }
5854
0f41302f 5855 /* ... fall through ... */
d6a5ac33 5856
bbf6f052 5857 case VAR_DECL:
2dca20cd
RS
5858 /* If a static var's type was incomplete when the decl was written,
5859 but the type is complete now, lay out the decl now. */
5860 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5861 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5862 {
5863 push_obstacks_nochange ();
5864 end_temporary_allocation ();
5865 layout_decl (exp, 0);
5866 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5867 pop_obstacks ();
5868 }
d6a5ac33 5869
7d384cc0
KR
5870 /* Although static-storage variables start off initialized, according to
5871 ANSI C, a memcpy could overwrite them with uninitialized values. So
5872 we check them too. This also lets us check for read-only variables
5873 accessed via a non-const declaration, in case it won't be detected
5874 any other way (e.g., in an embedded system or OS kernel without
5875 memory protection).
5876
5877 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 5878 if (cfun && current_function_check_memory_usage
49ad7cfa 5879 && code == VAR_DECL
921b3427 5880 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5881 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5882 {
5883 enum memory_use_mode memory_usage;
5884 memory_usage = get_memory_usage_from_modifier (modifier);
5885
5886 if (memory_usage != MEMORY_USE_DONT)
5887 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5888 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5889 GEN_INT (int_size_in_bytes (type)),
5890 TYPE_MODE (sizetype),
956d6950
JL
5891 GEN_INT (memory_usage),
5892 TYPE_MODE (integer_type_node));
921b3427
RK
5893 }
5894
0f41302f 5895 /* ... fall through ... */
d6a5ac33 5896
2dca20cd 5897 case FUNCTION_DECL:
bbf6f052
RK
5898 case RESULT_DECL:
5899 if (DECL_RTL (exp) == 0)
5900 abort ();
d6a5ac33 5901
e44842fe
RK
5902 /* Ensure variable marked as used even if it doesn't go through
5903 a parser. If it hasn't be used yet, write out an external
5904 definition. */
5905 if (! TREE_USED (exp))
5906 {
5907 assemble_external (exp);
5908 TREE_USED (exp) = 1;
5909 }
5910
dc6d66b3
RK
5911 /* Show we haven't gotten RTL for this yet. */
5912 temp = 0;
5913
bbf6f052
RK
5914 /* Handle variables inherited from containing functions. */
5915 context = decl_function_context (exp);
5916
5917 /* We treat inline_function_decl as an alias for the current function
5918 because that is the inline function whose vars, types, etc.
5919 are being merged into the current function.
5920 See expand_inline_function. */
d6a5ac33 5921
bbf6f052
RK
5922 if (context != 0 && context != current_function_decl
5923 && context != inline_function_decl
5924 /* If var is static, we don't need a static chain to access it. */
5925 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5926 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5927 {
5928 rtx addr;
5929
5930 /* Mark as non-local and addressable. */
81feeecb 5931 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5932 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5933 abort ();
bbf6f052
RK
5934 mark_addressable (exp);
5935 if (GET_CODE (DECL_RTL (exp)) != MEM)
5936 abort ();
5937 addr = XEXP (DECL_RTL (exp), 0);
5938 if (GET_CODE (addr) == MEM)
38a448ca
RH
5939 addr = gen_rtx_MEM (Pmode,
5940 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5941 else
5942 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5943 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5944 }
4af3895e 5945
bbf6f052
RK
5946 /* This is the case of an array whose size is to be determined
5947 from its initializer, while the initializer is still being parsed.
5948 See expand_decl. */
d6a5ac33 5949
dc6d66b3
RK
5950 else if (GET_CODE (DECL_RTL (exp)) == MEM
5951 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5952 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5953 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5954
5955 /* If DECL_RTL is memory, we are in the normal case and either
5956 the address is not valid or it is not a register and -fforce-addr
5957 is specified, get the address into a register. */
5958
dc6d66b3
RK
5959 else if (GET_CODE (DECL_RTL (exp)) == MEM
5960 && modifier != EXPAND_CONST_ADDRESS
5961 && modifier != EXPAND_SUM
5962 && modifier != EXPAND_INITIALIZER
5963 && (! memory_address_p (DECL_MODE (exp),
5964 XEXP (DECL_RTL (exp), 0))
5965 || (flag_force_addr
5966 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5967 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5968 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5969
dc6d66b3
RK
5970 /* If we got something, return it. But first, set the alignment
5971 the address is a register. */
5972 if (temp != 0)
5973 {
5974 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5975 mark_reg_pointer (XEXP (temp, 0),
5976 DECL_ALIGN (exp) / BITS_PER_UNIT);
5977
5978 return temp;
5979 }
5980
1499e0a8
RK
5981 /* If the mode of DECL_RTL does not match that of the decl, it
5982 must be a promoted value. We return a SUBREG of the wanted mode,
5983 but mark it so that we know that it was already extended. */
5984
5985 if (GET_CODE (DECL_RTL (exp)) == REG
5986 && GET_MODE (DECL_RTL (exp)) != mode)
5987 {
1499e0a8
RK
5988 /* Get the signedness used for this variable. Ensure we get the
5989 same mode we got when the variable was declared. */
78911e8b
RK
5990 if (GET_MODE (DECL_RTL (exp))
5991 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5992 abort ();
5993
38a448ca 5994 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5995 SUBREG_PROMOTED_VAR_P (temp) = 1;
5996 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5997 return temp;
5998 }
5999
bbf6f052
RK
6000 return DECL_RTL (exp);
6001
6002 case INTEGER_CST:
6003 return immed_double_const (TREE_INT_CST_LOW (exp),
6004 TREE_INT_CST_HIGH (exp),
6005 mode);
6006
6007 case CONST_DECL:
921b3427
RK
6008 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6009 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6010
6011 case REAL_CST:
6012 /* If optimized, generate immediate CONST_DOUBLE
6013 which will be turned into memory by reload if necessary.
6014
6015 We used to force a register so that loop.c could see it. But
6016 this does not allow gen_* patterns to perform optimizations with
6017 the constants. It also produces two insns in cases like "x = 1.0;".
6018 On most machines, floating-point constants are not permitted in
6019 many insns, so we'd end up copying it to a register in any case.
6020
6021 Now, we do the copying in expand_binop, if appropriate. */
6022 return immed_real_const (exp);
6023
6024 case COMPLEX_CST:
6025 case STRING_CST:
6026 if (! TREE_CST_RTL (exp))
6027 output_constant_def (exp);
6028
6029 /* TREE_CST_RTL probably contains a constant address.
6030 On RISC machines where a constant address isn't valid,
6031 make some insns to get that address into a register. */
6032 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6033 && modifier != EXPAND_CONST_ADDRESS
6034 && modifier != EXPAND_INITIALIZER
6035 && modifier != EXPAND_SUM
d6a5ac33
RK
6036 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6037 || (flag_force_addr
6038 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6039 return change_address (TREE_CST_RTL (exp), VOIDmode,
6040 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6041 return TREE_CST_RTL (exp);
6042
bf1e5319 6043 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6044 {
6045 rtx to_return;
6046 char *saved_input_filename = input_filename;
6047 int saved_lineno = lineno;
6048 input_filename = EXPR_WFL_FILENAME (exp);
6049 lineno = EXPR_WFL_LINENO (exp);
6050 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6051 emit_line_note (input_filename, lineno);
6052 /* Possibly avoid switching back and force here */
6053 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6054 input_filename = saved_input_filename;
6055 lineno = saved_lineno;
6056 return to_return;
6057 }
bf1e5319 6058
bbf6f052
RK
6059 case SAVE_EXPR:
6060 context = decl_function_context (exp);
d6a5ac33 6061
d0977240
RK
6062 /* If this SAVE_EXPR was at global context, assume we are an
6063 initialization function and move it into our context. */
6064 if (context == 0)
6065 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6066
bbf6f052
RK
6067 /* We treat inline_function_decl as an alias for the current function
6068 because that is the inline function whose vars, types, etc.
6069 are being merged into the current function.
6070 See expand_inline_function. */
6071 if (context == current_function_decl || context == inline_function_decl)
6072 context = 0;
6073
6074 /* If this is non-local, handle it. */
6075 if (context)
6076 {
d0977240
RK
6077 /* The following call just exists to abort if the context is
6078 not of a containing function. */
6079 find_function_data (context);
6080
bbf6f052
RK
6081 temp = SAVE_EXPR_RTL (exp);
6082 if (temp && GET_CODE (temp) == REG)
6083 {
6084 put_var_into_stack (exp);
6085 temp = SAVE_EXPR_RTL (exp);
6086 }
6087 if (temp == 0 || GET_CODE (temp) != MEM)
6088 abort ();
6089 return change_address (temp, mode,
6090 fix_lexical_addr (XEXP (temp, 0), exp));
6091 }
6092 if (SAVE_EXPR_RTL (exp) == 0)
6093 {
06089a8b
RK
6094 if (mode == VOIDmode)
6095 temp = const0_rtx;
6096 else
e5e809f4 6097 temp = assign_temp (type, 3, 0, 0);
1499e0a8 6098
bbf6f052 6099 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6100 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6101 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6102 save_expr_regs);
ff78f773
RK
6103
6104 /* If the mode of TEMP does not match that of the expression, it
6105 must be a promoted value. We pass store_expr a SUBREG of the
6106 wanted mode but mark it so that we know that it was already
6107 extended. Note that `unsignedp' was modified above in
6108 this case. */
6109
6110 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6111 {
38a448ca 6112 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
6113 SUBREG_PROMOTED_VAR_P (temp) = 1;
6114 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6115 }
6116
4c7a0be9 6117 if (temp == const0_rtx)
921b3427
RK
6118 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6119 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6120 else
6121 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6122
6123 TREE_USED (exp) = 1;
bbf6f052 6124 }
1499e0a8
RK
6125
6126 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6127 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6128 but mark it so that we know that it was already extended. */
1499e0a8
RK
6129
6130 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6131 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6132 {
e70d22c8
RK
6133 /* Compute the signedness and make the proper SUBREG. */
6134 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6135 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6136 SUBREG_PROMOTED_VAR_P (temp) = 1;
6137 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6138 return temp;
6139 }
6140
bbf6f052
RK
6141 return SAVE_EXPR_RTL (exp);
6142
679163cf
MS
6143 case UNSAVE_EXPR:
6144 {
6145 rtx temp;
6146 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6147 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6148 return temp;
6149 }
6150
b50d17a1 6151 case PLACEHOLDER_EXPR:
e9a25f70
JL
6152 {
6153 tree placeholder_expr;
6154
6155 /* If there is an object on the head of the placeholder list,
e5e809f4 6156 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6157 further information, see tree.def. */
6158 for (placeholder_expr = placeholder_list;
6159 placeholder_expr != 0;
6160 placeholder_expr = TREE_CHAIN (placeholder_expr))
6161 {
6162 tree need_type = TYPE_MAIN_VARIANT (type);
6163 tree object = 0;
6164 tree old_list = placeholder_list;
6165 tree elt;
6166
e5e809f4
JL
6167 /* Find the outermost reference that is of the type we want.
6168 If none, see if any object has a type that is a pointer to
6169 the type we want. */
6170 for (elt = TREE_PURPOSE (placeholder_expr);
6171 elt != 0 && object == 0;
6172 elt
6173 = ((TREE_CODE (elt) == COMPOUND_EXPR
6174 || TREE_CODE (elt) == COND_EXPR)
6175 ? TREE_OPERAND (elt, 1)
6176 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6177 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6178 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6179 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6180 ? TREE_OPERAND (elt, 0) : 0))
6181 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6182 object = elt;
e9a25f70 6183
e9a25f70 6184 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6185 elt != 0 && object == 0;
6186 elt
6187 = ((TREE_CODE (elt) == COMPOUND_EXPR
6188 || TREE_CODE (elt) == COND_EXPR)
6189 ? TREE_OPERAND (elt, 1)
6190 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6191 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6192 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6193 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6194 ? TREE_OPERAND (elt, 0) : 0))
6195 if (POINTER_TYPE_P (TREE_TYPE (elt))
6196 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6197 == need_type))
e5e809f4 6198 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6199
e9a25f70 6200 if (object != 0)
2cde2255 6201 {
e9a25f70
JL
6202 /* Expand this object skipping the list entries before
6203 it was found in case it is also a PLACEHOLDER_EXPR.
6204 In that case, we want to translate it using subsequent
6205 entries. */
6206 placeholder_list = TREE_CHAIN (placeholder_expr);
6207 temp = expand_expr (object, original_target, tmode,
6208 ro_modifier);
6209 placeholder_list = old_list;
6210 return temp;
2cde2255 6211 }
e9a25f70
JL
6212 }
6213 }
b50d17a1
RK
6214
6215 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6216 abort ();
6217
6218 case WITH_RECORD_EXPR:
6219 /* Put the object on the placeholder list, expand our first operand,
6220 and pop the list. */
6221 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6222 placeholder_list);
6223 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6224 tmode, ro_modifier);
b50d17a1
RK
6225 placeholder_list = TREE_CHAIN (placeholder_list);
6226 return target;
6227
70e6ca43
APB
6228 case GOTO_EXPR:
6229 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6230 expand_goto (TREE_OPERAND (exp, 0));
6231 else
6232 expand_computed_goto (TREE_OPERAND (exp, 0));
6233 return const0_rtx;
6234
bbf6f052 6235 case EXIT_EXPR:
e44842fe
RK
6236 expand_exit_loop_if_false (NULL_PTR,
6237 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6238 return const0_rtx;
6239
f42e28dd
APB
6240 case LABELED_BLOCK_EXPR:
6241 if (LABELED_BLOCK_BODY (exp))
6242 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6243 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6244 return const0_rtx;
6245
6246 case EXIT_BLOCK_EXPR:
6247 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6248 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6249 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6250 return const0_rtx;
6251
bbf6f052 6252 case LOOP_EXPR:
0088fcb1 6253 push_temp_slots ();
bbf6f052
RK
6254 expand_start_loop (1);
6255 expand_expr_stmt (TREE_OPERAND (exp, 0));
6256 expand_end_loop ();
0088fcb1 6257 pop_temp_slots ();
bbf6f052
RK
6258
6259 return const0_rtx;
6260
6261 case BIND_EXPR:
6262 {
6263 tree vars = TREE_OPERAND (exp, 0);
6264 int vars_need_expansion = 0;
6265
6266 /* Need to open a binding contour here because
e976b8b2 6267 if there are any cleanups they must be contained here. */
8e91754e 6268 expand_start_bindings (2);
bbf6f052 6269
2df53c0b
RS
6270 /* Mark the corresponding BLOCK for output in its proper place. */
6271 if (TREE_OPERAND (exp, 2) != 0
6272 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6273 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6274
6275 /* If VARS have not yet been expanded, expand them now. */
6276 while (vars)
6277 {
6278 if (DECL_RTL (vars) == 0)
6279 {
6280 vars_need_expansion = 1;
6281 expand_decl (vars);
6282 }
6283 expand_decl_init (vars);
6284 vars = TREE_CHAIN (vars);
6285 }
6286
921b3427 6287 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6288
6289 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6290
6291 return temp;
6292 }
6293
6294 case RTL_EXPR:
83b853c9
JM
6295 if (RTL_EXPR_SEQUENCE (exp))
6296 {
6297 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6298 abort ();
6299 emit_insns (RTL_EXPR_SEQUENCE (exp));
6300 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6301 }
99310285 6302 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 6303 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6304 return RTL_EXPR_RTL (exp);
6305
6306 case CONSTRUCTOR:
dd27116b
RK
6307 /* If we don't need the result, just ensure we evaluate any
6308 subexpressions. */
6309 if (ignore)
6310 {
6311 tree elt;
6312 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6313 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6314 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6315 return const0_rtx;
6316 }
3207b172 6317
4af3895e
JVA
6318 /* All elts simple constants => refer to a constant in memory. But
6319 if this is a non-BLKmode mode, let it store a field at a time
6320 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6321 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6322 store directly into the target unless the type is large enough
6323 that memcpy will be used. If we are making an initializer and
3207b172 6324 all operands are constant, put it in memory as well. */
dd27116b 6325 else if ((TREE_STATIC (exp)
3207b172 6326 && ((mode == BLKmode
e5e809f4 6327 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6328 || TREE_ADDRESSABLE (exp)
6329 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
fbe1758d
AM
6330 && (!MOVE_BY_PIECES_P
6331 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6332 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6333 && ! mostly_zeros_p (exp))))
dd27116b 6334 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6335 {
6336 rtx constructor = output_constant_def (exp);
b552441b
RS
6337 if (modifier != EXPAND_CONST_ADDRESS
6338 && modifier != EXPAND_INITIALIZER
6339 && modifier != EXPAND_SUM
d6a5ac33
RK
6340 && (! memory_address_p (GET_MODE (constructor),
6341 XEXP (constructor, 0))
6342 || (flag_force_addr
6343 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6344 constructor = change_address (constructor, VOIDmode,
6345 XEXP (constructor, 0));
6346 return constructor;
6347 }
6348
bbf6f052
RK
6349 else
6350 {
e9ac02a6
JW
6351 /* Handle calls that pass values in multiple non-contiguous
6352 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6353 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6354 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6355 {
6356 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6357 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6358 else
6359 target = assign_temp (type, 0, 1, 1);
6360 }
07604beb
RK
6361
6362 if (TREE_READONLY (exp))
6363 {
9151b3bf 6364 if (GET_CODE (target) == MEM)
effbcc6a
RK
6365 target = copy_rtx (target);
6366
07604beb
RK
6367 RTX_UNCHANGING_P (target) = 1;
6368 }
6369
b7010412
RK
6370 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6371 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6372 return target;
6373 }
6374
6375 case INDIRECT_REF:
6376 {
6377 tree exp1 = TREE_OPERAND (exp, 0);
6378 tree exp2;
7581a30f
JW
6379 tree index;
6380 tree string = string_constant (exp1, &index);
6381 int i;
6382
06eaa86f 6383 /* Try to optimize reads from const strings. */
7581a30f
JW
6384 if (string
6385 && TREE_CODE (string) == STRING_CST
6386 && TREE_CODE (index) == INTEGER_CST
6387 && !TREE_INT_CST_HIGH (index)
6388 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6389 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6390 && GET_MODE_SIZE (mode) == 1
6391 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6392 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6393
405f0da6
JW
6394 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6395 op0 = memory_address (mode, op0);
8c8a8e34 6396
01d939e8 6397 if (cfun && current_function_check_memory_usage
49ad7cfa 6398 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6399 {
6400 enum memory_use_mode memory_usage;
6401 memory_usage = get_memory_usage_from_modifier (modifier);
6402
6403 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6404 {
6405 in_check_memory_usage = 1;
6406 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6407 op0, Pmode,
c85f7c16
JL
6408 GEN_INT (int_size_in_bytes (type)),
6409 TYPE_MODE (sizetype),
6410 GEN_INT (memory_usage),
6411 TYPE_MODE (integer_type_node));
6412 in_check_memory_usage = 0;
6413 }
921b3427
RK
6414 }
6415
38a448ca 6416 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6417 /* If address was computed by addition,
6418 mark this as an element of an aggregate. */
9ec36da5
JL
6419 if (TREE_CODE (exp1) == PLUS_EXPR
6420 || (TREE_CODE (exp1) == SAVE_EXPR
6421 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6422 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6423 || (TREE_CODE (exp1) == ADDR_EXPR
6424 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6425 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6426 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6427
2c4c436a 6428 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6429 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6430
6431 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6432 here, because, in C and C++, the fact that a location is accessed
6433 through a pointer to const does not mean that the value there can
6434 never change. Languages where it can never change should
6435 also set TREE_STATIC. */
5cb7a25a 6436 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6437
6438 /* If we are writing to this object and its type is a record with
6439 readonly fields, we must mark it as readonly so it will
6440 conflict with readonly references to those fields. */
6441 if (modifier == EXPAND_MEMORY_USE_WO
6442 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6443 RTX_UNCHANGING_P (temp) = 1;
6444
8c8a8e34
JW
6445 return temp;
6446 }
bbf6f052
RK
6447
6448 case ARRAY_REF:
742920c7
RK
6449 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6450 abort ();
bbf6f052 6451
bbf6f052 6452 {
742920c7
RK
6453 tree array = TREE_OPERAND (exp, 0);
6454 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6455 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6456 tree index = TREE_OPERAND (exp, 1);
6457 tree index_type = TREE_TYPE (index);
08293add 6458 HOST_WIDE_INT i;
b50d17a1 6459
d4c89139
PB
6460 /* Optimize the special-case of a zero lower bound.
6461
6462 We convert the low_bound to sizetype to avoid some problems
6463 with constant folding. (E.g. suppose the lower bound is 1,
6464 and its mode is QI. Without the conversion, (ARRAY
6465 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6466 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6467
6468 But sizetype isn't quite right either (especially if
6469 the lowbound is negative). FIXME */
6470
742920c7 6471 if (! integer_zerop (low_bound))
d4c89139
PB
6472 index = fold (build (MINUS_EXPR, index_type, index,
6473 convert (sizetype, low_bound)));
742920c7 6474
742920c7 6475 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6476 This is not done in fold so it won't happen inside &.
6477 Don't fold if this is for wide characters since it's too
6478 difficult to do correctly and this is a very rare case. */
742920c7
RK
6479
6480 if (TREE_CODE (array) == STRING_CST
6481 && TREE_CODE (index) == INTEGER_CST
6482 && !TREE_INT_CST_HIGH (index)
307b821c 6483 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6484 && GET_MODE_CLASS (mode) == MODE_INT
6485 && GET_MODE_SIZE (mode) == 1)
307b821c 6486 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6487
742920c7
RK
6488 /* If this is a constant index into a constant array,
6489 just get the value from the array. Handle both the cases when
6490 we have an explicit constructor and when our operand is a variable
6491 that was declared const. */
4af3895e 6492
742920c7
RK
6493 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6494 {
6495 if (TREE_CODE (index) == INTEGER_CST
6496 && TREE_INT_CST_HIGH (index) == 0)
6497 {
6498 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6499
6500 i = TREE_INT_CST_LOW (index);
6501 while (elem && i--)
6502 elem = TREE_CHAIN (elem);
6503 if (elem)
6504 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6505 tmode, ro_modifier);
742920c7
RK
6506 }
6507 }
4af3895e 6508
742920c7
RK
6509 else if (optimize >= 1
6510 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6511 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6512 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6513 {
08293add 6514 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6515 {
6516 tree init = DECL_INITIAL (array);
6517
6518 i = TREE_INT_CST_LOW (index);
6519 if (TREE_CODE (init) == CONSTRUCTOR)
6520 {
6521 tree elem = CONSTRUCTOR_ELTS (init);
6522
03dc44a6
RS
6523 while (elem
6524 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6525 elem = TREE_CHAIN (elem);
6526 if (elem)
6527 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6528 tmode, ro_modifier);
742920c7
RK
6529 }
6530 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6531 && TREE_INT_CST_HIGH (index) == 0
6532 && (TREE_INT_CST_LOW (index)
6533 < TREE_STRING_LENGTH (init)))
6534 return (GEN_INT
6535 (TREE_STRING_POINTER
6536 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6537 }
6538 }
6539 }
8c8a8e34 6540
08293add 6541 /* ... fall through ... */
bbf6f052
RK
6542
6543 case COMPONENT_REF:
6544 case BIT_FIELD_REF:
4af3895e 6545 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6546 appropriate field if it is present. Don't do this if we have
6547 already written the data since we want to refer to that copy
6548 and varasm.c assumes that's what we'll do. */
4af3895e 6549 if (code != ARRAY_REF
7a0b7b9a
RK
6550 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6551 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6552 {
6553 tree elt;
6554
6555 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6556 elt = TREE_CHAIN (elt))
86b5812c
RK
6557 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6558 /* We can normally use the value of the field in the
6559 CONSTRUCTOR. However, if this is a bitfield in
6560 an integral mode that we can fit in a HOST_WIDE_INT,
6561 we must mask only the number of bits in the bitfield,
6562 since this is done implicitly by the constructor. If
6563 the bitfield does not meet either of those conditions,
6564 we can't do this optimization. */
6565 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6566 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6567 == MODE_INT)
6568 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6569 <= HOST_BITS_PER_WIDE_INT))))
6570 {
6571 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6572 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6573 {
6574 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6575
6576 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6577 {
6578 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6579 op0 = expand_and (op0, op1, target);
6580 }
6581 else
6582 {
e5e809f4
JL
6583 enum machine_mode imode
6584 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6585 tree count
e5e809f4
JL
6586 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6587 0);
86b5812c
RK
6588
6589 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6590 target, 0);
6591 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6592 target, 0);
6593 }
6594 }
6595
6596 return op0;
6597 }
4af3895e
JVA
6598 }
6599
bbf6f052
RK
6600 {
6601 enum machine_mode mode1;
6602 int bitsize;
6603 int bitpos;
7bb0943f 6604 tree offset;
bbf6f052 6605 int volatilep = 0;
034f9101 6606 int alignment;
839c4796
RK
6607 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6608 &mode1, &unsignedp, &volatilep,
6609 &alignment);
bbf6f052 6610
e7f3c83f
RK
6611 /* If we got back the original object, something is wrong. Perhaps
6612 we are evaluating an expression too early. In any event, don't
6613 infinitely recurse. */
6614 if (tem == exp)
6615 abort ();
6616
3d27140a 6617 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6618 computation, since it will need a temporary and TARGET is known
6619 to have to do. This occurs in unchecked conversion in Ada. */
6620
6621 op0 = expand_expr (tem,
6622 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6623 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6624 != INTEGER_CST)
6625 ? target : NULL_RTX),
4ed67205 6626 VOIDmode,
14a774a9
RK
6627 (modifier == EXPAND_INITIALIZER
6628 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 6629 ? modifier : EXPAND_NORMAL);
bbf6f052 6630
8c8a8e34 6631 /* If this is a constant, put it into a register if it is a
14a774a9 6632 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6633 if (CONSTANT_P (op0))
6634 {
6635 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6636 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6637 && offset == 0)
8c8a8e34
JW
6638 op0 = force_reg (mode, op0);
6639 else
6640 op0 = validize_mem (force_const_mem (mode, op0));
6641 }
6642
7bb0943f
RS
6643 if (offset != 0)
6644 {
906c4e36 6645 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 6646
14a774a9
RK
6647 /* If this object is in memory, put it into a register.
6648 This case can't occur in C, but can in Ada if we have
6649 unchecked conversion of an expression from a scalar type to
6650 an array or record type. */
6651 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6652 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6653 {
6654 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6655
6656 mark_temp_addr_taken (memloc);
6657 emit_move_insn (memloc, op0);
6658 op0 = memloc;
6659 }
6660
7bb0943f
RS
6661 if (GET_CODE (op0) != MEM)
6662 abort ();
2d48c13d
JL
6663
6664 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6665 {
2d48c13d 6666#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6667 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6668#else
bd070e1a 6669 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6670#endif
bd070e1a 6671 }
2d48c13d 6672
14a774a9 6673 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6674 to call force_reg for that case. Avoid that case. */
89752202
HB
6675 if (GET_CODE (op0) == MEM
6676 && GET_MODE (op0) == BLKmode
efd07ca7 6677 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6678 && bitsize != 0
89752202
HB
6679 && (bitpos % bitsize) == 0
6680 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6681 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6682 {
6683 rtx temp = change_address (op0, mode1,
6684 plus_constant (XEXP (op0, 0),
6685 (bitpos /
6686 BITS_PER_UNIT)));
6687 if (GET_CODE (XEXP (temp, 0)) == REG)
6688 op0 = temp;
6689 else
6690 op0 = change_address (op0, mode1,
6691 force_reg (GET_MODE (XEXP (temp, 0)),
6692 XEXP (temp, 0)));
6693 bitpos = 0;
6694 }
6695
6696
7bb0943f 6697 op0 = change_address (op0, VOIDmode,
38a448ca 6698 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6699 force_reg (ptr_mode,
6700 offset_rtx)));
7bb0943f
RS
6701 }
6702
bbf6f052
RK
6703 /* Don't forget about volatility even if this is a bitfield. */
6704 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6705 {
6706 op0 = copy_rtx (op0);
6707 MEM_VOLATILE_P (op0) = 1;
6708 }
6709
921b3427 6710 /* Check the access. */
c5c76735 6711 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6712 {
6713 enum memory_use_mode memory_usage;
6714 memory_usage = get_memory_usage_from_modifier (modifier);
6715
6716 if (memory_usage != MEMORY_USE_DONT)
6717 {
6718 rtx to;
6719 int size;
6720
6721 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6722 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6723
6724 /* Check the access right of the pointer. */
e9a25f70
JL
6725 if (size > BITS_PER_UNIT)
6726 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6727 to, Pmode,
e9a25f70
JL
6728 GEN_INT (size / BITS_PER_UNIT),
6729 TYPE_MODE (sizetype),
956d6950
JL
6730 GEN_INT (memory_usage),
6731 TYPE_MODE (integer_type_node));
921b3427
RK
6732 }
6733 }
6734
ccc98036
RS
6735 /* In cases where an aligned union has an unaligned object
6736 as a field, we might be extracting a BLKmode value from
6737 an integer-mode (e.g., SImode) object. Handle this case
6738 by doing the extract into an object as wide as the field
6739 (which we know to be the width of a basic mode), then
f2420d0b
JW
6740 storing into memory, and changing the mode to BLKmode.
6741 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6742 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6743 if (mode1 == VOIDmode
ccc98036 6744 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6745 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6746 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6747 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6748 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6749 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6750 /* If the field isn't aligned enough to fetch as a memref,
6751 fetch it as a bit field. */
e1565e65
DE
6752 || (mode1 != BLKmode
6753 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
14a774a9
RK
6754 && ((TYPE_ALIGN (TREE_TYPE (tem))
6755 < (unsigned int) GET_MODE_ALIGNMENT (mode))
6756 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))))
6757 || (modifier != EXPAND_CONST_ADDRESS
6758 && modifier != EXPAND_INITIALIZER
6759 && mode == BLKmode
e1565e65 6760 && SLOW_UNALIGNED_ACCESS (mode, alignment)
14a774a9
RK
6761 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
6762 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 6763 {
bbf6f052
RK
6764 enum machine_mode ext_mode = mode;
6765
14a774a9
RK
6766 if (ext_mode == BLKmode
6767 && ! (target != 0 && GET_CODE (op0) == MEM
6768 && GET_CODE (target) == MEM
6769 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
6770 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6771
6772 if (ext_mode == BLKmode)
a281e72d
RK
6773 {
6774 /* In this case, BITPOS must start at a byte boundary and
6775 TARGET, if specified, must be a MEM. */
6776 if (GET_CODE (op0) != MEM
6777 || (target != 0 && GET_CODE (target) != MEM)
6778 || bitpos % BITS_PER_UNIT != 0)
6779 abort ();
6780
6781 op0 = change_address (op0, VOIDmode,
6782 plus_constant (XEXP (op0, 0),
6783 bitpos / BITS_PER_UNIT));
6784 if (target == 0)
6785 target = assign_temp (type, 0, 1, 1);
6786
6787 emit_block_move (target, op0,
6788 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6789 / BITS_PER_UNIT),
6790 1);
6791
6792 return target;
6793 }
bbf6f052 6794
dc6d66b3
RK
6795 op0 = validize_mem (op0);
6796
6797 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6798 mark_reg_pointer (XEXP (op0, 0), alignment);
6799
6800 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6801 unsignedp, target, ext_mode, ext_mode,
034f9101 6802 alignment,
bbf6f052 6803 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6804
6805 /* If the result is a record type and BITSIZE is narrower than
6806 the mode of OP0, an integral mode, and this is a big endian
6807 machine, we must put the field into the high-order bits. */
6808 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6809 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6810 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6811 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6812 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6813 - bitsize),
6814 op0, 1);
6815
bbf6f052
RK
6816 if (mode == BLKmode)
6817 {
6818 rtx new = assign_stack_temp (ext_mode,
6819 bitsize / BITS_PER_UNIT, 0);
6820
6821 emit_move_insn (new, op0);
6822 op0 = copy_rtx (new);
6823 PUT_MODE (op0, BLKmode);
c6df88cb 6824 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6825 }
6826
6827 return op0;
6828 }
6829
05019f83
RK
6830 /* If the result is BLKmode, use that to access the object
6831 now as well. */
6832 if (mode == BLKmode)
6833 mode1 = BLKmode;
6834
bbf6f052
RK
6835 /* Get a reference to just this component. */
6836 if (modifier == EXPAND_CONST_ADDRESS
6837 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6838 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6839 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6840 else
6841 op0 = change_address (op0, mode1,
6842 plus_constant (XEXP (op0, 0),
6843 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6844
6845 if (GET_CODE (op0) == MEM)
6846 MEM_ALIAS_SET (op0) = get_alias_set (exp);
14a774a9 6847
dc6d66b3
RK
6848 if (GET_CODE (XEXP (op0, 0)) == REG)
6849 mark_reg_pointer (XEXP (op0, 0), alignment);
6850
c6df88cb 6851 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6852 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6853 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6854 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6855 || modifier == EXPAND_INITIALIZER)
bbf6f052 6856 return op0;
0d15e60c 6857 else if (target == 0)
bbf6f052 6858 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6859
bbf6f052
RK
6860 convert_move (target, op0, unsignedp);
6861 return target;
6862 }
6863
bbf6f052
RK
6864 /* Intended for a reference to a buffer of a file-object in Pascal.
6865 But it's not certain that a special tree code will really be
6866 necessary for these. INDIRECT_REF might work for them. */
6867 case BUFFER_REF:
6868 abort ();
6869
7308a047 6870 case IN_EXPR:
7308a047 6871 {
d6a5ac33
RK
6872 /* Pascal set IN expression.
6873
6874 Algorithm:
6875 rlo = set_low - (set_low%bits_per_word);
6876 the_word = set [ (index - rlo)/bits_per_word ];
6877 bit_index = index % bits_per_word;
6878 bitmask = 1 << bit_index;
6879 return !!(the_word & bitmask); */
6880
7308a047
RS
6881 tree set = TREE_OPERAND (exp, 0);
6882 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6883 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6884 tree set_type = TREE_TYPE (set);
7308a047
RS
6885 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6886 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6887 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6888 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6889 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6890 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6891 rtx setaddr = XEXP (setval, 0);
6892 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6893 rtx rlow;
6894 rtx diff, quo, rem, addr, bit, result;
7308a047 6895
d6a5ac33
RK
6896 preexpand_calls (exp);
6897
6898 /* If domain is empty, answer is no. Likewise if index is constant
6899 and out of bounds. */
51723711 6900 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6901 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6902 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6903 || (TREE_CODE (index) == INTEGER_CST
6904 && TREE_CODE (set_low_bound) == INTEGER_CST
6905 && tree_int_cst_lt (index, set_low_bound))
6906 || (TREE_CODE (set_high_bound) == INTEGER_CST
6907 && TREE_CODE (index) == INTEGER_CST
6908 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6909 return const0_rtx;
6910
d6a5ac33
RK
6911 if (target == 0)
6912 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6913
6914 /* If we get here, we have to generate the code for both cases
6915 (in range and out of range). */
6916
6917 op0 = gen_label_rtx ();
6918 op1 = gen_label_rtx ();
6919
6920 if (! (GET_CODE (index_val) == CONST_INT
6921 && GET_CODE (lo_r) == CONST_INT))
6922 {
c5d5d461
JL
6923 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6924 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6925 }
6926
6927 if (! (GET_CODE (index_val) == CONST_INT
6928 && GET_CODE (hi_r) == CONST_INT))
6929 {
c5d5d461
JL
6930 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6931 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6932 }
6933
6934 /* Calculate the element number of bit zero in the first word
6935 of the set. */
6936 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6937 rlow = GEN_INT (INTVAL (lo_r)
6938 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6939 else
17938e57
RK
6940 rlow = expand_binop (index_mode, and_optab, lo_r,
6941 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6942 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6943
d6a5ac33
RK
6944 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6945 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6946
6947 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6948 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6949 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6950 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6951
7308a047 6952 addr = memory_address (byte_mode,
d6a5ac33
RK
6953 expand_binop (index_mode, add_optab, diff,
6954 setaddr, NULL_RTX, iunsignedp,
17938e57 6955 OPTAB_LIB_WIDEN));
d6a5ac33 6956
7308a047
RS
6957 /* Extract the bit we want to examine */
6958 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6959 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6960 make_tree (TREE_TYPE (index), rem),
6961 NULL_RTX, 1);
6962 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6963 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6964 1, OPTAB_LIB_WIDEN);
17938e57
RK
6965
6966 if (result != target)
6967 convert_move (target, result, 1);
7308a047
RS
6968
6969 /* Output the code to handle the out-of-range case. */
6970 emit_jump (op0);
6971 emit_label (op1);
6972 emit_move_insn (target, const0_rtx);
6973 emit_label (op0);
6974 return target;
6975 }
6976
bbf6f052
RK
6977 case WITH_CLEANUP_EXPR:
6978 if (RTL_EXPR_RTL (exp) == 0)
6979 {
6980 RTL_EXPR_RTL (exp)
921b3427 6981 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6982 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6983
bbf6f052
RK
6984 /* That's it for this cleanup. */
6985 TREE_OPERAND (exp, 2) = 0;
6986 }
6987 return RTL_EXPR_RTL (exp);
6988
5dab5552
MS
6989 case CLEANUP_POINT_EXPR:
6990 {
e976b8b2
MS
6991 /* Start a new binding layer that will keep track of all cleanup
6992 actions to be performed. */
8e91754e 6993 expand_start_bindings (2);
e976b8b2 6994
d93d4205 6995 target_temp_slot_level = temp_slot_level;
e976b8b2 6996
921b3427 6997 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6998 /* If we're going to use this value, load it up now. */
6999 if (! ignore)
7000 op0 = force_not_mem (op0);
d93d4205 7001 preserve_temp_slots (op0);
e976b8b2 7002 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7003 }
7004 return op0;
7005
bbf6f052
RK
7006 case CALL_EXPR:
7007 /* Check for a built-in function. */
7008 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7009 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7010 == FUNCTION_DECL)
bbf6f052
RK
7011 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7012 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 7013
bbf6f052
RK
7014 /* If this call was expanded already by preexpand_calls,
7015 just return the result we got. */
7016 if (CALL_EXPR_RTL (exp) != 0)
7017 return CALL_EXPR_RTL (exp);
d6a5ac33 7018
8129842c 7019 return expand_call (exp, target, ignore);
bbf6f052
RK
7020
7021 case NON_LVALUE_EXPR:
7022 case NOP_EXPR:
7023 case CONVERT_EXPR:
7024 case REFERENCE_EXPR:
bbf6f052
RK
7025 if (TREE_CODE (type) == UNION_TYPE)
7026 {
7027 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7028
7029 /* If both input and output are BLKmode, this conversion
7030 isn't actually doing anything unless we need to make the
7031 alignment stricter. */
7032 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7033 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7034 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7035 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7036 modifier);
7037
bbf6f052 7038 if (target == 0)
06089a8b
RK
7039 {
7040 if (mode != BLKmode)
7041 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7042 else
7043 target = assign_temp (type, 0, 1, 1);
7044 }
d6a5ac33 7045
bbf6f052
RK
7046 if (GET_CODE (target) == MEM)
7047 /* Store data into beginning of memory target. */
7048 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7049 change_address (target, TYPE_MODE (valtype), 0), 0);
7050
bbf6f052
RK
7051 else if (GET_CODE (target) == REG)
7052 /* Store this field into a union of the proper type. */
14a774a9
RK
7053 store_field (target,
7054 MIN ((int_size_in_bytes (TREE_TYPE
7055 (TREE_OPERAND (exp, 0)))
7056 * BITS_PER_UNIT),
7057 GET_MODE_BITSIZE (mode)),
7058 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7059 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
bbf6f052
RK
7060 else
7061 abort ();
7062
7063 /* Return the entire union. */
7064 return target;
7065 }
d6a5ac33 7066
7f62854a
RK
7067 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7068 {
7069 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7070 ro_modifier);
7f62854a
RK
7071
7072 /* If the signedness of the conversion differs and OP0 is
7073 a promoted SUBREG, clear that indication since we now
7074 have to do the proper extension. */
7075 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7076 && GET_CODE (op0) == SUBREG)
7077 SUBREG_PROMOTED_VAR_P (op0) = 0;
7078
7079 return op0;
7080 }
7081
1499e0a8 7082 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7083 if (GET_MODE (op0) == mode)
7084 return op0;
12342f90 7085
d6a5ac33
RK
7086 /* If OP0 is a constant, just convert it into the proper mode. */
7087 if (CONSTANT_P (op0))
7088 return
7089 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7090 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7091
26fcb35a 7092 if (modifier == EXPAND_INITIALIZER)
38a448ca 7093 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7094
bbf6f052 7095 if (target == 0)
d6a5ac33
RK
7096 return
7097 convert_to_mode (mode, op0,
7098 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7099 else
d6a5ac33
RK
7100 convert_move (target, op0,
7101 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7102 return target;
7103
7104 case PLUS_EXPR:
0f41302f
MS
7105 /* We come here from MINUS_EXPR when the second operand is a
7106 constant. */
bbf6f052
RK
7107 plus_expr:
7108 this_optab = add_optab;
7109
7110 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7111 something else, make sure we add the register to the constant and
7112 then to the other thing. This case can occur during strength
7113 reduction and doing it this way will produce better code if the
7114 frame pointer or argument pointer is eliminated.
7115
7116 fold-const.c will ensure that the constant is always in the inner
7117 PLUS_EXPR, so the only case we need to do anything about is if
7118 sp, ap, or fp is our second argument, in which case we must swap
7119 the innermost first argument and our second argument. */
7120
7121 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7122 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7123 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7124 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7125 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7126 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7127 {
7128 tree t = TREE_OPERAND (exp, 1);
7129
7130 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7131 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7132 }
7133
88f63c77 7134 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7135 something, we might be forming a constant. So try to use
7136 plus_constant. If it produces a sum and we can't accept it,
7137 use force_operand. This allows P = &ARR[const] to generate
7138 efficient code on machines where a SYMBOL_REF is not a valid
7139 address.
7140
7141 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7142 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 7143 || mode == ptr_mode)
bbf6f052 7144 {
c980ac49
RS
7145 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7146 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7147 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7148 {
cbbc503e
JL
7149 rtx constant_part;
7150
c980ac49
RS
7151 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7152 EXPAND_SUM);
cbbc503e
JL
7153 /* Use immed_double_const to ensure that the constant is
7154 truncated according to the mode of OP1, then sign extended
7155 to a HOST_WIDE_INT. Using the constant directly can result
7156 in non-canonical RTL in a 64x32 cross compile. */
7157 constant_part
7158 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7159 (HOST_WIDE_INT) 0,
a5efcd63 7160 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7161 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7162 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7163 op1 = force_operand (op1, target);
7164 return op1;
7165 }
bbf6f052 7166
c980ac49
RS
7167 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7168 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7169 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7170 {
cbbc503e
JL
7171 rtx constant_part;
7172
c980ac49
RS
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7174 EXPAND_SUM);
7175 if (! CONSTANT_P (op0))
7176 {
7177 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7178 VOIDmode, modifier);
709f5be1
RS
7179 /* Don't go to both_summands if modifier
7180 says it's not right to return a PLUS. */
7181 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7182 goto binop2;
c980ac49
RS
7183 goto both_summands;
7184 }
cbbc503e
JL
7185 /* Use immed_double_const to ensure that the constant is
7186 truncated according to the mode of OP1, then sign extended
7187 to a HOST_WIDE_INT. Using the constant directly can result
7188 in non-canonical RTL in a 64x32 cross compile. */
7189 constant_part
7190 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7191 (HOST_WIDE_INT) 0,
2a94e396 7192 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7193 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7194 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7195 op0 = force_operand (op0, target);
7196 return op0;
7197 }
bbf6f052
RK
7198 }
7199
7200 /* No sense saving up arithmetic to be done
7201 if it's all in the wrong mode to form part of an address.
7202 And force_operand won't know whether to sign-extend or
7203 zero-extend. */
7204 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7205 || mode != ptr_mode)
c980ac49 7206 goto binop;
bbf6f052
RK
7207
7208 preexpand_calls (exp);
e5e809f4 7209 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7210 subtarget = 0;
7211
921b3427
RK
7212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7214
c980ac49 7215 both_summands:
bbf6f052
RK
7216 /* Make sure any term that's a sum with a constant comes last. */
7217 if (GET_CODE (op0) == PLUS
7218 && CONSTANT_P (XEXP (op0, 1)))
7219 {
7220 temp = op0;
7221 op0 = op1;
7222 op1 = temp;
7223 }
7224 /* If adding to a sum including a constant,
7225 associate it to put the constant outside. */
7226 if (GET_CODE (op1) == PLUS
7227 && CONSTANT_P (XEXP (op1, 1)))
7228 {
7229 rtx constant_term = const0_rtx;
7230
7231 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7232 if (temp != 0)
7233 op0 = temp;
6f90e075
JW
7234 /* Ensure that MULT comes first if there is one. */
7235 else if (GET_CODE (op0) == MULT)
38a448ca 7236 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7237 else
38a448ca 7238 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7239
7240 /* Let's also eliminate constants from op0 if possible. */
7241 op0 = eliminate_constant_term (op0, &constant_term);
7242
7243 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7244 their sum should be a constant. Form it into OP1, since the
7245 result we want will then be OP0 + OP1. */
7246
7247 temp = simplify_binary_operation (PLUS, mode, constant_term,
7248 XEXP (op1, 1));
7249 if (temp != 0)
7250 op1 = temp;
7251 else
38a448ca 7252 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7253 }
7254
7255 /* Put a constant term last and put a multiplication first. */
7256 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7257 temp = op1, op1 = op0, op0 = temp;
7258
7259 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7260 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7261
7262 case MINUS_EXPR:
ea87523e
RK
7263 /* For initializers, we are allowed to return a MINUS of two
7264 symbolic constants. Here we handle all cases when both operands
7265 are constant. */
bbf6f052
RK
7266 /* Handle difference of two symbolic constants,
7267 for the sake of an initializer. */
7268 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7269 && really_constant_p (TREE_OPERAND (exp, 0))
7270 && really_constant_p (TREE_OPERAND (exp, 1)))
7271 {
906c4e36 7272 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7273 VOIDmode, ro_modifier);
906c4e36 7274 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7275 VOIDmode, ro_modifier);
ea87523e 7276
ea87523e
RK
7277 /* If the last operand is a CONST_INT, use plus_constant of
7278 the negated constant. Else make the MINUS. */
7279 if (GET_CODE (op1) == CONST_INT)
7280 return plus_constant (op0, - INTVAL (op1));
7281 else
38a448ca 7282 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7283 }
7284 /* Convert A - const to A + (-const). */
7285 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7286 {
ae431183
RK
7287 tree negated = fold (build1 (NEGATE_EXPR, type,
7288 TREE_OPERAND (exp, 1)));
7289
ae431183 7290 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7291 /* If we can't negate the constant in TYPE, leave it alone and
7292 expand_binop will negate it for us. We used to try to do it
7293 here in the signed version of TYPE, but that doesn't work
7294 on POINTER_TYPEs. */;
ae431183
RK
7295 else
7296 {
7297 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7298 goto plus_expr;
7299 }
bbf6f052
RK
7300 }
7301 this_optab = sub_optab;
7302 goto binop;
7303
7304 case MULT_EXPR:
7305 preexpand_calls (exp);
7306 /* If first operand is constant, swap them.
7307 Thus the following special case checks need only
7308 check the second operand. */
7309 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7310 {
7311 register tree t1 = TREE_OPERAND (exp, 0);
7312 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7313 TREE_OPERAND (exp, 1) = t1;
7314 }
7315
7316 /* Attempt to return something suitable for generating an
7317 indexed address, for machines that support that. */
7318
88f63c77 7319 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7320 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7321 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7322 {
921b3427
RK
7323 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7324 EXPAND_SUM);
bbf6f052
RK
7325
7326 /* Apply distributive law if OP0 is x+c. */
7327 if (GET_CODE (op0) == PLUS
7328 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7329 return
7330 gen_rtx_PLUS
7331 (mode,
7332 gen_rtx_MULT
7333 (mode, XEXP (op0, 0),
7334 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7335 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7336 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7337
7338 if (GET_CODE (op0) != REG)
906c4e36 7339 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7340 if (GET_CODE (op0) != REG)
7341 op0 = copy_to_mode_reg (mode, op0);
7342
c5c76735
JL
7343 return
7344 gen_rtx_MULT (mode, op0,
7345 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7346 }
7347
e5e809f4 7348 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7349 subtarget = 0;
7350
7351 /* Check for multiplying things that have been extended
7352 from a narrower type. If this machine supports multiplying
7353 in that narrower type with a result in the desired type,
7354 do it that way, and avoid the explicit type-conversion. */
7355 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7356 && TREE_CODE (type) == INTEGER_TYPE
7357 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7358 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7359 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7360 && int_fits_type_p (TREE_OPERAND (exp, 1),
7361 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7362 /* Don't use a widening multiply if a shift will do. */
7363 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7364 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7365 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7366 ||
7367 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7369 ==
7370 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7371 /* If both operands are extended, they must either both
7372 be zero-extended or both be sign-extended. */
7373 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7374 ==
7375 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7376 {
7377 enum machine_mode innermode
7378 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7379 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7380 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7381 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7382 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7383 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7384 {
b10af0c8
TG
7385 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7386 {
7387 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7388 NULL_RTX, VOIDmode, 0);
7389 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7390 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7391 VOIDmode, 0);
7392 else
7393 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7394 NULL_RTX, VOIDmode, 0);
7395 goto binop2;
7396 }
7397 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7398 && innermode == word_mode)
7399 {
7400 rtx htem;
7401 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7402 NULL_RTX, VOIDmode, 0);
7403 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7405 VOIDmode, 0);
7406 else
7407 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7408 NULL_RTX, VOIDmode, 0);
7409 temp = expand_binop (mode, other_optab, op0, op1, target,
7410 unsignedp, OPTAB_LIB_WIDEN);
7411 htem = expand_mult_highpart_adjust (innermode,
7412 gen_highpart (innermode, temp),
7413 op0, op1,
7414 gen_highpart (innermode, temp),
7415 unsignedp);
7416 emit_move_insn (gen_highpart (innermode, temp), htem);
7417 return temp;
7418 }
bbf6f052
RK
7419 }
7420 }
7421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7422 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7423 return expand_mult (mode, op0, op1, target, unsignedp);
7424
7425 case TRUNC_DIV_EXPR:
7426 case FLOOR_DIV_EXPR:
7427 case CEIL_DIV_EXPR:
7428 case ROUND_DIV_EXPR:
7429 case EXACT_DIV_EXPR:
7430 preexpand_calls (exp);
e5e809f4 7431 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7432 subtarget = 0;
7433 /* Possible optimization: compute the dividend with EXPAND_SUM
7434 then if the divisor is constant can optimize the case
7435 where some terms of the dividend have coeffs divisible by it. */
7436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7437 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7438 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7439
7440 case RDIV_EXPR:
7441 this_optab = flodiv_optab;
7442 goto binop;
7443
7444 case TRUNC_MOD_EXPR:
7445 case FLOOR_MOD_EXPR:
7446 case CEIL_MOD_EXPR:
7447 case ROUND_MOD_EXPR:
7448 preexpand_calls (exp);
e5e809f4 7449 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7450 subtarget = 0;
7451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7452 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7453 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7454
7455 case FIX_ROUND_EXPR:
7456 case FIX_FLOOR_EXPR:
7457 case FIX_CEIL_EXPR:
7458 abort (); /* Not used for C. */
7459
7460 case FIX_TRUNC_EXPR:
906c4e36 7461 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7462 if (target == 0)
7463 target = gen_reg_rtx (mode);
7464 expand_fix (target, op0, unsignedp);
7465 return target;
7466
7467 case FLOAT_EXPR:
906c4e36 7468 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7469 if (target == 0)
7470 target = gen_reg_rtx (mode);
7471 /* expand_float can't figure out what to do if FROM has VOIDmode.
7472 So give it the correct mode. With -O, cse will optimize this. */
7473 if (GET_MODE (op0) == VOIDmode)
7474 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7475 op0);
7476 expand_float (target, op0,
7477 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7478 return target;
7479
7480 case NEGATE_EXPR:
5b22bee8 7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7482 temp = expand_unop (mode, neg_optab, op0, target, 0);
7483 if (temp == 0)
7484 abort ();
7485 return temp;
7486
7487 case ABS_EXPR:
7488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7489
2d7050fd 7490 /* Handle complex values specially. */
d6a5ac33
RK
7491 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7492 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7493 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7494
bbf6f052
RK
7495 /* Unsigned abs is simply the operand. Testing here means we don't
7496 risk generating incorrect code below. */
7497 if (TREE_UNSIGNED (type))
7498 return op0;
7499
91813b28 7500 return expand_abs (mode, op0, target,
e5e809f4 7501 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7502
7503 case MAX_EXPR:
7504 case MIN_EXPR:
7505 target = original_target;
e5e809f4 7506 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7507 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7508 || GET_MODE (target) != mode
bbf6f052
RK
7509 || (GET_CODE (target) == REG
7510 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7511 target = gen_reg_rtx (mode);
906c4e36 7512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7513 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7514
7515 /* First try to do it with a special MIN or MAX instruction.
7516 If that does not win, use a conditional jump to select the proper
7517 value. */
7518 this_optab = (TREE_UNSIGNED (type)
7519 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7520 : (code == MIN_EXPR ? smin_optab : smax_optab));
7521
7522 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7523 OPTAB_WIDEN);
7524 if (temp != 0)
7525 return temp;
7526
fa2981d8
JW
7527 /* At this point, a MEM target is no longer useful; we will get better
7528 code without it. */
7529
7530 if (GET_CODE (target) == MEM)
7531 target = gen_reg_rtx (mode);
7532
ee456b1c
RK
7533 if (target != op0)
7534 emit_move_insn (target, op0);
d6a5ac33 7535
bbf6f052 7536 op0 = gen_label_rtx ();
d6a5ac33 7537
f81497d9
RS
7538 /* If this mode is an integer too wide to compare properly,
7539 compare word by word. Rely on cse to optimize constant cases. */
1c0290ea 7540 if (GET_MODE_CLASS (mode) == MODE_INT && ! can_compare_p (mode, ccp_jump))
bbf6f052 7541 {
f81497d9 7542 if (code == MAX_EXPR)
d6a5ac33
RK
7543 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7544 target, op1, NULL_RTX, op0);
bbf6f052 7545 else
d6a5ac33
RK
7546 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7547 op1, target, NULL_RTX, op0);
bbf6f052 7548 }
f81497d9
RS
7549 else
7550 {
b30f05db
BS
7551 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7552 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7553 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7554 op0);
f81497d9 7555 }
b30f05db 7556 emit_move_insn (target, op1);
bbf6f052
RK
7557 emit_label (op0);
7558 return target;
7559
bbf6f052
RK
7560 case BIT_NOT_EXPR:
7561 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7562 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7563 if (temp == 0)
7564 abort ();
7565 return temp;
7566
7567 case FFS_EXPR:
7568 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7569 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7570 if (temp == 0)
7571 abort ();
7572 return temp;
7573
d6a5ac33
RK
7574 /* ??? Can optimize bitwise operations with one arg constant.
7575 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7576 and (a bitwise1 b) bitwise2 b (etc)
7577 but that is probably not worth while. */
7578
7579 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7580 boolean values when we want in all cases to compute both of them. In
7581 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7582 as actual zero-or-1 values and then bitwise anding. In cases where
7583 there cannot be any side effects, better code would be made by
7584 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7585 how to recognize those cases. */
7586
bbf6f052
RK
7587 case TRUTH_AND_EXPR:
7588 case BIT_AND_EXPR:
7589 this_optab = and_optab;
7590 goto binop;
7591
bbf6f052
RK
7592 case TRUTH_OR_EXPR:
7593 case BIT_IOR_EXPR:
7594 this_optab = ior_optab;
7595 goto binop;
7596
874726a8 7597 case TRUTH_XOR_EXPR:
bbf6f052
RK
7598 case BIT_XOR_EXPR:
7599 this_optab = xor_optab;
7600 goto binop;
7601
7602 case LSHIFT_EXPR:
7603 case RSHIFT_EXPR:
7604 case LROTATE_EXPR:
7605 case RROTATE_EXPR:
7606 preexpand_calls (exp);
e5e809f4 7607 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7608 subtarget = 0;
7609 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7610 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7611 unsignedp);
7612
d6a5ac33
RK
7613 /* Could determine the answer when only additive constants differ. Also,
7614 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7615 case LT_EXPR:
7616 case LE_EXPR:
7617 case GT_EXPR:
7618 case GE_EXPR:
7619 case EQ_EXPR:
7620 case NE_EXPR:
7621 preexpand_calls (exp);
7622 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7623 if (temp != 0)
7624 return temp;
d6a5ac33 7625
0f41302f 7626 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7627 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7628 && original_target
7629 && GET_CODE (original_target) == REG
7630 && (GET_MODE (original_target)
7631 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7632 {
d6a5ac33
RK
7633 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7634 VOIDmode, 0);
7635
bbf6f052
RK
7636 if (temp != original_target)
7637 temp = copy_to_reg (temp);
d6a5ac33 7638
bbf6f052 7639 op1 = gen_label_rtx ();
c5d5d461
JL
7640 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7641 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7642 emit_move_insn (temp, const1_rtx);
7643 emit_label (op1);
7644 return temp;
7645 }
d6a5ac33 7646
bbf6f052
RK
7647 /* If no set-flag instruction, must generate a conditional
7648 store into a temporary variable. Drop through
7649 and handle this like && and ||. */
7650
7651 case TRUTH_ANDIF_EXPR:
7652 case TRUTH_ORIF_EXPR:
e44842fe 7653 if (! ignore
e5e809f4 7654 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7655 /* Make sure we don't have a hard reg (such as function's return
7656 value) live across basic blocks, if not optimizing. */
7657 || (!optimize && GET_CODE (target) == REG
7658 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7659 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7660
7661 if (target)
7662 emit_clr_insn (target);
7663
bbf6f052
RK
7664 op1 = gen_label_rtx ();
7665 jumpifnot (exp, op1);
e44842fe
RK
7666
7667 if (target)
7668 emit_0_to_1_insn (target);
7669
bbf6f052 7670 emit_label (op1);
e44842fe 7671 return ignore ? const0_rtx : target;
bbf6f052
RK
7672
7673 case TRUTH_NOT_EXPR:
7674 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7675 /* The parser is careful to generate TRUTH_NOT_EXPR
7676 only with operands that are always zero or one. */
906c4e36 7677 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7678 target, 1, OPTAB_LIB_WIDEN);
7679 if (temp == 0)
7680 abort ();
7681 return temp;
7682
7683 case COMPOUND_EXPR:
7684 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7685 emit_queue ();
7686 return expand_expr (TREE_OPERAND (exp, 1),
7687 (ignore ? const0_rtx : target),
7688 VOIDmode, 0);
7689
7690 case COND_EXPR:
ac01eace
RK
7691 /* If we would have a "singleton" (see below) were it not for a
7692 conversion in each arm, bring that conversion back out. */
7693 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7694 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7695 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7696 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7697 {
7698 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7699 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7700
7701 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7702 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7703 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7704 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7705 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7706 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7707 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7708 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7709 return expand_expr (build1 (NOP_EXPR, type,
7710 build (COND_EXPR, TREE_TYPE (true),
7711 TREE_OPERAND (exp, 0),
7712 true, false)),
7713 target, tmode, modifier);
7714 }
7715
bbf6f052
RK
7716 {
7717 /* Note that COND_EXPRs whose type is a structure or union
7718 are required to be constructed to contain assignments of
7719 a temporary variable, so that we can evaluate them here
7720 for side effect only. If type is void, we must do likewise. */
7721
7722 /* If an arm of the branch requires a cleanup,
7723 only that cleanup is performed. */
7724
7725 tree singleton = 0;
7726 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7727
7728 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7729 convert it to our mode, if necessary. */
7730 if (integer_onep (TREE_OPERAND (exp, 1))
7731 && integer_zerop (TREE_OPERAND (exp, 2))
7732 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7733 {
dd27116b
RK
7734 if (ignore)
7735 {
7736 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7737 ro_modifier);
dd27116b
RK
7738 return const0_rtx;
7739 }
7740
921b3427 7741 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7742 if (GET_MODE (op0) == mode)
7743 return op0;
d6a5ac33 7744
bbf6f052
RK
7745 if (target == 0)
7746 target = gen_reg_rtx (mode);
7747 convert_move (target, op0, unsignedp);
7748 return target;
7749 }
7750
ac01eace
RK
7751 /* Check for X ? A + B : A. If we have this, we can copy A to the
7752 output and conditionally add B. Similarly for unary operations.
7753 Don't do this if X has side-effects because those side effects
7754 might affect A or B and the "?" operation is a sequence point in
7755 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7756
7757 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7758 && operand_equal_p (TREE_OPERAND (exp, 2),
7759 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7760 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7761 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7762 && operand_equal_p (TREE_OPERAND (exp, 1),
7763 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7764 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7765 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7766 && operand_equal_p (TREE_OPERAND (exp, 2),
7767 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7768 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7769 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7770 && operand_equal_p (TREE_OPERAND (exp, 1),
7771 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7772 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7773
01c8a7c8
RK
7774 /* If we are not to produce a result, we have no target. Otherwise,
7775 if a target was specified use it; it will not be used as an
7776 intermediate target unless it is safe. If no target, use a
7777 temporary. */
7778
7779 if (ignore)
7780 temp = 0;
7781 else if (original_target
e5e809f4 7782 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7783 || (singleton && GET_CODE (original_target) == REG
7784 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7785 && original_target == var_rtx (singleton)))
7786 && GET_MODE (original_target) == mode
7c00d1fe
RK
7787#ifdef HAVE_conditional_move
7788 && (! can_conditionally_move_p (mode)
7789 || GET_CODE (original_target) == REG
7790 || TREE_ADDRESSABLE (type))
7791#endif
01c8a7c8
RK
7792 && ! (GET_CODE (original_target) == MEM
7793 && MEM_VOLATILE_P (original_target)))
7794 temp = original_target;
7795 else if (TREE_ADDRESSABLE (type))
7796 abort ();
7797 else
7798 temp = assign_temp (type, 0, 0, 1);
7799
ac01eace
RK
7800 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7801 do the test of X as a store-flag operation, do this as
7802 A + ((X != 0) << log C). Similarly for other simple binary
7803 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7804 if (temp && singleton && binary_op
bbf6f052
RK
7805 && (TREE_CODE (binary_op) == PLUS_EXPR
7806 || TREE_CODE (binary_op) == MINUS_EXPR
7807 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7808 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7809 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7810 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7811 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7812 {
7813 rtx result;
7814 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7815 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7816 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7817 : xor_optab);
bbf6f052
RK
7818
7819 /* If we had X ? A : A + 1, do this as A + (X == 0).
7820
7821 We have to invert the truth value here and then put it
7822 back later if do_store_flag fails. We cannot simply copy
7823 TREE_OPERAND (exp, 0) to another variable and modify that
7824 because invert_truthvalue can modify the tree pointed to
7825 by its argument. */
7826 if (singleton == TREE_OPERAND (exp, 1))
7827 TREE_OPERAND (exp, 0)
7828 = invert_truthvalue (TREE_OPERAND (exp, 0));
7829
7830 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7831 (safe_from_p (temp, singleton, 1)
906c4e36 7832 ? temp : NULL_RTX),
bbf6f052
RK
7833 mode, BRANCH_COST <= 1);
7834
ac01eace
RK
7835 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7836 result = expand_shift (LSHIFT_EXPR, mode, result,
7837 build_int_2 (tree_log2
7838 (TREE_OPERAND
7839 (binary_op, 1)),
7840 0),
e5e809f4 7841 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7842 ? temp : NULL_RTX), 0);
7843
bbf6f052
RK
7844 if (result)
7845 {
906c4e36 7846 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7847 return expand_binop (mode, boptab, op1, result, temp,
7848 unsignedp, OPTAB_LIB_WIDEN);
7849 }
7850 else if (singleton == TREE_OPERAND (exp, 1))
7851 TREE_OPERAND (exp, 0)
7852 = invert_truthvalue (TREE_OPERAND (exp, 0));
7853 }
7854
dabf8373 7855 do_pending_stack_adjust ();
bbf6f052
RK
7856 NO_DEFER_POP;
7857 op0 = gen_label_rtx ();
7858
7859 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7860 {
7861 if (temp != 0)
7862 {
7863 /* If the target conflicts with the other operand of the
7864 binary op, we can't use it. Also, we can't use the target
7865 if it is a hard register, because evaluating the condition
7866 might clobber it. */
7867 if ((binary_op
e5e809f4 7868 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7869 || (GET_CODE (temp) == REG
7870 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7871 temp = gen_reg_rtx (mode);
7872 store_expr (singleton, temp, 0);
7873 }
7874 else
906c4e36 7875 expand_expr (singleton,
2937cf87 7876 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7877 if (singleton == TREE_OPERAND (exp, 1))
7878 jumpif (TREE_OPERAND (exp, 0), op0);
7879 else
7880 jumpifnot (TREE_OPERAND (exp, 0), op0);
7881
956d6950 7882 start_cleanup_deferral ();
bbf6f052
RK
7883 if (binary_op && temp == 0)
7884 /* Just touch the other operand. */
7885 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7886 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7887 else if (binary_op)
7888 store_expr (build (TREE_CODE (binary_op), type,
7889 make_tree (type, temp),
7890 TREE_OPERAND (binary_op, 1)),
7891 temp, 0);
7892 else
7893 store_expr (build1 (TREE_CODE (unary_op), type,
7894 make_tree (type, temp)),
7895 temp, 0);
7896 op1 = op0;
bbf6f052 7897 }
bbf6f052
RK
7898 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7899 comparison operator. If we have one of these cases, set the
7900 output to A, branch on A (cse will merge these two references),
7901 then set the output to FOO. */
7902 else if (temp
7903 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7904 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7905 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7906 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7907 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7908 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7909 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7910 {
7911 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7912 temp = gen_reg_rtx (mode);
7913 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7914 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7915
956d6950 7916 start_cleanup_deferral ();
bbf6f052
RK
7917 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7918 op1 = op0;
7919 }
7920 else if (temp
7921 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7922 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7923 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7924 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7925 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7926 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7927 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7928 {
7929 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7930 temp = gen_reg_rtx (mode);
7931 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7932 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7933
956d6950 7934 start_cleanup_deferral ();
bbf6f052
RK
7935 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7936 op1 = op0;
7937 }
7938 else
7939 {
7940 op1 = gen_label_rtx ();
7941 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7942
956d6950 7943 start_cleanup_deferral ();
2ac84cfe
NS
7944
7945 /* One branch of the cond can be void, if it never returns. For
7946 example A ? throw : E */
7947 if (temp != 0
7948 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
7949 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7950 else
906c4e36
RK
7951 expand_expr (TREE_OPERAND (exp, 1),
7952 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7953 end_cleanup_deferral ();
bbf6f052
RK
7954 emit_queue ();
7955 emit_jump_insn (gen_jump (op1));
7956 emit_barrier ();
7957 emit_label (op0);
956d6950 7958 start_cleanup_deferral ();
2ac84cfe
NS
7959 if (temp != 0
7960 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
7961 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7962 else
906c4e36
RK
7963 expand_expr (TREE_OPERAND (exp, 2),
7964 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7965 }
7966
956d6950 7967 end_cleanup_deferral ();
bbf6f052
RK
7968
7969 emit_queue ();
7970 emit_label (op1);
7971 OK_DEFER_POP;
5dab5552 7972
bbf6f052
RK
7973 return temp;
7974 }
7975
7976 case TARGET_EXPR:
7977 {
7978 /* Something needs to be initialized, but we didn't know
7979 where that thing was when building the tree. For example,
7980 it could be the return value of a function, or a parameter
7981 to a function which lays down in the stack, or a temporary
7982 variable which must be passed by reference.
7983
7984 We guarantee that the expression will either be constructed
7985 or copied into our original target. */
7986
7987 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7988 tree cleanups = NULL_TREE;
5c062816 7989 tree exp1;
bbf6f052
RK
7990
7991 if (TREE_CODE (slot) != VAR_DECL)
7992 abort ();
7993
9c51f375
RK
7994 if (! ignore)
7995 target = original_target;
7996
6fbfac92
JM
7997 /* Set this here so that if we get a target that refers to a
7998 register variable that's already been used, put_reg_into_stack
7999 knows that it should fix up those uses. */
8000 TREE_USED (slot) = 1;
8001
bbf6f052
RK
8002 if (target == 0)
8003 {
8004 if (DECL_RTL (slot) != 0)
ac993f4f
MS
8005 {
8006 target = DECL_RTL (slot);
5c062816 8007 /* If we have already expanded the slot, so don't do
ac993f4f 8008 it again. (mrs) */
5c062816
MS
8009 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8010 return target;
ac993f4f 8011 }
bbf6f052
RK
8012 else
8013 {
e9a25f70 8014 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8015 /* All temp slots at this level must not conflict. */
8016 preserve_temp_slots (target);
8017 DECL_RTL (slot) = target;
e9a25f70
JL
8018 if (TREE_ADDRESSABLE (slot))
8019 {
8020 TREE_ADDRESSABLE (slot) = 0;
8021 mark_addressable (slot);
8022 }
bbf6f052 8023
e287fd6e
RK
8024 /* Since SLOT is not known to the called function
8025 to belong to its stack frame, we must build an explicit
8026 cleanup. This case occurs when we must build up a reference
8027 to pass the reference as an argument. In this case,
8028 it is very likely that such a reference need not be
8029 built here. */
8030
8031 if (TREE_OPERAND (exp, 2) == 0)
8032 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8033 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8034 }
bbf6f052
RK
8035 }
8036 else
8037 {
8038 /* This case does occur, when expanding a parameter which
8039 needs to be constructed on the stack. The target
8040 is the actual stack address that we want to initialize.
8041 The function we call will perform the cleanup in this case. */
8042
8c042b47
RS
8043 /* If we have already assigned it space, use that space,
8044 not target that we were passed in, as our target
8045 parameter is only a hint. */
8046 if (DECL_RTL (slot) != 0)
8047 {
8048 target = DECL_RTL (slot);
8049 /* If we have already expanded the slot, so don't do
8050 it again. (mrs) */
8051 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8052 return target;
8053 }
21002281
JW
8054 else
8055 {
8056 DECL_RTL (slot) = target;
8057 /* If we must have an addressable slot, then make sure that
8058 the RTL that we just stored in slot is OK. */
8059 if (TREE_ADDRESSABLE (slot))
8060 {
8061 TREE_ADDRESSABLE (slot) = 0;
8062 mark_addressable (slot);
8063 }
8064 }
bbf6f052
RK
8065 }
8066
4847c938 8067 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8068 /* Mark it as expanded. */
8069 TREE_OPERAND (exp, 1) = NULL_TREE;
8070
41531e5b 8071 store_expr (exp1, target, 0);
61d6b1cc 8072
e976b8b2 8073 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 8074
41531e5b 8075 return target;
bbf6f052
RK
8076 }
8077
8078 case INIT_EXPR:
8079 {
8080 tree lhs = TREE_OPERAND (exp, 0);
8081 tree rhs = TREE_OPERAND (exp, 1);
8082 tree noncopied_parts = 0;
8083 tree lhs_type = TREE_TYPE (lhs);
8084
8085 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8086 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8087 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8088 TYPE_NONCOPIED_PARTS (lhs_type));
8089 while (noncopied_parts != 0)
8090 {
8091 expand_assignment (TREE_VALUE (noncopied_parts),
8092 TREE_PURPOSE (noncopied_parts), 0, 0);
8093 noncopied_parts = TREE_CHAIN (noncopied_parts);
8094 }
8095 return temp;
8096 }
8097
8098 case MODIFY_EXPR:
8099 {
8100 /* If lhs is complex, expand calls in rhs before computing it.
8101 That's so we don't compute a pointer and save it over a call.
8102 If lhs is simple, compute it first so we can give it as a
8103 target if the rhs is just a call. This avoids an extra temp and copy
8104 and that prevents a partial-subsumption which makes bad code.
8105 Actually we could treat component_ref's of vars like vars. */
8106
8107 tree lhs = TREE_OPERAND (exp, 0);
8108 tree rhs = TREE_OPERAND (exp, 1);
8109 tree noncopied_parts = 0;
8110 tree lhs_type = TREE_TYPE (lhs);
8111
8112 temp = 0;
8113
8114 if (TREE_CODE (lhs) != VAR_DECL
8115 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
8116 && TREE_CODE (lhs) != PARM_DECL
8117 && ! (TREE_CODE (lhs) == INDIRECT_REF
8118 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
8119 preexpand_calls (exp);
8120
8121 /* Check for |= or &= of a bitfield of size one into another bitfield
8122 of size 1. In this case, (unless we need the result of the
8123 assignment) we can do this more efficiently with a
8124 test followed by an assignment, if necessary.
8125
8126 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8127 things change so we do, this code should be enhanced to
8128 support it. */
8129 if (ignore
8130 && TREE_CODE (lhs) == COMPONENT_REF
8131 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8132 || TREE_CODE (rhs) == BIT_AND_EXPR)
8133 && TREE_OPERAND (rhs, 0) == lhs
8134 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
8135 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
8136 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
8137 {
8138 rtx label = gen_label_rtx ();
8139
8140 do_jump (TREE_OPERAND (rhs, 1),
8141 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8142 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8143 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8144 (TREE_CODE (rhs) == BIT_IOR_EXPR
8145 ? integer_one_node
8146 : integer_zero_node)),
8147 0, 0);
e7c33f54 8148 do_pending_stack_adjust ();
bbf6f052
RK
8149 emit_label (label);
8150 return const0_rtx;
8151 }
8152
8153 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8154 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8155 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8156 TYPE_NONCOPIED_PARTS (lhs_type));
8157
8158 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8159 while (noncopied_parts != 0)
8160 {
8161 expand_assignment (TREE_PURPOSE (noncopied_parts),
8162 TREE_VALUE (noncopied_parts), 0, 0);
8163 noncopied_parts = TREE_CHAIN (noncopied_parts);
8164 }
8165 return temp;
8166 }
8167
6e7f84a7
APB
8168 case RETURN_EXPR:
8169 if (!TREE_OPERAND (exp, 0))
8170 expand_null_return ();
8171 else
8172 expand_return (TREE_OPERAND (exp, 0));
8173 return const0_rtx;
8174
bbf6f052
RK
8175 case PREINCREMENT_EXPR:
8176 case PREDECREMENT_EXPR:
7b8b9722 8177 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8178
8179 case POSTINCREMENT_EXPR:
8180 case POSTDECREMENT_EXPR:
8181 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8182 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8183
8184 case ADDR_EXPR:
987c71d9 8185 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8186 be a MEM corresponding to a stack slot. */
987c71d9
RK
8187 temp = 0;
8188
bbf6f052
RK
8189 /* Are we taking the address of a nested function? */
8190 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8191 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8192 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8193 && ! TREE_STATIC (exp))
bbf6f052
RK
8194 {
8195 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8196 op0 = force_operand (op0, target);
8197 }
682ba3a6
RK
8198 /* If we are taking the address of something erroneous, just
8199 return a zero. */
8200 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8201 return const0_rtx;
bbf6f052
RK
8202 else
8203 {
e287fd6e
RK
8204 /* We make sure to pass const0_rtx down if we came in with
8205 ignore set, to avoid doing the cleanups twice for something. */
8206 op0 = expand_expr (TREE_OPERAND (exp, 0),
8207 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8208 (modifier == EXPAND_INITIALIZER
8209 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8210
119af78a
RK
8211 /* If we are going to ignore the result, OP0 will have been set
8212 to const0_rtx, so just return it. Don't get confused and
8213 think we are taking the address of the constant. */
8214 if (ignore)
8215 return op0;
8216
3539e816
MS
8217 op0 = protect_from_queue (op0, 0);
8218
c5c76735
JL
8219 /* We would like the object in memory. If it is a constant, we can
8220 have it be statically allocated into memory. For a non-constant,
8221 we need to allocate some memory and store the value into it. */
896102d0
RK
8222
8223 if (CONSTANT_P (op0))
8224 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8225 op0);
987c71d9 8226 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8227 {
8228 mark_temp_addr_taken (op0);
8229 temp = XEXP (op0, 0);
8230 }
896102d0 8231
682ba3a6 8232 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 8233 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
8234 {
8235 /* If this object is in a register, it must be not
0f41302f 8236 be BLKmode. */
896102d0 8237 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 8238 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 8239
7a0b7b9a 8240 mark_temp_addr_taken (memloc);
896102d0
RK
8241 emit_move_insn (memloc, op0);
8242 op0 = memloc;
8243 }
8244
bbf6f052
RK
8245 if (GET_CODE (op0) != MEM)
8246 abort ();
8247
8248 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8249 {
8250 temp = XEXP (op0, 0);
8251#ifdef POINTERS_EXTEND_UNSIGNED
8252 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8253 && mode == ptr_mode)
9fcfcce7 8254 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8255#endif
8256 return temp;
8257 }
987c71d9 8258
bbf6f052
RK
8259 op0 = force_operand (XEXP (op0, 0), target);
8260 }
987c71d9 8261
bbf6f052 8262 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8263 op0 = force_reg (Pmode, op0);
8264
dc6d66b3
RK
8265 if (GET_CODE (op0) == REG
8266 && ! REG_USERVAR_P (op0))
8267 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
8268
8269 /* If we might have had a temp slot, add an equivalent address
8270 for it. */
8271 if (temp != 0)
8272 update_temp_slot_address (temp, op0);
8273
88f63c77
RK
8274#ifdef POINTERS_EXTEND_UNSIGNED
8275 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8276 && mode == ptr_mode)
9fcfcce7 8277 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8278#endif
8279
bbf6f052
RK
8280 return op0;
8281
8282 case ENTRY_VALUE_EXPR:
8283 abort ();
8284
7308a047
RS
8285 /* COMPLEX type for Extended Pascal & Fortran */
8286 case COMPLEX_EXPR:
8287 {
8288 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8289 rtx insns;
7308a047
RS
8290
8291 /* Get the rtx code of the operands. */
8292 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8293 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8294
8295 if (! target)
8296 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8297
6551fa4d 8298 start_sequence ();
7308a047
RS
8299
8300 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8301 emit_move_insn (gen_realpart (mode, target), op0);
8302 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8303
6551fa4d
JW
8304 insns = get_insns ();
8305 end_sequence ();
8306
7308a047 8307 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8308 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8309 each with a separate pseudo as destination.
8310 It's not correct for flow to treat them as a unit. */
6d6e61ce 8311 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8312 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8313 else
8314 emit_insns (insns);
7308a047
RS
8315
8316 return target;
8317 }
8318
8319 case REALPART_EXPR:
2d7050fd
RS
8320 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8321 return gen_realpart (mode, op0);
7308a047
RS
8322
8323 case IMAGPART_EXPR:
2d7050fd
RS
8324 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8325 return gen_imagpart (mode, op0);
7308a047
RS
8326
8327 case CONJ_EXPR:
8328 {
62acb978 8329 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8330 rtx imag_t;
6551fa4d 8331 rtx insns;
7308a047
RS
8332
8333 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8334
8335 if (! target)
d6a5ac33 8336 target = gen_reg_rtx (mode);
7308a047 8337
6551fa4d 8338 start_sequence ();
7308a047
RS
8339
8340 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8341 emit_move_insn (gen_realpart (partmode, target),
8342 gen_realpart (partmode, op0));
7308a047 8343
62acb978
RK
8344 imag_t = gen_imagpart (partmode, target);
8345 temp = expand_unop (partmode, neg_optab,
8346 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8347 if (temp != imag_t)
8348 emit_move_insn (imag_t, temp);
8349
6551fa4d
JW
8350 insns = get_insns ();
8351 end_sequence ();
8352
d6a5ac33
RK
8353 /* Conjugate should appear as a single unit
8354 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8355 each with a separate pseudo as destination.
8356 It's not correct for flow to treat them as a unit. */
6d6e61ce 8357 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8358 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8359 else
8360 emit_insns (insns);
7308a047
RS
8361
8362 return target;
8363 }
8364
e976b8b2
MS
8365 case TRY_CATCH_EXPR:
8366 {
8367 tree handler = TREE_OPERAND (exp, 1);
8368
8369 expand_eh_region_start ();
8370
8371 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8372
8373 expand_eh_region_end (handler);
8374
8375 return op0;
8376 }
8377
b335b813
PB
8378 case TRY_FINALLY_EXPR:
8379 {
8380 tree try_block = TREE_OPERAND (exp, 0);
8381 tree finally_block = TREE_OPERAND (exp, 1);
8382 rtx finally_label = gen_label_rtx ();
8383 rtx done_label = gen_label_rtx ();
8384 rtx return_link = gen_reg_rtx (Pmode);
8385 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8386 (tree) finally_label, (tree) return_link);
8387 TREE_SIDE_EFFECTS (cleanup) = 1;
8388
8389 /* Start a new binding layer that will keep track of all cleanup
8390 actions to be performed. */
8e91754e 8391 expand_start_bindings (2);
b335b813
PB
8392
8393 target_temp_slot_level = temp_slot_level;
8394
8395 expand_decl_cleanup (NULL_TREE, cleanup);
8396 op0 = expand_expr (try_block, target, tmode, modifier);
8397
8398 preserve_temp_slots (op0);
8399 expand_end_bindings (NULL_TREE, 0, 0);
8400 emit_jump (done_label);
8401 emit_label (finally_label);
8402 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8403 emit_indirect_jump (return_link);
8404 emit_label (done_label);
8405 return op0;
8406 }
8407
8408 case GOTO_SUBROUTINE_EXPR:
8409 {
8410 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8411 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8412 rtx return_address = gen_label_rtx ();
8413 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8414 emit_jump (subr);
8415 emit_label (return_address);
8416 return const0_rtx;
8417 }
8418
e976b8b2
MS
8419 case POPDCC_EXPR:
8420 {
8421 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8422 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8423 return const0_rtx;
8424 }
8425
8426 case POPDHC_EXPR:
8427 {
8428 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8429 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8430 return const0_rtx;
8431 }
8432
d3707adb
RH
8433 case VA_ARG_EXPR:
8434 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8435
bbf6f052 8436 default:
90764a87 8437 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8438 }
8439
8440 /* Here to do an ordinary binary operator, generating an instruction
8441 from the optab already placed in `this_optab'. */
8442 binop:
8443 preexpand_calls (exp);
e5e809f4 8444 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8445 subtarget = 0;
8446 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8447 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8448 binop2:
8449 temp = expand_binop (mode, this_optab, op0, op1, target,
8450 unsignedp, OPTAB_LIB_WIDEN);
8451 if (temp == 0)
8452 abort ();
8453 return temp;
8454}
b93a436e 8455\f
14a774a9
RK
8456/* Similar to expand_expr, except that we don't specify a target, target
8457 mode, or modifier and we return the alignment of the inner type. This is
8458 used in cases where it is not necessary to align the result to the
8459 alignment of its type as long as we know the alignment of the result, for
8460 example for comparisons of BLKmode values. */
8461
8462static rtx
8463expand_expr_unaligned (exp, palign)
8464 register tree exp;
8465 int *palign;
8466{
8467 register rtx op0;
8468 tree type = TREE_TYPE (exp);
8469 register enum machine_mode mode = TYPE_MODE (type);
8470
8471 /* Default the alignment we return to that of the type. */
8472 *palign = TYPE_ALIGN (type);
8473
8474 /* The only cases in which we do anything special is if the resulting mode
8475 is BLKmode. */
8476 if (mode != BLKmode)
8477 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8478
8479 switch (TREE_CODE (exp))
8480 {
8481 case CONVERT_EXPR:
8482 case NOP_EXPR:
8483 case NON_LVALUE_EXPR:
8484 /* Conversions between BLKmode values don't change the underlying
8485 alignment or value. */
8486 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8487 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8488 break;
8489
8490 case ARRAY_REF:
8491 /* Much of the code for this case is copied directly from expand_expr.
8492 We need to duplicate it here because we will do something different
8493 in the fall-through case, so we need to handle the same exceptions
8494 it does. */
8495 {
8496 tree array = TREE_OPERAND (exp, 0);
8497 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8498 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
8499 tree index = TREE_OPERAND (exp, 1);
8500 tree index_type = TREE_TYPE (index);
8501 HOST_WIDE_INT i;
8502
8503 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8504 abort ();
8505
8506 /* Optimize the special-case of a zero lower bound.
8507
8508 We convert the low_bound to sizetype to avoid some problems
8509 with constant folding. (E.g. suppose the lower bound is 1,
8510 and its mode is QI. Without the conversion, (ARRAY
8511 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
8512 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
8513
8514 But sizetype isn't quite right either (especially if
8515 the lowbound is negative). FIXME */
8516
8517 if (! integer_zerop (low_bound))
8518 index = fold (build (MINUS_EXPR, index_type, index,
8519 convert (sizetype, low_bound)));
8520
8521 /* If this is a constant index into a constant array,
8522 just get the value from the array. Handle both the cases when
8523 we have an explicit constructor and when our operand is a variable
8524 that was declared const. */
8525
8526 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
8527 {
8528 if (TREE_CODE (index) == INTEGER_CST
8529 && TREE_INT_CST_HIGH (index) == 0)
8530 {
8531 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
8532
8533 i = TREE_INT_CST_LOW (index);
8534 while (elem && i--)
8535 elem = TREE_CHAIN (elem);
8536 if (elem)
8537 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8538 palign);
8539 }
8540 }
8541
8542 else if (optimize >= 1
8543 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8544 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8545 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8546 {
8547 if (TREE_CODE (index) == INTEGER_CST)
8548 {
8549 tree init = DECL_INITIAL (array);
8550
8551 i = TREE_INT_CST_LOW (index);
8552 if (TREE_CODE (init) == CONSTRUCTOR)
8553 {
8554 tree elem = CONSTRUCTOR_ELTS (init);
8555
8556 while (elem
8557 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
8558 elem = TREE_CHAIN (elem);
8559 if (elem)
8560 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8561 palign);
8562 }
8563 }
8564 }
8565 }
8566
8567 /* ... fall through ... */
8568
8569 case COMPONENT_REF:
8570 case BIT_FIELD_REF:
8571 /* If the operand is a CONSTRUCTOR, we can just extract the
8572 appropriate field if it is present. Don't do this if we have
8573 already written the data since we want to refer to that copy
8574 and varasm.c assumes that's what we'll do. */
8575 if (TREE_CODE (exp) != ARRAY_REF
8576 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8577 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8578 {
8579 tree elt;
8580
8581 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8582 elt = TREE_CHAIN (elt))
8583 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8584 /* Note that unlike the case in expand_expr, we know this is
8585 BLKmode and hence not an integer. */
8586 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8587 }
8588
8589 {
8590 enum machine_mode mode1;
8591 int bitsize;
8592 int bitpos;
8593 tree offset;
8594 int volatilep = 0;
8595 int alignment;
8596 int unsignedp;
8597 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8598 &mode1, &unsignedp, &volatilep,
8599 &alignment);
8600
8601 /* If we got back the original object, something is wrong. Perhaps
8602 we are evaluating an expression too early. In any event, don't
8603 infinitely recurse. */
8604 if (tem == exp)
8605 abort ();
8606
8607 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8608
8609 /* If this is a constant, put it into a register if it is a
8610 legitimate constant and OFFSET is 0 and memory if it isn't. */
8611 if (CONSTANT_P (op0))
8612 {
8613 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8614
8615 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8616 && offset == 0)
8617 op0 = force_reg (inner_mode, op0);
8618 else
8619 op0 = validize_mem (force_const_mem (inner_mode, op0));
8620 }
8621
8622 if (offset != 0)
8623 {
8624 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8625
8626 /* If this object is in a register, put it into memory.
8627 This case can't occur in C, but can in Ada if we have
8628 unchecked conversion of an expression from a scalar type to
8629 an array or record type. */
8630 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8631 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8632 {
8633 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8634
8635 mark_temp_addr_taken (memloc);
8636 emit_move_insn (memloc, op0);
8637 op0 = memloc;
8638 }
8639
8640 if (GET_CODE (op0) != MEM)
8641 abort ();
8642
8643 if (GET_MODE (offset_rtx) != ptr_mode)
8644 {
8645#ifdef POINTERS_EXTEND_UNSIGNED
8646 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8647#else
8648 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8649#endif
8650 }
8651
8652 op0 = change_address (op0, VOIDmode,
8653 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8654 force_reg (ptr_mode,
8655 offset_rtx)));
8656 }
8657
8658 /* Don't forget about volatility even if this is a bitfield. */
8659 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8660 {
8661 op0 = copy_rtx (op0);
8662 MEM_VOLATILE_P (op0) = 1;
8663 }
8664
8665 /* Check the access. */
8666 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8667 {
8668 rtx to;
8669 int size;
8670
8671 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8672 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8673
8674 /* Check the access right of the pointer. */
8675 if (size > BITS_PER_UNIT)
8676 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8677 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8678 TYPE_MODE (sizetype),
8679 GEN_INT (MEMORY_USE_RO),
8680 TYPE_MODE (integer_type_node));
8681 }
8682
a2b99161
RK
8683 /* In cases where an aligned union has an unaligned object
8684 as a field, we might be extracting a BLKmode value from
8685 an integer-mode (e.g., SImode) object. Handle this case
8686 by doing the extract into an object as wide as the field
8687 (which we know to be the width of a basic mode), then
8688 storing into memory, and changing the mode to BLKmode.
8689 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8690 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8691 if (mode1 == VOIDmode
8692 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 8693 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
a2b99161
RK
8694 && (TYPE_ALIGN (type) > alignment * BITS_PER_UNIT
8695 || bitpos % TYPE_ALIGN (type) != 0)))
8696 {
8697 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8698
8699 if (ext_mode == BLKmode)
8700 {
8701 /* In this case, BITPOS must start at a byte boundary. */
8702 if (GET_CODE (op0) != MEM
8703 || bitpos % BITS_PER_UNIT != 0)
8704 abort ();
8705
8706 op0 = change_address (op0, VOIDmode,
8707 plus_constant (XEXP (op0, 0),
8708 bitpos / BITS_PER_UNIT));
8709 }
8710 else
8711 {
8712 rtx new = assign_stack_temp (ext_mode,
8713 bitsize / BITS_PER_UNIT, 0);
8714
8715 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8716 unsignedp, NULL_RTX, ext_mode,
8717 ext_mode, alignment,
8718 int_size_in_bytes (TREE_TYPE (tem)));
8719
8720 /* If the result is a record type and BITSIZE is narrower than
8721 the mode of OP0, an integral mode, and this is a big endian
8722 machine, we must put the field into the high-order bits. */
8723 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8724 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8725 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8726 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8727 size_int (GET_MODE_BITSIZE
8728 (GET_MODE (op0))
8729 - bitsize),
8730 op0, 1);
8731
8732
8733 emit_move_insn (new, op0);
8734 op0 = copy_rtx (new);
8735 PUT_MODE (op0, BLKmode);
8736 }
8737 }
8738 else
8739 /* Get a reference to just this component. */
8740 op0 = change_address (op0, mode1,
8741 plus_constant (XEXP (op0, 0),
8742 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
8743
8744 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8745
8746 /* Adjust the alignment in case the bit position is not
8747 a multiple of the alignment of the inner object. */
8748 while (bitpos % alignment != 0)
8749 alignment >>= 1;
8750
8751 if (GET_CODE (XEXP (op0, 0)) == REG)
8752 mark_reg_pointer (XEXP (op0, 0), alignment);
8753
8754 MEM_IN_STRUCT_P (op0) = 1;
8755 MEM_VOLATILE_P (op0) |= volatilep;
8756
8757 *palign = alignment;
8758 return op0;
8759 }
8760
8761 default:
8762 break;
8763
8764 }
8765
8766 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8767}
8768\f
b93a436e
JL
8769/* Return the tree node and offset if a given argument corresponds to
8770 a string constant. */
8771
28f4ec01 8772tree
b93a436e
JL
8773string_constant (arg, ptr_offset)
8774 tree arg;
8775 tree *ptr_offset;
8776{
8777 STRIP_NOPS (arg);
8778
8779 if (TREE_CODE (arg) == ADDR_EXPR
8780 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8781 {
8782 *ptr_offset = integer_zero_node;
8783 return TREE_OPERAND (arg, 0);
8784 }
8785 else if (TREE_CODE (arg) == PLUS_EXPR)
8786 {
8787 tree arg0 = TREE_OPERAND (arg, 0);
8788 tree arg1 = TREE_OPERAND (arg, 1);
8789
8790 STRIP_NOPS (arg0);
8791 STRIP_NOPS (arg1);
8792
8793 if (TREE_CODE (arg0) == ADDR_EXPR
8794 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8795 {
b93a436e
JL
8796 *ptr_offset = arg1;
8797 return TREE_OPERAND (arg0, 0);
bbf6f052 8798 }
b93a436e
JL
8799 else if (TREE_CODE (arg1) == ADDR_EXPR
8800 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8801 {
b93a436e
JL
8802 *ptr_offset = arg0;
8803 return TREE_OPERAND (arg1, 0);
bbf6f052 8804 }
b93a436e 8805 }
ca695ac9 8806
b93a436e
JL
8807 return 0;
8808}
ca695ac9 8809\f
b93a436e
JL
8810/* Expand code for a post- or pre- increment or decrement
8811 and return the RTX for the result.
8812 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8813
b93a436e
JL
8814static rtx
8815expand_increment (exp, post, ignore)
8816 register tree exp;
8817 int post, ignore;
ca695ac9 8818{
b93a436e
JL
8819 register rtx op0, op1;
8820 register rtx temp, value;
8821 register tree incremented = TREE_OPERAND (exp, 0);
8822 optab this_optab = add_optab;
8823 int icode;
8824 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8825 int op0_is_copy = 0;
8826 int single_insn = 0;
8827 /* 1 means we can't store into OP0 directly,
8828 because it is a subreg narrower than a word,
8829 and we don't dare clobber the rest of the word. */
8830 int bad_subreg = 0;
1499e0a8 8831
b93a436e
JL
8832 /* Stabilize any component ref that might need to be
8833 evaluated more than once below. */
8834 if (!post
8835 || TREE_CODE (incremented) == BIT_FIELD_REF
8836 || (TREE_CODE (incremented) == COMPONENT_REF
8837 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8838 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8839 incremented = stabilize_reference (incremented);
8840 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8841 ones into save exprs so that they don't accidentally get evaluated
8842 more than once by the code below. */
8843 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8844 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8845 incremented = save_expr (incremented);
e9a25f70 8846
b93a436e
JL
8847 /* Compute the operands as RTX.
8848 Note whether OP0 is the actual lvalue or a copy of it:
8849 I believe it is a copy iff it is a register or subreg
8850 and insns were generated in computing it. */
e9a25f70 8851
b93a436e
JL
8852 temp = get_last_insn ();
8853 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 8854
b93a436e
JL
8855 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8856 in place but instead must do sign- or zero-extension during assignment,
8857 so we copy it into a new register and let the code below use it as
8858 a copy.
e9a25f70 8859
b93a436e
JL
8860 Note that we can safely modify this SUBREG since it is know not to be
8861 shared (it was made by the expand_expr call above). */
8862
8863 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8864 {
8865 if (post)
8866 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8867 else
8868 bad_subreg = 1;
8869 }
8870 else if (GET_CODE (op0) == SUBREG
8871 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8872 {
8873 /* We cannot increment this SUBREG in place. If we are
8874 post-incrementing, get a copy of the old value. Otherwise,
8875 just mark that we cannot increment in place. */
8876 if (post)
8877 op0 = copy_to_reg (op0);
8878 else
8879 bad_subreg = 1;
e9a25f70
JL
8880 }
8881
b93a436e
JL
8882 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8883 && temp != get_last_insn ());
8884 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8885 EXPAND_MEMORY_USE_BAD);
1499e0a8 8886
b93a436e
JL
8887 /* Decide whether incrementing or decrementing. */
8888 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8889 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8890 this_optab = sub_optab;
8891
8892 /* Convert decrement by a constant into a negative increment. */
8893 if (this_optab == sub_optab
8894 && GET_CODE (op1) == CONST_INT)
ca695ac9 8895 {
b93a436e
JL
8896 op1 = GEN_INT (- INTVAL (op1));
8897 this_optab = add_optab;
ca695ac9 8898 }
1499e0a8 8899
b93a436e
JL
8900 /* For a preincrement, see if we can do this with a single instruction. */
8901 if (!post)
8902 {
8903 icode = (int) this_optab->handlers[(int) mode].insn_code;
8904 if (icode != (int) CODE_FOR_nothing
8905 /* Make sure that OP0 is valid for operands 0 and 1
8906 of the insn we want to queue. */
a995e389
RH
8907 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8908 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8909 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
8910 single_insn = 1;
8911 }
bbf6f052 8912
b93a436e
JL
8913 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8914 then we cannot just increment OP0. We must therefore contrive to
8915 increment the original value. Then, for postincrement, we can return
8916 OP0 since it is a copy of the old value. For preincrement, expand here
8917 unless we can do it with a single insn.
bbf6f052 8918
b93a436e
JL
8919 Likewise if storing directly into OP0 would clobber high bits
8920 we need to preserve (bad_subreg). */
8921 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8922 {
b93a436e
JL
8923 /* This is the easiest way to increment the value wherever it is.
8924 Problems with multiple evaluation of INCREMENTED are prevented
8925 because either (1) it is a component_ref or preincrement,
8926 in which case it was stabilized above, or (2) it is an array_ref
8927 with constant index in an array in a register, which is
8928 safe to reevaluate. */
8929 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8930 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8931 ? MINUS_EXPR : PLUS_EXPR),
8932 TREE_TYPE (exp),
8933 incremented,
8934 TREE_OPERAND (exp, 1));
a358cee0 8935
b93a436e
JL
8936 while (TREE_CODE (incremented) == NOP_EXPR
8937 || TREE_CODE (incremented) == CONVERT_EXPR)
8938 {
8939 newexp = convert (TREE_TYPE (incremented), newexp);
8940 incremented = TREE_OPERAND (incremented, 0);
8941 }
bbf6f052 8942
b93a436e
JL
8943 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8944 return post ? op0 : temp;
8945 }
bbf6f052 8946
b93a436e
JL
8947 if (post)
8948 {
8949 /* We have a true reference to the value in OP0.
8950 If there is an insn to add or subtract in this mode, queue it.
8951 Queueing the increment insn avoids the register shuffling
8952 that often results if we must increment now and first save
8953 the old value for subsequent use. */
bbf6f052 8954
b93a436e
JL
8955#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8956 op0 = stabilize (op0);
8957#endif
41dfd40c 8958
b93a436e
JL
8959 icode = (int) this_optab->handlers[(int) mode].insn_code;
8960 if (icode != (int) CODE_FOR_nothing
8961 /* Make sure that OP0 is valid for operands 0 and 1
8962 of the insn we want to queue. */
a995e389
RH
8963 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8964 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 8965 {
a995e389 8966 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8967 op1 = force_reg (mode, op1);
bbf6f052 8968
b93a436e
JL
8969 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8970 }
8971 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8972 {
8973 rtx addr = (general_operand (XEXP (op0, 0), mode)
8974 ? force_reg (Pmode, XEXP (op0, 0))
8975 : copy_to_reg (XEXP (op0, 0)));
8976 rtx temp, result;
ca695ac9 8977
b93a436e
JL
8978 op0 = change_address (op0, VOIDmode, addr);
8979 temp = force_reg (GET_MODE (op0), op0);
a995e389 8980 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8981 op1 = force_reg (mode, op1);
ca695ac9 8982
b93a436e
JL
8983 /* The increment queue is LIFO, thus we have to `queue'
8984 the instructions in reverse order. */
8985 enqueue_insn (op0, gen_move_insn (op0, temp));
8986 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8987 return result;
bbf6f052
RK
8988 }
8989 }
ca695ac9 8990
b93a436e
JL
8991 /* Preincrement, or we can't increment with one simple insn. */
8992 if (post)
8993 /* Save a copy of the value before inc or dec, to return it later. */
8994 temp = value = copy_to_reg (op0);
8995 else
8996 /* Arrange to return the incremented value. */
8997 /* Copy the rtx because expand_binop will protect from the queue,
8998 and the results of that would be invalid for us to return
8999 if our caller does emit_queue before using our result. */
9000 temp = copy_rtx (value = op0);
bbf6f052 9001
b93a436e
JL
9002 /* Increment however we can. */
9003 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 9004 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9005 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9006 /* Make sure the value is stored into OP0. */
9007 if (op1 != op0)
9008 emit_move_insn (op0, op1);
5718612f 9009
b93a436e
JL
9010 return temp;
9011}
9012\f
9013/* Expand all function calls contained within EXP, innermost ones first.
9014 But don't look within expressions that have sequence points.
9015 For each CALL_EXPR, record the rtx for its value
9016 in the CALL_EXPR_RTL field. */
5718612f 9017
b93a436e
JL
9018static void
9019preexpand_calls (exp)
9020 tree exp;
9021{
9022 register int nops, i;
9023 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9024
b93a436e
JL
9025 if (! do_preexpand_calls)
9026 return;
5718612f 9027
b93a436e 9028 /* Only expressions and references can contain calls. */
bbf6f052 9029
b93a436e
JL
9030 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9031 return;
bbf6f052 9032
b93a436e
JL
9033 switch (TREE_CODE (exp))
9034 {
9035 case CALL_EXPR:
9036 /* Do nothing if already expanded. */
9037 if (CALL_EXPR_RTL (exp) != 0
9038 /* Do nothing if the call returns a variable-sized object. */
9039 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9040 /* Do nothing to built-in functions. */
9041 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9042 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9043 == FUNCTION_DECL)
9044 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9045 return;
bbf6f052 9046
b93a436e
JL
9047 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9048 return;
bbf6f052 9049
b93a436e
JL
9050 case COMPOUND_EXPR:
9051 case COND_EXPR:
9052 case TRUTH_ANDIF_EXPR:
9053 case TRUTH_ORIF_EXPR:
9054 /* If we find one of these, then we can be sure
9055 the adjust will be done for it (since it makes jumps).
9056 Do it now, so that if this is inside an argument
9057 of a function, we don't get the stack adjustment
9058 after some other args have already been pushed. */
9059 do_pending_stack_adjust ();
9060 return;
bbf6f052 9061
b93a436e
JL
9062 case BLOCK:
9063 case RTL_EXPR:
9064 case WITH_CLEANUP_EXPR:
9065 case CLEANUP_POINT_EXPR:
9066 case TRY_CATCH_EXPR:
9067 return;
bbf6f052 9068
b93a436e
JL
9069 case SAVE_EXPR:
9070 if (SAVE_EXPR_RTL (exp) != 0)
9071 return;
9072
9073 default:
9074 break;
ca695ac9 9075 }
bbf6f052 9076
b93a436e
JL
9077 nops = tree_code_length[(int) TREE_CODE (exp)];
9078 for (i = 0; i < nops; i++)
9079 if (TREE_OPERAND (exp, i) != 0)
9080 {
19832c77
MM
9081 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9082 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9083 It doesn't happen before the call is made. */
9084 ;
9085 else
9086 {
9087 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9088 if (type == 'e' || type == '<' || type == '1' || type == '2'
9089 || type == 'r')
9090 preexpand_calls (TREE_OPERAND (exp, i));
9091 }
b93a436e
JL
9092 }
9093}
9094\f
9095/* At the start of a function, record that we have no previously-pushed
9096 arguments waiting to be popped. */
bbf6f052 9097
b93a436e
JL
9098void
9099init_pending_stack_adjust ()
9100{
9101 pending_stack_adjust = 0;
9102}
bbf6f052 9103
b93a436e 9104/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9105 so the adjustment won't get done.
9106
9107 Note, if the current function calls alloca, then it must have a
9108 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9109
b93a436e
JL
9110void
9111clear_pending_stack_adjust ()
9112{
9113#ifdef EXIT_IGNORE_STACK
9114 if (optimize > 0
060fbabf
JL
9115 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9116 && EXIT_IGNORE_STACK
b93a436e
JL
9117 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9118 && ! flag_inline_functions)
9119 pending_stack_adjust = 0;
9120#endif
9121}
bbf6f052 9122
b93a436e
JL
9123/* Pop any previously-pushed arguments that have not been popped yet. */
9124
9125void
9126do_pending_stack_adjust ()
9127{
9128 if (inhibit_defer_pop == 0)
ca695ac9 9129 {
b93a436e
JL
9130 if (pending_stack_adjust != 0)
9131 adjust_stack (GEN_INT (pending_stack_adjust));
9132 pending_stack_adjust = 0;
bbf6f052 9133 }
bbf6f052
RK
9134}
9135\f
b93a436e 9136/* Expand conditional expressions. */
bbf6f052 9137
b93a436e
JL
9138/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9139 LABEL is an rtx of code CODE_LABEL, in this function and all the
9140 functions here. */
bbf6f052 9141
b93a436e
JL
9142void
9143jumpifnot (exp, label)
ca695ac9 9144 tree exp;
b93a436e 9145 rtx label;
bbf6f052 9146{
b93a436e
JL
9147 do_jump (exp, label, NULL_RTX);
9148}
bbf6f052 9149
b93a436e 9150/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9151
b93a436e
JL
9152void
9153jumpif (exp, label)
9154 tree exp;
9155 rtx label;
9156{
9157 do_jump (exp, NULL_RTX, label);
9158}
ca695ac9 9159
b93a436e
JL
9160/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9161 the result is zero, or IF_TRUE_LABEL if the result is one.
9162 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9163 meaning fall through in that case.
ca695ac9 9164
b93a436e
JL
9165 do_jump always does any pending stack adjust except when it does not
9166 actually perform a jump. An example where there is no jump
9167 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9168
b93a436e
JL
9169 This function is responsible for optimizing cases such as
9170 &&, || and comparison operators in EXP. */
5718612f 9171
b93a436e
JL
9172void
9173do_jump (exp, if_false_label, if_true_label)
9174 tree exp;
9175 rtx if_false_label, if_true_label;
9176{
9177 register enum tree_code code = TREE_CODE (exp);
9178 /* Some cases need to create a label to jump to
9179 in order to properly fall through.
9180 These cases set DROP_THROUGH_LABEL nonzero. */
9181 rtx drop_through_label = 0;
9182 rtx temp;
b93a436e
JL
9183 int i;
9184 tree type;
9185 enum machine_mode mode;
ca695ac9 9186
dbecbbe4
JL
9187#ifdef MAX_INTEGER_COMPUTATION_MODE
9188 check_max_integer_computation_mode (exp);
9189#endif
9190
b93a436e 9191 emit_queue ();
ca695ac9 9192
b93a436e 9193 switch (code)
ca695ac9 9194 {
b93a436e 9195 case ERROR_MARK:
ca695ac9 9196 break;
bbf6f052 9197
b93a436e
JL
9198 case INTEGER_CST:
9199 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9200 if (temp)
9201 emit_jump (temp);
9202 break;
bbf6f052 9203
b93a436e
JL
9204#if 0
9205 /* This is not true with #pragma weak */
9206 case ADDR_EXPR:
9207 /* The address of something can never be zero. */
9208 if (if_true_label)
9209 emit_jump (if_true_label);
9210 break;
9211#endif
bbf6f052 9212
b93a436e
JL
9213 case NOP_EXPR:
9214 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9215 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9216 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9217 goto normal;
9218 case CONVERT_EXPR:
9219 /* If we are narrowing the operand, we have to do the compare in the
9220 narrower mode. */
9221 if ((TYPE_PRECISION (TREE_TYPE (exp))
9222 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9223 goto normal;
9224 case NON_LVALUE_EXPR:
9225 case REFERENCE_EXPR:
9226 case ABS_EXPR:
9227 case NEGATE_EXPR:
9228 case LROTATE_EXPR:
9229 case RROTATE_EXPR:
9230 /* These cannot change zero->non-zero or vice versa. */
9231 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9232 break;
bbf6f052 9233
14a774a9
RK
9234 case WITH_RECORD_EXPR:
9235 /* Put the object on the placeholder list, recurse through our first
9236 operand, and pop the list. */
9237 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9238 placeholder_list);
9239 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9240 placeholder_list = TREE_CHAIN (placeholder_list);
9241 break;
9242
b93a436e
JL
9243#if 0
9244 /* This is never less insns than evaluating the PLUS_EXPR followed by
9245 a test and can be longer if the test is eliminated. */
9246 case PLUS_EXPR:
9247 /* Reduce to minus. */
9248 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9249 TREE_OPERAND (exp, 0),
9250 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9251 TREE_OPERAND (exp, 1))));
9252 /* Process as MINUS. */
ca695ac9 9253#endif
bbf6f052 9254
b93a436e
JL
9255 case MINUS_EXPR:
9256 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9257 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9258 TREE_OPERAND (exp, 0),
9259 TREE_OPERAND (exp, 1)),
9260 NE, NE, if_false_label, if_true_label);
b93a436e 9261 break;
bbf6f052 9262
b93a436e
JL
9263 case BIT_AND_EXPR:
9264 /* If we are AND'ing with a small constant, do this comparison in the
9265 smallest type that fits. If the machine doesn't have comparisons
9266 that small, it will be converted back to the wider comparison.
9267 This helps if we are testing the sign bit of a narrower object.
9268 combine can't do this for us because it can't know whether a
9269 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9270
b93a436e
JL
9271 if (! SLOW_BYTE_ACCESS
9272 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9273 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
9274 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
9275 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9276 && (type = type_for_mode (mode, 1)) != 0
9277 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9278 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9279 != CODE_FOR_nothing))
9280 {
9281 do_jump (convert (type, exp), if_false_label, if_true_label);
9282 break;
9283 }
9284 goto normal;
bbf6f052 9285
b93a436e
JL
9286 case TRUTH_NOT_EXPR:
9287 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9288 break;
bbf6f052 9289
b93a436e
JL
9290 case TRUTH_ANDIF_EXPR:
9291 if (if_false_label == 0)
9292 if_false_label = drop_through_label = gen_label_rtx ();
9293 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9294 start_cleanup_deferral ();
9295 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9296 end_cleanup_deferral ();
9297 break;
bbf6f052 9298
b93a436e
JL
9299 case TRUTH_ORIF_EXPR:
9300 if (if_true_label == 0)
9301 if_true_label = drop_through_label = gen_label_rtx ();
9302 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9303 start_cleanup_deferral ();
9304 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9305 end_cleanup_deferral ();
9306 break;
bbf6f052 9307
b93a436e
JL
9308 case COMPOUND_EXPR:
9309 push_temp_slots ();
9310 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9311 preserve_temp_slots (NULL_RTX);
9312 free_temp_slots ();
9313 pop_temp_slots ();
9314 emit_queue ();
9315 do_pending_stack_adjust ();
9316 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9317 break;
bbf6f052 9318
b93a436e
JL
9319 case COMPONENT_REF:
9320 case BIT_FIELD_REF:
9321 case ARRAY_REF:
9322 {
9323 int bitsize, bitpos, unsignedp;
9324 enum machine_mode mode;
9325 tree type;
9326 tree offset;
9327 int volatilep = 0;
9328 int alignment;
bbf6f052 9329
b93a436e
JL
9330 /* Get description of this reference. We don't actually care
9331 about the underlying object here. */
9332 get_inner_reference (exp, &bitsize, &bitpos, &offset,
9333 &mode, &unsignedp, &volatilep,
9334 &alignment);
bbf6f052 9335
b93a436e
JL
9336 type = type_for_size (bitsize, unsignedp);
9337 if (! SLOW_BYTE_ACCESS
9338 && type != 0 && bitsize >= 0
9339 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9340 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9341 != CODE_FOR_nothing))
9342 {
9343 do_jump (convert (type, exp), if_false_label, if_true_label);
9344 break;
9345 }
9346 goto normal;
9347 }
bbf6f052 9348
b93a436e
JL
9349 case COND_EXPR:
9350 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9351 if (integer_onep (TREE_OPERAND (exp, 1))
9352 && integer_zerop (TREE_OPERAND (exp, 2)))
9353 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9354
b93a436e
JL
9355 else if (integer_zerop (TREE_OPERAND (exp, 1))
9356 && integer_onep (TREE_OPERAND (exp, 2)))
9357 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9358
b93a436e
JL
9359 else
9360 {
9361 register rtx label1 = gen_label_rtx ();
9362 drop_through_label = gen_label_rtx ();
bbf6f052 9363
b93a436e 9364 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9365
b93a436e
JL
9366 start_cleanup_deferral ();
9367 /* Now the THEN-expression. */
9368 do_jump (TREE_OPERAND (exp, 1),
9369 if_false_label ? if_false_label : drop_through_label,
9370 if_true_label ? if_true_label : drop_through_label);
9371 /* In case the do_jump just above never jumps. */
9372 do_pending_stack_adjust ();
9373 emit_label (label1);
bbf6f052 9374
b93a436e
JL
9375 /* Now the ELSE-expression. */
9376 do_jump (TREE_OPERAND (exp, 2),
9377 if_false_label ? if_false_label : drop_through_label,
9378 if_true_label ? if_true_label : drop_through_label);
9379 end_cleanup_deferral ();
9380 }
9381 break;
bbf6f052 9382
b93a436e
JL
9383 case EQ_EXPR:
9384 {
9385 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9386
9ec36da5
JL
9387 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9388 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9389 {
9390 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9391 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9392 do_jump
9393 (fold
9394 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9395 fold (build (EQ_EXPR, TREE_TYPE (exp),
9396 fold (build1 (REALPART_EXPR,
9397 TREE_TYPE (inner_type),
9398 exp0)),
9399 fold (build1 (REALPART_EXPR,
9400 TREE_TYPE (inner_type),
9401 exp1)))),
9402 fold (build (EQ_EXPR, TREE_TYPE (exp),
9403 fold (build1 (IMAGPART_EXPR,
9404 TREE_TYPE (inner_type),
9405 exp0)),
9406 fold (build1 (IMAGPART_EXPR,
9407 TREE_TYPE (inner_type),
9408 exp1)))))),
9409 if_false_label, if_true_label);
9410 }
9ec36da5
JL
9411
9412 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9413 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9414
b93a436e 9415 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1c0290ea 9416 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9417 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9418 else
b30f05db 9419 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9420 break;
9421 }
bbf6f052 9422
b93a436e
JL
9423 case NE_EXPR:
9424 {
9425 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9426
9ec36da5
JL
9427 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9428 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9429 {
9430 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9431 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9432 do_jump
9433 (fold
9434 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9435 fold (build (NE_EXPR, TREE_TYPE (exp),
9436 fold (build1 (REALPART_EXPR,
9437 TREE_TYPE (inner_type),
9438 exp0)),
9439 fold (build1 (REALPART_EXPR,
9440 TREE_TYPE (inner_type),
9441 exp1)))),
9442 fold (build (NE_EXPR, TREE_TYPE (exp),
9443 fold (build1 (IMAGPART_EXPR,
9444 TREE_TYPE (inner_type),
9445 exp0)),
9446 fold (build1 (IMAGPART_EXPR,
9447 TREE_TYPE (inner_type),
9448 exp1)))))),
9449 if_false_label, if_true_label);
9450 }
9ec36da5
JL
9451
9452 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9453 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9454
b93a436e 9455 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1c0290ea 9456 && !can_compare_p (TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9457 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9458 else
b30f05db 9459 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9460 break;
9461 }
bbf6f052 9462
b93a436e 9463 case LT_EXPR:
1c0290ea
BS
9464 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9465 if (GET_MODE_CLASS (mode) == MODE_INT
9466 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9467 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9468 else
b30f05db 9469 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9470 break;
bbf6f052 9471
b93a436e 9472 case LE_EXPR:
1c0290ea
BS
9473 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9474 if (GET_MODE_CLASS (mode) == MODE_INT
9475 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9476 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9477 else
b30f05db 9478 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9479 break;
bbf6f052 9480
b93a436e 9481 case GT_EXPR:
1c0290ea
BS
9482 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9483 if (GET_MODE_CLASS (mode) == MODE_INT
9484 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9485 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9486 else
b30f05db 9487 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9488 break;
bbf6f052 9489
b93a436e 9490 case GE_EXPR:
1c0290ea
BS
9491 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9492 if (GET_MODE_CLASS (mode) == MODE_INT
9493 && ! can_compare_p (mode, ccp_jump))
b93a436e
JL
9494 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9495 else
b30f05db 9496 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9497 break;
bbf6f052 9498
b93a436e
JL
9499 default:
9500 normal:
9501 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9502#if 0
9503 /* This is not needed any more and causes poor code since it causes
9504 comparisons and tests from non-SI objects to have different code
9505 sequences. */
9506 /* Copy to register to avoid generating bad insns by cse
9507 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9508 if (!cse_not_expected && GET_CODE (temp) == MEM)
9509 temp = copy_to_reg (temp);
ca695ac9 9510#endif
b93a436e 9511 do_pending_stack_adjust ();
b30f05db
BS
9512 /* Do any postincrements in the expression that was tested. */
9513 emit_queue ();
9514
9515 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9516 {
9517 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9518 if (target)
9519 emit_jump (target);
9520 }
b93a436e 9521 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1c0290ea 9522 && ! can_compare_p (GET_MODE (temp), ccp_jump))
b93a436e
JL
9523 /* Note swapping the labels gives us not-equal. */
9524 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9525 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9526 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9527 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9528 GET_MODE (temp), NULL_RTX, 0,
9529 if_false_label, if_true_label);
b93a436e
JL
9530 else
9531 abort ();
9532 }
bbf6f052 9533
b93a436e
JL
9534 if (drop_through_label)
9535 {
9536 /* If do_jump produces code that might be jumped around,
9537 do any stack adjusts from that code, before the place
9538 where control merges in. */
9539 do_pending_stack_adjust ();
9540 emit_label (drop_through_label);
9541 }
bbf6f052 9542}
b93a436e
JL
9543\f
9544/* Given a comparison expression EXP for values too wide to be compared
9545 with one insn, test the comparison and jump to the appropriate label.
9546 The code of EXP is ignored; we always test GT if SWAP is 0,
9547 and LT if SWAP is 1. */
bbf6f052 9548
b93a436e
JL
9549static void
9550do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9551 tree exp;
9552 int swap;
9553 rtx if_false_label, if_true_label;
9554{
9555 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9556 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9557 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9558 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9559
b30f05db 9560 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9561}
9562
b93a436e
JL
9563/* Compare OP0 with OP1, word at a time, in mode MODE.
9564 UNSIGNEDP says to do unsigned comparison.
9565 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9566
b93a436e
JL
9567void
9568do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9569 enum machine_mode mode;
9570 int unsignedp;
9571 rtx op0, op1;
9572 rtx if_false_label, if_true_label;
f81497d9 9573{
b93a436e
JL
9574 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9575 rtx drop_through_label = 0;
9576 int i;
f81497d9 9577
b93a436e
JL
9578 if (! if_true_label || ! if_false_label)
9579 drop_through_label = gen_label_rtx ();
9580 if (! if_true_label)
9581 if_true_label = drop_through_label;
9582 if (! if_false_label)
9583 if_false_label = drop_through_label;
f81497d9 9584
b93a436e
JL
9585 /* Compare a word at a time, high order first. */
9586 for (i = 0; i < nwords; i++)
9587 {
b93a436e 9588 rtx op0_word, op1_word;
bbf6f052 9589
b93a436e
JL
9590 if (WORDS_BIG_ENDIAN)
9591 {
9592 op0_word = operand_subword_force (op0, i, mode);
9593 op1_word = operand_subword_force (op1, i, mode);
9594 }
9595 else
9596 {
9597 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9598 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9599 }
bbf6f052 9600
b93a436e 9601 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9602 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9603 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9604 NULL_RTX, if_true_label);
bbf6f052 9605
b93a436e 9606 /* Consider lower words only if these are equal. */
b30f05db
BS
9607 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9608 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9609 }
bbf6f052 9610
b93a436e
JL
9611 if (if_false_label)
9612 emit_jump (if_false_label);
9613 if (drop_through_label)
9614 emit_label (drop_through_label);
bbf6f052
RK
9615}
9616
b93a436e
JL
9617/* Given an EQ_EXPR expression EXP for values too wide to be compared
9618 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9619
b93a436e
JL
9620static void
9621do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9622 tree exp;
9623 rtx if_false_label, if_true_label;
bbf6f052 9624{
b93a436e
JL
9625 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9626 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9627 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9628 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9629 int i;
9630 rtx drop_through_label = 0;
bbf6f052 9631
b93a436e
JL
9632 if (! if_false_label)
9633 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9634
b93a436e 9635 for (i = 0; i < nwords; i++)
b30f05db
BS
9636 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9637 operand_subword_force (op1, i, mode),
9638 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9639 word_mode, NULL_RTX, 0, if_false_label,
9640 NULL_RTX);
bbf6f052 9641
b93a436e
JL
9642 if (if_true_label)
9643 emit_jump (if_true_label);
9644 if (drop_through_label)
9645 emit_label (drop_through_label);
bbf6f052 9646}
b93a436e
JL
9647\f
9648/* Jump according to whether OP0 is 0.
9649 We assume that OP0 has an integer mode that is too wide
9650 for the available compare insns. */
bbf6f052 9651
f5963e61 9652void
b93a436e
JL
9653do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9654 rtx op0;
9655 rtx if_false_label, if_true_label;
ca695ac9 9656{
b93a436e
JL
9657 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9658 rtx part;
9659 int i;
9660 rtx drop_through_label = 0;
bbf6f052 9661
b93a436e
JL
9662 /* The fastest way of doing this comparison on almost any machine is to
9663 "or" all the words and compare the result. If all have to be loaded
9664 from memory and this is a very wide item, it's possible this may
9665 be slower, but that's highly unlikely. */
bbf6f052 9666
b93a436e
JL
9667 part = gen_reg_rtx (word_mode);
9668 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9669 for (i = 1; i < nwords && part != 0; i++)
9670 part = expand_binop (word_mode, ior_optab, part,
9671 operand_subword_force (op0, i, GET_MODE (op0)),
9672 part, 1, OPTAB_WIDEN);
bbf6f052 9673
b93a436e
JL
9674 if (part != 0)
9675 {
b30f05db
BS
9676 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9677 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9678
b93a436e
JL
9679 return;
9680 }
bbf6f052 9681
b93a436e
JL
9682 /* If we couldn't do the "or" simply, do this with a series of compares. */
9683 if (! if_false_label)
9684 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9685
b93a436e 9686 for (i = 0; i < nwords; i++)
b30f05db
BS
9687 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9688 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9689 if_false_label, NULL_RTX);
bbf6f052 9690
b93a436e
JL
9691 if (if_true_label)
9692 emit_jump (if_true_label);
0f41302f 9693
b93a436e
JL
9694 if (drop_through_label)
9695 emit_label (drop_through_label);
bbf6f052 9696}
b93a436e 9697\f
b30f05db 9698/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9699 (including code to compute the values to be compared)
9700 and set (CC0) according to the result.
b30f05db 9701 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9702
b93a436e 9703 We force a stack adjustment unless there are currently
b30f05db 9704 things pushed on the stack that aren't yet used.
ca695ac9 9705
b30f05db
BS
9706 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9707 compared.
9708
9709 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9710 size of MODE should be used. */
9711
9712rtx
9713compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9714 register rtx op0, op1;
9715 enum rtx_code code;
9716 int unsignedp;
9717 enum machine_mode mode;
9718 rtx size;
9719 int align;
b93a436e 9720{
b30f05db 9721 rtx tem;
76bbe028 9722
b30f05db
BS
9723 /* If one operand is constant, make it the second one. Only do this
9724 if the other operand is not constant as well. */
ca695ac9 9725
b30f05db
BS
9726 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9727 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 9728 {
b30f05db
BS
9729 tem = op0;
9730 op0 = op1;
9731 op1 = tem;
9732 code = swap_condition (code);
ca695ac9 9733 }
bbf6f052 9734
b30f05db 9735 if (flag_force_mem)
b93a436e 9736 {
b30f05db
BS
9737 op0 = force_not_mem (op0);
9738 op1 = force_not_mem (op1);
9739 }
bbf6f052 9740
b30f05db
BS
9741 do_pending_stack_adjust ();
9742
9743 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9744 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9745 return tem;
9746
9747#if 0
9748 /* There's no need to do this now that combine.c can eliminate lots of
9749 sign extensions. This can be less efficient in certain cases on other
9750 machines. */
9751
9752 /* If this is a signed equality comparison, we can do it as an
9753 unsigned comparison since zero-extension is cheaper than sign
9754 extension and comparisons with zero are done as unsigned. This is
9755 the case even on machines that can do fast sign extension, since
9756 zero-extension is easier to combine with other operations than
9757 sign-extension is. If we are comparing against a constant, we must
9758 convert it to what it would look like unsigned. */
9759 if ((code == EQ || code == NE) && ! unsignedp
9760 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9761 {
9762 if (GET_CODE (op1) == CONST_INT
9763 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9764 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9765 unsignedp = 1;
b93a436e
JL
9766 }
9767#endif
b30f05db
BS
9768
9769 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 9770
b30f05db 9771 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9772}
bbf6f052 9773
b30f05db 9774/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9775 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9776
b93a436e
JL
9777 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9778 compared.
bbf6f052 9779
b93a436e
JL
9780 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9781 size of MODE should be used. */
ca695ac9 9782
b30f05db
BS
9783void
9784do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9785 if_false_label, if_true_label)
b93a436e
JL
9786 register rtx op0, op1;
9787 enum rtx_code code;
9788 int unsignedp;
9789 enum machine_mode mode;
9790 rtx size;
9791 int align;
b30f05db 9792 rtx if_false_label, if_true_label;
bbf6f052 9793{
b93a436e 9794 rtx tem;
b30f05db
BS
9795 int dummy_true_label = 0;
9796
9797 /* Reverse the comparison if that is safe and we want to jump if it is
9798 false. */
9799 if (! if_true_label && ! FLOAT_MODE_P (mode))
9800 {
9801 if_true_label = if_false_label;
9802 if_false_label = 0;
9803 code = reverse_condition (code);
9804 }
bbf6f052 9805
b93a436e
JL
9806 /* If one operand is constant, make it the second one. Only do this
9807 if the other operand is not constant as well. */
e7c33f54 9808
b93a436e
JL
9809 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9810 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 9811 {
b93a436e
JL
9812 tem = op0;
9813 op0 = op1;
9814 op1 = tem;
9815 code = swap_condition (code);
9816 }
bbf6f052 9817
b93a436e
JL
9818 if (flag_force_mem)
9819 {
9820 op0 = force_not_mem (op0);
9821 op1 = force_not_mem (op1);
9822 }
bbf6f052 9823
b93a436e 9824 do_pending_stack_adjust ();
ca695ac9 9825
b93a436e
JL
9826 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9827 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9828 {
9829 if (tem == const_true_rtx)
9830 {
9831 if (if_true_label)
9832 emit_jump (if_true_label);
9833 }
9834 else
9835 {
9836 if (if_false_label)
9837 emit_jump (if_false_label);
9838 }
9839 return;
9840 }
ca695ac9 9841
b93a436e
JL
9842#if 0
9843 /* There's no need to do this now that combine.c can eliminate lots of
9844 sign extensions. This can be less efficient in certain cases on other
9845 machines. */
ca695ac9 9846
b93a436e
JL
9847 /* If this is a signed equality comparison, we can do it as an
9848 unsigned comparison since zero-extension is cheaper than sign
9849 extension and comparisons with zero are done as unsigned. This is
9850 the case even on machines that can do fast sign extension, since
9851 zero-extension is easier to combine with other operations than
9852 sign-extension is. If we are comparing against a constant, we must
9853 convert it to what it would look like unsigned. */
9854 if ((code == EQ || code == NE) && ! unsignedp
9855 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9856 {
9857 if (GET_CODE (op1) == CONST_INT
9858 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9859 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9860 unsignedp = 1;
9861 }
9862#endif
ca695ac9 9863
b30f05db
BS
9864 if (! if_true_label)
9865 {
9866 dummy_true_label = 1;
9867 if_true_label = gen_label_rtx ();
9868 }
9869
9870 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9871 if_true_label);
9872
9873 if (if_false_label)
9874 emit_jump (if_false_label);
9875 if (dummy_true_label)
9876 emit_label (if_true_label);
9877}
9878
9879/* Generate code for a comparison expression EXP (including code to compute
9880 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9881 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9882 generated code will drop through.
9883 SIGNED_CODE should be the rtx operation for this comparison for
9884 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9885
9886 We force a stack adjustment unless there are currently
9887 things pushed on the stack that aren't yet used. */
9888
9889static void
9890do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9891 if_true_label)
9892 register tree exp;
9893 enum rtx_code signed_code, unsigned_code;
9894 rtx if_false_label, if_true_label;
9895{
14a774a9 9896 int align0, align1;
b30f05db
BS
9897 register rtx op0, op1;
9898 register tree type;
9899 register enum machine_mode mode;
9900 int unsignedp;
9901 enum rtx_code code;
9902
9903 /* Don't crash if the comparison was erroneous. */
14a774a9 9904 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
9905 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9906 return;
9907
14a774a9 9908 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
b30f05db
BS
9909 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9910 mode = TYPE_MODE (type);
9911 unsignedp = TREE_UNSIGNED (type);
9912 code = unsignedp ? unsigned_code : signed_code;
9913
9914#ifdef HAVE_canonicalize_funcptr_for_compare
9915 /* If function pointers need to be "canonicalized" before they can
9916 be reliably compared, then canonicalize them. */
9917 if (HAVE_canonicalize_funcptr_for_compare
9918 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9919 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9920 == FUNCTION_TYPE))
9921 {
9922 rtx new_op0 = gen_reg_rtx (mode);
9923
9924 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
9925 op0 = new_op0;
9926 }
9927
9928 if (HAVE_canonicalize_funcptr_for_compare
9929 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
9930 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
9931 == FUNCTION_TYPE))
9932 {
9933 rtx new_op1 = gen_reg_rtx (mode);
9934
9935 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
9936 op1 = new_op1;
9937 }
9938#endif
9939
9940 /* Do any postincrements in the expression that was tested. */
9941 emit_queue ();
9942
9943 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
9944 ((mode == BLKmode)
9945 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
14a774a9 9946 MIN (align0, align1) / BITS_PER_UNIT,
b30f05db 9947 if_false_label, if_true_label);
b93a436e
JL
9948}
9949\f
9950/* Generate code to calculate EXP using a store-flag instruction
9951 and return an rtx for the result. EXP is either a comparison
9952 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 9953
b93a436e 9954 If TARGET is nonzero, store the result there if convenient.
ca695ac9 9955
b93a436e
JL
9956 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
9957 cheap.
ca695ac9 9958
b93a436e
JL
9959 Return zero if there is no suitable set-flag instruction
9960 available on this machine.
ca695ac9 9961
b93a436e
JL
9962 Once expand_expr has been called on the arguments of the comparison,
9963 we are committed to doing the store flag, since it is not safe to
9964 re-evaluate the expression. We emit the store-flag insn by calling
9965 emit_store_flag, but only expand the arguments if we have a reason
9966 to believe that emit_store_flag will be successful. If we think that
9967 it will, but it isn't, we have to simulate the store-flag with a
9968 set/jump/set sequence. */
ca695ac9 9969
b93a436e
JL
9970static rtx
9971do_store_flag (exp, target, mode, only_cheap)
9972 tree exp;
9973 rtx target;
9974 enum machine_mode mode;
9975 int only_cheap;
9976{
9977 enum rtx_code code;
9978 tree arg0, arg1, type;
9979 tree tem;
9980 enum machine_mode operand_mode;
9981 int invert = 0;
9982 int unsignedp;
9983 rtx op0, op1;
9984 enum insn_code icode;
9985 rtx subtarget = target;
381127e8 9986 rtx result, label;
ca695ac9 9987
b93a436e
JL
9988 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
9989 result at the end. We can't simply invert the test since it would
9990 have already been inverted if it were valid. This case occurs for
9991 some floating-point comparisons. */
ca695ac9 9992
b93a436e
JL
9993 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
9994 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 9995
b93a436e
JL
9996 arg0 = TREE_OPERAND (exp, 0);
9997 arg1 = TREE_OPERAND (exp, 1);
9998 type = TREE_TYPE (arg0);
9999 operand_mode = TYPE_MODE (type);
10000 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10001
b93a436e
JL
10002 /* We won't bother with BLKmode store-flag operations because it would mean
10003 passing a lot of information to emit_store_flag. */
10004 if (operand_mode == BLKmode)
10005 return 0;
ca695ac9 10006
b93a436e
JL
10007 /* We won't bother with store-flag operations involving function pointers
10008 when function pointers must be canonicalized before comparisons. */
10009#ifdef HAVE_canonicalize_funcptr_for_compare
10010 if (HAVE_canonicalize_funcptr_for_compare
10011 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10012 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10013 == FUNCTION_TYPE))
10014 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10015 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10016 == FUNCTION_TYPE))))
10017 return 0;
ca695ac9
JB
10018#endif
10019
b93a436e
JL
10020 STRIP_NOPS (arg0);
10021 STRIP_NOPS (arg1);
ca695ac9 10022
b93a436e
JL
10023 /* Get the rtx comparison code to use. We know that EXP is a comparison
10024 operation of some type. Some comparisons against 1 and -1 can be
10025 converted to comparisons with zero. Do so here so that the tests
10026 below will be aware that we have a comparison with zero. These
10027 tests will not catch constants in the first operand, but constants
10028 are rarely passed as the first operand. */
ca695ac9 10029
b93a436e
JL
10030 switch (TREE_CODE (exp))
10031 {
10032 case EQ_EXPR:
10033 code = EQ;
bbf6f052 10034 break;
b93a436e
JL
10035 case NE_EXPR:
10036 code = NE;
bbf6f052 10037 break;
b93a436e
JL
10038 case LT_EXPR:
10039 if (integer_onep (arg1))
10040 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10041 else
10042 code = unsignedp ? LTU : LT;
ca695ac9 10043 break;
b93a436e
JL
10044 case LE_EXPR:
10045 if (! unsignedp && integer_all_onesp (arg1))
10046 arg1 = integer_zero_node, code = LT;
10047 else
10048 code = unsignedp ? LEU : LE;
ca695ac9 10049 break;
b93a436e
JL
10050 case GT_EXPR:
10051 if (! unsignedp && integer_all_onesp (arg1))
10052 arg1 = integer_zero_node, code = GE;
10053 else
10054 code = unsignedp ? GTU : GT;
10055 break;
10056 case GE_EXPR:
10057 if (integer_onep (arg1))
10058 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10059 else
10060 code = unsignedp ? GEU : GE;
ca695ac9 10061 break;
ca695ac9 10062 default:
b93a436e 10063 abort ();
bbf6f052 10064 }
bbf6f052 10065
b93a436e
JL
10066 /* Put a constant second. */
10067 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10068 {
10069 tem = arg0; arg0 = arg1; arg1 = tem;
10070 code = swap_condition (code);
ca695ac9 10071 }
bbf6f052 10072
b93a436e
JL
10073 /* If this is an equality or inequality test of a single bit, we can
10074 do this by shifting the bit being tested to the low-order bit and
10075 masking the result with the constant 1. If the condition was EQ,
10076 we xor it with 1. This does not require an scc insn and is faster
10077 than an scc insn even if we have it. */
d39985fa 10078
b93a436e
JL
10079 if ((code == NE || code == EQ)
10080 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10081 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10082 {
10083 tree inner = TREE_OPERAND (arg0, 0);
10084 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10085 int ops_unsignedp;
bbf6f052 10086
b93a436e
JL
10087 /* If INNER is a right shift of a constant and it plus BITNUM does
10088 not overflow, adjust BITNUM and INNER. */
ca695ac9 10089
b93a436e
JL
10090 if (TREE_CODE (inner) == RSHIFT_EXPR
10091 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10092 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
10093 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
10094 < TYPE_PRECISION (type)))
ca695ac9 10095 {
b93a436e
JL
10096 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10097 inner = TREE_OPERAND (inner, 0);
ca695ac9 10098 }
ca695ac9 10099
b93a436e
JL
10100 /* If we are going to be able to omit the AND below, we must do our
10101 operations as unsigned. If we must use the AND, we have a choice.
10102 Normally unsigned is faster, but for some machines signed is. */
10103 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10104#ifdef LOAD_EXTEND_OP
10105 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10106#else
10107 : 1
10108#endif
10109 );
bbf6f052 10110
b93a436e
JL
10111 if (subtarget == 0 || GET_CODE (subtarget) != REG
10112 || GET_MODE (subtarget) != operand_mode
e5e809f4 10113 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10114 subtarget = 0;
bbf6f052 10115
b93a436e 10116 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10117
b93a436e
JL
10118 if (bitnum != 0)
10119 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10120 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10121
b93a436e
JL
10122 if (GET_MODE (op0) != mode)
10123 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10124
b93a436e
JL
10125 if ((code == EQ && ! invert) || (code == NE && invert))
10126 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10127 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10128
b93a436e
JL
10129 /* Put the AND last so it can combine with more things. */
10130 if (bitnum != TYPE_PRECISION (type) - 1)
10131 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10132
b93a436e
JL
10133 return op0;
10134 }
bbf6f052 10135
b93a436e 10136 /* Now see if we are likely to be able to do this. Return if not. */
1c0290ea 10137 if (! can_compare_p (operand_mode, ccp_store_flag))
b93a436e
JL
10138 return 0;
10139 icode = setcc_gen_code[(int) code];
10140 if (icode == CODE_FOR_nothing
a995e389 10141 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10142 {
b93a436e
JL
10143 /* We can only do this if it is one of the special cases that
10144 can be handled without an scc insn. */
10145 if ((code == LT && integer_zerop (arg1))
10146 || (! only_cheap && code == GE && integer_zerop (arg1)))
10147 ;
10148 else if (BRANCH_COST >= 0
10149 && ! only_cheap && (code == NE || code == EQ)
10150 && TREE_CODE (type) != REAL_TYPE
10151 && ((abs_optab->handlers[(int) operand_mode].insn_code
10152 != CODE_FOR_nothing)
10153 || (ffs_optab->handlers[(int) operand_mode].insn_code
10154 != CODE_FOR_nothing)))
10155 ;
10156 else
10157 return 0;
ca695ac9 10158 }
b93a436e
JL
10159
10160 preexpand_calls (exp);
10161 if (subtarget == 0 || GET_CODE (subtarget) != REG
10162 || GET_MODE (subtarget) != operand_mode
e5e809f4 10163 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10164 subtarget = 0;
10165
10166 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10167 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10168
10169 if (target == 0)
10170 target = gen_reg_rtx (mode);
10171
10172 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10173 because, if the emit_store_flag does anything it will succeed and
10174 OP0 and OP1 will not be used subsequently. */
ca695ac9 10175
b93a436e
JL
10176 result = emit_store_flag (target, code,
10177 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10178 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10179 operand_mode, unsignedp, 1);
ca695ac9 10180
b93a436e
JL
10181 if (result)
10182 {
10183 if (invert)
10184 result = expand_binop (mode, xor_optab, result, const1_rtx,
10185 result, 0, OPTAB_LIB_WIDEN);
10186 return result;
ca695ac9 10187 }
bbf6f052 10188
b93a436e
JL
10189 /* If this failed, we have to do this with set/compare/jump/set code. */
10190 if (GET_CODE (target) != REG
10191 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10192 target = gen_reg_rtx (GET_MODE (target));
10193
10194 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10195 result = compare_from_rtx (op0, op1, code, unsignedp,
10196 operand_mode, NULL_RTX, 0);
10197 if (GET_CODE (result) == CONST_INT)
10198 return (((result == const0_rtx && ! invert)
10199 || (result != const0_rtx && invert))
10200 ? const0_rtx : const1_rtx);
ca695ac9 10201
b93a436e
JL
10202 label = gen_label_rtx ();
10203 if (bcc_gen_fctn[(int) code] == 0)
10204 abort ();
0f41302f 10205
b93a436e
JL
10206 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10207 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10208 emit_label (label);
bbf6f052 10209
b93a436e 10210 return target;
ca695ac9 10211}
b93a436e
JL
10212\f
10213/* Generate a tablejump instruction (used for switch statements). */
10214
10215#ifdef HAVE_tablejump
e87b4f3f 10216
b93a436e
JL
10217/* INDEX is the value being switched on, with the lowest value
10218 in the table already subtracted.
10219 MODE is its expected mode (needed if INDEX is constant).
10220 RANGE is the length of the jump table.
10221 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10222
b93a436e
JL
10223 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10224 index value is out of range. */
0f41302f 10225
ca695ac9 10226void
b93a436e
JL
10227do_tablejump (index, mode, range, table_label, default_label)
10228 rtx index, range, table_label, default_label;
10229 enum machine_mode mode;
ca695ac9 10230{
b93a436e 10231 register rtx temp, vector;
88d3b7f0 10232
b93a436e
JL
10233 /* Do an unsigned comparison (in the proper mode) between the index
10234 expression and the value which represents the length of the range.
10235 Since we just finished subtracting the lower bound of the range
10236 from the index expression, this comparison allows us to simultaneously
10237 check that the original index expression value is both greater than
10238 or equal to the minimum value of the range and less than or equal to
10239 the maximum value of the range. */
709f5be1 10240
c5d5d461
JL
10241 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10242 0, default_label);
bbf6f052 10243
b93a436e
JL
10244 /* If index is in range, it must fit in Pmode.
10245 Convert to Pmode so we can index with it. */
10246 if (mode != Pmode)
10247 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10248
b93a436e
JL
10249 /* Don't let a MEM slip thru, because then INDEX that comes
10250 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10251 and break_out_memory_refs will go to work on it and mess it up. */
10252#ifdef PIC_CASE_VECTOR_ADDRESS
10253 if (flag_pic && GET_CODE (index) != REG)
10254 index = copy_to_mode_reg (Pmode, index);
10255#endif
ca695ac9 10256
b93a436e
JL
10257 /* If flag_force_addr were to affect this address
10258 it could interfere with the tricky assumptions made
10259 about addresses that contain label-refs,
10260 which may be valid only very near the tablejump itself. */
10261 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10262 GET_MODE_SIZE, because this indicates how large insns are. The other
10263 uses should all be Pmode, because they are addresses. This code
10264 could fail if addresses and insns are not the same size. */
10265 index = gen_rtx_PLUS (Pmode,
10266 gen_rtx_MULT (Pmode, index,
10267 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10268 gen_rtx_LABEL_REF (Pmode, table_label));
10269#ifdef PIC_CASE_VECTOR_ADDRESS
10270 if (flag_pic)
10271 index = PIC_CASE_VECTOR_ADDRESS (index);
10272 else
bbf6f052 10273#endif
b93a436e
JL
10274 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10275 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10276 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10277 RTX_UNCHANGING_P (vector) = 1;
10278 convert_move (temp, vector, 0);
10279
10280 emit_jump_insn (gen_tablejump (temp, table_label));
10281
10282 /* If we are generating PIC code or if the table is PC-relative, the
10283 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10284 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10285 emit_barrier ();
bbf6f052 10286}
b93a436e
JL
10287
10288#endif /* HAVE_tablejump */
This page took 2.73665 seconds and 5 git commands to generate.