]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
cpplib.h: Merge struct cpp_options into struct cpp_reader.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
06ceef4e
RK
2 Copyright (C) 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000
3 Free Software Foundation, Inc.
bbf6f052
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
bbf6f052
RK
21
22
23#include "config.h"
670ee920 24#include "system.h"
ca695ac9 25#include "machmode.h"
bbf6f052
RK
26#include "rtl.h"
27#include "tree.h"
ca695ac9 28#include "obstack.h"
bbf6f052 29#include "flags.h"
bf76bb5a 30#include "regs.h"
4ed67205 31#include "hard-reg-set.h"
3d195391 32#include "except.h"
bbf6f052
RK
33#include "function.h"
34#include "insn-flags.h"
35#include "insn-codes.h"
bbf6f052 36#include "insn-config.h"
d6f4ec51
KG
37/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
38#include "expr.h"
bbf6f052
RK
39#include "recog.h"
40#include "output.h"
bbf6f052 41#include "typeclass.h"
ca55abae 42#include "defaults.h"
10f0ad3d 43#include "toplev.h"
d7db6646 44#include "ggc.h"
e2c49ac2 45#include "intl.h"
b1474bb7 46#include "tm_p.h"
bbf6f052 47
f73ad30e
JH
48#ifndef ACCUMULATE_OUTGOING_ARGS
49#define ACCUMULATE_OUTGOING_ARGS 0
50#endif
51
52/* Supply a default definition for PUSH_ARGS. */
53#ifndef PUSH_ARGS
54#ifdef PUSH_ROUNDING
55#define PUSH_ARGS !ACCUMULATE_OUTGOING_ARGS
56#else
57#define PUSH_ARGS 0
58#endif
59#endif
60
bbf6f052 61/* Decide whether a function's arguments should be processed
bbc8a071
RK
62 from first to last or from last to first.
63
64 They should if the stack and args grow in opposite directions, but
65 only if we have push insns. */
bbf6f052 66
bbf6f052 67#ifdef PUSH_ROUNDING
bbc8a071 68
3319a347 69#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
70#define PUSH_ARGS_REVERSED /* If it's last to first */
71#endif
bbc8a071 72
bbf6f052
RK
73#endif
74
75#ifndef STACK_PUSH_CODE
76#ifdef STACK_GROWS_DOWNWARD
77#define STACK_PUSH_CODE PRE_DEC
78#else
79#define STACK_PUSH_CODE PRE_INC
80#endif
81#endif
82
18543a22
ILT
83/* Assume that case vectors are not pc-relative. */
84#ifndef CASE_VECTOR_PC_RELATIVE
85#define CASE_VECTOR_PC_RELATIVE 0
86#endif
87
bbf6f052
RK
88/* If this is nonzero, we do not bother generating VOLATILE
89 around volatile memory references, and we are willing to
90 output indirect addresses. If cse is to follow, we reject
91 indirect addresses so a useful potential cse is generated;
92 if it is used only once, instruction combination will produce
93 the same indirect address eventually. */
94int cse_not_expected;
95
96/* Nonzero to generate code for all the subroutines within an
97 expression before generating the upper levels of the expression.
98 Nowadays this is never zero. */
99int do_preexpand_calls = 1;
100
956d6950 101/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
102 usage. Used when current_function_check_memory_usage is true, to avoid
103 infinite recursion. */
956d6950
JL
104static int in_check_memory_usage;
105
14a774a9
RK
106/* Chain of pending expressions for PLACEHOLDER_EXPR to replace. */
107static tree placeholder_list = 0;
108
4969d05d
RK
109/* This structure is used by move_by_pieces to describe the move to
110 be performed. */
4969d05d
RK
111struct move_by_pieces
112{
113 rtx to;
114 rtx to_addr;
115 int autinc_to;
116 int explicit_inc_to;
e9cf6a97 117 int to_struct;
c5c76735 118 int to_readonly;
4969d05d
RK
119 rtx from;
120 rtx from_addr;
121 int autinc_from;
122 int explicit_inc_from;
e9cf6a97 123 int from_struct;
c5c76735 124 int from_readonly;
4969d05d
RK
125 int len;
126 int offset;
127 int reverse;
128};
129
9de08200
RK
130/* This structure is used by clear_by_pieces to describe the clear to
131 be performed. */
132
133struct clear_by_pieces
134{
135 rtx to;
136 rtx to_addr;
137 int autinc_to;
138 int explicit_inc_to;
139 int to_struct;
140 int len;
141 int offset;
142 int reverse;
143};
144
292b1216 145extern struct obstack permanent_obstack;
c02bd5d9 146
711d877c
KG
147static rtx get_push_address PARAMS ((int));
148
149static rtx enqueue_insn PARAMS ((rtx, rtx));
729a2125 150static int move_by_pieces_ninsns PARAMS ((unsigned int, unsigned int));
711d877c
KG
151static void move_by_pieces_1 PARAMS ((rtx (*) (rtx, ...), enum machine_mode,
152 struct move_by_pieces *));
729a2125 153static void clear_by_pieces PARAMS ((rtx, int, unsigned int));
711d877c
KG
154static void clear_by_pieces_1 PARAMS ((rtx (*) (rtx, ...),
155 enum machine_mode,
156 struct clear_by_pieces *));
157static int is_zeros_p PARAMS ((tree));
158static int mostly_zeros_p PARAMS ((tree));
770ae6cc
RK
159static void store_constructor_field PARAMS ((rtx, unsigned HOST_WIDE_INT,
160 HOST_WIDE_INT, enum machine_mode,
729a2125 161 tree, tree, unsigned int, int));
770ae6cc 162static void store_constructor PARAMS ((tree, rtx, unsigned int, int,
13eb1f7f 163 HOST_WIDE_INT));
770ae6cc
RK
164static rtx store_field PARAMS ((rtx, HOST_WIDE_INT,
165 HOST_WIDE_INT, enum machine_mode,
729a2125 166 tree, enum machine_mode, int,
770ae6cc 167 unsigned int, HOST_WIDE_INT, int));
e009aaf3 168static enum memory_use_mode
711d877c
KG
169 get_memory_usage_from_modifier PARAMS ((enum expand_modifier));
170static tree save_noncopied_parts PARAMS ((tree, tree));
171static tree init_noncopied_parts PARAMS ((tree, tree));
172static int safe_from_p PARAMS ((rtx, tree, int));
173static int fixed_type_p PARAMS ((tree));
174static rtx var_rtx PARAMS ((tree));
175static int readonly_fields_p PARAMS ((tree));
729a2125 176static rtx expand_expr_unaligned PARAMS ((tree, unsigned int *));
711d877c
KG
177static rtx expand_increment PARAMS ((tree, int, int));
178static void preexpand_calls PARAMS ((tree));
179static void do_jump_by_parts_greater PARAMS ((tree, int, rtx, rtx));
180static void do_jump_by_parts_equality PARAMS ((tree, rtx, rtx));
770ae6cc
RK
181static void do_compare_and_jump PARAMS ((tree, enum rtx_code, enum rtx_code,
182 rtx, rtx));
711d877c 183static rtx do_store_flag PARAMS ((tree, rtx, enum machine_mode, int));
bbf6f052 184
4fa52007
RK
185/* Record for each mode whether we can move a register directly to or
186 from an object of that mode in memory. If we can't, we won't try
187 to use that mode directly when accessing a field of that mode. */
188
189static char direct_load[NUM_MACHINE_MODES];
190static char direct_store[NUM_MACHINE_MODES];
191
7e24ffc9
HPN
192/* If a memory-to-memory move would take MOVE_RATIO or more simple
193 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
194
195#ifndef MOVE_RATIO
266007a7 196#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
197#define MOVE_RATIO 2
198#else
996d9dac
MM
199/* If we are optimizing for space (-Os), cut down the default move ratio */
200#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
201#endif
202#endif
e87b4f3f 203
fbe1758d
AM
204/* This macro is used to determine whether move_by_pieces should be called
205 to perform a structure copy. */
206#ifndef MOVE_BY_PIECES_P
19caa751
RK
207#define MOVE_BY_PIECES_P(SIZE, ALIGN) \
208 (move_by_pieces_ninsns (SIZE, ALIGN) < MOVE_RATIO)
fbe1758d
AM
209#endif
210
266007a7 211/* This array records the insn_code of insns to perform block moves. */
e6677db3 212enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 213
9de08200
RK
214/* This array records the insn_code of insns to perform block clears. */
215enum insn_code clrstr_optab[NUM_MACHINE_MODES];
216
0f41302f 217/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
218
219#ifndef SLOW_UNALIGNED_ACCESS
e1565e65 220#define SLOW_UNALIGNED_ACCESS(MODE, ALIGN) STRICT_ALIGNMENT
e87b4f3f 221#endif
bbf6f052 222\f
4fa52007 223/* This is run once per compilation to set up which modes can be used
266007a7 224 directly in memory and to initialize the block move optab. */
4fa52007
RK
225
226void
227init_expr_once ()
228{
229 rtx insn, pat;
230 enum machine_mode mode;
cff48d8f 231 int num_clobbers;
9ec36da5
JL
232 rtx mem, mem1;
233 char *free_point;
234
235 start_sequence ();
236
237 /* Since we are on the permanent obstack, we must be sure we save this
238 spot AFTER we call start_sequence, since it will reuse the rtl it
239 makes. */
240 free_point = (char *) oballoc (0);
241
e2549997
RS
242 /* Try indexing by frame ptr and try by stack ptr.
243 It is known that on the Convex the stack ptr isn't a valid index.
244 With luck, one or the other is valid on any machine. */
9ec36da5
JL
245 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
246 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 247
38a448ca 248 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
249 pat = PATTERN (insn);
250
251 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
252 mode = (enum machine_mode) ((int) mode + 1))
253 {
254 int regno;
255 rtx reg;
4fa52007
RK
256
257 direct_load[(int) mode] = direct_store[(int) mode] = 0;
258 PUT_MODE (mem, mode);
e2549997 259 PUT_MODE (mem1, mode);
4fa52007 260
e6fe56a4
RK
261 /* See if there is some register that can be used in this mode and
262 directly loaded or stored from memory. */
263
7308a047
RS
264 if (mode != VOIDmode && mode != BLKmode)
265 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
266 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
267 regno++)
268 {
269 if (! HARD_REGNO_MODE_OK (regno, mode))
270 continue;
e6fe56a4 271
38a448ca 272 reg = gen_rtx_REG (mode, regno);
e6fe56a4 273
7308a047
RS
274 SET_SRC (pat) = mem;
275 SET_DEST (pat) = reg;
276 if (recog (pat, insn, &num_clobbers) >= 0)
277 direct_load[(int) mode] = 1;
e6fe56a4 278
e2549997
RS
279 SET_SRC (pat) = mem1;
280 SET_DEST (pat) = reg;
281 if (recog (pat, insn, &num_clobbers) >= 0)
282 direct_load[(int) mode] = 1;
283
7308a047
RS
284 SET_SRC (pat) = reg;
285 SET_DEST (pat) = mem;
286 if (recog (pat, insn, &num_clobbers) >= 0)
287 direct_store[(int) mode] = 1;
e2549997
RS
288
289 SET_SRC (pat) = reg;
290 SET_DEST (pat) = mem1;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_store[(int) mode] = 1;
7308a047 293 }
4fa52007
RK
294 }
295
296 end_sequence ();
9ec36da5 297 obfree (free_point);
4fa52007 298}
cff48d8f 299
bbf6f052
RK
300/* This is run at the start of compiling a function. */
301
302void
303init_expr ()
304{
01d939e8 305 cfun->expr = (struct expr_status *) xmalloc (sizeof (struct expr_status));
bbf6f052 306
49ad7cfa 307 pending_chain = 0;
bbf6f052 308 pending_stack_adjust = 0;
c2732da3 309 arg_space_so_far = 0;
bbf6f052 310 inhibit_defer_pop = 0;
bbf6f052 311 saveregs_value = 0;
0006469d 312 apply_args_value = 0;
e87b4f3f 313 forced_labels = 0;
bbf6f052
RK
314}
315
fa51b01b
RH
316void
317mark_expr_status (p)
318 struct expr_status *p;
319{
320 if (p == NULL)
321 return;
322
323 ggc_mark_rtx (p->x_saveregs_value);
324 ggc_mark_rtx (p->x_apply_args_value);
325 ggc_mark_rtx (p->x_forced_labels);
326}
327
328void
329free_expr_status (f)
330 struct function *f;
331{
332 free (f->expr);
333 f->expr = NULL;
334}
335
49ad7cfa 336/* Small sanity check that the queue is empty at the end of a function. */
bbf6f052 337void
49ad7cfa 338finish_expr_for_function ()
bbf6f052 339{
49ad7cfa
BS
340 if (pending_chain)
341 abort ();
bbf6f052
RK
342}
343\f
344/* Manage the queue of increment instructions to be output
345 for POSTINCREMENT_EXPR expressions, etc. */
346
bbf6f052
RK
347/* Queue up to increment (or change) VAR later. BODY says how:
348 BODY should be the same thing you would pass to emit_insn
349 to increment right away. It will go to emit_insn later on.
350
351 The value is a QUEUED expression to be used in place of VAR
352 where you want to guarantee the pre-incrementation value of VAR. */
353
354static rtx
355enqueue_insn (var, body)
356 rtx var, body;
357{
c5c76735
JL
358 pending_chain = gen_rtx_QUEUED (GET_MODE (var), var, NULL_RTX, NULL_RTX,
359 body, pending_chain);
bbf6f052
RK
360 return pending_chain;
361}
362
363/* Use protect_from_queue to convert a QUEUED expression
364 into something that you can put immediately into an instruction.
365 If the queued incrementation has not happened yet,
366 protect_from_queue returns the variable itself.
367 If the incrementation has happened, protect_from_queue returns a temp
368 that contains a copy of the old value of the variable.
369
370 Any time an rtx which might possibly be a QUEUED is to be put
371 into an instruction, it must be passed through protect_from_queue first.
372 QUEUED expressions are not meaningful in instructions.
373
374 Do not pass a value through protect_from_queue and then hold
375 on to it for a while before putting it in an instruction!
376 If the queue is flushed in between, incorrect code will result. */
377
378rtx
379protect_from_queue (x, modify)
380 register rtx x;
381 int modify;
382{
383 register RTX_CODE code = GET_CODE (x);
384
385#if 0 /* A QUEUED can hang around after the queue is forced out. */
386 /* Shortcut for most common case. */
387 if (pending_chain == 0)
388 return x;
389#endif
390
391 if (code != QUEUED)
392 {
e9baa644
RK
393 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
394 use of autoincrement. Make a copy of the contents of the memory
395 location rather than a copy of the address, but not if the value is
396 of mode BLKmode. Don't modify X in place since it might be
397 shared. */
bbf6f052
RK
398 if (code == MEM && GET_MODE (x) != BLKmode
399 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
400 {
401 register rtx y = XEXP (x, 0);
38a448ca 402 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 403
e9baa644 404 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 405 MEM_COPY_ATTRIBUTES (new, x);
41472af8 406 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 407
bbf6f052
RK
408 if (QUEUED_INSN (y))
409 {
e9baa644
RK
410 register rtx temp = gen_reg_rtx (GET_MODE (new));
411 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
412 QUEUED_INSN (y));
413 return temp;
414 }
e9baa644 415 return new;
bbf6f052
RK
416 }
417 /* Otherwise, recursively protect the subexpressions of all
418 the kinds of rtx's that can contain a QUEUED. */
419 if (code == MEM)
3f15938e
RS
420 {
421 rtx tem = protect_from_queue (XEXP (x, 0), 0);
422 if (tem != XEXP (x, 0))
423 {
424 x = copy_rtx (x);
425 XEXP (x, 0) = tem;
426 }
427 }
bbf6f052
RK
428 else if (code == PLUS || code == MULT)
429 {
3f15938e
RS
430 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
431 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
432 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
433 {
434 x = copy_rtx (x);
435 XEXP (x, 0) = new0;
436 XEXP (x, 1) = new1;
437 }
bbf6f052
RK
438 }
439 return x;
440 }
441 /* If the increment has not happened, use the variable itself. */
442 if (QUEUED_INSN (x) == 0)
443 return QUEUED_VAR (x);
444 /* If the increment has happened and a pre-increment copy exists,
445 use that copy. */
446 if (QUEUED_COPY (x) != 0)
447 return QUEUED_COPY (x);
448 /* The increment has happened but we haven't set up a pre-increment copy.
449 Set one up now, and use it. */
450 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
451 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
452 QUEUED_INSN (x));
453 return QUEUED_COPY (x);
454}
455
456/* Return nonzero if X contains a QUEUED expression:
457 if it contains anything that will be altered by a queued increment.
458 We handle only combinations of MEM, PLUS, MINUS and MULT operators
459 since memory addresses generally contain only those. */
460
1f06ee8d 461int
bbf6f052
RK
462queued_subexp_p (x)
463 rtx x;
464{
465 register enum rtx_code code = GET_CODE (x);
466 switch (code)
467 {
468 case QUEUED:
469 return 1;
470 case MEM:
471 return queued_subexp_p (XEXP (x, 0));
472 case MULT:
473 case PLUS:
474 case MINUS:
e9a25f70
JL
475 return (queued_subexp_p (XEXP (x, 0))
476 || queued_subexp_p (XEXP (x, 1)));
477 default:
478 return 0;
bbf6f052 479 }
bbf6f052
RK
480}
481
482/* Perform all the pending incrementations. */
483
484void
485emit_queue ()
486{
487 register rtx p;
381127e8 488 while ((p = pending_chain))
bbf6f052 489 {
41b083c4
R
490 rtx body = QUEUED_BODY (p);
491
492 if (GET_CODE (body) == SEQUENCE)
493 {
494 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
495 emit_insn (QUEUED_BODY (p));
496 }
497 else
498 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
499 pending_chain = QUEUED_NEXT (p);
500 }
501}
bbf6f052
RK
502\f
503/* Copy data from FROM to TO, where the machine modes are not the same.
504 Both modes may be integer, or both may be floating.
505 UNSIGNEDP should be nonzero if FROM is an unsigned type.
506 This causes zero-extension instead of sign-extension. */
507
508void
509convert_move (to, from, unsignedp)
510 register rtx to, from;
511 int unsignedp;
512{
513 enum machine_mode to_mode = GET_MODE (to);
514 enum machine_mode from_mode = GET_MODE (from);
515 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
516 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
517 enum insn_code code;
518 rtx libcall;
519
520 /* rtx code for making an equivalent value. */
521 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
522
523 to = protect_from_queue (to, 1);
524 from = protect_from_queue (from, 0);
525
526 if (to_real != from_real)
527 abort ();
528
1499e0a8
RK
529 /* If FROM is a SUBREG that indicates that we have already done at least
530 the required extension, strip it. We don't handle such SUBREGs as
531 TO here. */
532
533 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
534 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
535 >= GET_MODE_SIZE (to_mode))
536 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
537 from = gen_lowpart (to_mode, from), from_mode = to_mode;
538
539 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
540 abort ();
541
bbf6f052
RK
542 if (to_mode == from_mode
543 || (from_mode == VOIDmode && CONSTANT_P (from)))
544 {
545 emit_move_insn (to, from);
546 return;
547 }
548
549 if (to_real)
550 {
81d79e2c
RS
551 rtx value;
552
2b01c326 553 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 554 {
2b01c326
RK
555 /* Try converting directly if the insn is supported. */
556 if ((code = can_extend_p (to_mode, from_mode, 0))
557 != CODE_FOR_nothing)
558 {
559 emit_unop_insn (code, to, from, UNKNOWN);
560 return;
561 }
bbf6f052 562 }
2b01c326 563
b424402e
RS
564#ifdef HAVE_trunchfqf2
565 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
566 {
567 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
568 return;
569 }
570#endif
704af6a1
JL
571#ifdef HAVE_trunctqfqf2
572 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
573 {
574 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
575 return;
576 }
577#endif
b424402e
RS
578#ifdef HAVE_truncsfqf2
579 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
580 {
581 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
582 return;
583 }
584#endif
585#ifdef HAVE_truncdfqf2
586 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
587 {
588 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
589 return;
590 }
591#endif
592#ifdef HAVE_truncxfqf2
593 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
594 {
595 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
596 return;
597 }
598#endif
599#ifdef HAVE_trunctfqf2
600 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
601 {
602 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
603 return;
604 }
605#endif
03747aa3
RK
606
607#ifdef HAVE_trunctqfhf2
608 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
609 {
610 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
b424402e
RS
614#ifdef HAVE_truncsfhf2
615 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
616 {
617 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
621#ifdef HAVE_truncdfhf2
622 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_truncxfhf2
629 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635#ifdef HAVE_trunctfhf2
636 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
637 {
638 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
639 return;
640 }
641#endif
2b01c326
RK
642
643#ifdef HAVE_truncsftqf2
644 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
645 {
646 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
647 return;
648 }
649#endif
650#ifdef HAVE_truncdftqf2
651 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
652 {
653 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
657#ifdef HAVE_truncxftqf2
658 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
664#ifdef HAVE_trunctftqf2
665 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
666 {
667 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
671
bbf6f052
RK
672#ifdef HAVE_truncdfsf2
673 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
674 {
675 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
676 return;
677 }
678#endif
b092b471
JW
679#ifdef HAVE_truncxfsf2
680 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
681 {
682 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
683 return;
684 }
685#endif
bbf6f052
RK
686#ifdef HAVE_trunctfsf2
687 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
688 {
689 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
b092b471
JW
693#ifdef HAVE_truncxfdf2
694 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
bbf6f052
RK
700#ifdef HAVE_trunctfdf2
701 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
702 {
703 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
707
b092b471
JW
708 libcall = (rtx) 0;
709 switch (from_mode)
710 {
711 case SFmode:
712 switch (to_mode)
713 {
714 case DFmode:
715 libcall = extendsfdf2_libfunc;
716 break;
717
718 case XFmode:
719 libcall = extendsfxf2_libfunc;
720 break;
721
722 case TFmode:
723 libcall = extendsftf2_libfunc;
724 break;
e9a25f70
JL
725
726 default:
727 break;
b092b471
JW
728 }
729 break;
730
731 case DFmode:
732 switch (to_mode)
733 {
734 case SFmode:
735 libcall = truncdfsf2_libfunc;
736 break;
737
738 case XFmode:
739 libcall = extenddfxf2_libfunc;
740 break;
741
742 case TFmode:
743 libcall = extenddftf2_libfunc;
744 break;
e9a25f70
JL
745
746 default:
747 break;
b092b471
JW
748 }
749 break;
750
751 case XFmode:
752 switch (to_mode)
753 {
754 case SFmode:
755 libcall = truncxfsf2_libfunc;
756 break;
757
758 case DFmode:
759 libcall = truncxfdf2_libfunc;
760 break;
e9a25f70
JL
761
762 default:
763 break;
b092b471
JW
764 }
765 break;
766
767 case TFmode:
768 switch (to_mode)
769 {
770 case SFmode:
771 libcall = trunctfsf2_libfunc;
772 break;
773
774 case DFmode:
775 libcall = trunctfdf2_libfunc;
776 break;
e9a25f70
JL
777
778 default:
779 break;
b092b471
JW
780 }
781 break;
e9a25f70
JL
782
783 default:
784 break;
b092b471
JW
785 }
786
787 if (libcall == (rtx) 0)
788 /* This conversion is not implemented yet. */
bbf6f052
RK
789 abort ();
790
81d79e2c
RS
791 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
792 1, from, from_mode);
793 emit_move_insn (to, value);
bbf6f052
RK
794 return;
795 }
796
797 /* Now both modes are integers. */
798
799 /* Handle expanding beyond a word. */
800 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
801 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
802 {
803 rtx insns;
804 rtx lowpart;
805 rtx fill_value;
806 rtx lowfrom;
807 int i;
808 enum machine_mode lowpart_mode;
809 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
810
811 /* Try converting directly if the insn is supported. */
812 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
813 != CODE_FOR_nothing)
814 {
cd1b4b44
RK
815 /* If FROM is a SUBREG, put it into a register. Do this
816 so that we always generate the same set of insns for
817 better cse'ing; if an intermediate assignment occurred,
818 we won't be doing the operation directly on the SUBREG. */
819 if (optimize > 0 && GET_CODE (from) == SUBREG)
820 from = force_reg (from_mode, from);
bbf6f052
RK
821 emit_unop_insn (code, to, from, equiv_code);
822 return;
823 }
824 /* Next, try converting via full word. */
825 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
826 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
827 != CODE_FOR_nothing))
828 {
a81fee56 829 if (GET_CODE (to) == REG)
38a448ca 830 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
831 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
832 emit_unop_insn (code, to,
833 gen_lowpart (word_mode, to), equiv_code);
834 return;
835 }
836
837 /* No special multiword conversion insn; do it by hand. */
838 start_sequence ();
839
5c5033c3
RK
840 /* Since we will turn this into a no conflict block, we must ensure
841 that the source does not overlap the target. */
842
843 if (reg_overlap_mentioned_p (to, from))
844 from = force_reg (from_mode, from);
845
bbf6f052
RK
846 /* Get a copy of FROM widened to a word, if necessary. */
847 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
848 lowpart_mode = word_mode;
849 else
850 lowpart_mode = from_mode;
851
852 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
853
854 lowpart = gen_lowpart (lowpart_mode, to);
855 emit_move_insn (lowpart, lowfrom);
856
857 /* Compute the value to put in each remaining word. */
858 if (unsignedp)
859 fill_value = const0_rtx;
860 else
861 {
862#ifdef HAVE_slt
863 if (HAVE_slt
a995e389 864 && insn_data[(int) CODE_FOR_slt].operand[0].mode == word_mode
bbf6f052
RK
865 && STORE_FLAG_VALUE == -1)
866 {
906c4e36
RK
867 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
868 lowpart_mode, 0, 0);
bbf6f052
RK
869 fill_value = gen_reg_rtx (word_mode);
870 emit_insn (gen_slt (fill_value));
871 }
872 else
873#endif
874 {
875 fill_value
876 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
877 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 878 NULL_RTX, 0);
bbf6f052
RK
879 fill_value = convert_to_mode (word_mode, fill_value, 1);
880 }
881 }
882
883 /* Fill the remaining words. */
884 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
885 {
886 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
887 rtx subword = operand_subword (to, index, 1, to_mode);
888
889 if (subword == 0)
890 abort ();
891
892 if (fill_value != subword)
893 emit_move_insn (subword, fill_value);
894 }
895
896 insns = get_insns ();
897 end_sequence ();
898
906c4e36 899 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 900 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
901 return;
902 }
903
d3c64ee3
RS
904 /* Truncating multi-word to a word or less. */
905 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
906 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 907 {
431a6eca
JW
908 if (!((GET_CODE (from) == MEM
909 && ! MEM_VOLATILE_P (from)
910 && direct_load[(int) to_mode]
911 && ! mode_dependent_address_p (XEXP (from, 0)))
912 || GET_CODE (from) == REG
913 || GET_CODE (from) == SUBREG))
914 from = force_reg (from_mode, from);
bbf6f052
RK
915 convert_move (to, gen_lowpart (word_mode, from), 0);
916 return;
917 }
918
919 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
920 if (to_mode == PQImode)
921 {
922 if (from_mode != QImode)
923 from = convert_to_mode (QImode, from, unsignedp);
924
925#ifdef HAVE_truncqipqi2
926 if (HAVE_truncqipqi2)
927 {
928 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
929 return;
930 }
931#endif /* HAVE_truncqipqi2 */
932 abort ();
933 }
934
935 if (from_mode == PQImode)
936 {
937 if (to_mode != QImode)
938 {
939 from = convert_to_mode (QImode, from, unsignedp);
940 from_mode = QImode;
941 }
942 else
943 {
944#ifdef HAVE_extendpqiqi2
945 if (HAVE_extendpqiqi2)
946 {
947 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
948 return;
949 }
950#endif /* HAVE_extendpqiqi2 */
951 abort ();
952 }
953 }
954
bbf6f052
RK
955 if (to_mode == PSImode)
956 {
957 if (from_mode != SImode)
958 from = convert_to_mode (SImode, from, unsignedp);
959
1f584163
DE
960#ifdef HAVE_truncsipsi2
961 if (HAVE_truncsipsi2)
bbf6f052 962 {
1f584163 963 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
964 return;
965 }
1f584163 966#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
967 abort ();
968 }
969
970 if (from_mode == PSImode)
971 {
972 if (to_mode != SImode)
973 {
974 from = convert_to_mode (SImode, from, unsignedp);
975 from_mode = SImode;
976 }
977 else
978 {
1f584163
DE
979#ifdef HAVE_extendpsisi2
980 if (HAVE_extendpsisi2)
bbf6f052 981 {
1f584163 982 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
983 return;
984 }
1f584163 985#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
986 abort ();
987 }
988 }
989
0407367d
RK
990 if (to_mode == PDImode)
991 {
992 if (from_mode != DImode)
993 from = convert_to_mode (DImode, from, unsignedp);
994
995#ifdef HAVE_truncdipdi2
996 if (HAVE_truncdipdi2)
997 {
998 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
999 return;
1000 }
1001#endif /* HAVE_truncdipdi2 */
1002 abort ();
1003 }
1004
1005 if (from_mode == PDImode)
1006 {
1007 if (to_mode != DImode)
1008 {
1009 from = convert_to_mode (DImode, from, unsignedp);
1010 from_mode = DImode;
1011 }
1012 else
1013 {
1014#ifdef HAVE_extendpdidi2
1015 if (HAVE_extendpdidi2)
1016 {
1017 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1018 return;
1019 }
1020#endif /* HAVE_extendpdidi2 */
1021 abort ();
1022 }
1023 }
1024
bbf6f052
RK
1025 /* Now follow all the conversions between integers
1026 no more than a word long. */
1027
1028 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1029 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1030 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1031 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1032 {
d3c64ee3
RS
1033 if (!((GET_CODE (from) == MEM
1034 && ! MEM_VOLATILE_P (from)
1035 && direct_load[(int) to_mode]
1036 && ! mode_dependent_address_p (XEXP (from, 0)))
1037 || GET_CODE (from) == REG
1038 || GET_CODE (from) == SUBREG))
1039 from = force_reg (from_mode, from);
34aa3599
RK
1040 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1041 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1042 from = copy_to_reg (from);
bbf6f052
RK
1043 emit_move_insn (to, gen_lowpart (to_mode, from));
1044 return;
1045 }
1046
d3c64ee3 1047 /* Handle extension. */
bbf6f052
RK
1048 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1049 {
1050 /* Convert directly if that works. */
1051 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1052 != CODE_FOR_nothing)
1053 {
1054 emit_unop_insn (code, to, from, equiv_code);
1055 return;
1056 }
1057 else
1058 {
1059 enum machine_mode intermediate;
2b28d92e
NC
1060 rtx tmp;
1061 tree shift_amount;
bbf6f052
RK
1062
1063 /* Search for a mode to convert via. */
1064 for (intermediate = from_mode; intermediate != VOIDmode;
1065 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1066 if (((can_extend_p (to_mode, intermediate, unsignedp)
1067 != CODE_FOR_nothing)
1068 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
d60eaeff
JL
1069 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
1070 GET_MODE_BITSIZE (intermediate))))
bbf6f052
RK
1071 && (can_extend_p (intermediate, from_mode, unsignedp)
1072 != CODE_FOR_nothing))
1073 {
1074 convert_move (to, convert_to_mode (intermediate, from,
1075 unsignedp), unsignedp);
1076 return;
1077 }
1078
2b28d92e
NC
1079 /* No suitable intermediate mode.
1080 Generate what we need with shifts. */
1081 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1082 - GET_MODE_BITSIZE (from_mode), 0);
1083 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1084 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1085 to, unsignedp);
1086 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1087 to, unsignedp);
1088 if (tmp != to)
1089 emit_move_insn (to, tmp);
1090 return;
bbf6f052
RK
1091 }
1092 }
1093
1094 /* Support special truncate insns for certain modes. */
1095
1096 if (from_mode == DImode && to_mode == SImode)
1097 {
1098#ifdef HAVE_truncdisi2
1099 if (HAVE_truncdisi2)
1100 {
1101 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1102 return;
1103 }
1104#endif
1105 convert_move (to, force_reg (from_mode, from), unsignedp);
1106 return;
1107 }
1108
1109 if (from_mode == DImode && to_mode == HImode)
1110 {
1111#ifdef HAVE_truncdihi2
1112 if (HAVE_truncdihi2)
1113 {
1114 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1115 return;
1116 }
1117#endif
1118 convert_move (to, force_reg (from_mode, from), unsignedp);
1119 return;
1120 }
1121
1122 if (from_mode == DImode && to_mode == QImode)
1123 {
1124#ifdef HAVE_truncdiqi2
1125 if (HAVE_truncdiqi2)
1126 {
1127 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1128 return;
1129 }
1130#endif
1131 convert_move (to, force_reg (from_mode, from), unsignedp);
1132 return;
1133 }
1134
1135 if (from_mode == SImode && to_mode == HImode)
1136 {
1137#ifdef HAVE_truncsihi2
1138 if (HAVE_truncsihi2)
1139 {
1140 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1141 return;
1142 }
1143#endif
1144 convert_move (to, force_reg (from_mode, from), unsignedp);
1145 return;
1146 }
1147
1148 if (from_mode == SImode && to_mode == QImode)
1149 {
1150#ifdef HAVE_truncsiqi2
1151 if (HAVE_truncsiqi2)
1152 {
1153 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1154 return;
1155 }
1156#endif
1157 convert_move (to, force_reg (from_mode, from), unsignedp);
1158 return;
1159 }
1160
1161 if (from_mode == HImode && to_mode == QImode)
1162 {
1163#ifdef HAVE_trunchiqi2
1164 if (HAVE_trunchiqi2)
1165 {
1166 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1167 return;
1168 }
1169#endif
1170 convert_move (to, force_reg (from_mode, from), unsignedp);
1171 return;
1172 }
1173
b9bcad65
RK
1174 if (from_mode == TImode && to_mode == DImode)
1175 {
1176#ifdef HAVE_trunctidi2
1177 if (HAVE_trunctidi2)
1178 {
1179 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1180 return;
1181 }
1182#endif
1183 convert_move (to, force_reg (from_mode, from), unsignedp);
1184 return;
1185 }
1186
1187 if (from_mode == TImode && to_mode == SImode)
1188 {
1189#ifdef HAVE_trunctisi2
1190 if (HAVE_trunctisi2)
1191 {
1192 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1193 return;
1194 }
1195#endif
1196 convert_move (to, force_reg (from_mode, from), unsignedp);
1197 return;
1198 }
1199
1200 if (from_mode == TImode && to_mode == HImode)
1201 {
1202#ifdef HAVE_trunctihi2
1203 if (HAVE_trunctihi2)
1204 {
1205 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1206 return;
1207 }
1208#endif
1209 convert_move (to, force_reg (from_mode, from), unsignedp);
1210 return;
1211 }
1212
1213 if (from_mode == TImode && to_mode == QImode)
1214 {
1215#ifdef HAVE_trunctiqi2
1216 if (HAVE_trunctiqi2)
1217 {
1218 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1219 return;
1220 }
1221#endif
1222 convert_move (to, force_reg (from_mode, from), unsignedp);
1223 return;
1224 }
1225
bbf6f052
RK
1226 /* Handle truncation of volatile memrefs, and so on;
1227 the things that couldn't be truncated directly,
1228 and for which there was no special instruction. */
1229 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1230 {
1231 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1232 emit_move_insn (to, temp);
1233 return;
1234 }
1235
1236 /* Mode combination is not recognized. */
1237 abort ();
1238}
1239
1240/* Return an rtx for a value that would result
1241 from converting X to mode MODE.
1242 Both X and MODE may be floating, or both integer.
1243 UNSIGNEDP is nonzero if X is an unsigned value.
1244 This can be done by referring to a part of X in place
5d901c31
RS
1245 or by copying to a new temporary with conversion.
1246
1247 This function *must not* call protect_from_queue
1248 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1249
1250rtx
1251convert_to_mode (mode, x, unsignedp)
1252 enum machine_mode mode;
1253 rtx x;
1254 int unsignedp;
5ffe63ed
RS
1255{
1256 return convert_modes (mode, VOIDmode, x, unsignedp);
1257}
1258
1259/* Return an rtx for a value that would result
1260 from converting X from mode OLDMODE to mode MODE.
1261 Both modes may be floating, or both integer.
1262 UNSIGNEDP is nonzero if X is an unsigned value.
1263
1264 This can be done by referring to a part of X in place
1265 or by copying to a new temporary with conversion.
1266
1267 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1268
1269 This function *must not* call protect_from_queue
1270 except when putting X into an insn (in which case convert_move does it). */
1271
1272rtx
1273convert_modes (mode, oldmode, x, unsignedp)
1274 enum machine_mode mode, oldmode;
1275 rtx x;
1276 int unsignedp;
bbf6f052
RK
1277{
1278 register rtx temp;
5ffe63ed 1279
1499e0a8
RK
1280 /* If FROM is a SUBREG that indicates that we have already done at least
1281 the required extension, strip it. */
1282
1283 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1284 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1285 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1286 x = gen_lowpart (mode, x);
bbf6f052 1287
64791b18
RK
1288 if (GET_MODE (x) != VOIDmode)
1289 oldmode = GET_MODE (x);
1290
5ffe63ed 1291 if (mode == oldmode)
bbf6f052
RK
1292 return x;
1293
1294 /* There is one case that we must handle specially: If we are converting
906c4e36 1295 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1296 we are to interpret the constant as unsigned, gen_lowpart will do
1297 the wrong if the constant appears negative. What we want to do is
1298 make the high-order word of the constant zero, not all ones. */
1299
1300 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1301 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1302 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1303 {
1304 HOST_WIDE_INT val = INTVAL (x);
1305
1306 if (oldmode != VOIDmode
1307 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1308 {
1309 int width = GET_MODE_BITSIZE (oldmode);
1310
1311 /* We need to zero extend VAL. */
1312 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1313 }
1314
1315 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1316 }
bbf6f052
RK
1317
1318 /* We can do this with a gen_lowpart if both desired and current modes
1319 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1320 non-volatile MEM. Except for the constant case where MODE is no
1321 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1322
ba2e110c
RK
1323 if ((GET_CODE (x) == CONST_INT
1324 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1325 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1326 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1327 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1328 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1329 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1330 && direct_load[(int) mode])
2bf29316
JW
1331 || (GET_CODE (x) == REG
1332 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1333 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1334 {
1335 /* ?? If we don't know OLDMODE, we have to assume here that
1336 X does not need sign- or zero-extension. This may not be
1337 the case, but it's the best we can do. */
1338 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1339 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1340 {
1341 HOST_WIDE_INT val = INTVAL (x);
1342 int width = GET_MODE_BITSIZE (oldmode);
1343
1344 /* We must sign or zero-extend in this case. Start by
1345 zero-extending, then sign extend if we need to. */
1346 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1347 if (! unsignedp
1348 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1349 val |= (HOST_WIDE_INT) (-1) << width;
1350
1351 return GEN_INT (val);
1352 }
1353
1354 return gen_lowpart (mode, x);
1355 }
bbf6f052
RK
1356
1357 temp = gen_reg_rtx (mode);
1358 convert_move (temp, x, unsignedp);
1359 return temp;
1360}
1361\f
fbe1758d
AM
1362
1363/* This macro is used to determine what the largest unit size that
1364 move_by_pieces can use is. */
1365
1366/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1367 move efficiently, as opposed to MOVE_MAX which is the maximum
19caa751 1368 number of bytes we can move with a single instruction. */
fbe1758d
AM
1369
1370#ifndef MOVE_MAX_PIECES
1371#define MOVE_MAX_PIECES MOVE_MAX
1372#endif
1373
bbf6f052
RK
1374/* Generate several move instructions to copy LEN bytes
1375 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1376 The caller must pass FROM and TO
1377 through protect_from_queue before calling.
19caa751 1378 ALIGN is maximum alignment we can assume. */
bbf6f052 1379
2e245dac 1380void
bbf6f052
RK
1381move_by_pieces (to, from, len, align)
1382 rtx to, from;
729a2125
RK
1383 int len;
1384 unsigned int align;
bbf6f052
RK
1385{
1386 struct move_by_pieces data;
1387 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
770ae6cc 1388 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
1389 enum machine_mode mode = VOIDmode, tmode;
1390 enum insn_code icode;
bbf6f052
RK
1391
1392 data.offset = 0;
1393 data.to_addr = to_addr;
1394 data.from_addr = from_addr;
1395 data.to = to;
1396 data.from = from;
1397 data.autinc_to
1398 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1399 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1400 data.autinc_from
1401 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1402 || GET_CODE (from_addr) == POST_INC
1403 || GET_CODE (from_addr) == POST_DEC);
1404
1405 data.explicit_inc_from = 0;
1406 data.explicit_inc_to = 0;
1407 data.reverse
1408 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1409 if (data.reverse) data.offset = len;
1410 data.len = len;
1411
e9cf6a97
JW
1412 data.to_struct = MEM_IN_STRUCT_P (to);
1413 data.from_struct = MEM_IN_STRUCT_P (from);
c5c76735
JL
1414 data.to_readonly = RTX_UNCHANGING_P (to);
1415 data.from_readonly = RTX_UNCHANGING_P (from);
e9cf6a97 1416
bbf6f052
RK
1417 /* If copying requires more than two move insns,
1418 copy addresses to registers (to make displacements shorter)
1419 and use post-increment if available. */
1420 if (!(data.autinc_from && data.autinc_to)
1421 && move_by_pieces_ninsns (len, align) > 2)
1422 {
fbe1758d
AM
1423 /* Find the mode of the largest move... */
1424 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1425 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1426 if (GET_MODE_SIZE (tmode) < max_size)
1427 mode = tmode;
1428
1429 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1430 {
1431 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1432 data.autinc_from = 1;
1433 data.explicit_inc_from = -1;
1434 }
fbe1758d 1435 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1436 {
1437 data.from_addr = copy_addr_to_reg (from_addr);
1438 data.autinc_from = 1;
1439 data.explicit_inc_from = 1;
1440 }
bbf6f052
RK
1441 if (!data.autinc_from && CONSTANT_P (from_addr))
1442 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1443 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1444 {
1445 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1446 data.autinc_to = 1;
1447 data.explicit_inc_to = -1;
1448 }
fbe1758d 1449 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1450 {
1451 data.to_addr = copy_addr_to_reg (to_addr);
1452 data.autinc_to = 1;
1453 data.explicit_inc_to = 1;
1454 }
bbf6f052
RK
1455 if (!data.autinc_to && CONSTANT_P (to_addr))
1456 data.to_addr = copy_addr_to_reg (to_addr);
1457 }
1458
e1565e65 1459 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751
RK
1460 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
1461 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1462
1463 /* First move what we can in the largest integer mode, then go to
1464 successively smaller modes. */
1465
1466 while (max_size > 1)
1467 {
e7c33f54
RK
1468 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1469 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1470 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1471 mode = tmode;
1472
1473 if (mode == VOIDmode)
1474 break;
1475
1476 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1477 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1478 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1479
1480 max_size = GET_MODE_SIZE (mode);
1481 }
1482
1483 /* The code above should have handled everything. */
2a8e278c 1484 if (data.len > 0)
bbf6f052
RK
1485 abort ();
1486}
1487
1488/* Return number of insns required to move L bytes by pieces.
1489 ALIGN (in bytes) is maximum alignment we can assume. */
1490
1491static int
1492move_by_pieces_ninsns (l, align)
1493 unsigned int l;
729a2125 1494 unsigned int align;
bbf6f052
RK
1495{
1496 register int n_insns = 0;
770ae6cc 1497 unsigned int max_size = MOVE_MAX + 1;
bbf6f052 1498
e1565e65 1499 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 1500 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
14c78e9b 1501 align = MOVE_MAX * BITS_PER_UNIT;
bbf6f052
RK
1502
1503 while (max_size > 1)
1504 {
1505 enum machine_mode mode = VOIDmode, tmode;
1506 enum insn_code icode;
1507
e7c33f54
RK
1508 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1509 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1510 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1511 mode = tmode;
1512
1513 if (mode == VOIDmode)
1514 break;
1515
1516 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 1517 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
bbf6f052
RK
1518 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1519
1520 max_size = GET_MODE_SIZE (mode);
1521 }
1522
1523 return n_insns;
1524}
1525
1526/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1527 with move instructions for mode MODE. GENFUN is the gen_... function
1528 to make a move insn for that mode. DATA has all the other info. */
1529
1530static void
1531move_by_pieces_1 (genfun, mode, data)
711d877c 1532 rtx (*genfun) PARAMS ((rtx, ...));
bbf6f052
RK
1533 enum machine_mode mode;
1534 struct move_by_pieces *data;
1535{
1536 register int size = GET_MODE_SIZE (mode);
1537 register rtx to1, from1;
1538
1539 while (data->len >= size)
1540 {
1541 if (data->reverse) data->offset -= size;
1542
1543 to1 = (data->autinc_to
38a448ca 1544 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1545 : copy_rtx (change_address (data->to, mode,
1546 plus_constant (data->to_addr,
1547 data->offset))));
e9cf6a97 1548 MEM_IN_STRUCT_P (to1) = data->to_struct;
c5c76735 1549 RTX_UNCHANGING_P (to1) = data->to_readonly;
effbcc6a 1550
db3cf6fb
MS
1551 from1
1552 = (data->autinc_from
38a448ca 1553 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1554 : copy_rtx (change_address (data->from, mode,
1555 plus_constant (data->from_addr,
1556 data->offset))));
e9cf6a97 1557 MEM_IN_STRUCT_P (from1) = data->from_struct;
c5c76735 1558 RTX_UNCHANGING_P (from1) = data->from_readonly;
bbf6f052 1559
940da324 1560 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1561 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1562 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1563 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1564
1565 emit_insn ((*genfun) (to1, from1));
940da324 1566 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1567 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1568 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1569 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1570
1571 if (! data->reverse) data->offset += size;
1572
1573 data->len -= size;
1574 }
1575}
1576\f
1577/* Emit code to move a block Y to a block X.
1578 This may be done with string-move instructions,
1579 with multiple scalar move instructions, or with a library call.
1580
1581 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1582 with mode BLKmode.
1583 SIZE is an rtx that says how long they are.
19caa751 1584 ALIGN is the maximum alignment we can assume they have.
bbf6f052 1585
e9a25f70
JL
1586 Return the address of the new block, if memcpy is called and returns it,
1587 0 otherwise. */
1588
1589rtx
bbf6f052
RK
1590emit_block_move (x, y, size, align)
1591 rtx x, y;
1592 rtx size;
729a2125 1593 unsigned int align;
bbf6f052 1594{
e9a25f70 1595 rtx retval = 0;
52cf7115
JL
1596#ifdef TARGET_MEM_FUNCTIONS
1597 static tree fn;
1598 tree call_expr, arg_list;
1599#endif
e9a25f70 1600
bbf6f052
RK
1601 if (GET_MODE (x) != BLKmode)
1602 abort ();
1603
1604 if (GET_MODE (y) != BLKmode)
1605 abort ();
1606
1607 x = protect_from_queue (x, 1);
1608 y = protect_from_queue (y, 0);
5d901c31 1609 size = protect_from_queue (size, 0);
bbf6f052
RK
1610
1611 if (GET_CODE (x) != MEM)
1612 abort ();
1613 if (GET_CODE (y) != MEM)
1614 abort ();
1615 if (size == 0)
1616 abort ();
1617
fbe1758d 1618 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1619 move_by_pieces (x, y, INTVAL (size), align);
1620 else
1621 {
1622 /* Try the most limited insn first, because there's no point
1623 including more than one in the machine description unless
1624 the more limited one has some advantage. */
266007a7 1625
19caa751 1626 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
266007a7
RK
1627 enum machine_mode mode;
1628
1629 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1630 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1631 {
266007a7 1632 enum insn_code code = movstr_optab[(int) mode];
a995e389 1633 insn_operand_predicate_fn pred;
266007a7
RK
1634
1635 if (code != CODE_FOR_nothing
803090c4
RK
1636 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1637 here because if SIZE is less than the mode mask, as it is
8008b228 1638 returned by the macro, it will definitely be less than the
803090c4 1639 actual mode mask. */
8ca00751
RK
1640 && ((GET_CODE (size) == CONST_INT
1641 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1642 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1643 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
1644 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
1645 || (*pred) (x, BLKmode))
1646 && ((pred = insn_data[(int) code].operand[1].predicate) == 0
1647 || (*pred) (y, BLKmode))
1648 && ((pred = insn_data[(int) code].operand[3].predicate) == 0
1649 || (*pred) (opalign, VOIDmode)))
bbf6f052 1650 {
1ba1e2a8 1651 rtx op2;
266007a7
RK
1652 rtx last = get_last_insn ();
1653 rtx pat;
1654
1ba1e2a8 1655 op2 = convert_to_mode (mode, size, 1);
a995e389
RH
1656 pred = insn_data[(int) code].operand[2].predicate;
1657 if (pred != 0 && ! (*pred) (op2, mode))
266007a7
RK
1658 op2 = copy_to_mode_reg (mode, op2);
1659
1660 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1661 if (pat)
1662 {
1663 emit_insn (pat);
e9a25f70 1664 return 0;
266007a7
RK
1665 }
1666 else
1667 delete_insns_since (last);
bbf6f052
RK
1668 }
1669 }
bbf6f052 1670
4bc973ae
JL
1671 /* X, Y, or SIZE may have been passed through protect_from_queue.
1672
1673 It is unsafe to save the value generated by protect_from_queue
1674 and reuse it later. Consider what happens if emit_queue is
1675 called before the return value from protect_from_queue is used.
1676
1677 Expansion of the CALL_EXPR below will call emit_queue before
1678 we are finished emitting RTL for argument setup. So if we are
1679 not careful we could get the wrong value for an argument.
1680
1681 To avoid this problem we go ahead and emit code to copy X, Y &
1682 SIZE into new pseudos. We can then place those new pseudos
1683 into an RTL_EXPR and use them later, even after a call to
1684 emit_queue.
1685
1686 Note this is not strictly needed for library calls since they
1687 do not call emit_queue before loading their arguments. However,
1688 we may need to have library calls call emit_queue in the future
1689 since failing to do so could cause problems for targets which
1690 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1691 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1692 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1693
1694#ifdef TARGET_MEM_FUNCTIONS
1695 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1696#else
1697 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1698 TREE_UNSIGNED (integer_type_node));
f3dc586a 1699 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1700#endif
1701
bbf6f052 1702#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1703 /* It is incorrect to use the libcall calling conventions to call
1704 memcpy in this context.
1705
1706 This could be a user call to memcpy and the user may wish to
1707 examine the return value from memcpy.
1708
1709 For targets where libcalls and normal calls have different conventions
1710 for returning pointers, we could end up generating incorrect code.
1711
1712 So instead of using a libcall sequence we build up a suitable
1713 CALL_EXPR and expand the call in the normal fashion. */
1714 if (fn == NULL_TREE)
1715 {
1716 tree fntype;
1717
1718 /* This was copied from except.c, I don't know if all this is
1719 necessary in this context or not. */
1720 fn = get_identifier ("memcpy");
1721 push_obstacks_nochange ();
1722 end_temporary_allocation ();
1723 fntype = build_pointer_type (void_type_node);
1724 fntype = build_function_type (fntype, NULL_TREE);
1725 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 1726 ggc_add_tree_root (&fn, 1);
52cf7115
JL
1727 DECL_EXTERNAL (fn) = 1;
1728 TREE_PUBLIC (fn) = 1;
1729 DECL_ARTIFICIAL (fn) = 1;
1730 make_decl_rtl (fn, NULL_PTR, 1);
1731 assemble_external (fn);
1732 pop_obstacks ();
1733 }
1734
1735 /* We need to make an argument list for the function call.
1736
1737 memcpy has three arguments, the first two are void * addresses and
1738 the last is a size_t byte count for the copy. */
1739 arg_list
1740 = build_tree_list (NULL_TREE,
4bc973ae 1741 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1742 TREE_CHAIN (arg_list)
1743 = build_tree_list (NULL_TREE,
4bc973ae 1744 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1745 TREE_CHAIN (TREE_CHAIN (arg_list))
1746 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1747 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1748
1749 /* Now we have to build up the CALL_EXPR itself. */
1750 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1751 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1752 call_expr, arg_list, NULL_TREE);
1753 TREE_SIDE_EFFECTS (call_expr) = 1;
1754
1755 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1756#else
d562e42e 1757 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1758 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1759 convert_to_mode (TYPE_MODE (integer_type_node), size,
1760 TREE_UNSIGNED (integer_type_node)),
1761 TYPE_MODE (integer_type_node));
bbf6f052
RK
1762#endif
1763 }
e9a25f70
JL
1764
1765 return retval;
bbf6f052
RK
1766}
1767\f
1768/* Copy all or part of a value X into registers starting at REGNO.
1769 The number of registers to be filled is NREGS. */
1770
1771void
1772move_block_to_reg (regno, x, nregs, mode)
1773 int regno;
1774 rtx x;
1775 int nregs;
1776 enum machine_mode mode;
1777{
1778 int i;
381127e8
RL
1779#ifdef HAVE_load_multiple
1780 rtx pat;
1781 rtx last;
1782#endif
bbf6f052 1783
72bb9717
RK
1784 if (nregs == 0)
1785 return;
1786
bbf6f052
RK
1787 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1788 x = validize_mem (force_const_mem (mode, x));
1789
1790 /* See if the machine can do this with a load multiple insn. */
1791#ifdef HAVE_load_multiple
c3a02afe 1792 if (HAVE_load_multiple)
bbf6f052 1793 {
c3a02afe 1794 last = get_last_insn ();
38a448ca 1795 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1796 GEN_INT (nregs));
1797 if (pat)
1798 {
1799 emit_insn (pat);
1800 return;
1801 }
1802 else
1803 delete_insns_since (last);
bbf6f052 1804 }
bbf6f052
RK
1805#endif
1806
1807 for (i = 0; i < nregs; i++)
38a448ca 1808 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1809 operand_subword_force (x, i, mode));
1810}
1811
1812/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1813 The number of registers to be filled is NREGS. SIZE indicates the number
1814 of bytes in the object X. */
1815
bbf6f052
RK
1816
1817void
0040593d 1818move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1819 int regno;
1820 rtx x;
1821 int nregs;
0040593d 1822 int size;
bbf6f052
RK
1823{
1824 int i;
381127e8
RL
1825#ifdef HAVE_store_multiple
1826 rtx pat;
1827 rtx last;
1828#endif
58a32c5c 1829 enum machine_mode mode;
bbf6f052 1830
58a32c5c
DE
1831 /* If SIZE is that of a mode no bigger than a word, just use that
1832 mode's store operation. */
1833 if (size <= UNITS_PER_WORD
1834 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1835 {
1836 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1837 gen_rtx_REG (mode, regno));
58a32c5c
DE
1838 return;
1839 }
1840
0040593d 1841 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1842 to the left before storing to memory. Note that the previous test
1843 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1844 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1845 {
1846 rtx tem = operand_subword (x, 0, 1, BLKmode);
1847 rtx shift;
1848
1849 if (tem == 0)
1850 abort ();
1851
1852 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1853 gen_rtx_REG (word_mode, regno),
0040593d
JW
1854 build_int_2 ((UNITS_PER_WORD - size)
1855 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1856 emit_move_insn (tem, shift);
1857 return;
1858 }
1859
bbf6f052
RK
1860 /* See if the machine can do this with a store multiple insn. */
1861#ifdef HAVE_store_multiple
c3a02afe 1862 if (HAVE_store_multiple)
bbf6f052 1863 {
c3a02afe 1864 last = get_last_insn ();
38a448ca 1865 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1866 GEN_INT (nregs));
1867 if (pat)
1868 {
1869 emit_insn (pat);
1870 return;
1871 }
1872 else
1873 delete_insns_since (last);
bbf6f052 1874 }
bbf6f052
RK
1875#endif
1876
1877 for (i = 0; i < nregs; i++)
1878 {
1879 rtx tem = operand_subword (x, i, 1, BLKmode);
1880
1881 if (tem == 0)
1882 abort ();
1883
38a448ca 1884 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1885 }
1886}
1887
aac5cc16
RH
1888/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1889 registers represented by a PARALLEL. SSIZE represents the total size of
1890 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1891 SRC in bits. */
1892/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1893 the balance will be in what would be the low-order memory addresses, i.e.
1894 left justified for big endian, right justified for little endian. This
1895 happens to be true for the targets currently using this support. If this
1896 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1897 would be needed. */
fffa9c1d
JW
1898
1899void
aac5cc16
RH
1900emit_group_load (dst, orig_src, ssize, align)
1901 rtx dst, orig_src;
729a2125
RK
1902 unsigned int align;
1903 int ssize;
fffa9c1d 1904{
aac5cc16
RH
1905 rtx *tmps, src;
1906 int start, i;
fffa9c1d 1907
aac5cc16 1908 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1909 abort ();
1910
1911 /* Check for a NULL entry, used to indicate that the parameter goes
1912 both on the stack and in registers. */
aac5cc16
RH
1913 if (XEXP (XVECEXP (dst, 0, 0), 0))
1914 start = 0;
fffa9c1d 1915 else
aac5cc16
RH
1916 start = 1;
1917
1918 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1919
1920 /* If we won't be loading directly from memory, protect the real source
1921 from strange tricks we might play. */
1922 src = orig_src;
1923 if (GET_CODE (src) != MEM)
1924 {
8b725198
JJ
1925 if (GET_CODE (src) == VOIDmode)
1926 src = gen_reg_rtx (GET_MODE (dst));
1927 else
1928 src = gen_reg_rtx (GET_MODE (orig_src));
aac5cc16
RH
1929 emit_move_insn (src, orig_src);
1930 }
1931
1932 /* Process the pieces. */
1933 for (i = start; i < XVECLEN (dst, 0); i++)
1934 {
1935 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
770ae6cc
RK
1936 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1937 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
1938 int shift = 0;
1939
1940 /* Handle trailing fragments that run over the size of the struct. */
1941 if (ssize >= 0 && bytepos + bytelen > ssize)
1942 {
1943 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1944 bytelen = ssize - bytepos;
1945 if (bytelen <= 0)
729a2125 1946 abort ();
aac5cc16
RH
1947 }
1948
1949 /* Optimize the access just a bit. */
1950 if (GET_CODE (src) == MEM
19caa751 1951 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 1952 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16
RH
1953 && bytelen == GET_MODE_SIZE (mode))
1954 {
1955 tmps[i] = gen_reg_rtx (mode);
1956 emit_move_insn (tmps[i],
1957 change_address (src, mode,
1958 plus_constant (XEXP (src, 0),
1959 bytepos)));
fffa9c1d 1960 }
7c4a6db0
JW
1961 else if (GET_CODE (src) == CONCAT)
1962 {
1963 if (bytepos == 0
1964 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 0))))
1965 tmps[i] = XEXP (src, 0);
1966 else if (bytepos == GET_MODE_SIZE (GET_MODE (XEXP (src, 0)))
1967 && bytelen == GET_MODE_SIZE (GET_MODE (XEXP (src, 1))))
1968 tmps[i] = XEXP (src, 1);
1969 else
1970 abort ();
1971 }
fffa9c1d 1972 else
19caa751
RK
1973 tmps[i] = extract_bit_field (src, bytelen * BITS_PER_UNIT,
1974 bytepos * BITS_PER_UNIT, 1, NULL_RTX,
1975 mode, mode, align, ssize);
fffa9c1d 1976
aac5cc16 1977 if (BYTES_BIG_ENDIAN && shift)
19caa751
RK
1978 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1979 tmps[i], 0, OPTAB_WIDEN);
fffa9c1d 1980 }
19caa751 1981
aac5cc16
RH
1982 emit_queue();
1983
1984 /* Copy the extracted pieces into the proper (probable) hard regs. */
1985 for (i = start; i < XVECLEN (dst, 0); i++)
1986 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1987}
1988
aac5cc16
RH
1989/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1990 registers represented by a PARALLEL. SSIZE represents the total size of
1991 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1992
1993void
aac5cc16
RH
1994emit_group_store (orig_dst, src, ssize, align)
1995 rtx orig_dst, src;
729a2125
RK
1996 int ssize;
1997 unsigned int align;
fffa9c1d 1998{
aac5cc16
RH
1999 rtx *tmps, dst;
2000 int start, i;
fffa9c1d 2001
aac5cc16 2002 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2003 abort ();
2004
2005 /* Check for a NULL entry, used to indicate that the parameter goes
2006 both on the stack and in registers. */
aac5cc16
RH
2007 if (XEXP (XVECEXP (src, 0, 0), 0))
2008 start = 0;
fffa9c1d 2009 else
aac5cc16
RH
2010 start = 1;
2011
2012 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 2013
aac5cc16
RH
2014 /* Copy the (probable) hard regs into pseudos. */
2015 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2016 {
aac5cc16
RH
2017 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2018 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2019 emit_move_insn (tmps[i], reg);
2020 }
2021 emit_queue();
fffa9c1d 2022
aac5cc16
RH
2023 /* If we won't be storing directly into memory, protect the real destination
2024 from strange tricks we might play. */
2025 dst = orig_dst;
10a9f2be
JW
2026 if (GET_CODE (dst) == PARALLEL)
2027 {
2028 rtx temp;
2029
2030 /* We can get a PARALLEL dst if there is a conditional expression in
2031 a return statement. In that case, the dst and src are the same,
2032 so no action is necessary. */
2033 if (rtx_equal_p (dst, src))
2034 return;
2035
2036 /* It is unclear if we can ever reach here, but we may as well handle
2037 it. Allocate a temporary, and split this into a store/load to/from
2038 the temporary. */
2039
2040 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2041 emit_group_store (temp, src, ssize, align);
2042 emit_group_load (dst, temp, ssize, align);
2043 return;
2044 }
2045 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2046 {
2047 dst = gen_reg_rtx (GET_MODE (orig_dst));
2048 /* Make life a bit easier for combine. */
2049 emit_move_insn (dst, const0_rtx);
2050 }
2051 else if (! MEM_IN_STRUCT_P (dst))
2052 {
2053 /* store_bit_field requires that memory operations have
2054 mem_in_struct_p set; we might not. */
fffa9c1d 2055
aac5cc16 2056 dst = copy_rtx (orig_dst);
c6df88cb 2057 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2058 }
2059
2060 /* Process the pieces. */
2061 for (i = start; i < XVECLEN (src, 0); i++)
2062 {
770ae6cc 2063 HOST_WIDE_INT bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
aac5cc16 2064 enum machine_mode mode = GET_MODE (tmps[i]);
770ae6cc 2065 unsigned int bytelen = GET_MODE_SIZE (mode);
aac5cc16
RH
2066
2067 /* Handle trailing fragments that run over the size of the struct. */
2068 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2069 {
aac5cc16
RH
2070 if (BYTES_BIG_ENDIAN)
2071 {
2072 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2073 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2074 tmps[i], 0, OPTAB_WIDEN);
2075 }
2076 bytelen = ssize - bytepos;
71bc0330 2077 }
fffa9c1d 2078
aac5cc16
RH
2079 /* Optimize the access just a bit. */
2080 if (GET_CODE (dst) == MEM
19caa751 2081 && align >= GET_MODE_ALIGNMENT (mode)
729a2125 2082 && bytepos * BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
aac5cc16 2083 && bytelen == GET_MODE_SIZE (mode))
729a2125
RK
2084 emit_move_insn (change_address (dst, mode,
2085 plus_constant (XEXP (dst, 0),
2086 bytepos)),
2087 tmps[i]);
aac5cc16 2088 else
729a2125 2089 store_bit_field (dst, bytelen * BITS_PER_UNIT, bytepos * BITS_PER_UNIT,
aac5cc16 2090 mode, tmps[i], align, ssize);
fffa9c1d 2091 }
729a2125 2092
aac5cc16
RH
2093 emit_queue();
2094
2095 /* Copy from the pseudo into the (probable) hard reg. */
2096 if (GET_CODE (dst) == REG)
2097 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2098}
2099
c36fce9a
GRK
2100/* Generate code to copy a BLKmode object of TYPE out of a
2101 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2102 is null, a stack temporary is created. TGTBLK is returned.
2103
2104 The primary purpose of this routine is to handle functions
2105 that return BLKmode structures in registers. Some machines
2106 (the PA for example) want to return all small structures
729a2125 2107 in registers regardless of the structure's alignment. */
c36fce9a
GRK
2108
2109rtx
19caa751 2110copy_blkmode_from_reg (tgtblk, srcreg, type)
c36fce9a
GRK
2111 rtx tgtblk;
2112 rtx srcreg;
2113 tree type;
2114{
19caa751
RK
2115 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (type);
2116 rtx src = NULL, dst = NULL;
2117 unsigned HOST_WIDE_INT bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2118 unsigned HOST_WIDE_INT bitpos, xbitpos, big_endian_correction = 0;
2119
2120 if (tgtblk == 0)
2121 {
2122 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2123 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
2124 preserve_temp_slots (tgtblk);
2125 }
c36fce9a 2126
19caa751
RK
2127 /* This code assumes srcreg is at least a full word. If it isn't,
2128 copy it into a new pseudo which is a full word. */
2129 if (GET_MODE (srcreg) != BLKmode
2130 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2131 srcreg = convert_to_mode (word_mode, srcreg, TREE_UNSIGNED (type));
2132
2133 /* Structures whose size is not a multiple of a word are aligned
2134 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2135 machine, this means we must skip the empty high order bytes when
2136 calculating the bit offset. */
2137 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2138 big_endian_correction
2139 = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD) * BITS_PER_UNIT));
2140
2141 /* Copy the structure BITSIZE bites at a time.
2142
2143 We could probably emit more efficient code for machines which do not use
2144 strict alignment, but it doesn't seem worth the effort at the current
2145 time. */
2146 for (bitpos = 0, xbitpos = big_endian_correction;
2147 bitpos < bytes * BITS_PER_UNIT;
2148 bitpos += bitsize, xbitpos += bitsize)
2149 {
2150 /* We need a new source operand each time xbitpos is on a
2151 word boundary and when xbitpos == big_endian_correction
2152 (the first time through). */
2153 if (xbitpos % BITS_PER_WORD == 0
2154 || xbitpos == big_endian_correction)
2155 src = operand_subword_force (srcreg, xbitpos / BITS_PER_WORD, BLKmode);
2156
2157 /* We need a new destination operand each time bitpos is on
2158 a word boundary. */
2159 if (bitpos % BITS_PER_WORD == 0)
2160 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
c36fce9a 2161
19caa751
RK
2162 /* Use xbitpos for the source extraction (right justified) and
2163 xbitpos for the destination store (left justified). */
2164 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2165 extract_bit_field (src, bitsize,
2166 xbitpos % BITS_PER_WORD, 1,
2167 NULL_RTX, word_mode, word_mode,
2168 bitsize, BITS_PER_WORD),
2169 bitsize, BITS_PER_WORD);
2170 }
2171
2172 return tgtblk;
c36fce9a
GRK
2173}
2174
2175
94b25f81
RK
2176/* Add a USE expression for REG to the (possibly empty) list pointed
2177 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2178
2179void
b3f8cf4a
RK
2180use_reg (call_fusage, reg)
2181 rtx *call_fusage, reg;
2182{
0304dfbb
DE
2183 if (GET_CODE (reg) != REG
2184 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2185 abort();
2186
2187 *call_fusage
38a448ca
RH
2188 = gen_rtx_EXPR_LIST (VOIDmode,
2189 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2190}
2191
94b25f81
RK
2192/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2193 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2194
2195void
0304dfbb
DE
2196use_regs (call_fusage, regno, nregs)
2197 rtx *call_fusage;
bbf6f052
RK
2198 int regno;
2199 int nregs;
2200{
0304dfbb 2201 int i;
bbf6f052 2202
0304dfbb
DE
2203 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2204 abort ();
2205
2206 for (i = 0; i < nregs; i++)
38a448ca 2207 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2208}
fffa9c1d
JW
2209
2210/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2211 PARALLEL REGS. This is for calls that pass values in multiple
2212 non-contiguous locations. The Irix 6 ABI has examples of this. */
2213
2214void
2215use_group_regs (call_fusage, regs)
2216 rtx *call_fusage;
2217 rtx regs;
2218{
2219 int i;
2220
6bd35f86
DE
2221 for (i = 0; i < XVECLEN (regs, 0); i++)
2222 {
2223 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2224
6bd35f86
DE
2225 /* A NULL entry means the parameter goes both on the stack and in
2226 registers. This can also be a MEM for targets that pass values
2227 partially on the stack and partially in registers. */
e9a25f70 2228 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2229 use_reg (call_fusage, reg);
2230 }
fffa9c1d 2231}
bbf6f052 2232\f
19caa751
RK
2233/* Generate several move instructions to clear LEN bytes of block TO. (A MEM
2234 rtx with BLKmode). The caller must pass TO through protect_from_queue
2235 before calling. ALIGN is maximum alignment we can assume. */
9de08200
RK
2236
2237static void
2238clear_by_pieces (to, len, align)
2239 rtx to;
729a2125
RK
2240 int len;
2241 unsigned int align;
9de08200
RK
2242{
2243 struct clear_by_pieces data;
2244 rtx to_addr = XEXP (to, 0);
770ae6cc 2245 unsigned int max_size = MOVE_MAX_PIECES + 1;
fbe1758d
AM
2246 enum machine_mode mode = VOIDmode, tmode;
2247 enum insn_code icode;
9de08200
RK
2248
2249 data.offset = 0;
2250 data.to_addr = to_addr;
2251 data.to = to;
2252 data.autinc_to
2253 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2254 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2255
2256 data.explicit_inc_to = 0;
2257 data.reverse
2258 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2259 if (data.reverse) data.offset = len;
2260 data.len = len;
2261
2262 data.to_struct = MEM_IN_STRUCT_P (to);
2263
2264 /* If copying requires more than two move insns,
2265 copy addresses to registers (to make displacements shorter)
2266 and use post-increment if available. */
2267 if (!data.autinc_to
2268 && move_by_pieces_ninsns (len, align) > 2)
2269 {
fbe1758d
AM
2270 /* Determine the main mode we'll be using */
2271 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2272 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2273 if (GET_MODE_SIZE (tmode) < max_size)
2274 mode = tmode;
2275
2276 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2277 {
2278 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2279 data.autinc_to = 1;
2280 data.explicit_inc_to = -1;
2281 }
fbe1758d 2282 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2283 {
2284 data.to_addr = copy_addr_to_reg (to_addr);
2285 data.autinc_to = 1;
2286 data.explicit_inc_to = 1;
2287 }
9de08200
RK
2288 if (!data.autinc_to && CONSTANT_P (to_addr))
2289 data.to_addr = copy_addr_to_reg (to_addr);
2290 }
2291
e1565e65 2292 if (! SLOW_UNALIGNED_ACCESS (word_mode, align)
19caa751 2293 || align > MOVE_MAX * BITS_PER_UNIT || align >= BIGGEST_ALIGNMENT)
bdb429a5 2294 align = MOVE_MAX * BITS_PER_UNIT;
9de08200
RK
2295
2296 /* First move what we can in the largest integer mode, then go to
2297 successively smaller modes. */
2298
2299 while (max_size > 1)
2300 {
9de08200
RK
2301 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2302 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2303 if (GET_MODE_SIZE (tmode) < max_size)
2304 mode = tmode;
2305
2306 if (mode == VOIDmode)
2307 break;
2308
2309 icode = mov_optab->handlers[(int) mode].insn_code;
19caa751 2310 if (icode != CODE_FOR_nothing && align >= GET_MODE_ALIGNMENT (mode))
9de08200
RK
2311 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2312
2313 max_size = GET_MODE_SIZE (mode);
2314 }
2315
2316 /* The code above should have handled everything. */
2317 if (data.len != 0)
2318 abort ();
2319}
2320
2321/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2322 with move instructions for mode MODE. GENFUN is the gen_... function
2323 to make a move insn for that mode. DATA has all the other info. */
2324
2325static void
2326clear_by_pieces_1 (genfun, mode, data)
711d877c 2327 rtx (*genfun) PARAMS ((rtx, ...));
9de08200
RK
2328 enum machine_mode mode;
2329 struct clear_by_pieces *data;
2330{
2331 register int size = GET_MODE_SIZE (mode);
2332 register rtx to1;
2333
2334 while (data->len >= size)
2335 {
2336 if (data->reverse) data->offset -= size;
2337
2338 to1 = (data->autinc_to
38a448ca 2339 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2340 : copy_rtx (change_address (data->to, mode,
2341 plus_constant (data->to_addr,
2342 data->offset))));
9de08200
RK
2343 MEM_IN_STRUCT_P (to1) = data->to_struct;
2344
940da324 2345 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2346 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2347
2348 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2349 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2350 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2351
2352 if (! data->reverse) data->offset += size;
2353
2354 data->len -= size;
2355 }
2356}
2357\f
19caa751
RK
2358/* Write zeros through the storage of OBJECT. If OBJECT has BLKmode, SIZE is
2359 its length in bytes and ALIGN is the maximum alignment we can is has.
bbf6f052 2360
e9a25f70
JL
2361 If we call a function that returns the length of the block, return it. */
2362
2363rtx
9de08200 2364clear_storage (object, size, align)
bbf6f052 2365 rtx object;
4c08eef0 2366 rtx size;
729a2125 2367 unsigned int align;
bbf6f052 2368{
52cf7115
JL
2369#ifdef TARGET_MEM_FUNCTIONS
2370 static tree fn;
2371 tree call_expr, arg_list;
2372#endif
e9a25f70
JL
2373 rtx retval = 0;
2374
bbf6f052
RK
2375 if (GET_MODE (object) == BLKmode)
2376 {
9de08200
RK
2377 object = protect_from_queue (object, 1);
2378 size = protect_from_queue (size, 0);
2379
2380 if (GET_CODE (size) == CONST_INT
fbe1758d 2381 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200 2382 clear_by_pieces (object, INTVAL (size), align);
9de08200
RK
2383 else
2384 {
2385 /* Try the most limited insn first, because there's no point
2386 including more than one in the machine description unless
2387 the more limited one has some advantage. */
2388
19caa751 2389 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
9de08200
RK
2390 enum machine_mode mode;
2391
2392 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2393 mode = GET_MODE_WIDER_MODE (mode))
2394 {
2395 enum insn_code code = clrstr_optab[(int) mode];
a995e389 2396 insn_operand_predicate_fn pred;
9de08200
RK
2397
2398 if (code != CODE_FOR_nothing
2399 /* We don't need MODE to be narrower than
2400 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2401 the mode mask, as it is returned by the macro, it will
2402 definitely be less than the actual mode mask. */
2403 && ((GET_CODE (size) == CONST_INT
2404 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2405 <= (GET_MODE_MASK (mode) >> 1)))
9de08200 2406 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
2407 && ((pred = insn_data[(int) code].operand[0].predicate) == 0
2408 || (*pred) (object, BLKmode))
2409 && ((pred = insn_data[(int) code].operand[2].predicate) == 0
2410 || (*pred) (opalign, VOIDmode)))
9de08200
RK
2411 {
2412 rtx op1;
2413 rtx last = get_last_insn ();
2414 rtx pat;
2415
2416 op1 = convert_to_mode (mode, size, 1);
a995e389
RH
2417 pred = insn_data[(int) code].operand[1].predicate;
2418 if (pred != 0 && ! (*pred) (op1, mode))
9de08200
RK
2419 op1 = copy_to_mode_reg (mode, op1);
2420
2421 pat = GEN_FCN ((int) code) (object, op1, opalign);
2422 if (pat)
2423 {
2424 emit_insn (pat);
e9a25f70 2425 return 0;
9de08200
RK
2426 }
2427 else
2428 delete_insns_since (last);
2429 }
2430 }
2431
4bc973ae 2432 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2433
4bc973ae
JL
2434 It is unsafe to save the value generated by protect_from_queue
2435 and reuse it later. Consider what happens if emit_queue is
2436 called before the return value from protect_from_queue is used.
52cf7115 2437
4bc973ae
JL
2438 Expansion of the CALL_EXPR below will call emit_queue before
2439 we are finished emitting RTL for argument setup. So if we are
2440 not careful we could get the wrong value for an argument.
52cf7115 2441
4bc973ae
JL
2442 To avoid this problem we go ahead and emit code to copy OBJECT
2443 and SIZE into new pseudos. We can then place those new pseudos
2444 into an RTL_EXPR and use them later, even after a call to
2445 emit_queue.
52cf7115 2446
4bc973ae
JL
2447 Note this is not strictly needed for library calls since they
2448 do not call emit_queue before loading their arguments. However,
2449 we may need to have library calls call emit_queue in the future
2450 since failing to do so could cause problems for targets which
2451 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2452 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2453
4bc973ae
JL
2454#ifdef TARGET_MEM_FUNCTIONS
2455 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2456#else
2457 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2458 TREE_UNSIGNED (integer_type_node));
f3dc586a 2459 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2460#endif
52cf7115 2461
52cf7115 2462
4bc973ae
JL
2463#ifdef TARGET_MEM_FUNCTIONS
2464 /* It is incorrect to use the libcall calling conventions to call
2465 memset in this context.
52cf7115 2466
4bc973ae
JL
2467 This could be a user call to memset and the user may wish to
2468 examine the return value from memset.
52cf7115 2469
4bc973ae
JL
2470 For targets where libcalls and normal calls have different
2471 conventions for returning pointers, we could end up generating
2472 incorrect code.
2473
2474 So instead of using a libcall sequence we build up a suitable
2475 CALL_EXPR and expand the call in the normal fashion. */
2476 if (fn == NULL_TREE)
2477 {
2478 tree fntype;
2479
2480 /* This was copied from except.c, I don't know if all this is
2481 necessary in this context or not. */
2482 fn = get_identifier ("memset");
2483 push_obstacks_nochange ();
2484 end_temporary_allocation ();
2485 fntype = build_pointer_type (void_type_node);
2486 fntype = build_function_type (fntype, NULL_TREE);
2487 fn = build_decl (FUNCTION_DECL, fn, fntype);
d7db6646 2488 ggc_add_tree_root (&fn, 1);
4bc973ae
JL
2489 DECL_EXTERNAL (fn) = 1;
2490 TREE_PUBLIC (fn) = 1;
2491 DECL_ARTIFICIAL (fn) = 1;
2492 make_decl_rtl (fn, NULL_PTR, 1);
2493 assemble_external (fn);
2494 pop_obstacks ();
2495 }
2496
2497 /* We need to make an argument list for the function call.
2498
2499 memset has three arguments, the first is a void * addresses, the
2500 second a integer with the initialization value, the last is a
2501 size_t byte count for the copy. */
2502 arg_list
2503 = build_tree_list (NULL_TREE,
2504 make_tree (build_pointer_type (void_type_node),
2505 object));
2506 TREE_CHAIN (arg_list)
2507 = build_tree_list (NULL_TREE,
2508 make_tree (integer_type_node, const0_rtx));
2509 TREE_CHAIN (TREE_CHAIN (arg_list))
2510 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2511 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2512
2513 /* Now we have to build up the CALL_EXPR itself. */
2514 call_expr = build1 (ADDR_EXPR,
2515 build_pointer_type (TREE_TYPE (fn)), fn);
2516 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2517 call_expr, arg_list, NULL_TREE);
2518 TREE_SIDE_EFFECTS (call_expr) = 1;
2519
2520 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2521#else
9de08200 2522 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2523 VOIDmode, 2, object, Pmode, size,
9de08200 2524 TYPE_MODE (integer_type_node));
bbf6f052 2525#endif
9de08200 2526 }
bbf6f052
RK
2527 }
2528 else
66ed0683 2529 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2530
2531 return retval;
bbf6f052
RK
2532}
2533
2534/* Generate code to copy Y into X.
2535 Both Y and X must have the same mode, except that
2536 Y can be a constant with VOIDmode.
2537 This mode cannot be BLKmode; use emit_block_move for that.
2538
2539 Return the last instruction emitted. */
2540
2541rtx
2542emit_move_insn (x, y)
2543 rtx x, y;
2544{
2545 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2546
2547 x = protect_from_queue (x, 1);
2548 y = protect_from_queue (y, 0);
2549
2550 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2551 abort ();
2552
ee5332b8
RH
2553 /* Never force constant_p_rtx to memory. */
2554 if (GET_CODE (y) == CONSTANT_P_RTX)
2555 ;
2556 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2557 y = force_const_mem (mode, y);
2558
2559 /* If X or Y are memory references, verify that their addresses are valid
2560 for the machine. */
2561 if (GET_CODE (x) == MEM
2562 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2563 && ! push_operand (x, GET_MODE (x)))
2564 || (flag_force_addr
2565 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2566 x = change_address (x, VOIDmode, XEXP (x, 0));
2567
2568 if (GET_CODE (y) == MEM
2569 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2570 || (flag_force_addr
2571 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2572 y = change_address (y, VOIDmode, XEXP (y, 0));
2573
2574 if (mode == BLKmode)
2575 abort ();
2576
261c4230
RS
2577 return emit_move_insn_1 (x, y);
2578}
2579
2580/* Low level part of emit_move_insn.
2581 Called just like emit_move_insn, but assumes X and Y
2582 are basically valid. */
2583
2584rtx
2585emit_move_insn_1 (x, y)
2586 rtx x, y;
2587{
2588 enum machine_mode mode = GET_MODE (x);
2589 enum machine_mode submode;
2590 enum mode_class class = GET_MODE_CLASS (mode);
770ae6cc 2591 unsigned int i;
261c4230 2592
76bbe028
ZW
2593 if (mode >= MAX_MACHINE_MODE)
2594 abort ();
2595
bbf6f052
RK
2596 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2597 return
2598 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2599
89742723 2600 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2601 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2602 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2603 * BITS_PER_UNIT),
2604 (class == MODE_COMPLEX_INT
2605 ? MODE_INT : MODE_FLOAT),
2606 0))
7308a047
RS
2607 && (mov_optab->handlers[(int) submode].insn_code
2608 != CODE_FOR_nothing))
2609 {
2610 /* Don't split destination if it is a stack push. */
2611 int stack = push_operand (x, GET_MODE (x));
7308a047 2612
7308a047
RS
2613 /* If this is a stack, push the highpart first, so it
2614 will be in the argument order.
2615
2616 In that case, change_address is used only to convert
2617 the mode, not to change the address. */
c937357e
RS
2618 if (stack)
2619 {
e33c0d66
RS
2620 /* Note that the real part always precedes the imag part in memory
2621 regardless of machine's endianness. */
c937357e
RS
2622#ifdef STACK_GROWS_DOWNWARD
2623 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2624 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2625 gen_imagpart (submode, y)));
c937357e 2626 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2627 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2628 gen_realpart (submode, y)));
c937357e
RS
2629#else
2630 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2631 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2632 gen_realpart (submode, y)));
c937357e 2633 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2634 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2635 gen_imagpart (submode, y)));
c937357e
RS
2636#endif
2637 }
2638 else
2639 {
235ae7be
DM
2640 rtx realpart_x, realpart_y;
2641 rtx imagpart_x, imagpart_y;
2642
405f63da
MM
2643 /* If this is a complex value with each part being smaller than a
2644 word, the usual calling sequence will likely pack the pieces into
2645 a single register. Unfortunately, SUBREG of hard registers only
2646 deals in terms of words, so we have a problem converting input
2647 arguments to the CONCAT of two registers that is used elsewhere
2648 for complex values. If this is before reload, we can copy it into
2649 memory and reload. FIXME, we should see about using extract and
2650 insert on integer registers, but complex short and complex char
2651 variables should be rarely used. */
2652 if (GET_MODE_BITSIZE (mode) < 2*BITS_PER_WORD
2653 && (reload_in_progress | reload_completed) == 0)
2654 {
2655 int packed_dest_p = (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER);
2656 int packed_src_p = (REG_P (y) && REGNO (y) < FIRST_PSEUDO_REGISTER);
2657
2658 if (packed_dest_p || packed_src_p)
2659 {
2660 enum mode_class reg_class = ((class == MODE_COMPLEX_FLOAT)
2661 ? MODE_FLOAT : MODE_INT);
2662
2663 enum machine_mode reg_mode =
2664 mode_for_size (GET_MODE_BITSIZE (mode), reg_class, 1);
2665
2666 if (reg_mode != BLKmode)
2667 {
2668 rtx mem = assign_stack_temp (reg_mode,
2669 GET_MODE_SIZE (mode), 0);
2670
2671 rtx cmem = change_address (mem, mode, NULL_RTX);
2672
942248b8 2673 cfun->cannot_inline = N_("function using short complex types cannot be inline");
405f63da
MM
2674
2675 if (packed_dest_p)
2676 {
2677 rtx sreg = gen_rtx_SUBREG (reg_mode, x, 0);
2678 emit_move_insn_1 (cmem, y);
2679 return emit_move_insn_1 (sreg, mem);
2680 }
2681 else
2682 {
2683 rtx sreg = gen_rtx_SUBREG (reg_mode, y, 0);
2684 emit_move_insn_1 (mem, sreg);
2685 return emit_move_insn_1 (x, cmem);
2686 }
2687 }
2688 }
2689 }
2690
235ae7be
DM
2691 realpart_x = gen_realpart (submode, x);
2692 realpart_y = gen_realpart (submode, y);
2693 imagpart_x = gen_imagpart (submode, x);
2694 imagpart_y = gen_imagpart (submode, y);
2695
2696 /* Show the output dies here. This is necessary for SUBREGs
2697 of pseudos since we cannot track their lifetimes correctly;
c14c6529
RH
2698 hard regs shouldn't appear here except as return values.
2699 We never want to emit such a clobber after reload. */
2700 if (x != y
235ae7be
DM
2701 && ! (reload_in_progress || reload_completed)
2702 && (GET_CODE (realpart_x) == SUBREG
2703 || GET_CODE (imagpart_x) == SUBREG))
b2e7e6fb 2704 {
c14c6529 2705 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2706 }
2638126a 2707
c937357e 2708 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2709 (realpart_x, realpart_y));
c937357e 2710 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
235ae7be 2711 (imagpart_x, imagpart_y));
c937357e 2712 }
7308a047 2713
7a1ab50a 2714 return get_last_insn ();
7308a047
RS
2715 }
2716
bbf6f052
RK
2717 /* This will handle any multi-word mode that lacks a move_insn pattern.
2718 However, you will get better code if you define such patterns,
2719 even if they must turn into multiple assembler instructions. */
a4320483 2720 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2721 {
2722 rtx last_insn = 0;
235ae7be
DM
2723 rtx seq;
2724 int need_clobber;
6551fa4d 2725
a98c9f1a
RK
2726#ifdef PUSH_ROUNDING
2727
2728 /* If X is a push on the stack, do the push now and replace
2729 X with a reference to the stack pointer. */
2730 if (push_operand (x, GET_MODE (x)))
2731 {
2732 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2733 x = change_address (x, VOIDmode, stack_pointer_rtx);
2734 }
2735#endif
2736
235ae7be 2737 start_sequence ();
15a7a8ec 2738
235ae7be 2739 need_clobber = 0;
bbf6f052
RK
2740 for (i = 0;
2741 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2742 i++)
2743 {
2744 rtx xpart = operand_subword (x, i, 1, mode);
2745 rtx ypart = operand_subword (y, i, 1, mode);
2746
2747 /* If we can't get a part of Y, put Y into memory if it is a
2748 constant. Otherwise, force it into a register. If we still
2749 can't get a part of Y, abort. */
2750 if (ypart == 0 && CONSTANT_P (y))
2751 {
2752 y = force_const_mem (mode, y);
2753 ypart = operand_subword (y, i, 1, mode);
2754 }
2755 else if (ypart == 0)
2756 ypart = operand_subword_force (y, i, mode);
2757
2758 if (xpart == 0 || ypart == 0)
2759 abort ();
2760
235ae7be
DM
2761 need_clobber |= (GET_CODE (xpart) == SUBREG);
2762
bbf6f052
RK
2763 last_insn = emit_move_insn (xpart, ypart);
2764 }
6551fa4d 2765
235ae7be
DM
2766 seq = gen_sequence ();
2767 end_sequence ();
2768
2769 /* Show the output dies here. This is necessary for SUBREGs
2770 of pseudos since we cannot track their lifetimes correctly;
2771 hard regs shouldn't appear here except as return values.
2772 We never want to emit such a clobber after reload. */
2773 if (x != y
2774 && ! (reload_in_progress || reload_completed)
2775 && need_clobber != 0)
2776 {
2777 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2778 }
2779
2780 emit_insn (seq);
2781
bbf6f052
RK
2782 return last_insn;
2783 }
2784 else
2785 abort ();
2786}
2787\f
2788/* Pushing data onto the stack. */
2789
2790/* Push a block of length SIZE (perhaps variable)
2791 and return an rtx to address the beginning of the block.
2792 Note that it is not possible for the value returned to be a QUEUED.
2793 The value may be virtual_outgoing_args_rtx.
2794
2795 EXTRA is the number of bytes of padding to push in addition to SIZE.
2796 BELOW nonzero means this padding comes at low addresses;
2797 otherwise, the padding comes at high addresses. */
2798
2799rtx
2800push_block (size, extra, below)
2801 rtx size;
2802 int extra, below;
2803{
2804 register rtx temp;
88f63c77
RK
2805
2806 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2807 if (CONSTANT_P (size))
2808 anti_adjust_stack (plus_constant (size, extra));
2809 else if (GET_CODE (size) == REG && extra == 0)
2810 anti_adjust_stack (size);
2811 else
2812 {
2813 rtx temp = copy_to_mode_reg (Pmode, size);
2814 if (extra != 0)
906c4e36 2815 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2816 temp, 0, OPTAB_LIB_WIDEN);
2817 anti_adjust_stack (temp);
2818 }
2819
f73ad30e
JH
2820#ifndef STACK_GROWS_DOWNWARD
2821#ifdef ARGS_GROW_DOWNWARD
2822 if (!ACCUMULATE_OUTGOING_ARGS)
bbf6f052 2823#else
f73ad30e
JH
2824 if (0)
2825#endif
2826#else
2827 if (1)
bbf6f052 2828#endif
f73ad30e
JH
2829 {
2830
2831 /* Return the lowest stack address when STACK or ARGS grow downward and
2832 we are not aaccumulating outgoing arguments (the c4x port uses such
2833 conventions). */
2834 temp = virtual_outgoing_args_rtx;
2835 if (extra != 0 && below)
2836 temp = plus_constant (temp, extra);
2837 }
2838 else
2839 {
2840 if (GET_CODE (size) == CONST_INT)
2841 temp = plus_constant (virtual_outgoing_args_rtx,
2842 - INTVAL (size) - (below ? 0 : extra));
2843 else if (extra != 0 && !below)
2844 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2845 negate_rtx (Pmode, plus_constant (size, extra)));
2846 else
2847 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
2848 negate_rtx (Pmode, size));
2849 }
bbf6f052
RK
2850
2851 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2852}
2853
87e38d84 2854rtx
bbf6f052
RK
2855gen_push_operand ()
2856{
38a448ca 2857 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2858}
2859
921b3427
RK
2860/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2861 block of SIZE bytes. */
2862
2863static rtx
2864get_push_address (size)
2865 int size;
2866{
2867 register rtx temp;
2868
2869 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2870 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2871 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2872 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2873 else
2874 temp = stack_pointer_rtx;
2875
c85f7c16 2876 return copy_to_reg (temp);
921b3427
RK
2877}
2878
bbf6f052
RK
2879/* Generate code to push X onto the stack, assuming it has mode MODE and
2880 type TYPE.
2881 MODE is redundant except when X is a CONST_INT (since they don't
2882 carry mode info).
2883 SIZE is an rtx for the size of data to be copied (in bytes),
2884 needed only if X is BLKmode.
2885
19caa751 2886 ALIGN is maximum alignment we can assume.
bbf6f052 2887
cd048831
RK
2888 If PARTIAL and REG are both nonzero, then copy that many of the first
2889 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2890 The amount of space pushed is decreased by PARTIAL words,
2891 rounded *down* to a multiple of PARM_BOUNDARY.
2892 REG must be a hard register in this case.
cd048831
RK
2893 If REG is zero but PARTIAL is not, take any all others actions for an
2894 argument partially in registers, but do not actually load any
2895 registers.
bbf6f052
RK
2896
2897 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2898 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2899
2900 On a machine that lacks real push insns, ARGS_ADDR is the address of
2901 the bottom of the argument block for this call. We use indexing off there
2902 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2903 argument block has not been preallocated.
2904
e5e809f4
JL
2905 ARGS_SO_FAR is the size of args previously pushed for this call.
2906
2907 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2908 for arguments passed in registers. If nonzero, it will be the number
2909 of bytes required. */
bbf6f052
RK
2910
2911void
2912emit_push_insn (x, mode, type, size, align, partial, reg, extra,
4fc026cd
CM
2913 args_addr, args_so_far, reg_parm_stack_space,
2914 alignment_pad)
bbf6f052
RK
2915 register rtx x;
2916 enum machine_mode mode;
2917 tree type;
2918 rtx size;
729a2125 2919 unsigned int align;
bbf6f052
RK
2920 int partial;
2921 rtx reg;
2922 int extra;
2923 rtx args_addr;
2924 rtx args_so_far;
e5e809f4 2925 int reg_parm_stack_space;
4fc026cd 2926 rtx alignment_pad;
bbf6f052
RK
2927{
2928 rtx xinner;
2929 enum direction stack_direction
2930#ifdef STACK_GROWS_DOWNWARD
2931 = downward;
2932#else
2933 = upward;
2934#endif
2935
2936 /* Decide where to pad the argument: `downward' for below,
2937 `upward' for above, or `none' for don't pad it.
2938 Default is below for small data on big-endian machines; else above. */
2939 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2940
2941 /* Invert direction if stack is post-update. */
2942 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2943 if (where_pad != none)
2944 where_pad = (where_pad == downward ? upward : downward);
2945
2946 xinner = x = protect_from_queue (x, 0);
2947
2948 if (mode == BLKmode)
2949 {
2950 /* Copy a block into the stack, entirely or partially. */
2951
2952 register rtx temp;
2953 int used = partial * UNITS_PER_WORD;
2954 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2955 int skip;
2956
2957 if (size == 0)
2958 abort ();
2959
2960 used -= offset;
2961
2962 /* USED is now the # of bytes we need not copy to the stack
2963 because registers will take care of them. */
2964
2965 if (partial != 0)
2966 xinner = change_address (xinner, BLKmode,
2967 plus_constant (XEXP (xinner, 0), used));
2968
2969 /* If the partial register-part of the arg counts in its stack size,
2970 skip the part of stack space corresponding to the registers.
2971 Otherwise, start copying to the beginning of the stack space,
2972 by setting SKIP to 0. */
e5e809f4 2973 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2974
2975#ifdef PUSH_ROUNDING
2976 /* Do it with several push insns if that doesn't take lots of insns
2977 and if there is no difficulty with push insns that skip bytes
2978 on the stack for alignment purposes. */
2979 if (args_addr == 0
f73ad30e 2980 && PUSH_ARGS
bbf6f052
RK
2981 && GET_CODE (size) == CONST_INT
2982 && skip == 0
15914757 2983 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2984 /* Here we avoid the case of a structure whose weak alignment
2985 forces many pushes of a small amount of data,
2986 and such small pushes do rounding that causes trouble. */
e1565e65 2987 && ((! SLOW_UNALIGNED_ACCESS (word_mode, align))
19caa751 2988 || align >= BIGGEST_ALIGNMENT
bbf6f052 2989 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2990 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2991 {
2992 /* Push padding now if padding above and stack grows down,
2993 or if padding below and stack grows up.
2994 But if space already allocated, this has already been done. */
2995 if (extra && args_addr == 0
2996 && where_pad != none && where_pad != stack_direction)
906c4e36 2997 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2998
38a448ca 2999 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 3000 INTVAL (size) - used, align);
921b3427 3001
7d384cc0 3002 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3003 {
3004 rtx temp;
3005
956d6950 3006 in_check_memory_usage = 1;
921b3427 3007 temp = get_push_address (INTVAL(size) - used);
c85f7c16 3008 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3009 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3010 temp, Pmode,
3011 XEXP (xinner, 0), Pmode,
921b3427
RK
3012 GEN_INT (INTVAL(size) - used),
3013 TYPE_MODE (sizetype));
3014 else
3015 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3016 temp, Pmode,
921b3427
RK
3017 GEN_INT (INTVAL(size) - used),
3018 TYPE_MODE (sizetype),
956d6950
JL
3019 GEN_INT (MEMORY_USE_RW),
3020 TYPE_MODE (integer_type_node));
3021 in_check_memory_usage = 0;
921b3427 3022 }
bbf6f052
RK
3023 }
3024 else
3025#endif /* PUSH_ROUNDING */
3026 {
3027 /* Otherwise make space on the stack and copy the data
3028 to the address of that space. */
3029
3030 /* Deduct words put into registers from the size we must copy. */
3031 if (partial != 0)
3032 {
3033 if (GET_CODE (size) == CONST_INT)
906c4e36 3034 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
3035 else
3036 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
3037 GEN_INT (used), NULL_RTX, 0,
3038 OPTAB_LIB_WIDEN);
bbf6f052
RK
3039 }
3040
3041 /* Get the address of the stack space.
3042 In this case, we do not deal with EXTRA separately.
3043 A single stack adjust will do. */
3044 if (! args_addr)
3045 {
3046 temp = push_block (size, extra, where_pad == downward);
3047 extra = 0;
3048 }
3049 else if (GET_CODE (args_so_far) == CONST_INT)
3050 temp = memory_address (BLKmode,
3051 plus_constant (args_addr,
3052 skip + INTVAL (args_so_far)));
3053 else
3054 temp = memory_address (BLKmode,
38a448ca
RH
3055 plus_constant (gen_rtx_PLUS (Pmode,
3056 args_addr,
3057 args_so_far),
bbf6f052 3058 skip));
7d384cc0 3059 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3060 {
3061 rtx target;
3062
956d6950 3063 in_check_memory_usage = 1;
921b3427 3064 target = copy_to_reg (temp);
c85f7c16 3065 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3066 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3067 target, Pmode,
3068 XEXP (xinner, 0), Pmode,
921b3427
RK
3069 size, TYPE_MODE (sizetype));
3070 else
3071 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3072 target, Pmode,
921b3427 3073 size, TYPE_MODE (sizetype),
956d6950
JL
3074 GEN_INT (MEMORY_USE_RW),
3075 TYPE_MODE (integer_type_node));
3076 in_check_memory_usage = 0;
921b3427 3077 }
bbf6f052
RK
3078
3079 /* TEMP is the address of the block. Copy the data there. */
3080 if (GET_CODE (size) == CONST_INT
729a2125 3081 && MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align))
bbf6f052 3082 {
38a448ca 3083 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
3084 INTVAL (size), align);
3085 goto ret;
3086 }
e5e809f4 3087 else
bbf6f052 3088 {
19caa751 3089 rtx opalign = GEN_INT (align / BITS_PER_UNIT);
e5e809f4 3090 enum machine_mode mode;
9e6a5703 3091 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
3092
3093 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3094 mode != VOIDmode;
3095 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3096 {
e5e809f4 3097 enum insn_code code = movstr_optab[(int) mode];
a995e389 3098 insn_operand_predicate_fn pred;
e5e809f4
JL
3099
3100 if (code != CODE_FOR_nothing
3101 && ((GET_CODE (size) == CONST_INT
3102 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3103 <= (GET_MODE_MASK (mode) >> 1)))
3104 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
a995e389
RH
3105 && (!(pred = insn_data[(int) code].operand[0].predicate)
3106 || ((*pred) (target, BLKmode)))
3107 && (!(pred = insn_data[(int) code].operand[1].predicate)
3108 || ((*pred) (xinner, BLKmode)))
3109 && (!(pred = insn_data[(int) code].operand[3].predicate)
3110 || ((*pred) (opalign, VOIDmode))))
e5e809f4
JL
3111 {
3112 rtx op2 = convert_to_mode (mode, size, 1);
3113 rtx last = get_last_insn ();
3114 rtx pat;
3115
a995e389
RH
3116 pred = insn_data[(int) code].operand[2].predicate;
3117 if (pred != 0 && ! (*pred) (op2, mode))
e5e809f4
JL
3118 op2 = copy_to_mode_reg (mode, op2);
3119
3120 pat = GEN_FCN ((int) code) (target, xinner,
3121 op2, opalign);
3122 if (pat)
3123 {
3124 emit_insn (pat);
3125 goto ret;
3126 }
3127 else
3128 delete_insns_since (last);
3129 }
c841050e 3130 }
bbf6f052 3131 }
bbf6f052 3132
f73ad30e
JH
3133 if (!ACCUMULATE_OUTGOING_ARGS)
3134 {
3135 /* If the source is referenced relative to the stack pointer,
3136 copy it to another register to stabilize it. We do not need
3137 to do this if we know that we won't be changing sp. */
bbf6f052 3138
f73ad30e
JH
3139 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3140 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3141 temp = copy_to_reg (temp);
3142 }
bbf6f052
RK
3143
3144 /* Make inhibit_defer_pop nonzero around the library call
3145 to force it to pop the bcopy-arguments right away. */
3146 NO_DEFER_POP;
3147#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3148 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3149 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3150 convert_to_mode (TYPE_MODE (sizetype),
3151 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3152 TYPE_MODE (sizetype));
bbf6f052 3153#else
d562e42e 3154 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3155 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3156 convert_to_mode (TYPE_MODE (integer_type_node),
3157 size,
3158 TREE_UNSIGNED (integer_type_node)),
3159 TYPE_MODE (integer_type_node));
bbf6f052
RK
3160#endif
3161 OK_DEFER_POP;
3162 }
3163 }
3164 else if (partial > 0)
3165 {
3166 /* Scalar partly in registers. */
3167
3168 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3169 int i;
3170 int not_stack;
3171 /* # words of start of argument
3172 that we must make space for but need not store. */
3173 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3174 int args_offset = INTVAL (args_so_far);
3175 int skip;
3176
3177 /* Push padding now if padding above and stack grows down,
3178 or if padding below and stack grows up.
3179 But if space already allocated, this has already been done. */
3180 if (extra && args_addr == 0
3181 && where_pad != none && where_pad != stack_direction)
906c4e36 3182 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3183
3184 /* If we make space by pushing it, we might as well push
3185 the real data. Otherwise, we can leave OFFSET nonzero
3186 and leave the space uninitialized. */
3187 if (args_addr == 0)
3188 offset = 0;
3189
3190 /* Now NOT_STACK gets the number of words that we don't need to
3191 allocate on the stack. */
3192 not_stack = partial - offset;
3193
3194 /* If the partial register-part of the arg counts in its stack size,
3195 skip the part of stack space corresponding to the registers.
3196 Otherwise, start copying to the beginning of the stack space,
3197 by setting SKIP to 0. */
e5e809f4 3198 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3199
3200 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3201 x = validize_mem (force_const_mem (mode, x));
3202
3203 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3204 SUBREGs of such registers are not allowed. */
3205 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3206 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3207 x = copy_to_reg (x);
3208
3209 /* Loop over all the words allocated on the stack for this arg. */
3210 /* We can do it by words, because any scalar bigger than a word
3211 has a size a multiple of a word. */
3212#ifndef PUSH_ARGS_REVERSED
3213 for (i = not_stack; i < size; i++)
3214#else
3215 for (i = size - 1; i >= not_stack; i--)
3216#endif
3217 if (i >= not_stack + offset)
3218 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3219 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3220 0, args_addr,
3221 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4 3222 * UNITS_PER_WORD)),
4fc026cd 3223 reg_parm_stack_space, alignment_pad);
bbf6f052
RK
3224 }
3225 else
3226 {
3227 rtx addr;
921b3427 3228 rtx target = NULL_RTX;
bbf6f052
RK
3229
3230 /* Push padding now if padding above and stack grows down,
3231 or if padding below and stack grows up.
3232 But if space already allocated, this has already been done. */
3233 if (extra && args_addr == 0
3234 && where_pad != none && where_pad != stack_direction)
906c4e36 3235 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3236
3237#ifdef PUSH_ROUNDING
f73ad30e 3238 if (args_addr == 0 && PUSH_ARGS)
bbf6f052
RK
3239 addr = gen_push_operand ();
3240 else
3241#endif
921b3427
RK
3242 {
3243 if (GET_CODE (args_so_far) == CONST_INT)
3244 addr
3245 = memory_address (mode,
3246 plus_constant (args_addr,
3247 INTVAL (args_so_far)));
3248 else
38a448ca
RH
3249 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3250 args_so_far));
921b3427
RK
3251 target = addr;
3252 }
bbf6f052 3253
38a448ca 3254 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3255
7d384cc0 3256 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3257 {
956d6950 3258 in_check_memory_usage = 1;
921b3427
RK
3259 if (target == 0)
3260 target = get_push_address (GET_MODE_SIZE (mode));
3261
c85f7c16 3262 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3263 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3264 target, Pmode,
3265 XEXP (x, 0), Pmode,
921b3427
RK
3266 GEN_INT (GET_MODE_SIZE (mode)),
3267 TYPE_MODE (sizetype));
3268 else
3269 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3270 target, Pmode,
921b3427
RK
3271 GEN_INT (GET_MODE_SIZE (mode)),
3272 TYPE_MODE (sizetype),
956d6950
JL
3273 GEN_INT (MEMORY_USE_RW),
3274 TYPE_MODE (integer_type_node));
3275 in_check_memory_usage = 0;
921b3427 3276 }
bbf6f052
RK
3277 }
3278
3279 ret:
3280 /* If part should go in registers, copy that part
3281 into the appropriate registers. Do this now, at the end,
3282 since mem-to-mem copies above may do function calls. */
cd048831 3283 if (partial > 0 && reg != 0)
fffa9c1d
JW
3284 {
3285 /* Handle calls that pass values in multiple non-contiguous locations.
3286 The Irix 6 ABI has examples of this. */
3287 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3288 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3289 else
3290 move_block_to_reg (REGNO (reg), x, partial, mode);
3291 }
bbf6f052
RK
3292
3293 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3294 anti_adjust_stack (GEN_INT (extra));
4fc026cd
CM
3295
3296 if (alignment_pad)
3297 anti_adjust_stack (alignment_pad);
bbf6f052
RK
3298}
3299\f
bbf6f052
RK
3300/* Expand an assignment that stores the value of FROM into TO.
3301 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3302 (This may contain a QUEUED rtx;
3303 if the value is constant, this rtx is a constant.)
3304 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3305
3306 SUGGEST_REG is no longer actually used.
3307 It used to mean, copy the value through a register
3308 and return that register, if that is possible.
709f5be1 3309 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3310
3311rtx
3312expand_assignment (to, from, want_value, suggest_reg)
3313 tree to, from;
3314 int want_value;
c5c76735 3315 int suggest_reg ATTRIBUTE_UNUSED;
bbf6f052
RK
3316{
3317 register rtx to_rtx = 0;
3318 rtx result;
3319
3320 /* Don't crash if the lhs of the assignment was erroneous. */
3321
3322 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3323 {
3324 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3325 return want_value ? result : NULL_RTX;
3326 }
bbf6f052
RK
3327
3328 /* Assignment of a structure component needs special treatment
3329 if the structure component's rtx is not simply a MEM.
6be58303
JW
3330 Assignment of an array element at a constant index, and assignment of
3331 an array element in an unaligned packed structure field, has the same
3332 problem. */
bbf6f052 3333
08293add
RK
3334 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3335 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3336 {
3337 enum machine_mode mode1;
770ae6cc 3338 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 3339 tree offset;
bbf6f052
RK
3340 int unsignedp;
3341 int volatilep = 0;
0088fcb1 3342 tree tem;
729a2125 3343 unsigned int alignment;
0088fcb1
RK
3344
3345 push_temp_slots ();
839c4796
RK
3346 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3347 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3348
3349 /* If we are going to use store_bit_field and extract_bit_field,
3350 make sure to_rtx will be safe for multiple use. */
3351
3352 if (mode1 == VOIDmode && want_value)
3353 tem = stabilize_reference (tem);
3354
921b3427 3355 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3356 if (offset != 0)
3357 {
906c4e36 3358 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3359
3360 if (GET_CODE (to_rtx) != MEM)
3361 abort ();
bd070e1a
RH
3362
3363 if (GET_MODE (offset_rtx) != ptr_mode)
3364 {
3365#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3366 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3367#else
3368 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3369#endif
3370 }
3371
9a7b9f4f
JL
3372 /* A constant address in TO_RTX can have VOIDmode, we must not try
3373 to call force_reg for that case. Avoid that case. */
89752202
HB
3374 if (GET_CODE (to_rtx) == MEM
3375 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3376 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3377 && bitsize
3378 && (bitpos % bitsize) == 0
3379 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 3380 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
3381 {
3382 rtx temp = change_address (to_rtx, mode1,
3383 plus_constant (XEXP (to_rtx, 0),
3384 (bitpos /
3385 BITS_PER_UNIT)));
3386 if (GET_CODE (XEXP (temp, 0)) == REG)
3387 to_rtx = temp;
3388 else
3389 to_rtx = change_address (to_rtx, mode1,
3390 force_reg (GET_MODE (XEXP (temp, 0)),
3391 XEXP (temp, 0)));
3392 bitpos = 0;
3393 }
3394
7bb0943f 3395 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca 3396 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
3397 force_reg (ptr_mode,
3398 offset_rtx)));
7bb0943f 3399 }
c5c76735 3400
bbf6f052
RK
3401 if (volatilep)
3402 {
3403 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3404 {
3405 /* When the offset is zero, to_rtx is the address of the
3406 structure we are storing into, and hence may be shared.
3407 We must make a new MEM before setting the volatile bit. */
3408 if (offset == 0)
effbcc6a
RK
3409 to_rtx = copy_rtx (to_rtx);
3410
01188446
JW
3411 MEM_VOLATILE_P (to_rtx) = 1;
3412 }
bbf6f052
RK
3413#if 0 /* This was turned off because, when a field is volatile
3414 in an object which is not volatile, the object may be in a register,
3415 and then we would abort over here. */
3416 else
3417 abort ();
3418#endif
3419 }
3420
956d6950
JL
3421 if (TREE_CODE (to) == COMPONENT_REF
3422 && TREE_READONLY (TREE_OPERAND (to, 1)))
3423 {
8bd6ecc2 3424 if (offset == 0)
956d6950
JL
3425 to_rtx = copy_rtx (to_rtx);
3426
3427 RTX_UNCHANGING_P (to_rtx) = 1;
3428 }
3429
921b3427 3430 /* Check the access. */
7d384cc0 3431 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3432 {
3433 rtx to_addr;
3434 int size;
3435 int best_mode_size;
3436 enum machine_mode best_mode;
3437
3438 best_mode = get_best_mode (bitsize, bitpos,
3439 TYPE_ALIGN (TREE_TYPE (tem)),
3440 mode1, volatilep);
3441 if (best_mode == VOIDmode)
3442 best_mode = QImode;
3443
3444 best_mode_size = GET_MODE_BITSIZE (best_mode);
3445 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3446 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3447 size *= GET_MODE_SIZE (best_mode);
3448
3449 /* Check the access right of the pointer. */
e9a25f70
JL
3450 if (size)
3451 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3452 to_addr, Pmode,
e9a25f70 3453 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3454 GEN_INT (MEMORY_USE_WO),
3455 TYPE_MODE (integer_type_node));
921b3427
RK
3456 }
3457
a69beca1
RK
3458 /* If this is a varying-length object, we must get the address of
3459 the source and do an explicit block move. */
3460 if (bitsize < 0)
3461 {
3462 unsigned int from_align;
3463 rtx from_rtx = expand_expr_unaligned (from, &from_align);
3464 rtx inner_to_rtx
3465 = change_address (to_rtx, VOIDmode,
3466 plus_constant (XEXP (to_rtx, 0),
3467 bitpos / BITS_PER_UNIT));
3468
3469 emit_block_move (inner_to_rtx, from_rtx, expr_size (from),
19caa751 3470 MIN (alignment, from_align));
a69beca1
RK
3471 free_temp_slots ();
3472 pop_temp_slots ();
3473 return to_rtx;
3474 }
3475 else
3476 {
3477 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3478 (want_value
3479 /* Spurious cast for HPUX compiler. */
3480 ? ((enum machine_mode)
3481 TYPE_MODE (TREE_TYPE (to)))
3482 : VOIDmode),
3483 unsignedp,
a69beca1
RK
3484 alignment,
3485 int_size_in_bytes (TREE_TYPE (tem)),
3486 get_alias_set (to));
3487
3488 preserve_temp_slots (result);
3489 free_temp_slots ();
3490 pop_temp_slots ();
3491
3492 /* If the value is meaningful, convert RESULT to the proper mode.
3493 Otherwise, return nothing. */
3494 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3495 TYPE_MODE (TREE_TYPE (from)),
3496 result,
3497 TREE_UNSIGNED (TREE_TYPE (to)))
3498 : NULL_RTX);
3499 }
bbf6f052
RK
3500 }
3501
cd1db108
RS
3502 /* If the rhs is a function call and its value is not an aggregate,
3503 call the function before we start to compute the lhs.
3504 This is needed for correct code for cases such as
3505 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3506 requires loading up part of an address in a separate insn.
3507
1858863b
JW
3508 Don't do this if TO is a VAR_DECL or PARM_DECL whose DECL_RTL is REG
3509 since it might be a promoted variable where the zero- or sign- extension
3510 needs to be done. Handling this in the normal way is safe because no
3511 computation is done before the call. */
1ad87b63 3512 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3513 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1858863b
JW
3514 && ! ((TREE_CODE (to) == VAR_DECL || TREE_CODE (to) == PARM_DECL)
3515 && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3516 {
0088fcb1
RK
3517 rtx value;
3518
3519 push_temp_slots ();
3520 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3521 if (to_rtx == 0)
921b3427 3522 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3523
fffa9c1d
JW
3524 /* Handle calls that return values in multiple non-contiguous locations.
3525 The Irix 6 ABI has examples of this. */
3526 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16 3527 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3528 TYPE_ALIGN (TREE_TYPE (from)));
fffa9c1d 3529 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3530 emit_block_move (to_rtx, value, expr_size (from),
19caa751 3531 TYPE_ALIGN (TREE_TYPE (from)));
aaf87c45 3532 else
6419e5b0
DT
3533 {
3534#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3535 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3536 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3537 value = convert_memory_address (GET_MODE (to_rtx), value);
3538#endif
3539 emit_move_insn (to_rtx, value);
3540 }
cd1db108
RS
3541 preserve_temp_slots (to_rtx);
3542 free_temp_slots ();
0088fcb1 3543 pop_temp_slots ();
709f5be1 3544 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3545 }
3546
bbf6f052
RK
3547 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3548 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3549
3550 if (to_rtx == 0)
41472af8
MM
3551 {
3552 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3553 if (GET_CODE (to_rtx) == MEM)
3554 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3555 }
bbf6f052 3556
86d38d25 3557 /* Don't move directly into a return register. */
14a774a9
RK
3558 if (TREE_CODE (to) == RESULT_DECL
3559 && (GET_CODE (to_rtx) == REG || GET_CODE (to_rtx) == PARALLEL))
86d38d25 3560 {
0088fcb1
RK
3561 rtx temp;
3562
3563 push_temp_slots ();
3564 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
14a774a9
RK
3565
3566 if (GET_CODE (to_rtx) == PARALLEL)
3567 emit_group_load (to_rtx, temp, int_size_in_bytes (TREE_TYPE (from)),
19caa751 3568 TYPE_ALIGN (TREE_TYPE (from)));
14a774a9
RK
3569 else
3570 emit_move_insn (to_rtx, temp);
3571
86d38d25
RS
3572 preserve_temp_slots (to_rtx);
3573 free_temp_slots ();
0088fcb1 3574 pop_temp_slots ();
709f5be1 3575 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3576 }
3577
bbf6f052
RK
3578 /* In case we are returning the contents of an object which overlaps
3579 the place the value is being stored, use a safe function when copying
3580 a value through a pointer into a structure value return block. */
3581 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3582 && current_function_returns_struct
3583 && !current_function_returns_pcc_struct)
3584 {
0088fcb1
RK
3585 rtx from_rtx, size;
3586
3587 push_temp_slots ();
33a20d10 3588 size = expr_size (from);
921b3427
RK
3589 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3590 EXPAND_MEMORY_USE_DONT);
3591
3592 /* Copy the rights of the bitmap. */
7d384cc0 3593 if (current_function_check_memory_usage)
921b3427 3594 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3595 XEXP (to_rtx, 0), Pmode,
3596 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3597 convert_to_mode (TYPE_MODE (sizetype),
3598 size, TREE_UNSIGNED (sizetype)),
3599 TYPE_MODE (sizetype));
bbf6f052
RK
3600
3601#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3602 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3603 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3604 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3605 convert_to_mode (TYPE_MODE (sizetype),
3606 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3607 TYPE_MODE (sizetype));
bbf6f052 3608#else
d562e42e 3609 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3610 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3611 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3612 convert_to_mode (TYPE_MODE (integer_type_node),
3613 size, TREE_UNSIGNED (integer_type_node)),
3614 TYPE_MODE (integer_type_node));
bbf6f052
RK
3615#endif
3616
3617 preserve_temp_slots (to_rtx);
3618 free_temp_slots ();
0088fcb1 3619 pop_temp_slots ();
709f5be1 3620 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3621 }
3622
3623 /* Compute FROM and store the value in the rtx we got. */
3624
0088fcb1 3625 push_temp_slots ();
bbf6f052
RK
3626 result = store_expr (from, to_rtx, want_value);
3627 preserve_temp_slots (result);
3628 free_temp_slots ();
0088fcb1 3629 pop_temp_slots ();
709f5be1 3630 return want_value ? result : NULL_RTX;
bbf6f052
RK
3631}
3632
3633/* Generate code for computing expression EXP,
3634 and storing the value into TARGET.
bbf6f052
RK
3635 TARGET may contain a QUEUED rtx.
3636
709f5be1
RS
3637 If WANT_VALUE is nonzero, return a copy of the value
3638 not in TARGET, so that we can be sure to use the proper
3639 value in a containing expression even if TARGET has something
3640 else stored in it. If possible, we copy the value through a pseudo
3641 and return that pseudo. Or, if the value is constant, we try to
3642 return the constant. In some cases, we return a pseudo
3643 copied *from* TARGET.
3644
3645 If the mode is BLKmode then we may return TARGET itself.
3646 It turns out that in BLKmode it doesn't cause a problem.
3647 because C has no operators that could combine two different
3648 assignments into the same BLKmode object with different values
3649 with no sequence point. Will other languages need this to
3650 be more thorough?
3651
3652 If WANT_VALUE is 0, we return NULL, to make sure
3653 to catch quickly any cases where the caller uses the value
3654 and fails to set WANT_VALUE. */
bbf6f052
RK
3655
3656rtx
709f5be1 3657store_expr (exp, target, want_value)
bbf6f052
RK
3658 register tree exp;
3659 register rtx target;
709f5be1 3660 int want_value;
bbf6f052
RK
3661{
3662 register rtx temp;
3663 int dont_return_target = 0;
3664
3665 if (TREE_CODE (exp) == COMPOUND_EXPR)
3666 {
3667 /* Perform first part of compound expression, then assign from second
3668 part. */
3669 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3670 emit_queue ();
709f5be1 3671 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3672 }
3673 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3674 {
3675 /* For conditional expression, get safe form of the target. Then
3676 test the condition, doing the appropriate assignment on either
3677 side. This avoids the creation of unnecessary temporaries.
3678 For non-BLKmode, it is more efficient not to do this. */
3679
3680 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3681
3682 emit_queue ();
3683 target = protect_from_queue (target, 1);
3684
dabf8373 3685 do_pending_stack_adjust ();
bbf6f052
RK
3686 NO_DEFER_POP;
3687 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3688 start_cleanup_deferral ();
709f5be1 3689 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3690 end_cleanup_deferral ();
bbf6f052
RK
3691 emit_queue ();
3692 emit_jump_insn (gen_jump (lab2));
3693 emit_barrier ();
3694 emit_label (lab1);
956d6950 3695 start_cleanup_deferral ();
709f5be1 3696 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3697 end_cleanup_deferral ();
bbf6f052
RK
3698 emit_queue ();
3699 emit_label (lab2);
3700 OK_DEFER_POP;
a3a58acc 3701
709f5be1 3702 return want_value ? target : NULL_RTX;
bbf6f052 3703 }
bbf6f052 3704 else if (queued_subexp_p (target))
709f5be1
RS
3705 /* If target contains a postincrement, let's not risk
3706 using it as the place to generate the rhs. */
bbf6f052
RK
3707 {
3708 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3709 {
3710 /* Expand EXP into a new pseudo. */
3711 temp = gen_reg_rtx (GET_MODE (target));
3712 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3713 }
3714 else
906c4e36 3715 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3716
3717 /* If target is volatile, ANSI requires accessing the value
3718 *from* the target, if it is accessed. So make that happen.
3719 In no case return the target itself. */
3720 if (! MEM_VOLATILE_P (target) && want_value)
3721 dont_return_target = 1;
bbf6f052 3722 }
12f06d17
CH
3723 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3724 && GET_MODE (target) != BLKmode)
3725 /* If target is in memory and caller wants value in a register instead,
3726 arrange that. Pass TARGET as target for expand_expr so that,
3727 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3728 We know expand_expr will not use the target in that case.
3729 Don't do this if TARGET is volatile because we are supposed
3730 to write it and then read it. */
3731 {
1da93fe0 3732 temp = expand_expr (exp, target, GET_MODE (target), 0);
12f06d17
CH
3733 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3734 temp = copy_to_reg (temp);
3735 dont_return_target = 1;
3736 }
1499e0a8
RK
3737 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3738 /* If this is an scalar in a register that is stored in a wider mode
3739 than the declared mode, compute the result into its declared mode
3740 and then convert to the wider mode. Our value is the computed
3741 expression. */
3742 {
5a32d038 3743 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3744 which will often result in some optimizations. Do the conversion
3745 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3746 the extend. But don't do this if the type of EXP is a subtype
3747 of something else since then the conversion might involve
3748 more than just converting modes. */
3749 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3750 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3751 {
3752 if (TREE_UNSIGNED (TREE_TYPE (exp))
3753 != SUBREG_PROMOTED_UNSIGNED_P (target))
3754 exp
3755 = convert
3756 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3757 TREE_TYPE (exp)),
3758 exp);
3759
3760 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3761 SUBREG_PROMOTED_UNSIGNED_P (target)),
3762 exp);
3763 }
5a32d038 3764
1499e0a8 3765 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3766
766f36c7 3767 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3768 the access now so it gets done only once. Likewise if
3769 it contains TARGET. */
3770 if (GET_CODE (temp) == MEM && want_value
3771 && (MEM_VOLATILE_P (temp)
3772 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3773 temp = copy_to_reg (temp);
3774
b258707c
RS
3775 /* If TEMP is a VOIDmode constant, use convert_modes to make
3776 sure that we properly convert it. */
3777 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3778 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3779 TYPE_MODE (TREE_TYPE (exp)), temp,
3780 SUBREG_PROMOTED_UNSIGNED_P (target));
3781
1499e0a8
RK
3782 convert_move (SUBREG_REG (target), temp,
3783 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3784
3785 /* If we promoted a constant, change the mode back down to match
3786 target. Otherwise, the caller might get confused by a result whose
3787 mode is larger than expected. */
3788
3789 if (want_value && GET_MODE (temp) != GET_MODE (target)
3790 && GET_MODE (temp) != VOIDmode)
3791 {
3792 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3793 SUBREG_PROMOTED_VAR_P (temp) = 1;
3794 SUBREG_PROMOTED_UNSIGNED_P (temp)
3795 = SUBREG_PROMOTED_UNSIGNED_P (target);
3796 }
3797
709f5be1 3798 return want_value ? temp : NULL_RTX;
1499e0a8 3799 }
bbf6f052
RK
3800 else
3801 {
3802 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3803 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3804 If TARGET is a volatile mem ref, either return TARGET
3805 or return a reg copied *from* TARGET; ANSI requires this.
3806
3807 Otherwise, if TEMP is not TARGET, return TEMP
3808 if it is constant (for efficiency),
3809 or if we really want the correct value. */
bbf6f052
RK
3810 if (!(target && GET_CODE (target) == REG
3811 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3812 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3813 && ! rtx_equal_p (temp, target)
709f5be1 3814 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3815 dont_return_target = 1;
3816 }
3817
b258707c
RS
3818 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3819 the same as that of TARGET, adjust the constant. This is needed, for
3820 example, in case it is a CONST_DOUBLE and we want only a word-sized
3821 value. */
3822 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3823 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3824 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3825 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3826 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3827
7d384cc0 3828 if (current_function_check_memory_usage
921b3427
RK
3829 && GET_CODE (target) == MEM
3830 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3831 {
3832 if (GET_CODE (temp) == MEM)
3833 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3834 XEXP (target, 0), Pmode,
3835 XEXP (temp, 0), Pmode,
921b3427
RK
3836 expr_size (exp), TYPE_MODE (sizetype));
3837 else
3838 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3839 XEXP (target, 0), Pmode,
921b3427 3840 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3841 GEN_INT (MEMORY_USE_WO),
3842 TYPE_MODE (integer_type_node));
921b3427
RK
3843 }
3844
bbf6f052
RK
3845 /* If value was not generated in the target, store it there.
3846 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3847 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3848 one or both of them are volatile memory refs, we have to distinguish
3849 two cases:
3850 - expand_expr has used TARGET. In this case, we must not generate
3851 another copy. This can be detected by TARGET being equal according
3852 to == .
3853 - expand_expr has not used TARGET - that means that the source just
3854 happens to have the same RTX form. Since temp will have been created
3855 by expand_expr, it will compare unequal according to == .
3856 We must generate a copy in this case, to reach the correct number
3857 of volatile memory references. */
bbf6f052 3858
6036acbb 3859 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3860 || (temp != target && (side_effects_p (temp)
3861 || side_effects_p (target))))
6036acbb 3862 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3863 {
3864 target = protect_from_queue (target, 1);
3865 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3866 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3867 {
3868 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3869 if (dont_return_target)
3870 {
3871 /* In this case, we will return TEMP,
3872 so make sure it has the proper mode.
3873 But don't forget to store the value into TARGET. */
3874 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3875 emit_move_insn (target, temp);
3876 }
3877 else
3878 convert_move (target, temp, unsignedp);
3879 }
3880
3881 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3882 {
3883 /* Handle copying a string constant into an array.
3884 The string constant may be shorter than the array.
3885 So copy just the string's actual length, and clear the rest. */
3886 rtx size;
22619c3f 3887 rtx addr;
bbf6f052 3888
e87b4f3f
RS
3889 /* Get the size of the data type of the string,
3890 which is actually the size of the target. */
3891 size = expr_size (exp);
3892 if (GET_CODE (size) == CONST_INT
3893 && INTVAL (size) < TREE_STRING_LENGTH (exp))
19caa751 3894 emit_block_move (target, temp, size, TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 3895 else
bbf6f052 3896 {
e87b4f3f
RS
3897 /* Compute the size of the data to copy from the string. */
3898 tree copy_size
c03b7665 3899 = size_binop (MIN_EXPR,
b50d17a1 3900 make_tree (sizetype, size),
fed3cef0 3901 size_int (TREE_STRING_LENGTH (exp)));
906c4e36
RK
3902 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3903 VOIDmode, 0);
e87b4f3f
RS
3904 rtx label = 0;
3905
3906 /* Copy that much. */
3907 emit_block_move (target, temp, copy_size_rtx,
19caa751 3908 TYPE_ALIGN (TREE_TYPE (exp)));
e87b4f3f 3909
88f63c77
RK
3910 /* Figure out how much is left in TARGET that we have to clear.
3911 Do all calculations in ptr_mode. */
3912
3913 addr = XEXP (target, 0);
3914 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3915
e87b4f3f
RS
3916 if (GET_CODE (copy_size_rtx) == CONST_INT)
3917 {
88f63c77 3918 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3919 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3920 }
3921 else
3922 {
88f63c77
RK
3923 addr = force_reg (ptr_mode, addr);
3924 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3925 copy_size_rtx, NULL_RTX, 0,
3926 OPTAB_LIB_WIDEN);
e87b4f3f 3927
88f63c77 3928 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3929 copy_size_rtx, NULL_RTX, 0,
3930 OPTAB_LIB_WIDEN);
e87b4f3f 3931
e87b4f3f 3932 label = gen_label_rtx ();
c5d5d461
JL
3933 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3934 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3935 }
3936
3937 if (size != const0_rtx)
3938 {
921b3427 3939 /* Be sure we can write on ADDR. */
7d384cc0 3940 if (current_function_check_memory_usage)
921b3427 3941 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3942 addr, Pmode,
921b3427 3943 size, TYPE_MODE (sizetype),
956d6950
JL
3944 GEN_INT (MEMORY_USE_WO),
3945 TYPE_MODE (integer_type_node));
bbf6f052 3946#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3947 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3948 addr, ptr_mode,
3b6f75e2
JW
3949 const0_rtx, TYPE_MODE (integer_type_node),
3950 convert_to_mode (TYPE_MODE (sizetype),
3951 size,
3952 TREE_UNSIGNED (sizetype)),
3953 TYPE_MODE (sizetype));
bbf6f052 3954#else
d562e42e 3955 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3956 addr, ptr_mode,
3b6f75e2
JW
3957 convert_to_mode (TYPE_MODE (integer_type_node),
3958 size,
3959 TREE_UNSIGNED (integer_type_node)),
3960 TYPE_MODE (integer_type_node));
bbf6f052 3961#endif
e87b4f3f 3962 }
22619c3f 3963
e87b4f3f
RS
3964 if (label)
3965 emit_label (label);
bbf6f052
RK
3966 }
3967 }
fffa9c1d
JW
3968 /* Handle calls that return values in multiple non-contiguous locations.
3969 The Irix 6 ABI has examples of this. */
3970 else if (GET_CODE (target) == PARALLEL)
aac5cc16 3971 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
19caa751 3972 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
3973 else if (GET_MODE (temp) == BLKmode)
3974 emit_block_move (target, temp, expr_size (exp),
19caa751 3975 TYPE_ALIGN (TREE_TYPE (exp)));
bbf6f052
RK
3976 else
3977 emit_move_insn (target, temp);
3978 }
709f5be1 3979
766f36c7
RK
3980 /* If we don't want a value, return NULL_RTX. */
3981 if (! want_value)
3982 return NULL_RTX;
3983
3984 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3985 ??? The latter test doesn't seem to make sense. */
3986 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3987 return temp;
766f36c7
RK
3988
3989 /* Return TARGET itself if it is a hard register. */
3990 else if (want_value && GET_MODE (target) != BLKmode
3991 && ! (GET_CODE (target) == REG
3992 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3993 return copy_to_reg (target);
766f36c7
RK
3994
3995 else
709f5be1 3996 return target;
bbf6f052
RK
3997}
3998\f
9de08200
RK
3999/* Return 1 if EXP just contains zeros. */
4000
4001static int
4002is_zeros_p (exp)
4003 tree exp;
4004{
4005 tree elt;
4006
4007 switch (TREE_CODE (exp))
4008 {
4009 case CONVERT_EXPR:
4010 case NOP_EXPR:
4011 case NON_LVALUE_EXPR:
4012 return is_zeros_p (TREE_OPERAND (exp, 0));
4013
4014 case INTEGER_CST:
05bccae2 4015 return integer_zerop (exp);
9de08200
RK
4016
4017 case COMPLEX_CST:
4018 return
4019 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
4020
4021 case REAL_CST:
41c9120b 4022 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
4023
4024 case CONSTRUCTOR:
e1a43f73
PB
4025 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4026 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
4027 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4028 if (! is_zeros_p (TREE_VALUE (elt)))
4029 return 0;
4030
4031 return 1;
e9a25f70
JL
4032
4033 default:
4034 return 0;
9de08200 4035 }
9de08200
RK
4036}
4037
4038/* Return 1 if EXP contains mostly (3/4) zeros. */
4039
4040static int
4041mostly_zeros_p (exp)
4042 tree exp;
4043{
9de08200
RK
4044 if (TREE_CODE (exp) == CONSTRUCTOR)
4045 {
e1a43f73
PB
4046 int elts = 0, zeros = 0;
4047 tree elt = CONSTRUCTOR_ELTS (exp);
4048 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
4049 {
4050 /* If there are no ranges of true bits, it is all zero. */
4051 return elt == NULL_TREE;
4052 }
4053 for (; elt; elt = TREE_CHAIN (elt))
4054 {
4055 /* We do not handle the case where the index is a RANGE_EXPR,
4056 so the statistic will be somewhat inaccurate.
4057 We do make a more accurate count in store_constructor itself,
4058 so since this function is only used for nested array elements,
0f41302f 4059 this should be close enough. */
e1a43f73
PB
4060 if (mostly_zeros_p (TREE_VALUE (elt)))
4061 zeros++;
4062 elts++;
4063 }
9de08200
RK
4064
4065 return 4 * zeros >= 3 * elts;
4066 }
4067
4068 return is_zeros_p (exp);
4069}
4070\f
e1a43f73
PB
4071/* Helper function for store_constructor.
4072 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4073 TYPE is the type of the CONSTRUCTOR, not the element type.
c5c76735 4074 ALIGN and CLEARED are as for store_constructor.
23ccec44
JW
4075
4076 This provides a recursive shortcut back to store_constructor when it isn't
4077 necessary to go through store_field. This is so that we can pass through
4078 the cleared field to let store_constructor know that we may not have to
4079 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4080
4081static void
4082store_constructor_field (target, bitsize, bitpos,
c5c76735 4083 mode, exp, type, align, cleared)
e1a43f73 4084 rtx target;
770ae6cc
RK
4085 unsigned HOST_WIDE_INT bitsize;
4086 HOST_WIDE_INT bitpos;
e1a43f73
PB
4087 enum machine_mode mode;
4088 tree exp, type;
729a2125 4089 unsigned int align;
e1a43f73
PB
4090 int cleared;
4091{
4092 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4093 && bitpos % BITS_PER_UNIT == 0
4094 /* If we have a non-zero bitpos for a register target, then we just
4095 let store_field do the bitfield handling. This is unlikely to
4096 generate unnecessary clear instructions anyways. */
4097 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4098 {
126e5b0d 4099 if (bitpos != 0)
ce64861e
RK
4100 target
4101 = change_address (target,
4102 GET_MODE (target) == BLKmode
4103 || 0 != (bitpos
4104 % GET_MODE_ALIGNMENT (GET_MODE (target)))
4105 ? BLKmode : VOIDmode,
4106 plus_constant (XEXP (target, 0),
4107 bitpos / BITS_PER_UNIT));
b7010412 4108 store_constructor (exp, target, align, cleared, bitsize / BITS_PER_UNIT);
e1a43f73
PB
4109 }
4110 else
19caa751 4111 store_field (target, bitsize, bitpos, mode, exp, VOIDmode, 0, align,
0db5adc6 4112 int_size_in_bytes (type), 0);
e1a43f73
PB
4113}
4114
bbf6f052 4115/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4116 TARGET is either a REG or a MEM.
19caa751 4117 ALIGN is the maximum known alignment for TARGET.
b7010412
RK
4118 CLEARED is true if TARGET is known to have been zero'd.
4119 SIZE is the number of bytes of TARGET we are allowed to modify: this
4120 may not be the same as the size of EXP if we are assigning to a field
4121 which has been packed to exclude padding bits. */
bbf6f052
RK
4122
4123static void
b7010412 4124store_constructor (exp, target, align, cleared, size)
bbf6f052
RK
4125 tree exp;
4126 rtx target;
729a2125 4127 unsigned int align;
e1a43f73 4128 int cleared;
13eb1f7f 4129 HOST_WIDE_INT size;
bbf6f052 4130{
4af3895e 4131 tree type = TREE_TYPE (exp);
a5efcd63 4132#ifdef WORD_REGISTER_OPERATIONS
13eb1f7f 4133 HOST_WIDE_INT exp_size = int_size_in_bytes (type);
a5efcd63 4134#endif
4af3895e 4135
bbf6f052
RK
4136 /* We know our target cannot conflict, since safe_from_p has been called. */
4137#if 0
4138 /* Don't try copying piece by piece into a hard register
4139 since that is vulnerable to being clobbered by EXP.
4140 Instead, construct in a pseudo register and then copy it all. */
4141 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4142 {
4143 rtx temp = gen_reg_rtx (GET_MODE (target));
7205485e 4144 store_constructor (exp, temp, align, cleared, size);
bbf6f052
RK
4145 emit_move_insn (target, temp);
4146 return;
4147 }
4148#endif
4149
e44842fe
RK
4150 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4151 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4152 {
4153 register tree elt;
4154
4af3895e 4155 /* Inform later passes that the whole union value is dead. */
dd1db5ec
RK
4156 if ((TREE_CODE (type) == UNION_TYPE
4157 || TREE_CODE (type) == QUAL_UNION_TYPE)
4158 && ! cleared)
a59f8640
R
4159 {
4160 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4161
4162 /* If the constructor is empty, clear the union. */
4163 if (! CONSTRUCTOR_ELTS (exp) && ! cleared)
19caa751 4164 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
a59f8640 4165 }
4af3895e
JVA
4166
4167 /* If we are building a static constructor into a register,
4168 set the initial value as zero so we can fold the value into
67225c15
RK
4169 a constant. But if more than one register is involved,
4170 this probably loses. */
4171 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4172 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4173 {
4174 if (! cleared)
e9a25f70 4175 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4176
9de08200
RK
4177 cleared = 1;
4178 }
4179
4180 /* If the constructor has fewer fields than the structure
4181 or if we are initializing the structure to mostly zeros,
bbf6f052 4182 clear the whole structure first. */
9376fcd6
RK
4183 else if (size > 0
4184 && ((list_length (CONSTRUCTOR_ELTS (exp))
c3b247b4 4185 != fields_length (type))
9376fcd6 4186 || mostly_zeros_p (exp)))
9de08200
RK
4187 {
4188 if (! cleared)
19caa751 4189 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4190
4191 cleared = 1;
4192 }
dd1db5ec 4193 else if (! cleared)
bbf6f052 4194 /* Inform later passes that the old value is dead. */
38a448ca 4195 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4196
4197 /* Store each element of the constructor into
4198 the corresponding field of TARGET. */
4199
4200 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4201 {
4202 register tree field = TREE_PURPOSE (elt);
c5c76735 4203#ifdef WORD_REGISTER_OPERATIONS
34c73909 4204 tree value = TREE_VALUE (elt);
c5c76735 4205#endif
bbf6f052 4206 register enum machine_mode mode;
770ae6cc
RK
4207 HOST_WIDE_INT bitsize;
4208 HOST_WIDE_INT bitpos = 0;
bbf6f052 4209 int unsignedp;
770ae6cc 4210 tree offset;
b50d17a1 4211 rtx to_rtx = target;
bbf6f052 4212
f32fd778
RS
4213 /* Just ignore missing fields.
4214 We cleared the whole structure, above,
4215 if any fields are missing. */
4216 if (field == 0)
4217 continue;
4218
e1a43f73
PB
4219 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4220 continue;
9de08200 4221
770ae6cc
RK
4222 if (host_integerp (DECL_SIZE (field), 1))
4223 bitsize = tree_low_cst (DECL_SIZE (field), 1);
14a774a9
RK
4224 else
4225 bitsize = -1;
4226
bbf6f052
RK
4227 unsignedp = TREE_UNSIGNED (field);
4228 mode = DECL_MODE (field);
4229 if (DECL_BIT_FIELD (field))
4230 mode = VOIDmode;
4231
770ae6cc
RK
4232 offset = DECL_FIELD_OFFSET (field);
4233 if (host_integerp (offset, 0)
4234 && host_integerp (bit_position (field), 0))
4235 {
4236 bitpos = int_bit_position (field);
4237 offset = 0;
4238 }
b50d17a1 4239 else
770ae6cc
RK
4240 bitpos = tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 0);
4241
b50d17a1
RK
4242 if (offset)
4243 {
4244 rtx offset_rtx;
4245
4246 if (contains_placeholder_p (offset))
7fa96708 4247 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4248 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4249
b50d17a1
RK
4250 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4251 if (GET_CODE (to_rtx) != MEM)
4252 abort ();
4253
bd070e1a
RH
4254 if (GET_MODE (offset_rtx) != ptr_mode)
4255 {
4256#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4257 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4258#else
4259 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4260#endif
4261 }
4262
b50d17a1
RK
4263 to_rtx
4264 = change_address (to_rtx, VOIDmode,
38a448ca 4265 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
c5c76735
JL
4266 force_reg (ptr_mode,
4267 offset_rtx)));
7fa96708 4268 align = DECL_OFFSET_ALIGN (field);
b50d17a1 4269 }
c5c76735 4270
cf04eb80
RK
4271 if (TREE_READONLY (field))
4272 {
9151b3bf 4273 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4274 to_rtx = copy_rtx (to_rtx);
4275
cf04eb80
RK
4276 RTX_UNCHANGING_P (to_rtx) = 1;
4277 }
4278
34c73909
R
4279#ifdef WORD_REGISTER_OPERATIONS
4280 /* If this initializes a field that is smaller than a word, at the
4281 start of a word, try to widen it to a full word.
4282 This special case allows us to output C++ member function
4283 initializations in a form that the optimizers can understand. */
770ae6cc 4284 if (GET_CODE (target) == REG
34c73909
R
4285 && bitsize < BITS_PER_WORD
4286 && bitpos % BITS_PER_WORD == 0
4287 && GET_MODE_CLASS (mode) == MODE_INT
4288 && TREE_CODE (value) == INTEGER_CST
13eb1f7f
RK
4289 && exp_size >= 0
4290 && bitpos + BITS_PER_WORD <= exp_size * BITS_PER_UNIT)
34c73909
R
4291 {
4292 tree type = TREE_TYPE (value);
4293 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4294 {
4295 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4296 value = convert (type, value);
4297 }
4298 if (BYTES_BIG_ENDIAN)
4299 value
4300 = fold (build (LSHIFT_EXPR, type, value,
4301 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4302 bitsize = BITS_PER_WORD;
4303 mode = word_mode;
4304 }
4305#endif
c5c76735 4306 store_constructor_field (to_rtx, bitsize, bitpos, mode,
7fa96708 4307 TREE_VALUE (elt), type, align, cleared);
bbf6f052
RK
4308 }
4309 }
4af3895e 4310 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4311 {
4312 register tree elt;
4313 register int i;
e1a43f73 4314 int need_to_clear;
4af3895e 4315 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4316 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4317 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4318 tree elttype = TREE_TYPE (type);
bbf6f052 4319
e1a43f73 4320 /* If the constructor has fewer elements than the array,
38e01259 4321 clear the whole array first. Similarly if this is
e1a43f73
PB
4322 static constructor of a non-BLKmode object. */
4323 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4324 need_to_clear = 1;
4325 else
4326 {
4327 HOST_WIDE_INT count = 0, zero_count = 0;
4328 need_to_clear = 0;
4329 /* This loop is a more accurate version of the loop in
4330 mostly_zeros_p (it handles RANGE_EXPR in an index).
4331 It is also needed to check for missing elements. */
4332 for (elt = CONSTRUCTOR_ELTS (exp);
4333 elt != NULL_TREE;
df0faff1 4334 elt = TREE_CHAIN (elt))
e1a43f73
PB
4335 {
4336 tree index = TREE_PURPOSE (elt);
4337 HOST_WIDE_INT this_node_count;
19caa751 4338
e1a43f73
PB
4339 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4340 {
4341 tree lo_index = TREE_OPERAND (index, 0);
4342 tree hi_index = TREE_OPERAND (index, 1);
05bccae2 4343
19caa751
RK
4344 if (! host_integerp (lo_index, 1)
4345 || ! host_integerp (hi_index, 1))
e1a43f73
PB
4346 {
4347 need_to_clear = 1;
4348 break;
4349 }
19caa751
RK
4350
4351 this_node_count = (tree_low_cst (hi_index, 1)
4352 - tree_low_cst (lo_index, 1) + 1);
e1a43f73
PB
4353 }
4354 else
4355 this_node_count = 1;
4356 count += this_node_count;
4357 if (mostly_zeros_p (TREE_VALUE (elt)))
4358 zero_count += this_node_count;
4359 }
8e958f70 4360 /* Clear the entire array first if there are any missing elements,
0f41302f 4361 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4362 if (count < maxelt - minelt + 1
4363 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4364 need_to_clear = 1;
4365 }
9376fcd6 4366 if (need_to_clear && size > 0)
9de08200
RK
4367 {
4368 if (! cleared)
19caa751 4369 clear_storage (target, GEN_INT (size), align);
9de08200
RK
4370 cleared = 1;
4371 }
bbf6f052
RK
4372 else
4373 /* Inform later passes that the old value is dead. */
38a448ca 4374 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4375
4376 /* Store each element of the constructor into
4377 the corresponding element of TARGET, determined
4378 by counting the elements. */
4379 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4380 elt;
4381 elt = TREE_CHAIN (elt), i++)
4382 {
4383 register enum machine_mode mode;
19caa751
RK
4384 HOST_WIDE_INT bitsize;
4385 HOST_WIDE_INT bitpos;
bbf6f052 4386 int unsignedp;
e1a43f73 4387 tree value = TREE_VALUE (elt);
729a2125 4388 unsigned int align = TYPE_ALIGN (TREE_TYPE (value));
03dc44a6
RS
4389 tree index = TREE_PURPOSE (elt);
4390 rtx xtarget = target;
bbf6f052 4391
e1a43f73
PB
4392 if (cleared && is_zeros_p (value))
4393 continue;
9de08200 4394
bbf6f052 4395 unsignedp = TREE_UNSIGNED (elttype);
14a774a9
RK
4396 mode = TYPE_MODE (elttype);
4397 if (mode == BLKmode)
19caa751
RK
4398 bitsize = (host_integerp (TYPE_SIZE (elttype), 1)
4399 ? tree_low_cst (TYPE_SIZE (elttype), 1)
4400 : -1);
14a774a9
RK
4401 else
4402 bitsize = GET_MODE_BITSIZE (mode);
bbf6f052 4403
e1a43f73
PB
4404 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4405 {
4406 tree lo_index = TREE_OPERAND (index, 0);
4407 tree hi_index = TREE_OPERAND (index, 1);
4408 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4409 struct nesting *loop;
05c0b405
PB
4410 HOST_WIDE_INT lo, hi, count;
4411 tree position;
e1a43f73 4412
0f41302f 4413 /* If the range is constant and "small", unroll the loop. */
19caa751
RK
4414 if (host_integerp (lo_index, 0)
4415 && host_integerp (hi_index, 0)
4416 && (lo = tree_low_cst (lo_index, 0),
4417 hi = tree_low_cst (hi_index, 0),
05c0b405
PB
4418 count = hi - lo + 1,
4419 (GET_CODE (target) != MEM
4420 || count <= 2
19caa751
RK
4421 || (host_integerp (TYPE_SIZE (elttype), 1)
4422 && (tree_low_cst (TYPE_SIZE (elttype), 1) * count
4423 <= 40 * 8)))))
e1a43f73 4424 {
05c0b405
PB
4425 lo -= minelt; hi -= minelt;
4426 for (; lo <= hi; lo++)
e1a43f73 4427 {
19caa751 4428 bitpos = lo * tree_low_cst (TYPE_SIZE (elttype), 0);
c5c76735
JL
4429 store_constructor_field (target, bitsize, bitpos, mode,
4430 value, type, align, cleared);
e1a43f73
PB
4431 }
4432 }
4433 else
4434 {
4435 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4436 loop_top = gen_label_rtx ();
4437 loop_end = gen_label_rtx ();
4438
4439 unsignedp = TREE_UNSIGNED (domain);
4440
4441 index = build_decl (VAR_DECL, NULL_TREE, domain);
4442
4443 DECL_RTL (index) = index_r
4444 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4445 &unsignedp, 0));
4446
4447 if (TREE_CODE (value) == SAVE_EXPR
4448 && SAVE_EXPR_RTL (value) == 0)
4449 {
0f41302f
MS
4450 /* Make sure value gets expanded once before the
4451 loop. */
e1a43f73
PB
4452 expand_expr (value, const0_rtx, VOIDmode, 0);
4453 emit_queue ();
4454 }
4455 store_expr (lo_index, index_r, 0);
4456 loop = expand_start_loop (0);
4457
0f41302f 4458 /* Assign value to element index. */
fed3cef0
RK
4459 position
4460 = convert (ssizetype,
4461 fold (build (MINUS_EXPR, TREE_TYPE (index),
4462 index, TYPE_MIN_VALUE (domain))));
4463 position = size_binop (MULT_EXPR, position,
4464 convert (ssizetype,
4465 TYPE_SIZE_UNIT (elttype)));
4466
e1a43f73 4467 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4468 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4469 xtarget = change_address (target, mode, addr);
4470 if (TREE_CODE (value) == CONSTRUCTOR)
b7010412
RK
4471 store_constructor (value, xtarget, align, cleared,
4472 bitsize / BITS_PER_UNIT);
e1a43f73
PB
4473 else
4474 store_expr (value, xtarget, 0);
4475
4476 expand_exit_loop_if_false (loop,
4477 build (LT_EXPR, integer_type_node,
4478 index, hi_index));
4479
4480 expand_increment (build (PREINCREMENT_EXPR,
4481 TREE_TYPE (index),
7b8b9722 4482 index, integer_one_node), 0, 0);
e1a43f73
PB
4483 expand_end_loop ();
4484 emit_label (loop_end);
e1a43f73
PB
4485 }
4486 }
19caa751
RK
4487 else if ((index != 0 && ! host_integerp (index, 0))
4488 || ! host_integerp (TYPE_SIZE (elttype), 1))
03dc44a6 4489 {
e1a43f73 4490 rtx pos_rtx, addr;
03dc44a6
RS
4491 tree position;
4492
5b6c44ff 4493 if (index == 0)
fed3cef0 4494 index = ssize_int (1);
5b6c44ff 4495
e1a43f73 4496 if (minelt)
fed3cef0
RK
4497 index = convert (ssizetype,
4498 fold (build (MINUS_EXPR, index,
4499 TYPE_MIN_VALUE (domain))));
19caa751 4500
fed3cef0
RK
4501 position = size_binop (MULT_EXPR, index,
4502 convert (ssizetype,
4503 TYPE_SIZE_UNIT (elttype)));
03dc44a6 4504 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4505 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4506 xtarget = change_address (target, mode, addr);
e1a43f73 4507 store_expr (value, xtarget, 0);
03dc44a6
RS
4508 }
4509 else
4510 {
4511 if (index != 0)
19caa751
RK
4512 bitpos = ((tree_low_cst (index, 0) - minelt)
4513 * tree_low_cst (TYPE_SIZE (elttype), 1));
03dc44a6 4514 else
19caa751
RK
4515 bitpos = (i * tree_low_cst (TYPE_SIZE (elttype), 1));
4516
c5c76735
JL
4517 store_constructor_field (target, bitsize, bitpos, mode, value,
4518 type, align, cleared);
03dc44a6 4519 }
bbf6f052
RK
4520 }
4521 }
19caa751
RK
4522
4523 /* Set constructor assignments */
071a6595
PB
4524 else if (TREE_CODE (type) == SET_TYPE)
4525 {
e1a43f73 4526 tree elt = CONSTRUCTOR_ELTS (exp);
19caa751 4527 unsigned HOST_WIDE_INT nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4528 tree domain = TYPE_DOMAIN (type);
4529 tree domain_min, domain_max, bitlength;
4530
9faa82d8 4531 /* The default implementation strategy is to extract the constant
071a6595
PB
4532 parts of the constructor, use that to initialize the target,
4533 and then "or" in whatever non-constant ranges we need in addition.
4534
4535 If a large set is all zero or all ones, it is
4536 probably better to set it using memset (if available) or bzero.
4537 Also, if a large set has just a single range, it may also be
4538 better to first clear all the first clear the set (using
0f41302f 4539 bzero/memset), and set the bits we want. */
071a6595 4540
0f41302f 4541 /* Check for all zeros. */
9376fcd6 4542 if (elt == NULL_TREE && size > 0)
071a6595 4543 {
e1a43f73 4544 if (!cleared)
19caa751 4545 clear_storage (target, GEN_INT (size), TYPE_ALIGN (type));
071a6595
PB
4546 return;
4547 }
4548
071a6595
PB
4549 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4550 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4551 bitlength = size_binop (PLUS_EXPR,
fed3cef0
RK
4552 size_diffop (domain_max, domain_min),
4553 ssize_int (1));
071a6595 4554
19caa751 4555 nbits = tree_low_cst (bitlength, 1);
e1a43f73
PB
4556
4557 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4558 are "complicated" (more than one range), initialize (the
4559 constant parts) by copying from a constant. */
4560 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4561 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4562 {
19caa751 4563 unsigned int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
b4ee5a72 4564 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4565 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72 4566 HOST_WIDE_INT word = 0;
19caa751
RK
4567 unsigned int bit_pos = 0;
4568 unsigned int ibit = 0;
4569 unsigned int offset = 0; /* In bytes from beginning of set. */
4570
e1a43f73 4571 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4572 for (;;)
071a6595 4573 {
b4ee5a72
PB
4574 if (bit_buffer[ibit])
4575 {
b09f3348 4576 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4577 word |= (1 << (set_word_size - 1 - bit_pos));
4578 else
4579 word |= 1 << bit_pos;
4580 }
19caa751 4581
b4ee5a72
PB
4582 bit_pos++; ibit++;
4583 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4584 {
e1a43f73
PB
4585 if (word != 0 || ! cleared)
4586 {
4587 rtx datum = GEN_INT (word);
4588 rtx to_rtx;
19caa751 4589
0f41302f
MS
4590 /* The assumption here is that it is safe to use
4591 XEXP if the set is multi-word, but not if
4592 it's single-word. */
e1a43f73
PB
4593 if (GET_CODE (target) == MEM)
4594 {
4595 to_rtx = plus_constant (XEXP (target, 0), offset);
4596 to_rtx = change_address (target, mode, to_rtx);
4597 }
4598 else if (offset == 0)
4599 to_rtx = target;
4600 else
4601 abort ();
4602 emit_move_insn (to_rtx, datum);
4603 }
19caa751 4604
b4ee5a72
PB
4605 if (ibit == nbits)
4606 break;
4607 word = 0;
4608 bit_pos = 0;
4609 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4610 }
4611 }
071a6595 4612 }
e1a43f73 4613 else if (!cleared)
19caa751
RK
4614 /* Don't bother clearing storage if the set is all ones. */
4615 if (TREE_CHAIN (elt) != NULL_TREE
4616 || (TREE_PURPOSE (elt) == NULL_TREE
4617 ? nbits != 1
4618 : ( ! host_integerp (TREE_VALUE (elt), 0)
4619 || ! host_integerp (TREE_PURPOSE (elt), 0)
4620 || (tree_low_cst (TREE_VALUE (elt), 0)
4621 - tree_low_cst (TREE_PURPOSE (elt), 0) + 1
4622 != (HOST_WIDE_INT) nbits))))
4623 clear_storage (target, expr_size (exp), TYPE_ALIGN (type));
e1a43f73
PB
4624
4625 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4626 {
4627 /* start of range of element or NULL */
4628 tree startbit = TREE_PURPOSE (elt);
4629 /* end of range of element, or element value */
4630 tree endbit = TREE_VALUE (elt);
381127e8 4631#ifdef TARGET_MEM_FUNCTIONS
071a6595 4632 HOST_WIDE_INT startb, endb;
381127e8 4633#endif
19caa751 4634 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
071a6595
PB
4635
4636 bitlength_rtx = expand_expr (bitlength,
19caa751 4637 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
071a6595
PB
4638
4639 /* handle non-range tuple element like [ expr ] */
4640 if (startbit == NULL_TREE)
4641 {
4642 startbit = save_expr (endbit);
4643 endbit = startbit;
4644 }
19caa751 4645
071a6595
PB
4646 startbit = convert (sizetype, startbit);
4647 endbit = convert (sizetype, endbit);
4648 if (! integer_zerop (domain_min))
4649 {
4650 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4651 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4652 }
4653 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4654 EXPAND_CONST_ADDRESS);
4655 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4656 EXPAND_CONST_ADDRESS);
4657
4658 if (REG_P (target))
4659 {
4660 targetx = assign_stack_temp (GET_MODE (target),
4661 GET_MODE_SIZE (GET_MODE (target)),
4662 0);
4663 emit_move_insn (targetx, target);
4664 }
19caa751 4665
071a6595
PB
4666 else if (GET_CODE (target) == MEM)
4667 targetx = target;
4668 else
4669 abort ();
4670
4671#ifdef TARGET_MEM_FUNCTIONS
4672 /* Optimization: If startbit and endbit are
9faa82d8 4673 constants divisible by BITS_PER_UNIT,
0f41302f 4674 call memset instead. */
071a6595
PB
4675 if (TREE_CODE (startbit) == INTEGER_CST
4676 && TREE_CODE (endbit) == INTEGER_CST
4677 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4678 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4679 {
071a6595
PB
4680 emit_library_call (memset_libfunc, 0,
4681 VOIDmode, 3,
e1a43f73
PB
4682 plus_constant (XEXP (targetx, 0),
4683 startb / BITS_PER_UNIT),
071a6595 4684 Pmode,
3b6f75e2 4685 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4686 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4687 TYPE_MODE (sizetype));
071a6595
PB
4688 }
4689 else
4690#endif
19caa751
RK
4691 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
4692 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4693 bitlength_rtx, TYPE_MODE (sizetype),
4694 startbit_rtx, TYPE_MODE (sizetype),
4695 endbit_rtx, TYPE_MODE (sizetype));
4696
071a6595
PB
4697 if (REG_P (target))
4698 emit_move_insn (target, targetx);
4699 }
4700 }
bbf6f052
RK
4701
4702 else
4703 abort ();
4704}
4705
4706/* Store the value of EXP (an expression tree)
4707 into a subfield of TARGET which has mode MODE and occupies
4708 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4709 If MODE is VOIDmode, it means that we are storing into a bit-field.
4710
4711 If VALUE_MODE is VOIDmode, return nothing in particular.
4712 UNSIGNEDP is not used in this case.
4713
4714 Otherwise, return an rtx for the value stored. This rtx
4715 has mode VALUE_MODE if that is convenient to do.
4716 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4717
19caa751 4718 ALIGN is the alignment that TARGET is known to have.
ece32014
MM
4719 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4720
4721 ALIAS_SET is the alias set for the destination. This value will
4722 (in general) be different from that for TARGET, since TARGET is a
4723 reference to the containing structure. */
bbf6f052
RK
4724
4725static rtx
4726store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4727 unsignedp, align, total_size, alias_set)
bbf6f052 4728 rtx target;
770ae6cc
RK
4729 HOST_WIDE_INT bitsize;
4730 HOST_WIDE_INT bitpos;
bbf6f052
RK
4731 enum machine_mode mode;
4732 tree exp;
4733 enum machine_mode value_mode;
4734 int unsignedp;
729a2125 4735 unsigned int align;
770ae6cc 4736 HOST_WIDE_INT total_size;
ece32014 4737 int alias_set;
bbf6f052 4738{
906c4e36 4739 HOST_WIDE_INT width_mask = 0;
bbf6f052 4740
e9a25f70
JL
4741 if (TREE_CODE (exp) == ERROR_MARK)
4742 return const0_rtx;
4743
906c4e36
RK
4744 if (bitsize < HOST_BITS_PER_WIDE_INT)
4745 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4746
4747 /* If we are storing into an unaligned field of an aligned union that is
4748 in a register, we may have the mode of TARGET being an integer mode but
4749 MODE == BLKmode. In that case, get an aligned object whose size and
4750 alignment are the same as TARGET and store TARGET into it (we can avoid
4751 the store if the field being stored is the entire width of TARGET). Then
4752 call ourselves recursively to store the field into a BLKmode version of
4753 that object. Finally, load from the object into TARGET. This is not
4754 very efficient in general, but should only be slightly more expensive
4755 than the otherwise-required unaligned accesses. Perhaps this can be
4756 cleaned up later. */
4757
4758 if (mode == BLKmode
4759 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4760 {
4761 rtx object = assign_stack_temp (GET_MODE (target),
4762 GET_MODE_SIZE (GET_MODE (target)), 0);
4763 rtx blk_object = copy_rtx (object);
4764
c6df88cb
MM
4765 MEM_SET_IN_STRUCT_P (object, 1);
4766 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4767 PUT_MODE (blk_object, BLKmode);
4768
4769 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4770 emit_move_insn (object, target);
4771
4772 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4773 align, total_size, alias_set);
bbf6f052 4774
46093b97
RS
4775 /* Even though we aren't returning target, we need to
4776 give it the updated value. */
bbf6f052
RK
4777 emit_move_insn (target, object);
4778
46093b97 4779 return blk_object;
bbf6f052 4780 }
c3b247b4
JM
4781
4782 if (GET_CODE (target) == CONCAT)
4783 {
4784 /* We're storing into a struct containing a single __complex. */
4785
4786 if (bitpos != 0)
4787 abort ();
4788 return store_expr (exp, target, 0);
4789 }
bbf6f052
RK
4790
4791 /* If the structure is in a register or if the component
4792 is a bit field, we cannot use addressing to access it.
4793 Use bit-field techniques or SUBREG to store in it. */
4794
4fa52007 4795 if (mode == VOIDmode
6ab06cbb
JW
4796 || (mode != BLKmode && ! direct_store[(int) mode]
4797 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4798 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4799 || GET_CODE (target) == REG
c980ac49 4800 || GET_CODE (target) == SUBREG
ccc98036
RS
4801 /* If the field isn't aligned enough to store as an ordinary memref,
4802 store it as a bit field. */
e1565e65 4803 || (mode != BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 4804 && (align < GET_MODE_ALIGNMENT (mode)
14a774a9 4805 || bitpos % GET_MODE_ALIGNMENT (mode)))
e1565e65 4806 || (mode == BLKmode && SLOW_UNALIGNED_ACCESS (mode, align)
19caa751 4807 && (TYPE_ALIGN (TREE_TYPE (exp)) > align
14a774a9
RK
4808 || bitpos % TYPE_ALIGN (TREE_TYPE (exp)) != 0))
4809 /* If the RHS and field are a constant size and the size of the
4810 RHS isn't the same size as the bitfield, we must use bitfield
4811 operations. */
05bccae2
RK
4812 || (bitsize >= 0
4813 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
4814 && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
bbf6f052 4815 {
906c4e36 4816 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4817
ef19912d
RK
4818 /* If BITSIZE is narrower than the size of the type of EXP
4819 we will be narrowing TEMP. Normally, what's wanted are the
4820 low-order bits. However, if EXP's type is a record and this is
4821 big-endian machine, we want the upper BITSIZE bits. */
4822 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4823 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4824 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4825 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4826 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4827 - bitsize),
4828 temp, 1);
4829
bbd6cf73
RK
4830 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4831 MODE. */
4832 if (mode != VOIDmode && mode != BLKmode
4833 && mode != TYPE_MODE (TREE_TYPE (exp)))
4834 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4835
a281e72d
RK
4836 /* If the modes of TARGET and TEMP are both BLKmode, both
4837 must be in memory and BITPOS must be aligned on a byte
4838 boundary. If so, we simply do a block copy. */
4839 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4840 {
19caa751 4841 unsigned int exp_align = expr_align (exp);
729a2125 4842
a281e72d
RK
4843 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4844 || bitpos % BITS_PER_UNIT != 0)
4845 abort ();
4846
0086427c
RK
4847 target = change_address (target, VOIDmode,
4848 plus_constant (XEXP (target, 0),
a281e72d
RK
4849 bitpos / BITS_PER_UNIT));
4850
729a2125
RK
4851 /* Make sure that ALIGN is no stricter than the alignment of EXP. */
4852 align = MIN (exp_align, align);
c297a34e 4853
14a774a9 4854 /* Find an alignment that is consistent with the bit position. */
19caa751 4855 while ((bitpos % align) != 0)
14a774a9
RK
4856 align >>= 1;
4857
a281e72d
RK
4858 emit_block_move (target, temp,
4859 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4860 / BITS_PER_UNIT),
14a774a9 4861 align);
a281e72d
RK
4862
4863 return value_mode == VOIDmode ? const0_rtx : target;
4864 }
4865
bbf6f052
RK
4866 /* Store the value in the bitfield. */
4867 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4868 if (value_mode != VOIDmode)
4869 {
4870 /* The caller wants an rtx for the value. */
4871 /* If possible, avoid refetching from the bitfield itself. */
4872 if (width_mask != 0
4873 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4874 {
9074de27 4875 tree count;
5c4d7cfb 4876 enum machine_mode tmode;
86a2c12a 4877
5c4d7cfb
RS
4878 if (unsignedp)
4879 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4880 tmode = GET_MODE (temp);
86a2c12a
RS
4881 if (tmode == VOIDmode)
4882 tmode = value_mode;
5c4d7cfb
RS
4883 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4884 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4885 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4886 }
bbf6f052 4887 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4888 NULL_RTX, value_mode, 0, align,
4889 total_size);
bbf6f052
RK
4890 }
4891 return const0_rtx;
4892 }
4893 else
4894 {
4895 rtx addr = XEXP (target, 0);
4896 rtx to_rtx;
4897
4898 /* If a value is wanted, it must be the lhs;
4899 so make the address stable for multiple use. */
4900
4901 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4902 && ! CONSTANT_ADDRESS_P (addr)
4903 /* A frame-pointer reference is already stable. */
4904 && ! (GET_CODE (addr) == PLUS
4905 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4906 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4907 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4908 addr = copy_to_reg (addr);
4909
4910 /* Now build a reference to just the desired component. */
4911
effbcc6a
RK
4912 to_rtx = copy_rtx (change_address (target, mode,
4913 plus_constant (addr,
4914 (bitpos
4915 / BITS_PER_UNIT))));
c6df88cb 4916 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4917 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4918
4919 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4920 }
4921}
4922\f
4923/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4924 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4925 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4926
4927 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4928 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4929 If the position of the field is variable, we store a tree
4930 giving the variable offset (in units) in *POFFSET.
4931 This offset is in addition to the bit position.
4932 If the position is not variable, we store 0 in *POFFSET.
19caa751 4933 We set *PALIGNMENT to the alignment of the address that will be
839c4796
RK
4934 computed. This is the alignment of the thing we return if *POFFSET
4935 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4936
4937 If any of the extraction expressions is volatile,
4938 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4939
4940 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4941 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4942 is redundant.
4943
4944 If the field describes a variable-sized object, *PMODE is set to
4945 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4946 this case, but the address of the object can be found. */
bbf6f052
RK
4947
4948tree
4969d05d 4949get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4950 punsignedp, pvolatilep, palignment)
bbf6f052 4951 tree exp;
770ae6cc
RK
4952 HOST_WIDE_INT *pbitsize;
4953 HOST_WIDE_INT *pbitpos;
7bb0943f 4954 tree *poffset;
bbf6f052
RK
4955 enum machine_mode *pmode;
4956 int *punsignedp;
4957 int *pvolatilep;
729a2125 4958 unsigned int *palignment;
bbf6f052
RK
4959{
4960 tree size_tree = 0;
4961 enum machine_mode mode = VOIDmode;
fed3cef0 4962 tree offset = size_zero_node;
770ae6cc 4963 tree bit_offset = bitsize_zero_node;
c84e2712 4964 unsigned int alignment = BIGGEST_ALIGNMENT;
770ae6cc 4965 tree tem;
bbf6f052 4966
770ae6cc
RK
4967 /* First get the mode, signedness, and size. We do this from just the
4968 outermost expression. */
bbf6f052
RK
4969 if (TREE_CODE (exp) == COMPONENT_REF)
4970 {
4971 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4972 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4973 mode = DECL_MODE (TREE_OPERAND (exp, 1));
770ae6cc 4974
bbf6f052
RK
4975 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4976 }
4977 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4978 {
4979 size_tree = TREE_OPERAND (exp, 1);
4980 *punsignedp = TREE_UNSIGNED (exp);
4981 }
4982 else
4983 {
4984 mode = TYPE_MODE (TREE_TYPE (exp));
770ae6cc
RK
4985 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4986
ab87f8c8
JL
4987 if (mode == BLKmode)
4988 size_tree = TYPE_SIZE (TREE_TYPE (exp));
770ae6cc
RK
4989 else
4990 *pbitsize = GET_MODE_BITSIZE (mode);
bbf6f052
RK
4991 }
4992
770ae6cc 4993 if (size_tree != 0)
bbf6f052 4994 {
770ae6cc 4995 if (! host_integerp (size_tree, 1))
e7c33f54
RK
4996 mode = BLKmode, *pbitsize = -1;
4997 else
770ae6cc 4998 *pbitsize = tree_low_cst (size_tree, 1);
bbf6f052
RK
4999 }
5000
5001 /* Compute cumulative bit-offset for nested component-refs and array-refs,
5002 and find the ultimate containing object. */
bbf6f052
RK
5003 while (1)
5004 {
770ae6cc
RK
5005 if (TREE_CODE (exp) == BIT_FIELD_REF)
5006 bit_offset = size_binop (PLUS_EXPR, bit_offset, TREE_OPERAND (exp, 2));
5007 else if (TREE_CODE (exp) == COMPONENT_REF)
bbf6f052 5008 {
770ae6cc
RK
5009 tree field = TREE_OPERAND (exp, 1);
5010 tree this_offset = DECL_FIELD_OFFSET (field);
bbf6f052 5011
e7f3c83f
RK
5012 /* If this field hasn't been filled in yet, don't go
5013 past it. This should only happen when folding expressions
5014 made during type construction. */
770ae6cc 5015 if (this_offset == 0)
e7f3c83f 5016 break;
770ae6cc
RK
5017 else if (! TREE_CONSTANT (this_offset)
5018 && contains_placeholder_p (this_offset))
5019 this_offset = build (WITH_RECORD_EXPR, sizetype, this_offset, exp);
e7f3c83f 5020
770ae6cc
RK
5021 offset = size_binop (PLUS_EXPR, offset, DECL_FIELD_OFFSET (field));
5022 bit_offset = size_binop (PLUS_EXPR, bit_offset,
5023 DECL_FIELD_BIT_OFFSET (field));
e6d8c385 5024
770ae6cc
RK
5025 if (! host_integerp (offset, 0))
5026 alignment = MIN (alignment, DECL_OFFSET_ALIGN (field));
bbf6f052 5027 }
742920c7 5028 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 5029 {
742920c7
RK
5030 tree index = TREE_OPERAND (exp, 1);
5031 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
770ae6cc 5032 tree low_bound = (domain ? TYPE_MIN_VALUE (domain) : 0);
742920c7 5033
770ae6cc
RK
5034 /* We assume all arrays have sizes that are a multiple of a byte.
5035 First subtract the lower bound, if any, in the type of the
5036 index, then convert to sizetype and multiply by the size of the
5037 array element. */
5038 if (low_bound != 0 && ! integer_zerop (low_bound))
5039 index = fold (build (MINUS_EXPR, TREE_TYPE (index),
5040 index, low_bound));
f8dac6eb 5041
770ae6cc
RK
5042 if (! TREE_CONSTANT (index)
5043 && contains_placeholder_p (index))
5044 index = build (WITH_RECORD_EXPR, TREE_TYPE (index), index, exp);
742920c7 5045
770ae6cc
RK
5046 offset = size_binop (PLUS_EXPR, offset,
5047 size_binop (MULT_EXPR,
5048 convert (sizetype, index),
5049 TYPE_SIZE_UNIT (TREE_TYPE (exp))));
bbf6f052
RK
5050 }
5051 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
5052 && ! ((TREE_CODE (exp) == NOP_EXPR
5053 || TREE_CODE (exp) == CONVERT_EXPR)
5054 && (TYPE_MODE (TREE_TYPE (exp))
5055 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
5056 break;
7bb0943f
RS
5057
5058 /* If any reference in the chain is volatile, the effect is volatile. */
5059 if (TREE_THIS_VOLATILE (exp))
5060 *pvolatilep = 1;
839c4796
RK
5061
5062 /* If the offset is non-constant already, then we can't assume any
5063 alignment more than the alignment here. */
770ae6cc 5064 if (! TREE_CONSTANT (offset))
839c4796
RK
5065 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5066
bbf6f052
RK
5067 exp = TREE_OPERAND (exp, 0);
5068 }
5069
2f939d94 5070 if (DECL_P (exp))
839c4796 5071 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 5072 else if (TREE_TYPE (exp) != 0)
839c4796
RK
5073 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
5074
770ae6cc
RK
5075 /* If OFFSET is constant, see if we can return the whole thing as a
5076 constant bit position. Otherwise, split it up. */
5077 if (host_integerp (offset, 0)
5078 && 0 != (tem = size_binop (MULT_EXPR, convert (bitsizetype, offset),
5079 bitsize_unit_node))
5080 && 0 != (tem = size_binop (PLUS_EXPR, tem, bit_offset))
5081 && host_integerp (tem, 0))
5082 *pbitpos = tree_low_cst (tem, 0), *poffset = 0;
5083 else
5084 *pbitpos = tree_low_cst (bit_offset, 0), *poffset = offset;
b50d17a1 5085
bbf6f052 5086 *pmode = mode;
19caa751 5087 *palignment = alignment;
bbf6f052
RK
5088 return exp;
5089}
921b3427
RK
5090
5091/* Subroutine of expand_exp: compute memory_usage from modifier. */
770ae6cc 5092
921b3427
RK
5093static enum memory_use_mode
5094get_memory_usage_from_modifier (modifier)
5095 enum expand_modifier modifier;
5096{
5097 switch (modifier)
5098 {
5099 case EXPAND_NORMAL:
e5e809f4 5100 case EXPAND_SUM:
921b3427
RK
5101 return MEMORY_USE_RO;
5102 break;
5103 case EXPAND_MEMORY_USE_WO:
5104 return MEMORY_USE_WO;
5105 break;
5106 case EXPAND_MEMORY_USE_RW:
5107 return MEMORY_USE_RW;
5108 break;
921b3427 5109 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5110 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5111 MEMORY_USE_DONT, because they are modifiers to a call of
5112 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5113 case EXPAND_CONST_ADDRESS:
e5e809f4 5114 case EXPAND_INITIALIZER:
921b3427
RK
5115 return MEMORY_USE_DONT;
5116 case EXPAND_MEMORY_USE_BAD:
5117 default:
5118 abort ();
5119 }
5120}
bbf6f052
RK
5121\f
5122/* Given an rtx VALUE that may contain additions and multiplications,
5123 return an equivalent value that just refers to a register or memory.
5124 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
5125 and returning a pseudo-register containing the value.
5126
5127 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5128
5129rtx
5130force_operand (value, target)
5131 rtx value, target;
5132{
5133 register optab binoptab = 0;
5134 /* Use a temporary to force order of execution of calls to
5135 `force_operand'. */
5136 rtx tmp;
5137 register rtx op2;
5138 /* Use subtarget as the target for operand 0 of a binary operation. */
5139 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5140
8b015896
RH
5141 /* Check for a PIC address load. */
5142 if (flag_pic
5143 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5144 && XEXP (value, 0) == pic_offset_table_rtx
5145 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5146 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5147 || GET_CODE (XEXP (value, 1)) == CONST))
5148 {
5149 if (!subtarget)
5150 subtarget = gen_reg_rtx (GET_MODE (value));
5151 emit_move_insn (subtarget, value);
5152 return subtarget;
5153 }
5154
bbf6f052
RK
5155 if (GET_CODE (value) == PLUS)
5156 binoptab = add_optab;
5157 else if (GET_CODE (value) == MINUS)
5158 binoptab = sub_optab;
5159 else if (GET_CODE (value) == MULT)
5160 {
5161 op2 = XEXP (value, 1);
5162 if (!CONSTANT_P (op2)
5163 && !(GET_CODE (op2) == REG && op2 != subtarget))
5164 subtarget = 0;
5165 tmp = force_operand (XEXP (value, 0), subtarget);
5166 return expand_mult (GET_MODE (value), tmp,
906c4e36 5167 force_operand (op2, NULL_RTX),
bbf6f052
RK
5168 target, 0);
5169 }
5170
5171 if (binoptab)
5172 {
5173 op2 = XEXP (value, 1);
5174 if (!CONSTANT_P (op2)
5175 && !(GET_CODE (op2) == REG && op2 != subtarget))
5176 subtarget = 0;
5177 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5178 {
5179 binoptab = add_optab;
5180 op2 = negate_rtx (GET_MODE (value), op2);
5181 }
5182
5183 /* Check for an addition with OP2 a constant integer and our first
5184 operand a PLUS of a virtual register and something else. In that
5185 case, we want to emit the sum of the virtual register and the
5186 constant first and then add the other value. This allows virtual
5187 register instantiation to simply modify the constant rather than
5188 creating another one around this addition. */
5189 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5190 && GET_CODE (XEXP (value, 0)) == PLUS
5191 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5192 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5193 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5194 {
5195 rtx temp = expand_binop (GET_MODE (value), binoptab,
5196 XEXP (XEXP (value, 0), 0), op2,
5197 subtarget, 0, OPTAB_LIB_WIDEN);
5198 return expand_binop (GET_MODE (value), binoptab, temp,
5199 force_operand (XEXP (XEXP (value, 0), 1), 0),
5200 target, 0, OPTAB_LIB_WIDEN);
5201 }
5202
5203 tmp = force_operand (XEXP (value, 0), subtarget);
5204 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5205 force_operand (op2, NULL_RTX),
bbf6f052 5206 target, 0, OPTAB_LIB_WIDEN);
8008b228 5207 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5208 because the only operations we are expanding here are signed ones. */
5209 }
5210 return value;
5211}
5212\f
5213/* Subroutine of expand_expr:
5214 save the non-copied parts (LIST) of an expr (LHS), and return a list
5215 which can restore these values to their previous values,
5216 should something modify their storage. */
5217
5218static tree
5219save_noncopied_parts (lhs, list)
5220 tree lhs;
5221 tree list;
5222{
5223 tree tail;
5224 tree parts = 0;
5225
5226 for (tail = list; tail; tail = TREE_CHAIN (tail))
5227 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5228 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5229 else
5230 {
5231 tree part = TREE_VALUE (tail);
5232 tree part_type = TREE_TYPE (part);
906c4e36 5233 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5234 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5235 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5236 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5237 parts = tree_cons (to_be_saved,
906c4e36
RK
5238 build (RTL_EXPR, part_type, NULL_TREE,
5239 (tree) target),
bbf6f052
RK
5240 parts);
5241 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5242 }
5243 return parts;
5244}
5245
5246/* Subroutine of expand_expr:
5247 record the non-copied parts (LIST) of an expr (LHS), and return a list
5248 which specifies the initial values of these parts. */
5249
5250static tree
5251init_noncopied_parts (lhs, list)
5252 tree lhs;
5253 tree list;
5254{
5255 tree tail;
5256 tree parts = 0;
5257
5258 for (tail = list; tail; tail = TREE_CHAIN (tail))
5259 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5260 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5261 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5262 {
5263 tree part = TREE_VALUE (tail);
5264 tree part_type = TREE_TYPE (part);
906c4e36 5265 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5266 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5267 }
5268 return parts;
5269}
5270
5271/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5272 EXP can reference X, which is being modified. TOP_P is nonzero if this
5273 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5274 for EXP, as opposed to a recursive call to this function.
5275
5276 It is always safe for this routine to return zero since it merely
5277 searches for optimization opportunities. */
bbf6f052
RK
5278
5279static int
e5e809f4 5280safe_from_p (x, exp, top_p)
bbf6f052
RK
5281 rtx x;
5282 tree exp;
e5e809f4 5283 int top_p;
bbf6f052
RK
5284{
5285 rtx exp_rtl = 0;
5286 int i, nops;
ff439b5f
CB
5287 static int save_expr_count;
5288 static int save_expr_size = 0;
5289 static tree *save_expr_rewritten;
5290 static tree save_expr_trees[256];
bbf6f052 5291
6676e72f
RK
5292 if (x == 0
5293 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5294 have no way of allocating temporaries of variable size
5295 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5296 So we assume here that something at a higher level has prevented a
f4510f37 5297 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4 5298 do this when X is BLKmode and when we are at the top level. */
d0f062fb 5299 || (top_p && TREE_TYPE (exp) != 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
f4510f37 5300 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5301 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5302 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5303 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5304 != INTEGER_CST)
f4510f37 5305 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5306 return 1;
5307
ff439b5f
CB
5308 if (top_p && save_expr_size == 0)
5309 {
5310 int rtn;
5311
5312 save_expr_count = 0;
5313 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5314 save_expr_rewritten = &save_expr_trees[0];
5315
5316 rtn = safe_from_p (x, exp, 1);
5317
5318 for (i = 0; i < save_expr_count; ++i)
5319 {
5320 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5321 abort ();
5322 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5323 }
5324
5325 save_expr_size = 0;
5326
5327 return rtn;
5328 }
5329
bbf6f052
RK
5330 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5331 find the underlying pseudo. */
5332 if (GET_CODE (x) == SUBREG)
5333 {
5334 x = SUBREG_REG (x);
5335 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5336 return 0;
5337 }
5338
5339 /* If X is a location in the outgoing argument area, it is always safe. */
5340 if (GET_CODE (x) == MEM
5341 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5342 || (GET_CODE (XEXP (x, 0)) == PLUS
5343 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5344 return 1;
5345
5346 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5347 {
5348 case 'd':
5349 exp_rtl = DECL_RTL (exp);
5350 break;
5351
5352 case 'c':
5353 return 1;
5354
5355 case 'x':
5356 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5357 return ((TREE_VALUE (exp) == 0
e5e809f4 5358 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5359 && (TREE_CHAIN (exp) == 0
e5e809f4 5360 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5361 else if (TREE_CODE (exp) == ERROR_MARK)
5362 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5363 else
5364 return 0;
5365
5366 case '1':
e5e809f4 5367 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5368
5369 case '2':
5370 case '<':
e5e809f4
JL
5371 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5372 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5373
5374 case 'e':
5375 case 'r':
5376 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5377 the expression. If it is set, we conflict iff we are that rtx or
5378 both are in memory. Otherwise, we check all operands of the
5379 expression recursively. */
5380
5381 switch (TREE_CODE (exp))
5382 {
5383 case ADDR_EXPR:
e44842fe 5384 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5385 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5386 || TREE_STATIC (exp));
bbf6f052
RK
5387
5388 case INDIRECT_REF:
5389 if (GET_CODE (x) == MEM)
5390 return 0;
5391 break;
5392
5393 case CALL_EXPR:
5394 exp_rtl = CALL_EXPR_RTL (exp);
5395 if (exp_rtl == 0)
5396 {
5397 /* Assume that the call will clobber all hard registers and
5398 all of memory. */
5399 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5400 || GET_CODE (x) == MEM)
5401 return 0;
5402 }
5403
5404 break;
5405
5406 case RTL_EXPR:
3bb5826a
RK
5407 /* If a sequence exists, we would have to scan every instruction
5408 in the sequence to see if it was safe. This is probably not
5409 worthwhile. */
5410 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5411 return 0;
5412
3bb5826a 5413 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5414 break;
5415
5416 case WITH_CLEANUP_EXPR:
5417 exp_rtl = RTL_EXPR_RTL (exp);
5418 break;
5419
5dab5552 5420 case CLEANUP_POINT_EXPR:
e5e809f4 5421 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5422
bbf6f052
RK
5423 case SAVE_EXPR:
5424 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5425 if (exp_rtl)
5426 break;
5427
5428 /* This SAVE_EXPR might appear many times in the top-level
5429 safe_from_p() expression, and if it has a complex
5430 subexpression, examining it multiple times could result
5431 in a combinatorial explosion. E.g. on an Alpha
5432 running at least 200MHz, a Fortran test case compiled with
5433 optimization took about 28 minutes to compile -- even though
5434 it was only a few lines long, and the complicated line causing
5435 so much time to be spent in the earlier version of safe_from_p()
5436 had only 293 or so unique nodes.
5437
5438 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5439 where it is so we can turn it back in the top-level safe_from_p()
5440 when we're done. */
5441
5442 /* For now, don't bother re-sizing the array. */
5443 if (save_expr_count >= save_expr_size)
5444 return 0;
5445 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5446
5447 nops = tree_code_length[(int) SAVE_EXPR];
5448 for (i = 0; i < nops; i++)
ff59bfe6
JM
5449 {
5450 tree operand = TREE_OPERAND (exp, i);
5451 if (operand == NULL_TREE)
5452 continue;
5453 TREE_SET_CODE (exp, ERROR_MARK);
5454 if (!safe_from_p (x, operand, 0))
5455 return 0;
5456 TREE_SET_CODE (exp, SAVE_EXPR);
5457 }
5458 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5459 return 1;
bbf6f052 5460
8129842c
RS
5461 case BIND_EXPR:
5462 /* The only operand we look at is operand 1. The rest aren't
5463 part of the expression. */
e5e809f4 5464 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5465
bbf6f052 5466 case METHOD_CALL_EXPR:
0f41302f 5467 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5468 abort ();
e9a25f70
JL
5469
5470 default:
5471 break;
bbf6f052
RK
5472 }
5473
5474 /* If we have an rtx, we do not need to scan our operands. */
5475 if (exp_rtl)
5476 break;
5477
5478 nops = tree_code_length[(int) TREE_CODE (exp)];
5479 for (i = 0; i < nops; i++)
5480 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5481 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5482 return 0;
5483 }
5484
5485 /* If we have an rtl, find any enclosed object. Then see if we conflict
5486 with it. */
5487 if (exp_rtl)
5488 {
5489 if (GET_CODE (exp_rtl) == SUBREG)
5490 {
5491 exp_rtl = SUBREG_REG (exp_rtl);
5492 if (GET_CODE (exp_rtl) == REG
5493 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5494 return 0;
5495 }
5496
5497 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5498 are memory and EXP is not readonly. */
5499 return ! (rtx_equal_p (x, exp_rtl)
5500 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5501 && ! TREE_READONLY (exp)));
5502 }
5503
5504 /* If we reach here, it is safe. */
5505 return 1;
5506}
5507
5508/* Subroutine of expand_expr: return nonzero iff EXP is an
5509 expression whose type is statically determinable. */
5510
5511static int
5512fixed_type_p (exp)
5513 tree exp;
5514{
5515 if (TREE_CODE (exp) == PARM_DECL
5516 || TREE_CODE (exp) == VAR_DECL
5517 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5518 || TREE_CODE (exp) == COMPONENT_REF
5519 || TREE_CODE (exp) == ARRAY_REF)
5520 return 1;
5521 return 0;
5522}
01c8a7c8
RK
5523
5524/* Subroutine of expand_expr: return rtx if EXP is a
5525 variable or parameter; else return 0. */
5526
5527static rtx
5528var_rtx (exp)
5529 tree exp;
5530{
5531 STRIP_NOPS (exp);
5532 switch (TREE_CODE (exp))
5533 {
5534 case PARM_DECL:
5535 case VAR_DECL:
5536 return DECL_RTL (exp);
5537 default:
5538 return 0;
5539 }
5540}
dbecbbe4
JL
5541
5542#ifdef MAX_INTEGER_COMPUTATION_MODE
5543void
5544check_max_integer_computation_mode (exp)
5545 tree exp;
5546{
5f652c07 5547 enum tree_code code;
dbecbbe4
JL
5548 enum machine_mode mode;
5549
5f652c07
JM
5550 /* Strip any NOPs that don't change the mode. */
5551 STRIP_NOPS (exp);
5552 code = TREE_CODE (exp);
5553
71bca506
JL
5554 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5555 if (code == NOP_EXPR
5556 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5557 return;
5558
dbecbbe4
JL
5559 /* First check the type of the overall operation. We need only look at
5560 unary, binary and relational operations. */
5561 if (TREE_CODE_CLASS (code) == '1'
5562 || TREE_CODE_CLASS (code) == '2'
5563 || TREE_CODE_CLASS (code) == '<')
5564 {
5565 mode = TYPE_MODE (TREE_TYPE (exp));
5566 if (GET_MODE_CLASS (mode) == MODE_INT
5567 && mode > MAX_INTEGER_COMPUTATION_MODE)
5568 fatal ("unsupported wide integer operation");
5569 }
5570
5571 /* Check operand of a unary op. */
5572 if (TREE_CODE_CLASS (code) == '1')
5573 {
5574 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5575 if (GET_MODE_CLASS (mode) == MODE_INT
5576 && mode > MAX_INTEGER_COMPUTATION_MODE)
5577 fatal ("unsupported wide integer operation");
5578 }
5579
5580 /* Check operands of a binary/comparison op. */
5581 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5582 {
5583 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5584 if (GET_MODE_CLASS (mode) == MODE_INT
5585 && mode > MAX_INTEGER_COMPUTATION_MODE)
5586 fatal ("unsupported wide integer operation");
5587
5588 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5589 if (GET_MODE_CLASS (mode) == MODE_INT
5590 && mode > MAX_INTEGER_COMPUTATION_MODE)
5591 fatal ("unsupported wide integer operation");
5592 }
5593}
5594#endif
5595
14a774a9
RK
5596\f
5597/* Utility function used by expand_expr to see if TYPE, a RECORD_TYPE,
5598 has any readonly fields. If any of the fields have types that
5599 contain readonly fields, return true as well. */
5600
5601static int
5602readonly_fields_p (type)
5603 tree type;
5604{
5605 tree field;
5606
5607 for (field = TYPE_FIELDS (type); field != 0; field = TREE_CHAIN (field))
77fd6d10
MM
5608 if (TREE_CODE (field) == FIELD_DECL
5609 && (TREE_READONLY (field)
5610 || (TREE_CODE (TREE_TYPE (field)) == RECORD_TYPE
5611 && readonly_fields_p (TREE_TYPE (field)))))
14a774a9
RK
5612 return 1;
5613
5614 return 0;
5615}
bbf6f052
RK
5616\f
5617/* expand_expr: generate code for computing expression EXP.
5618 An rtx for the computed value is returned. The value is never null.
5619 In the case of a void EXP, const0_rtx is returned.
5620
5621 The value may be stored in TARGET if TARGET is nonzero.
5622 TARGET is just a suggestion; callers must assume that
5623 the rtx returned may not be the same as TARGET.
5624
5625 If TARGET is CONST0_RTX, it means that the value will be ignored.
5626
5627 If TMODE is not VOIDmode, it suggests generating the
5628 result in mode TMODE. But this is done only when convenient.
5629 Otherwise, TMODE is ignored and the value generated in its natural mode.
5630 TMODE is just a suggestion; callers must assume that
5631 the rtx returned may not have mode TMODE.
5632
d6a5ac33
RK
5633 Note that TARGET may have neither TMODE nor MODE. In that case, it
5634 probably will not be used.
bbf6f052
RK
5635
5636 If MODIFIER is EXPAND_SUM then when EXP is an addition
5637 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5638 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5639 products as above, or REG or MEM, or constant.
5640 Ordinarily in such cases we would output mul or add instructions
5641 and then return a pseudo reg containing the sum.
5642
5643 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5644 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5645 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5646 This is used for outputting expressions used in initializers.
5647
5648 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5649 with a constant address even if that address is not normally legitimate.
5650 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5651
5652rtx
5653expand_expr (exp, target, tmode, modifier)
5654 register tree exp;
5655 rtx target;
5656 enum machine_mode tmode;
5657 enum expand_modifier modifier;
5658{
5659 register rtx op0, op1, temp;
5660 tree type = TREE_TYPE (exp);
5661 int unsignedp = TREE_UNSIGNED (type);
68557e14 5662 register enum machine_mode mode;
bbf6f052
RK
5663 register enum tree_code code = TREE_CODE (exp);
5664 optab this_optab;
68557e14
ML
5665 rtx subtarget, original_target;
5666 int ignore;
bbf6f052 5667 tree context;
921b3427
RK
5668 /* Used by check-memory-usage to make modifier read only. */
5669 enum expand_modifier ro_modifier;
bbf6f052 5670
68557e14
ML
5671 /* Handle ERROR_MARK before anybody tries to access its type. */
5672 if (TREE_CODE (exp) == ERROR_MARK)
5673 {
5674 op0 = CONST0_RTX (tmode);
5675 if (op0 != 0)
5676 return op0;
5677 return const0_rtx;
5678 }
5679
5680 mode = TYPE_MODE (type);
5681 /* Use subtarget as the target for operand 0 of a binary operation. */
5682 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5683 original_target = target;
5684 ignore = (target == const0_rtx
5685 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5686 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5687 || code == COND_EXPR)
5688 && TREE_CODE (type) == VOID_TYPE));
5689
921b3427
RK
5690 /* Make a read-only version of the modifier. */
5691 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5692 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5693 ro_modifier = modifier;
5694 else
5695 ro_modifier = EXPAND_NORMAL;
ca695ac9 5696
bbf6f052
RK
5697 /* Don't use hard regs as subtargets, because the combiner
5698 can only handle pseudo regs. */
5699 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5700 subtarget = 0;
5701 /* Avoid subtargets inside loops,
5702 since they hide some invariant expressions. */
5703 if (preserve_subexpressions_p ())
5704 subtarget = 0;
5705
dd27116b
RK
5706 /* If we are going to ignore this result, we need only do something
5707 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5708 is, short-circuit the most common cases here. Note that we must
5709 not call expand_expr with anything but const0_rtx in case this
5710 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5711
dd27116b
RK
5712 if (ignore)
5713 {
5714 if (! TREE_SIDE_EFFECTS (exp))
5715 return const0_rtx;
5716
14a774a9
RK
5717 /* Ensure we reference a volatile object even if value is ignored, but
5718 don't do this if all we are doing is taking its address. */
dd27116b
RK
5719 if (TREE_THIS_VOLATILE (exp)
5720 && TREE_CODE (exp) != FUNCTION_DECL
14a774a9
RK
5721 && mode != VOIDmode && mode != BLKmode
5722 && modifier != EXPAND_CONST_ADDRESS)
dd27116b 5723 {
921b3427 5724 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5725 if (GET_CODE (temp) == MEM)
5726 temp = copy_to_reg (temp);
5727 return const0_rtx;
5728 }
5729
14a774a9
RK
5730 if (TREE_CODE_CLASS (code) == '1' || code == COMPONENT_REF
5731 || code == INDIRECT_REF || code == BUFFER_REF)
dd27116b 5732 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5733 VOIDmode, ro_modifier);
14a774a9
RK
5734 else if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<'
5735 || code == ARRAY_REF)
dd27116b 5736 {
921b3427
RK
5737 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5738 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5739 return const0_rtx;
5740 }
5741 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5742 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5743 /* If the second operand has no side effects, just evaluate
0f41302f 5744 the first. */
dd27116b 5745 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5746 VOIDmode, ro_modifier);
14a774a9
RK
5747 else if (code == BIT_FIELD_REF)
5748 {
5749 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5750 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
5751 expand_expr (TREE_OPERAND (exp, 2), const0_rtx, VOIDmode, ro_modifier);
5752 return const0_rtx;
5753 }
5754;
90764a87 5755 target = 0;
dd27116b 5756 }
bbf6f052 5757
dbecbbe4 5758#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5759 /* Only check stuff here if the mode we want is different from the mode
5760 of the expression; if it's the same, check_max_integer_computiation_mode
5761 will handle it. Do we really need to check this stuff at all? */
5762
ce3c0b53 5763 if (target
5f652c07 5764 && GET_MODE (target) != mode
ce3c0b53
JL
5765 && TREE_CODE (exp) != INTEGER_CST
5766 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5767 && TREE_CODE (exp) != ARRAY_REF
5768 && TREE_CODE (exp) != COMPONENT_REF
5769 && TREE_CODE (exp) != BIT_FIELD_REF
5770 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5771 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5772 && TREE_CODE (exp) != VAR_DECL
5773 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5774 {
5775 enum machine_mode mode = GET_MODE (target);
5776
5777 if (GET_MODE_CLASS (mode) == MODE_INT
5778 && mode > MAX_INTEGER_COMPUTATION_MODE)
5779 fatal ("unsupported wide integer operation");
5780 }
5781
5f652c07
JM
5782 if (tmode != mode
5783 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5784 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5785 && TREE_CODE (exp) != ARRAY_REF
5786 && TREE_CODE (exp) != COMPONENT_REF
5787 && TREE_CODE (exp) != BIT_FIELD_REF
5788 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5789 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5790 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5791 && TREE_CODE (exp) != RTL_EXPR
71bca506 5792 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5793 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5794 fatal ("unsupported wide integer operation");
5795
5796 check_max_integer_computation_mode (exp);
5797#endif
5798
e44842fe
RK
5799 /* If will do cse, generate all results into pseudo registers
5800 since 1) that allows cse to find more things
5801 and 2) otherwise cse could produce an insn the machine
5802 cannot support. */
5803
bbf6f052
RK
5804 if (! cse_not_expected && mode != BLKmode && target
5805 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5806 target = subtarget;
5807
bbf6f052
RK
5808 switch (code)
5809 {
5810 case LABEL_DECL:
b552441b
RS
5811 {
5812 tree function = decl_function_context (exp);
5813 /* Handle using a label in a containing function. */
d0977240
RK
5814 if (function != current_function_decl
5815 && function != inline_function_decl && function != 0)
b552441b
RS
5816 {
5817 struct function *p = find_function_data (function);
5818 /* Allocate in the memory associated with the function
5819 that the label is in. */
5820 push_obstacks (p->function_obstack,
5821 p->function_maybepermanent_obstack);
5822
49ad7cfa
BS
5823 p->expr->x_forced_labels
5824 = gen_rtx_EXPR_LIST (VOIDmode, label_rtx (exp),
5825 p->expr->x_forced_labels);
b552441b
RS
5826 pop_obstacks ();
5827 }
ab87f8c8
JL
5828 else
5829 {
ab87f8c8
JL
5830 if (modifier == EXPAND_INITIALIZER)
5831 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5832 label_rtx (exp),
5833 forced_labels);
5834 }
c5c76735 5835
38a448ca
RH
5836 temp = gen_rtx_MEM (FUNCTION_MODE,
5837 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5838 if (function != current_function_decl
5839 && function != inline_function_decl && function != 0)
26fcb35a
RS
5840 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5841 return temp;
b552441b 5842 }
bbf6f052
RK
5843
5844 case PARM_DECL:
5845 if (DECL_RTL (exp) == 0)
5846 {
5847 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5848 return CONST0_RTX (mode);
bbf6f052
RK
5849 }
5850
0f41302f 5851 /* ... fall through ... */
d6a5ac33 5852
bbf6f052 5853 case VAR_DECL:
2dca20cd
RS
5854 /* If a static var's type was incomplete when the decl was written,
5855 but the type is complete now, lay out the decl now. */
d0f062fb 5856 if (DECL_SIZE (exp) == 0 && COMPLETE_TYPE_P (TREE_TYPE (exp))
2dca20cd
RS
5857 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5858 {
5859 push_obstacks_nochange ();
5860 end_temporary_allocation ();
5861 layout_decl (exp, 0);
5862 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5863 pop_obstacks ();
5864 }
d6a5ac33 5865
7d384cc0
KR
5866 /* Although static-storage variables start off initialized, according to
5867 ANSI C, a memcpy could overwrite them with uninitialized values. So
5868 we check them too. This also lets us check for read-only variables
5869 accessed via a non-const declaration, in case it won't be detected
5870 any other way (e.g., in an embedded system or OS kernel without
5871 memory protection).
5872
5873 Aggregates are not checked here; they're handled elsewhere. */
01d939e8 5874 if (cfun && current_function_check_memory_usage
49ad7cfa 5875 && code == VAR_DECL
921b3427 5876 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5877 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5878 {
5879 enum memory_use_mode memory_usage;
5880 memory_usage = get_memory_usage_from_modifier (modifier);
5881
5882 if (memory_usage != MEMORY_USE_DONT)
5883 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5884 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5885 GEN_INT (int_size_in_bytes (type)),
5886 TYPE_MODE (sizetype),
956d6950
JL
5887 GEN_INT (memory_usage),
5888 TYPE_MODE (integer_type_node));
921b3427
RK
5889 }
5890
0f41302f 5891 /* ... fall through ... */
d6a5ac33 5892
2dca20cd 5893 case FUNCTION_DECL:
bbf6f052
RK
5894 case RESULT_DECL:
5895 if (DECL_RTL (exp) == 0)
5896 abort ();
d6a5ac33 5897
e44842fe
RK
5898 /* Ensure variable marked as used even if it doesn't go through
5899 a parser. If it hasn't be used yet, write out an external
5900 definition. */
5901 if (! TREE_USED (exp))
5902 {
5903 assemble_external (exp);
5904 TREE_USED (exp) = 1;
5905 }
5906
dc6d66b3
RK
5907 /* Show we haven't gotten RTL for this yet. */
5908 temp = 0;
5909
bbf6f052
RK
5910 /* Handle variables inherited from containing functions. */
5911 context = decl_function_context (exp);
5912
5913 /* We treat inline_function_decl as an alias for the current function
5914 because that is the inline function whose vars, types, etc.
5915 are being merged into the current function.
5916 See expand_inline_function. */
d6a5ac33 5917
bbf6f052
RK
5918 if (context != 0 && context != current_function_decl
5919 && context != inline_function_decl
5920 /* If var is static, we don't need a static chain to access it. */
5921 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5922 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5923 {
5924 rtx addr;
5925
5926 /* Mark as non-local and addressable. */
81feeecb 5927 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5928 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5929 abort ();
bbf6f052
RK
5930 mark_addressable (exp);
5931 if (GET_CODE (DECL_RTL (exp)) != MEM)
5932 abort ();
5933 addr = XEXP (DECL_RTL (exp), 0);
5934 if (GET_CODE (addr) == MEM)
38a448ca
RH
5935 addr = gen_rtx_MEM (Pmode,
5936 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5937 else
5938 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5939 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5940 }
4af3895e 5941
bbf6f052
RK
5942 /* This is the case of an array whose size is to be determined
5943 from its initializer, while the initializer is still being parsed.
5944 See expand_decl. */
d6a5ac33 5945
dc6d66b3
RK
5946 else if (GET_CODE (DECL_RTL (exp)) == MEM
5947 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5948 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5949 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5950
5951 /* If DECL_RTL is memory, we are in the normal case and either
5952 the address is not valid or it is not a register and -fforce-addr
5953 is specified, get the address into a register. */
5954
dc6d66b3
RK
5955 else if (GET_CODE (DECL_RTL (exp)) == MEM
5956 && modifier != EXPAND_CONST_ADDRESS
5957 && modifier != EXPAND_SUM
5958 && modifier != EXPAND_INITIALIZER
5959 && (! memory_address_p (DECL_MODE (exp),
5960 XEXP (DECL_RTL (exp), 0))
5961 || (flag_force_addr
5962 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5963 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5964 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5965
dc6d66b3
RK
5966 /* If we got something, return it. But first, set the alignment
5967 the address is a register. */
5968 if (temp != 0)
5969 {
5970 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
bdb429a5 5971 mark_reg_pointer (XEXP (temp, 0), DECL_ALIGN (exp));
dc6d66b3
RK
5972
5973 return temp;
5974 }
5975
1499e0a8
RK
5976 /* If the mode of DECL_RTL does not match that of the decl, it
5977 must be a promoted value. We return a SUBREG of the wanted mode,
5978 but mark it so that we know that it was already extended. */
5979
5980 if (GET_CODE (DECL_RTL (exp)) == REG
5981 && GET_MODE (DECL_RTL (exp)) != mode)
5982 {
1499e0a8
RK
5983 /* Get the signedness used for this variable. Ensure we get the
5984 same mode we got when the variable was declared. */
78911e8b
RK
5985 if (GET_MODE (DECL_RTL (exp))
5986 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5987 abort ();
5988
38a448ca 5989 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5990 SUBREG_PROMOTED_VAR_P (temp) = 1;
5991 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5992 return temp;
5993 }
5994
bbf6f052
RK
5995 return DECL_RTL (exp);
5996
5997 case INTEGER_CST:
5998 return immed_double_const (TREE_INT_CST_LOW (exp),
05bccae2 5999 TREE_INT_CST_HIGH (exp), mode);
bbf6f052
RK
6000
6001 case CONST_DECL:
921b3427
RK
6002 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
6003 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
6004
6005 case REAL_CST:
6006 /* If optimized, generate immediate CONST_DOUBLE
6007 which will be turned into memory by reload if necessary.
6008
6009 We used to force a register so that loop.c could see it. But
6010 this does not allow gen_* patterns to perform optimizations with
6011 the constants. It also produces two insns in cases like "x = 1.0;".
6012 On most machines, floating-point constants are not permitted in
6013 many insns, so we'd end up copying it to a register in any case.
6014
6015 Now, we do the copying in expand_binop, if appropriate. */
6016 return immed_real_const (exp);
6017
6018 case COMPLEX_CST:
6019 case STRING_CST:
6020 if (! TREE_CST_RTL (exp))
6021 output_constant_def (exp);
6022
6023 /* TREE_CST_RTL probably contains a constant address.
6024 On RISC machines where a constant address isn't valid,
6025 make some insns to get that address into a register. */
6026 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
6027 && modifier != EXPAND_CONST_ADDRESS
6028 && modifier != EXPAND_INITIALIZER
6029 && modifier != EXPAND_SUM
d6a5ac33
RK
6030 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
6031 || (flag_force_addr
6032 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
6033 return change_address (TREE_CST_RTL (exp), VOIDmode,
6034 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
6035 return TREE_CST_RTL (exp);
6036
bf1e5319 6037 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
6038 {
6039 rtx to_return;
6040 char *saved_input_filename = input_filename;
6041 int saved_lineno = lineno;
6042 input_filename = EXPR_WFL_FILENAME (exp);
6043 lineno = EXPR_WFL_LINENO (exp);
6044 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
6045 emit_line_note (input_filename, lineno);
6046 /* Possibly avoid switching back and force here */
6047 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
6048 input_filename = saved_input_filename;
6049 lineno = saved_lineno;
6050 return to_return;
6051 }
bf1e5319 6052
bbf6f052
RK
6053 case SAVE_EXPR:
6054 context = decl_function_context (exp);
d6a5ac33 6055
d0977240
RK
6056 /* If this SAVE_EXPR was at global context, assume we are an
6057 initialization function and move it into our context. */
6058 if (context == 0)
6059 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
6060
bbf6f052
RK
6061 /* We treat inline_function_decl as an alias for the current function
6062 because that is the inline function whose vars, types, etc.
6063 are being merged into the current function.
6064 See expand_inline_function. */
6065 if (context == current_function_decl || context == inline_function_decl)
6066 context = 0;
6067
6068 /* If this is non-local, handle it. */
6069 if (context)
6070 {
d0977240
RK
6071 /* The following call just exists to abort if the context is
6072 not of a containing function. */
6073 find_function_data (context);
6074
bbf6f052
RK
6075 temp = SAVE_EXPR_RTL (exp);
6076 if (temp && GET_CODE (temp) == REG)
6077 {
6078 put_var_into_stack (exp);
6079 temp = SAVE_EXPR_RTL (exp);
6080 }
6081 if (temp == 0 || GET_CODE (temp) != MEM)
6082 abort ();
6083 return change_address (temp, mode,
6084 fix_lexical_addr (XEXP (temp, 0), exp));
6085 }
6086 if (SAVE_EXPR_RTL (exp) == 0)
6087 {
06089a8b
RK
6088 if (mode == VOIDmode)
6089 temp = const0_rtx;
6090 else
e5e809f4 6091 temp = assign_temp (type, 3, 0, 0);
1499e0a8 6092
bbf6f052 6093 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 6094 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
6095 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
6096 save_expr_regs);
ff78f773
RK
6097
6098 /* If the mode of TEMP does not match that of the expression, it
6099 must be a promoted value. We pass store_expr a SUBREG of the
6100 wanted mode but mark it so that we know that it was already
6101 extended. Note that `unsignedp' was modified above in
6102 this case. */
6103
6104 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
6105 {
38a448ca 6106 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
6107 SUBREG_PROMOTED_VAR_P (temp) = 1;
6108 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6109 }
6110
4c7a0be9 6111 if (temp == const0_rtx)
921b3427
RK
6112 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
6113 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
6114 else
6115 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
6116
6117 TREE_USED (exp) = 1;
bbf6f052 6118 }
1499e0a8
RK
6119
6120 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6121 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6122 but mark it so that we know that it was already extended. */
1499e0a8
RK
6123
6124 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6125 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6126 {
e70d22c8
RK
6127 /* Compute the signedness and make the proper SUBREG. */
6128 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6129 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6130 SUBREG_PROMOTED_VAR_P (temp) = 1;
6131 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6132 return temp;
6133 }
6134
bbf6f052
RK
6135 return SAVE_EXPR_RTL (exp);
6136
679163cf
MS
6137 case UNSAVE_EXPR:
6138 {
6139 rtx temp;
6140 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6141 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6142 return temp;
6143 }
6144
b50d17a1 6145 case PLACEHOLDER_EXPR:
e9a25f70
JL
6146 {
6147 tree placeholder_expr;
6148
6149 /* If there is an object on the head of the placeholder list,
e5e809f4 6150 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6151 further information, see tree.def. */
6152 for (placeholder_expr = placeholder_list;
6153 placeholder_expr != 0;
6154 placeholder_expr = TREE_CHAIN (placeholder_expr))
6155 {
6156 tree need_type = TYPE_MAIN_VARIANT (type);
6157 tree object = 0;
6158 tree old_list = placeholder_list;
6159 tree elt;
6160
e5e809f4
JL
6161 /* Find the outermost reference that is of the type we want.
6162 If none, see if any object has a type that is a pointer to
6163 the type we want. */
6164 for (elt = TREE_PURPOSE (placeholder_expr);
6165 elt != 0 && object == 0;
6166 elt
6167 = ((TREE_CODE (elt) == COMPOUND_EXPR
6168 || TREE_CODE (elt) == COND_EXPR)
6169 ? TREE_OPERAND (elt, 1)
6170 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6171 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6172 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6173 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6174 ? TREE_OPERAND (elt, 0) : 0))
6175 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6176 object = elt;
e9a25f70 6177
e9a25f70 6178 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6179 elt != 0 && object == 0;
6180 elt
6181 = ((TREE_CODE (elt) == COMPOUND_EXPR
6182 || TREE_CODE (elt) == COND_EXPR)
6183 ? TREE_OPERAND (elt, 1)
6184 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6185 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6186 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6187 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6188 ? TREE_OPERAND (elt, 0) : 0))
6189 if (POINTER_TYPE_P (TREE_TYPE (elt))
6190 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6191 == need_type))
e5e809f4 6192 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6193
e9a25f70 6194 if (object != 0)
2cde2255 6195 {
e9a25f70
JL
6196 /* Expand this object skipping the list entries before
6197 it was found in case it is also a PLACEHOLDER_EXPR.
6198 In that case, we want to translate it using subsequent
6199 entries. */
6200 placeholder_list = TREE_CHAIN (placeholder_expr);
6201 temp = expand_expr (object, original_target, tmode,
6202 ro_modifier);
6203 placeholder_list = old_list;
6204 return temp;
2cde2255 6205 }
e9a25f70
JL
6206 }
6207 }
b50d17a1
RK
6208
6209 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6210 abort ();
6211
6212 case WITH_RECORD_EXPR:
6213 /* Put the object on the placeholder list, expand our first operand,
6214 and pop the list. */
6215 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6216 placeholder_list);
6217 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6218 tmode, ro_modifier);
b50d17a1
RK
6219 placeholder_list = TREE_CHAIN (placeholder_list);
6220 return target;
6221
70e6ca43
APB
6222 case GOTO_EXPR:
6223 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6224 expand_goto (TREE_OPERAND (exp, 0));
6225 else
6226 expand_computed_goto (TREE_OPERAND (exp, 0));
6227 return const0_rtx;
6228
bbf6f052 6229 case EXIT_EXPR:
e44842fe
RK
6230 expand_exit_loop_if_false (NULL_PTR,
6231 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6232 return const0_rtx;
6233
f42e28dd
APB
6234 case LABELED_BLOCK_EXPR:
6235 if (LABELED_BLOCK_BODY (exp))
6236 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6237 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6238 return const0_rtx;
6239
6240 case EXIT_BLOCK_EXPR:
6241 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6242 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6243 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6244 return const0_rtx;
6245
bbf6f052 6246 case LOOP_EXPR:
0088fcb1 6247 push_temp_slots ();
bbf6f052
RK
6248 expand_start_loop (1);
6249 expand_expr_stmt (TREE_OPERAND (exp, 0));
6250 expand_end_loop ();
0088fcb1 6251 pop_temp_slots ();
bbf6f052
RK
6252
6253 return const0_rtx;
6254
6255 case BIND_EXPR:
6256 {
6257 tree vars = TREE_OPERAND (exp, 0);
6258 int vars_need_expansion = 0;
6259
6260 /* Need to open a binding contour here because
e976b8b2 6261 if there are any cleanups they must be contained here. */
8e91754e 6262 expand_start_bindings (2);
bbf6f052 6263
2df53c0b
RS
6264 /* Mark the corresponding BLOCK for output in its proper place. */
6265 if (TREE_OPERAND (exp, 2) != 0
6266 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6267 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6268
6269 /* If VARS have not yet been expanded, expand them now. */
6270 while (vars)
6271 {
6272 if (DECL_RTL (vars) == 0)
6273 {
6274 vars_need_expansion = 1;
6275 expand_decl (vars);
6276 }
6277 expand_decl_init (vars);
6278 vars = TREE_CHAIN (vars);
6279 }
6280
921b3427 6281 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6282
6283 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6284
6285 return temp;
6286 }
6287
6288 case RTL_EXPR:
83b853c9
JM
6289 if (RTL_EXPR_SEQUENCE (exp))
6290 {
6291 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6292 abort ();
6293 emit_insns (RTL_EXPR_SEQUENCE (exp));
6294 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6295 }
64dc53f3
MM
6296 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
6297 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6298 return RTL_EXPR_RTL (exp);
6299
6300 case CONSTRUCTOR:
dd27116b
RK
6301 /* If we don't need the result, just ensure we evaluate any
6302 subexpressions. */
6303 if (ignore)
6304 {
6305 tree elt;
6306 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6307 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6308 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6309 return const0_rtx;
6310 }
3207b172 6311
4af3895e
JVA
6312 /* All elts simple constants => refer to a constant in memory. But
6313 if this is a non-BLKmode mode, let it store a field at a time
6314 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6315 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6316 store directly into the target unless the type is large enough
6317 that memcpy will be used. If we are making an initializer and
3207b172 6318 all operands are constant, put it in memory as well. */
dd27116b 6319 else if ((TREE_STATIC (exp)
3207b172 6320 && ((mode == BLKmode
e5e809f4 6321 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1 6322 || TREE_ADDRESSABLE (exp)
19caa751 6323 || (host_integerp (TYPE_SIZE_UNIT (type), 1)
05bccae2 6324 && (! MOVE_BY_PIECES_P
19caa751
RK
6325 (tree_low_cst (TYPE_SIZE_UNIT (type), 1),
6326 TYPE_ALIGN (type)))
9de08200 6327 && ! mostly_zeros_p (exp))))
dd27116b 6328 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6329 {
6330 rtx constructor = output_constant_def (exp);
19caa751 6331
b552441b
RS
6332 if (modifier != EXPAND_CONST_ADDRESS
6333 && modifier != EXPAND_INITIALIZER
6334 && modifier != EXPAND_SUM
d6a5ac33
RK
6335 && (! memory_address_p (GET_MODE (constructor),
6336 XEXP (constructor, 0))
6337 || (flag_force_addr
6338 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6339 constructor = change_address (constructor, VOIDmode,
6340 XEXP (constructor, 0));
6341 return constructor;
6342 }
6343
bbf6f052
RK
6344 else
6345 {
e9ac02a6
JW
6346 /* Handle calls that pass values in multiple non-contiguous
6347 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6348 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6349 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6350 {
6351 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6352 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6353 else
6354 target = assign_temp (type, 0, 1, 1);
6355 }
07604beb
RK
6356
6357 if (TREE_READONLY (exp))
6358 {
9151b3bf 6359 if (GET_CODE (target) == MEM)
effbcc6a
RK
6360 target = copy_rtx (target);
6361
07604beb
RK
6362 RTX_UNCHANGING_P (target) = 1;
6363 }
6364
b7010412
RK
6365 store_constructor (exp, target, TYPE_ALIGN (TREE_TYPE (exp)), 0,
6366 int_size_in_bytes (TREE_TYPE (exp)));
bbf6f052
RK
6367 return target;
6368 }
6369
6370 case INDIRECT_REF:
6371 {
6372 tree exp1 = TREE_OPERAND (exp, 0);
6373 tree exp2;
7581a30f
JW
6374 tree index;
6375 tree string = string_constant (exp1, &index);
7581a30f 6376
06eaa86f 6377 /* Try to optimize reads from const strings. */
7581a30f
JW
6378 if (string
6379 && TREE_CODE (string) == STRING_CST
6380 && TREE_CODE (index) == INTEGER_CST
05bccae2 6381 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
7581a30f 6382 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6383 && GET_MODE_SIZE (mode) == 1
6384 && modifier != EXPAND_MEMORY_USE_WO)
05bccae2
RK
6385 return
6386 GEN_INT (TREE_STRING_POINTER (string)[TREE_INT_CST_LOW (index)]);
bbf6f052 6387
405f0da6
JW
6388 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6389 op0 = memory_address (mode, op0);
8c8a8e34 6390
01d939e8 6391 if (cfun && current_function_check_memory_usage
49ad7cfa 6392 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6393 {
6394 enum memory_use_mode memory_usage;
6395 memory_usage = get_memory_usage_from_modifier (modifier);
6396
6397 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6398 {
6399 in_check_memory_usage = 1;
6400 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6401 op0, Pmode,
c85f7c16
JL
6402 GEN_INT (int_size_in_bytes (type)),
6403 TYPE_MODE (sizetype),
6404 GEN_INT (memory_usage),
6405 TYPE_MODE (integer_type_node));
6406 in_check_memory_usage = 0;
6407 }
921b3427
RK
6408 }
6409
38a448ca 6410 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6411 /* If address was computed by addition,
6412 mark this as an element of an aggregate. */
9ec36da5
JL
6413 if (TREE_CODE (exp1) == PLUS_EXPR
6414 || (TREE_CODE (exp1) == SAVE_EXPR
6415 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6416 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6417 || (TREE_CODE (exp1) == ADDR_EXPR
6418 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6419 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6420 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6421
2c4c436a 6422 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6423 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6424
6425 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6426 here, because, in C and C++, the fact that a location is accessed
6427 through a pointer to const does not mean that the value there can
6428 never change. Languages where it can never change should
6429 also set TREE_STATIC. */
5cb7a25a 6430 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
14a774a9
RK
6431
6432 /* If we are writing to this object and its type is a record with
6433 readonly fields, we must mark it as readonly so it will
6434 conflict with readonly references to those fields. */
6435 if (modifier == EXPAND_MEMORY_USE_WO
6436 && TREE_CODE (type) == RECORD_TYPE && readonly_fields_p (type))
6437 RTX_UNCHANGING_P (temp) = 1;
6438
8c8a8e34
JW
6439 return temp;
6440 }
bbf6f052
RK
6441
6442 case ARRAY_REF:
742920c7
RK
6443 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6444 abort ();
bbf6f052 6445
bbf6f052 6446 {
742920c7
RK
6447 tree array = TREE_OPERAND (exp, 0);
6448 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6449 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 6450 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
08293add 6451 HOST_WIDE_INT i;
b50d17a1 6452
d4c89139
PB
6453 /* Optimize the special-case of a zero lower bound.
6454
6455 We convert the low_bound to sizetype to avoid some problems
6456 with constant folding. (E.g. suppose the lower bound is 1,
6457 and its mode is QI. Without the conversion, (ARRAY
6458 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 6459 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
d4c89139 6460
742920c7 6461 if (! integer_zerop (low_bound))
fed3cef0 6462 index = size_diffop (index, convert (sizetype, low_bound));
742920c7 6463
742920c7 6464 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6465 This is not done in fold so it won't happen inside &.
6466 Don't fold if this is for wide characters since it's too
6467 difficult to do correctly and this is a very rare case. */
742920c7
RK
6468
6469 if (TREE_CODE (array) == STRING_CST
6470 && TREE_CODE (index) == INTEGER_CST
05bccae2 6471 && compare_tree_int (index, TREE_STRING_LENGTH (array)) < 0
ad2e7dd0
RK
6472 && GET_MODE_CLASS (mode) == MODE_INT
6473 && GET_MODE_SIZE (mode) == 1)
05bccae2
RK
6474 return
6475 GEN_INT (TREE_STRING_POINTER (array)[TREE_INT_CST_LOW (index)]);
bbf6f052 6476
742920c7
RK
6477 /* If this is a constant index into a constant array,
6478 just get the value from the array. Handle both the cases when
6479 we have an explicit constructor and when our operand is a variable
6480 that was declared const. */
4af3895e 6481
05bccae2
RK
6482 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
6483 && TREE_CODE (index) == INTEGER_CST
6484 && 0 > compare_tree_int (index,
6485 list_length (CONSTRUCTOR_ELTS
6486 (TREE_OPERAND (exp, 0)))))
742920c7 6487 {
05bccae2
RK
6488 tree elem;
6489
6490 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
6491 i = TREE_INT_CST_LOW (index);
6492 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
6493 ;
6494
6495 if (elem)
6496 return expand_expr (fold (TREE_VALUE (elem)), target,
6497 tmode, ro_modifier);
742920c7 6498 }
4af3895e 6499
742920c7
RK
6500 else if (optimize >= 1
6501 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6502 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6503 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6504 {
08293add 6505 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6506 {
6507 tree init = DECL_INITIAL (array);
6508
742920c7
RK
6509 if (TREE_CODE (init) == CONSTRUCTOR)
6510 {
665f2503 6511 tree elem;
742920c7 6512
05bccae2 6513 for (elem = CONSTRUCTOR_ELTS (init);
5cb1bea4
JM
6514 (elem
6515 && !tree_int_cst_equal (TREE_PURPOSE (elem), index));
05bccae2
RK
6516 elem = TREE_CHAIN (elem))
6517 ;
6518
742920c7
RK
6519 if (elem)
6520 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6521 tmode, ro_modifier);
742920c7
RK
6522 }
6523 else if (TREE_CODE (init) == STRING_CST
05bccae2
RK
6524 && 0 > compare_tree_int (index,
6525 TREE_STRING_LENGTH (init)))
08293add
RK
6526 return (GEN_INT
6527 (TREE_STRING_POINTER
6528 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6529 }
6530 }
6531 }
8c8a8e34 6532
08293add 6533 /* ... fall through ... */
bbf6f052
RK
6534
6535 case COMPONENT_REF:
6536 case BIT_FIELD_REF:
4af3895e 6537 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6538 appropriate field if it is present. Don't do this if we have
6539 already written the data since we want to refer to that copy
6540 and varasm.c assumes that's what we'll do. */
4af3895e 6541 if (code != ARRAY_REF
7a0b7b9a
RK
6542 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6543 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6544 {
6545 tree elt;
6546
6547 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6548 elt = TREE_CHAIN (elt))
86b5812c
RK
6549 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6550 /* We can normally use the value of the field in the
6551 CONSTRUCTOR. However, if this is a bitfield in
6552 an integral mode that we can fit in a HOST_WIDE_INT,
6553 we must mask only the number of bits in the bitfield,
6554 since this is done implicitly by the constructor. If
6555 the bitfield does not meet either of those conditions,
6556 we can't do this optimization. */
6557 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6558 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6559 == MODE_INT)
6560 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6561 <= HOST_BITS_PER_WIDE_INT))))
6562 {
6563 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6564 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6565 {
9df2c88c
RK
6566 HOST_WIDE_INT bitsize
6567 = TREE_INT_CST_LOW (DECL_SIZE (TREE_PURPOSE (elt)));
86b5812c
RK
6568
6569 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6570 {
6571 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6572 op0 = expand_and (op0, op1, target);
6573 }
6574 else
6575 {
e5e809f4
JL
6576 enum machine_mode imode
6577 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6578 tree count
e5e809f4
JL
6579 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6580 0);
86b5812c
RK
6581
6582 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6583 target, 0);
6584 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6585 target, 0);
6586 }
6587 }
6588
6589 return op0;
6590 }
4af3895e
JVA
6591 }
6592
bbf6f052
RK
6593 {
6594 enum machine_mode mode1;
770ae6cc 6595 HOST_WIDE_INT bitsize, bitpos;
7bb0943f 6596 tree offset;
bbf6f052 6597 int volatilep = 0;
729a2125 6598 unsigned int alignment;
839c4796
RK
6599 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6600 &mode1, &unsignedp, &volatilep,
6601 &alignment);
bbf6f052 6602
e7f3c83f
RK
6603 /* If we got back the original object, something is wrong. Perhaps
6604 we are evaluating an expression too early. In any event, don't
6605 infinitely recurse. */
6606 if (tem == exp)
6607 abort ();
6608
3d27140a 6609 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6610 computation, since it will need a temporary and TARGET is known
6611 to have to do. This occurs in unchecked conversion in Ada. */
6612
6613 op0 = expand_expr (tem,
6614 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6615 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6616 != INTEGER_CST)
6617 ? target : NULL_RTX),
4ed67205 6618 VOIDmode,
14a774a9
RK
6619 (modifier == EXPAND_INITIALIZER
6620 || modifier == EXPAND_CONST_ADDRESS)
e5e809f4 6621 ? modifier : EXPAND_NORMAL);
bbf6f052 6622
8c8a8e34 6623 /* If this is a constant, put it into a register if it is a
14a774a9 6624 legitimate constant and OFFSET is 0 and memory if it isn't. */
8c8a8e34
JW
6625 if (CONSTANT_P (op0))
6626 {
6627 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
14a774a9
RK
6628 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
6629 && offset == 0)
8c8a8e34
JW
6630 op0 = force_reg (mode, op0);
6631 else
6632 op0 = validize_mem (force_const_mem (mode, op0));
6633 }
6634
7bb0943f
RS
6635 if (offset != 0)
6636 {
906c4e36 6637 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f 6638
14a774a9
RK
6639 /* If this object is in memory, put it into a register.
6640 This case can't occur in C, but can in Ada if we have
6641 unchecked conversion of an expression from a scalar type to
6642 an array or record type. */
6643 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6644 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
6645 {
6646 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
6647
6648 mark_temp_addr_taken (memloc);
6649 emit_move_insn (memloc, op0);
6650 op0 = memloc;
6651 }
6652
7bb0943f
RS
6653 if (GET_CODE (op0) != MEM)
6654 abort ();
2d48c13d
JL
6655
6656 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6657 {
2d48c13d 6658#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6659 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6660#else
bd070e1a 6661 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6662#endif
bd070e1a 6663 }
2d48c13d 6664
14a774a9 6665 /* A constant address in OP0 can have VOIDmode, we must not try
efd07ca7 6666 to call force_reg for that case. Avoid that case. */
89752202
HB
6667 if (GET_CODE (op0) == MEM
6668 && GET_MODE (op0) == BLKmode
efd07ca7 6669 && GET_MODE (XEXP (op0, 0)) != VOIDmode
14a774a9 6670 && bitsize != 0
89752202
HB
6671 && (bitpos % bitsize) == 0
6672 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
19caa751 6673 && alignment == GET_MODE_ALIGNMENT (mode1))
89752202
HB
6674 {
6675 rtx temp = change_address (op0, mode1,
6676 plus_constant (XEXP (op0, 0),
6677 (bitpos /
6678 BITS_PER_UNIT)));
6679 if (GET_CODE (XEXP (temp, 0)) == REG)
6680 op0 = temp;
6681 else
6682 op0 = change_address (op0, mode1,
6683 force_reg (GET_MODE (XEXP (temp, 0)),
6684 XEXP (temp, 0)));
6685 bitpos = 0;
6686 }
6687
6688
7bb0943f 6689 op0 = change_address (op0, VOIDmode,
38a448ca 6690 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
c5c76735
JL
6691 force_reg (ptr_mode,
6692 offset_rtx)));
7bb0943f
RS
6693 }
6694
bbf6f052
RK
6695 /* Don't forget about volatility even if this is a bitfield. */
6696 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6697 {
6698 op0 = copy_rtx (op0);
6699 MEM_VOLATILE_P (op0) = 1;
6700 }
6701
921b3427 6702 /* Check the access. */
c5c76735 6703 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6704 {
6705 enum memory_use_mode memory_usage;
6706 memory_usage = get_memory_usage_from_modifier (modifier);
6707
6708 if (memory_usage != MEMORY_USE_DONT)
6709 {
6710 rtx to;
6711 int size;
6712
6713 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6714 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6715
6716 /* Check the access right of the pointer. */
e9a25f70
JL
6717 if (size > BITS_PER_UNIT)
6718 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6719 to, Pmode,
e9a25f70
JL
6720 GEN_INT (size / BITS_PER_UNIT),
6721 TYPE_MODE (sizetype),
956d6950
JL
6722 GEN_INT (memory_usage),
6723 TYPE_MODE (integer_type_node));
921b3427
RK
6724 }
6725 }
6726
ccc98036
RS
6727 /* In cases where an aligned union has an unaligned object
6728 as a field, we might be extracting a BLKmode value from
6729 an integer-mode (e.g., SImode) object. Handle this case
6730 by doing the extract into an object as wide as the field
6731 (which we know to be the width of a basic mode), then
f2420d0b
JW
6732 storing into memory, and changing the mode to BLKmode.
6733 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6734 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6735 if (mode1 == VOIDmode
ccc98036 6736 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6737 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6738 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6739 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6740 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6741 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6742 /* If the field isn't aligned enough to fetch as a memref,
6743 fetch it as a bit field. */
e1565e65
DE
6744 || (mode1 != BLKmode
6745 && SLOW_UNALIGNED_ACCESS (mode1, alignment)
14a774a9 6746 && ((TYPE_ALIGN (TREE_TYPE (tem))
19caa751 6747 < GET_MODE_ALIGNMENT (mode))
dd841181
RK
6748 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0)))
6749 /* If the type and the field are a constant size and the
6750 size of the type isn't the same size as the bitfield,
6751 we must use bitfield operations. */
6752 || ((bitsize >= 0
6753 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (exp)))
6754 == INTEGER_CST)
05bccae2
RK
6755 && 0 != compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)),
6756 bitsize)))))
14a774a9
RK
6757 || (modifier != EXPAND_CONST_ADDRESS
6758 && modifier != EXPAND_INITIALIZER
6759 && mode == BLKmode
e1565e65 6760 && SLOW_UNALIGNED_ACCESS (mode, alignment)
19caa751 6761 && (TYPE_ALIGN (type) > alignment
14a774a9 6762 || bitpos % TYPE_ALIGN (type) != 0)))
bbf6f052 6763 {
bbf6f052
RK
6764 enum machine_mode ext_mode = mode;
6765
14a774a9
RK
6766 if (ext_mode == BLKmode
6767 && ! (target != 0 && GET_CODE (op0) == MEM
6768 && GET_CODE (target) == MEM
6769 && bitpos % BITS_PER_UNIT == 0))
bbf6f052
RK
6770 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6771
6772 if (ext_mode == BLKmode)
a281e72d
RK
6773 {
6774 /* In this case, BITPOS must start at a byte boundary and
6775 TARGET, if specified, must be a MEM. */
6776 if (GET_CODE (op0) != MEM
6777 || (target != 0 && GET_CODE (target) != MEM)
6778 || bitpos % BITS_PER_UNIT != 0)
6779 abort ();
6780
6781 op0 = change_address (op0, VOIDmode,
6782 plus_constant (XEXP (op0, 0),
6783 bitpos / BITS_PER_UNIT));
6784 if (target == 0)
6785 target = assign_temp (type, 0, 1, 1);
6786
6787 emit_block_move (target, op0,
6788 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6789 / BITS_PER_UNIT),
19caa751 6790 BITS_PER_UNIT);
a281e72d
RK
6791
6792 return target;
6793 }
bbf6f052 6794
dc6d66b3
RK
6795 op0 = validize_mem (op0);
6796
6797 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 6798 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3
RK
6799
6800 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6801 unsignedp, target, ext_mode, ext_mode,
034f9101 6802 alignment,
bbf6f052 6803 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6804
6805 /* If the result is a record type and BITSIZE is narrower than
6806 the mode of OP0, an integral mode, and this is a big endian
6807 machine, we must put the field into the high-order bits. */
6808 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6809 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6810 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6811 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6812 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6813 - bitsize),
6814 op0, 1);
6815
bbf6f052
RK
6816 if (mode == BLKmode)
6817 {
6818 rtx new = assign_stack_temp (ext_mode,
6819 bitsize / BITS_PER_UNIT, 0);
6820
6821 emit_move_insn (new, op0);
6822 op0 = copy_rtx (new);
6823 PUT_MODE (op0, BLKmode);
c6df88cb 6824 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6825 }
6826
6827 return op0;
6828 }
6829
05019f83
RK
6830 /* If the result is BLKmode, use that to access the object
6831 now as well. */
6832 if (mode == BLKmode)
6833 mode1 = BLKmode;
6834
bbf6f052
RK
6835 /* Get a reference to just this component. */
6836 if (modifier == EXPAND_CONST_ADDRESS
6837 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6838 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6839 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6840 else
6841 op0 = change_address (op0, mode1,
6842 plus_constant (XEXP (op0, 0),
6843 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6844
6845 if (GET_CODE (op0) == MEM)
6846 MEM_ALIAS_SET (op0) = get_alias_set (exp);
14a774a9 6847
dc6d66b3 6848 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 6849 mark_reg_pointer (XEXP (op0, 0), alignment);
dc6d66b3 6850
c6df88cb 6851 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6852 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6853 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6854 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6855 || modifier == EXPAND_INITIALIZER)
bbf6f052 6856 return op0;
0d15e60c 6857 else if (target == 0)
bbf6f052 6858 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6859
bbf6f052
RK
6860 convert_move (target, op0, unsignedp);
6861 return target;
6862 }
6863
bbf6f052
RK
6864 /* Intended for a reference to a buffer of a file-object in Pascal.
6865 But it's not certain that a special tree code will really be
6866 necessary for these. INDIRECT_REF might work for them. */
6867 case BUFFER_REF:
6868 abort ();
6869
7308a047 6870 case IN_EXPR:
7308a047 6871 {
d6a5ac33
RK
6872 /* Pascal set IN expression.
6873
6874 Algorithm:
6875 rlo = set_low - (set_low%bits_per_word);
6876 the_word = set [ (index - rlo)/bits_per_word ];
6877 bit_index = index % bits_per_word;
6878 bitmask = 1 << bit_index;
6879 return !!(the_word & bitmask); */
6880
7308a047
RS
6881 tree set = TREE_OPERAND (exp, 0);
6882 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6883 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6884 tree set_type = TREE_TYPE (set);
7308a047
RS
6885 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6886 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6887 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6888 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6889 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6890 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6891 rtx setaddr = XEXP (setval, 0);
6892 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6893 rtx rlow;
6894 rtx diff, quo, rem, addr, bit, result;
7308a047 6895
d6a5ac33
RK
6896 preexpand_calls (exp);
6897
6898 /* If domain is empty, answer is no. Likewise if index is constant
6899 and out of bounds. */
51723711 6900 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6901 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6902 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6903 || (TREE_CODE (index) == INTEGER_CST
6904 && TREE_CODE (set_low_bound) == INTEGER_CST
6905 && tree_int_cst_lt (index, set_low_bound))
6906 || (TREE_CODE (set_high_bound) == INTEGER_CST
6907 && TREE_CODE (index) == INTEGER_CST
6908 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6909 return const0_rtx;
6910
d6a5ac33
RK
6911 if (target == 0)
6912 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6913
6914 /* If we get here, we have to generate the code for both cases
6915 (in range and out of range). */
6916
6917 op0 = gen_label_rtx ();
6918 op1 = gen_label_rtx ();
6919
6920 if (! (GET_CODE (index_val) == CONST_INT
6921 && GET_CODE (lo_r) == CONST_INT))
6922 {
c5d5d461
JL
6923 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6924 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6925 }
6926
6927 if (! (GET_CODE (index_val) == CONST_INT
6928 && GET_CODE (hi_r) == CONST_INT))
6929 {
c5d5d461
JL
6930 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6931 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6932 }
6933
6934 /* Calculate the element number of bit zero in the first word
6935 of the set. */
6936 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6937 rlow = GEN_INT (INTVAL (lo_r)
6938 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6939 else
17938e57
RK
6940 rlow = expand_binop (index_mode, and_optab, lo_r,
6941 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6942 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6943
d6a5ac33
RK
6944 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6945 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6946
6947 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6948 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6949 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6950 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6951
7308a047 6952 addr = memory_address (byte_mode,
d6a5ac33
RK
6953 expand_binop (index_mode, add_optab, diff,
6954 setaddr, NULL_RTX, iunsignedp,
17938e57 6955 OPTAB_LIB_WIDEN));
d6a5ac33 6956
7308a047
RS
6957 /* Extract the bit we want to examine */
6958 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6959 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6960 make_tree (TREE_TYPE (index), rem),
6961 NULL_RTX, 1);
6962 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6963 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6964 1, OPTAB_LIB_WIDEN);
17938e57
RK
6965
6966 if (result != target)
6967 convert_move (target, result, 1);
7308a047
RS
6968
6969 /* Output the code to handle the out-of-range case. */
6970 emit_jump (op0);
6971 emit_label (op1);
6972 emit_move_insn (target, const0_rtx);
6973 emit_label (op0);
6974 return target;
6975 }
6976
bbf6f052
RK
6977 case WITH_CLEANUP_EXPR:
6978 if (RTL_EXPR_RTL (exp) == 0)
6979 {
6980 RTL_EXPR_RTL (exp)
921b3427 6981 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6982 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6983
bbf6f052
RK
6984 /* That's it for this cleanup. */
6985 TREE_OPERAND (exp, 2) = 0;
6986 }
6987 return RTL_EXPR_RTL (exp);
6988
5dab5552
MS
6989 case CLEANUP_POINT_EXPR:
6990 {
e976b8b2
MS
6991 /* Start a new binding layer that will keep track of all cleanup
6992 actions to be performed. */
8e91754e 6993 expand_start_bindings (2);
e976b8b2 6994
d93d4205 6995 target_temp_slot_level = temp_slot_level;
e976b8b2 6996
921b3427 6997 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6998 /* If we're going to use this value, load it up now. */
6999 if (! ignore)
7000 op0 = force_not_mem (op0);
d93d4205 7001 preserve_temp_slots (op0);
e976b8b2 7002 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
7003 }
7004 return op0;
7005
bbf6f052
RK
7006 case CALL_EXPR:
7007 /* Check for a built-in function. */
7008 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
7009 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
7010 == FUNCTION_DECL)
bbf6f052
RK
7011 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7012 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 7013
bbf6f052
RK
7014 /* If this call was expanded already by preexpand_calls,
7015 just return the result we got. */
7016 if (CALL_EXPR_RTL (exp) != 0)
7017 return CALL_EXPR_RTL (exp);
d6a5ac33 7018
8129842c 7019 return expand_call (exp, target, ignore);
bbf6f052
RK
7020
7021 case NON_LVALUE_EXPR:
7022 case NOP_EXPR:
7023 case CONVERT_EXPR:
7024 case REFERENCE_EXPR:
bbf6f052
RK
7025 if (TREE_CODE (type) == UNION_TYPE)
7026 {
7027 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
14a774a9
RK
7028
7029 /* If both input and output are BLKmode, this conversion
7030 isn't actually doing anything unless we need to make the
7031 alignment stricter. */
7032 if (mode == BLKmode && TYPE_MODE (valtype) == BLKmode
7033 && (TYPE_ALIGN (type) <= TYPE_ALIGN (valtype)
7034 || TYPE_ALIGN (type) >= BIGGEST_ALIGNMENT))
7035 return expand_expr (TREE_OPERAND (exp, 0), target, tmode,
7036 modifier);
7037
bbf6f052 7038 if (target == 0)
06089a8b
RK
7039 {
7040 if (mode != BLKmode)
7041 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7042 else
7043 target = assign_temp (type, 0, 1, 1);
7044 }
d6a5ac33 7045
bbf6f052
RK
7046 if (GET_CODE (target) == MEM)
7047 /* Store data into beginning of memory target. */
7048 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
7049 change_address (target, TYPE_MODE (valtype), 0), 0);
7050
bbf6f052
RK
7051 else if (GET_CODE (target) == REG)
7052 /* Store this field into a union of the proper type. */
14a774a9
RK
7053 store_field (target,
7054 MIN ((int_size_in_bytes (TREE_TYPE
7055 (TREE_OPERAND (exp, 0)))
7056 * BITS_PER_UNIT),
7057 GET_MODE_BITSIZE (mode)),
7058 0, TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
7059 VOIDmode, 0, 1, int_size_in_bytes (type), 0);
bbf6f052
RK
7060 else
7061 abort ();
7062
7063 /* Return the entire union. */
7064 return target;
7065 }
d6a5ac33 7066
7f62854a
RK
7067 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
7068 {
7069 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 7070 ro_modifier);
7f62854a
RK
7071
7072 /* If the signedness of the conversion differs and OP0 is
7073 a promoted SUBREG, clear that indication since we now
7074 have to do the proper extension. */
7075 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
7076 && GET_CODE (op0) == SUBREG)
7077 SUBREG_PROMOTED_VAR_P (op0) = 0;
7078
7079 return op0;
7080 }
7081
1499e0a8 7082 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
7083 if (GET_MODE (op0) == mode)
7084 return op0;
12342f90 7085
d6a5ac33
RK
7086 /* If OP0 is a constant, just convert it into the proper mode. */
7087 if (CONSTANT_P (op0))
7088 return
7089 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7090 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 7091
26fcb35a 7092 if (modifier == EXPAND_INITIALIZER)
38a448ca 7093 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 7094
bbf6f052 7095 if (target == 0)
d6a5ac33
RK
7096 return
7097 convert_to_mode (mode, op0,
7098 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 7099 else
d6a5ac33
RK
7100 convert_move (target, op0,
7101 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
7102 return target;
7103
7104 case PLUS_EXPR:
0f41302f
MS
7105 /* We come here from MINUS_EXPR when the second operand is a
7106 constant. */
bbf6f052
RK
7107 plus_expr:
7108 this_optab = add_optab;
7109
7110 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
7111 something else, make sure we add the register to the constant and
7112 then to the other thing. This case can occur during strength
7113 reduction and doing it this way will produce better code if the
7114 frame pointer or argument pointer is eliminated.
7115
7116 fold-const.c will ensure that the constant is always in the inner
7117 PLUS_EXPR, so the only case we need to do anything about is if
7118 sp, ap, or fp is our second argument, in which case we must swap
7119 the innermost first argument and our second argument. */
7120
7121 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
7122 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
7123 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
7124 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
7125 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
7126 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
7127 {
7128 tree t = TREE_OPERAND (exp, 1);
7129
7130 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
7131 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
7132 }
7133
88f63c77 7134 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
7135 something, we might be forming a constant. So try to use
7136 plus_constant. If it produces a sum and we can't accept it,
7137 use force_operand. This allows P = &ARR[const] to generate
7138 efficient code on machines where a SYMBOL_REF is not a valid
7139 address.
7140
7141 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 7142 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 7143 || mode == ptr_mode)
bbf6f052 7144 {
c980ac49
RS
7145 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
7146 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
7147 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
7148 {
cbbc503e
JL
7149 rtx constant_part;
7150
c980ac49
RS
7151 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
7152 EXPAND_SUM);
cbbc503e
JL
7153 /* Use immed_double_const to ensure that the constant is
7154 truncated according to the mode of OP1, then sign extended
7155 to a HOST_WIDE_INT. Using the constant directly can result
7156 in non-canonical RTL in a 64x32 cross compile. */
7157 constant_part
7158 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)),
7159 (HOST_WIDE_INT) 0,
a5efcd63 7160 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))));
7f401c74 7161 op1 = plus_constant (op1, INTVAL (constant_part));
c980ac49
RS
7162 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7163 op1 = force_operand (op1, target);
7164 return op1;
7165 }
bbf6f052 7166
c980ac49
RS
7167 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7168 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
7169 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
7170 {
cbbc503e
JL
7171 rtx constant_part;
7172
c980ac49
RS
7173 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7174 EXPAND_SUM);
7175 if (! CONSTANT_P (op0))
7176 {
7177 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7178 VOIDmode, modifier);
709f5be1
RS
7179 /* Don't go to both_summands if modifier
7180 says it's not right to return a PLUS. */
7181 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7182 goto binop2;
c980ac49
RS
7183 goto both_summands;
7184 }
cbbc503e
JL
7185 /* Use immed_double_const to ensure that the constant is
7186 truncated according to the mode of OP1, then sign extended
7187 to a HOST_WIDE_INT. Using the constant directly can result
7188 in non-canonical RTL in a 64x32 cross compile. */
7189 constant_part
7190 = immed_double_const (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)),
7191 (HOST_WIDE_INT) 0,
2a94e396 7192 TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))));
7f401c74 7193 op0 = plus_constant (op0, INTVAL (constant_part));
c980ac49
RS
7194 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7195 op0 = force_operand (op0, target);
7196 return op0;
7197 }
bbf6f052
RK
7198 }
7199
7200 /* No sense saving up arithmetic to be done
7201 if it's all in the wrong mode to form part of an address.
7202 And force_operand won't know whether to sign-extend or
7203 zero-extend. */
7204 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7205 || mode != ptr_mode)
c980ac49 7206 goto binop;
bbf6f052
RK
7207
7208 preexpand_calls (exp);
e5e809f4 7209 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7210 subtarget = 0;
7211
921b3427
RK
7212 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7213 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7214
c980ac49 7215 both_summands:
bbf6f052
RK
7216 /* Make sure any term that's a sum with a constant comes last. */
7217 if (GET_CODE (op0) == PLUS
7218 && CONSTANT_P (XEXP (op0, 1)))
7219 {
7220 temp = op0;
7221 op0 = op1;
7222 op1 = temp;
7223 }
7224 /* If adding to a sum including a constant,
7225 associate it to put the constant outside. */
7226 if (GET_CODE (op1) == PLUS
7227 && CONSTANT_P (XEXP (op1, 1)))
7228 {
7229 rtx constant_term = const0_rtx;
7230
7231 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7232 if (temp != 0)
7233 op0 = temp;
6f90e075
JW
7234 /* Ensure that MULT comes first if there is one. */
7235 else if (GET_CODE (op0) == MULT)
38a448ca 7236 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7237 else
38a448ca 7238 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7239
7240 /* Let's also eliminate constants from op0 if possible. */
7241 op0 = eliminate_constant_term (op0, &constant_term);
7242
7243 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7244 their sum should be a constant. Form it into OP1, since the
7245 result we want will then be OP0 + OP1. */
7246
7247 temp = simplify_binary_operation (PLUS, mode, constant_term,
7248 XEXP (op1, 1));
7249 if (temp != 0)
7250 op1 = temp;
7251 else
38a448ca 7252 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7253 }
7254
7255 /* Put a constant term last and put a multiplication first. */
7256 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7257 temp = op1, op1 = op0, op0 = temp;
7258
7259 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7260 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7261
7262 case MINUS_EXPR:
ea87523e
RK
7263 /* For initializers, we are allowed to return a MINUS of two
7264 symbolic constants. Here we handle all cases when both operands
7265 are constant. */
bbf6f052
RK
7266 /* Handle difference of two symbolic constants,
7267 for the sake of an initializer. */
7268 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7269 && really_constant_p (TREE_OPERAND (exp, 0))
7270 && really_constant_p (TREE_OPERAND (exp, 1)))
7271 {
906c4e36 7272 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7273 VOIDmode, ro_modifier);
906c4e36 7274 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7275 VOIDmode, ro_modifier);
ea87523e 7276
ea87523e
RK
7277 /* If the last operand is a CONST_INT, use plus_constant of
7278 the negated constant. Else make the MINUS. */
7279 if (GET_CODE (op1) == CONST_INT)
7280 return plus_constant (op0, - INTVAL (op1));
7281 else
38a448ca 7282 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7283 }
7284 /* Convert A - const to A + (-const). */
7285 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7286 {
ae431183
RK
7287 tree negated = fold (build1 (NEGATE_EXPR, type,
7288 TREE_OPERAND (exp, 1)));
7289
ae431183 7290 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6fbfac92
JM
7291 /* If we can't negate the constant in TYPE, leave it alone and
7292 expand_binop will negate it for us. We used to try to do it
7293 here in the signed version of TYPE, but that doesn't work
7294 on POINTER_TYPEs. */;
ae431183
RK
7295 else
7296 {
7297 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7298 goto plus_expr;
7299 }
bbf6f052
RK
7300 }
7301 this_optab = sub_optab;
7302 goto binop;
7303
7304 case MULT_EXPR:
7305 preexpand_calls (exp);
7306 /* If first operand is constant, swap them.
7307 Thus the following special case checks need only
7308 check the second operand. */
7309 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7310 {
7311 register tree t1 = TREE_OPERAND (exp, 0);
7312 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7313 TREE_OPERAND (exp, 1) = t1;
7314 }
7315
7316 /* Attempt to return something suitable for generating an
7317 indexed address, for machines that support that. */
7318
88f63c77 7319 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7320 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7321 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7322 {
921b3427
RK
7323 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7324 EXPAND_SUM);
bbf6f052
RK
7325
7326 /* Apply distributive law if OP0 is x+c. */
7327 if (GET_CODE (op0) == PLUS
7328 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
c5c76735
JL
7329 return
7330 gen_rtx_PLUS
7331 (mode,
7332 gen_rtx_MULT
7333 (mode, XEXP (op0, 0),
7334 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
7335 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7336 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7337
7338 if (GET_CODE (op0) != REG)
906c4e36 7339 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7340 if (GET_CODE (op0) != REG)
7341 op0 = copy_to_mode_reg (mode, op0);
7342
c5c76735
JL
7343 return
7344 gen_rtx_MULT (mode, op0,
7345 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7346 }
7347
e5e809f4 7348 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7349 subtarget = 0;
7350
7351 /* Check for multiplying things that have been extended
7352 from a narrower type. If this machine supports multiplying
7353 in that narrower type with a result in the desired type,
7354 do it that way, and avoid the explicit type-conversion. */
7355 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7356 && TREE_CODE (type) == INTEGER_TYPE
7357 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7358 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7359 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7360 && int_fits_type_p (TREE_OPERAND (exp, 1),
7361 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7362 /* Don't use a widening multiply if a shift will do. */
7363 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7364 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7365 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7366 ||
7367 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7368 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7369 ==
7370 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7371 /* If both operands are extended, they must either both
7372 be zero-extended or both be sign-extended. */
7373 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7374 ==
7375 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7376 {
7377 enum machine_mode innermode
7378 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7379 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7380 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7381 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7382 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7383 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7384 {
b10af0c8
TG
7385 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7386 {
7387 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7388 NULL_RTX, VOIDmode, 0);
7389 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7390 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7391 VOIDmode, 0);
7392 else
7393 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7394 NULL_RTX, VOIDmode, 0);
7395 goto binop2;
7396 }
7397 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7398 && innermode == word_mode)
7399 {
7400 rtx htem;
7401 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7402 NULL_RTX, VOIDmode, 0);
7403 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7404 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7405 VOIDmode, 0);
7406 else
7407 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7408 NULL_RTX, VOIDmode, 0);
7409 temp = expand_binop (mode, other_optab, op0, op1, target,
7410 unsignedp, OPTAB_LIB_WIDEN);
7411 htem = expand_mult_highpart_adjust (innermode,
7412 gen_highpart (innermode, temp),
7413 op0, op1,
7414 gen_highpart (innermode, temp),
7415 unsignedp);
7416 emit_move_insn (gen_highpart (innermode, temp), htem);
7417 return temp;
7418 }
bbf6f052
RK
7419 }
7420 }
7421 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7422 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7423 return expand_mult (mode, op0, op1, target, unsignedp);
7424
7425 case TRUNC_DIV_EXPR:
7426 case FLOOR_DIV_EXPR:
7427 case CEIL_DIV_EXPR:
7428 case ROUND_DIV_EXPR:
7429 case EXACT_DIV_EXPR:
7430 preexpand_calls (exp);
e5e809f4 7431 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7432 subtarget = 0;
7433 /* Possible optimization: compute the dividend with EXPAND_SUM
7434 then if the divisor is constant can optimize the case
7435 where some terms of the dividend have coeffs divisible by it. */
7436 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7437 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7438 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7439
7440 case RDIV_EXPR:
7441 this_optab = flodiv_optab;
7442 goto binop;
7443
7444 case TRUNC_MOD_EXPR:
7445 case FLOOR_MOD_EXPR:
7446 case CEIL_MOD_EXPR:
7447 case ROUND_MOD_EXPR:
7448 preexpand_calls (exp);
e5e809f4 7449 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7450 subtarget = 0;
7451 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7452 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7453 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7454
7455 case FIX_ROUND_EXPR:
7456 case FIX_FLOOR_EXPR:
7457 case FIX_CEIL_EXPR:
7458 abort (); /* Not used for C. */
7459
7460 case FIX_TRUNC_EXPR:
906c4e36 7461 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7462 if (target == 0)
7463 target = gen_reg_rtx (mode);
7464 expand_fix (target, op0, unsignedp);
7465 return target;
7466
7467 case FLOAT_EXPR:
906c4e36 7468 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7469 if (target == 0)
7470 target = gen_reg_rtx (mode);
7471 /* expand_float can't figure out what to do if FROM has VOIDmode.
7472 So give it the correct mode. With -O, cse will optimize this. */
7473 if (GET_MODE (op0) == VOIDmode)
7474 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7475 op0);
7476 expand_float (target, op0,
7477 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7478 return target;
7479
7480 case NEGATE_EXPR:
5b22bee8 7481 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7482 temp = expand_unop (mode, neg_optab, op0, target, 0);
7483 if (temp == 0)
7484 abort ();
7485 return temp;
7486
7487 case ABS_EXPR:
7488 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7489
2d7050fd 7490 /* Handle complex values specially. */
d6a5ac33
RK
7491 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7492 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7493 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7494
bbf6f052
RK
7495 /* Unsigned abs is simply the operand. Testing here means we don't
7496 risk generating incorrect code below. */
7497 if (TREE_UNSIGNED (type))
7498 return op0;
7499
91813b28 7500 return expand_abs (mode, op0, target,
e5e809f4 7501 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7502
7503 case MAX_EXPR:
7504 case MIN_EXPR:
7505 target = original_target;
e5e809f4 7506 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7507 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7508 || GET_MODE (target) != mode
bbf6f052
RK
7509 || (GET_CODE (target) == REG
7510 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7511 target = gen_reg_rtx (mode);
906c4e36 7512 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7513 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7514
7515 /* First try to do it with a special MIN or MAX instruction.
7516 If that does not win, use a conditional jump to select the proper
7517 value. */
7518 this_optab = (TREE_UNSIGNED (type)
7519 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7520 : (code == MIN_EXPR ? smin_optab : smax_optab));
7521
7522 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7523 OPTAB_WIDEN);
7524 if (temp != 0)
7525 return temp;
7526
fa2981d8
JW
7527 /* At this point, a MEM target is no longer useful; we will get better
7528 code without it. */
7529
7530 if (GET_CODE (target) == MEM)
7531 target = gen_reg_rtx (mode);
7532
ee456b1c
RK
7533 if (target != op0)
7534 emit_move_insn (target, op0);
d6a5ac33 7535
bbf6f052 7536 op0 = gen_label_rtx ();
d6a5ac33 7537
f81497d9
RS
7538 /* If this mode is an integer too wide to compare properly,
7539 compare word by word. Rely on cse to optimize constant cases. */
1eb8759b
RH
7540 if (GET_MODE_CLASS (mode) == MODE_INT
7541 && ! can_compare_p (GE, mode, ccp_jump))
bbf6f052 7542 {
f81497d9 7543 if (code == MAX_EXPR)
d6a5ac33
RK
7544 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7545 target, op1, NULL_RTX, op0);
bbf6f052 7546 else
d6a5ac33
RK
7547 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7548 op1, target, NULL_RTX, op0);
bbf6f052 7549 }
f81497d9
RS
7550 else
7551 {
b30f05db
BS
7552 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)));
7553 do_compare_rtx_and_jump (target, op1, code == MAX_EXPR ? GE : LE,
7554 unsignedp, mode, NULL_RTX, 0, NULL_RTX,
7555 op0);
f81497d9 7556 }
b30f05db 7557 emit_move_insn (target, op1);
bbf6f052
RK
7558 emit_label (op0);
7559 return target;
7560
bbf6f052
RK
7561 case BIT_NOT_EXPR:
7562 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7563 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7564 if (temp == 0)
7565 abort ();
7566 return temp;
7567
7568 case FFS_EXPR:
7569 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7570 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7571 if (temp == 0)
7572 abort ();
7573 return temp;
7574
d6a5ac33
RK
7575 /* ??? Can optimize bitwise operations with one arg constant.
7576 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7577 and (a bitwise1 b) bitwise2 b (etc)
7578 but that is probably not worth while. */
7579
7580 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7581 boolean values when we want in all cases to compute both of them. In
7582 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7583 as actual zero-or-1 values and then bitwise anding. In cases where
7584 there cannot be any side effects, better code would be made by
7585 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7586 how to recognize those cases. */
7587
bbf6f052
RK
7588 case TRUTH_AND_EXPR:
7589 case BIT_AND_EXPR:
7590 this_optab = and_optab;
7591 goto binop;
7592
bbf6f052
RK
7593 case TRUTH_OR_EXPR:
7594 case BIT_IOR_EXPR:
7595 this_optab = ior_optab;
7596 goto binop;
7597
874726a8 7598 case TRUTH_XOR_EXPR:
bbf6f052
RK
7599 case BIT_XOR_EXPR:
7600 this_optab = xor_optab;
7601 goto binop;
7602
7603 case LSHIFT_EXPR:
7604 case RSHIFT_EXPR:
7605 case LROTATE_EXPR:
7606 case RROTATE_EXPR:
7607 preexpand_calls (exp);
e5e809f4 7608 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7609 subtarget = 0;
7610 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7611 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7612 unsignedp);
7613
d6a5ac33
RK
7614 /* Could determine the answer when only additive constants differ. Also,
7615 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7616 case LT_EXPR:
7617 case LE_EXPR:
7618 case GT_EXPR:
7619 case GE_EXPR:
7620 case EQ_EXPR:
7621 case NE_EXPR:
1eb8759b
RH
7622 case UNORDERED_EXPR:
7623 case ORDERED_EXPR:
7624 case UNLT_EXPR:
7625 case UNLE_EXPR:
7626 case UNGT_EXPR:
7627 case UNGE_EXPR:
7628 case UNEQ_EXPR:
bbf6f052
RK
7629 preexpand_calls (exp);
7630 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7631 if (temp != 0)
7632 return temp;
d6a5ac33 7633
0f41302f 7634 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7635 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7636 && original_target
7637 && GET_CODE (original_target) == REG
7638 && (GET_MODE (original_target)
7639 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7640 {
d6a5ac33
RK
7641 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7642 VOIDmode, 0);
7643
bbf6f052
RK
7644 if (temp != original_target)
7645 temp = copy_to_reg (temp);
d6a5ac33 7646
bbf6f052 7647 op1 = gen_label_rtx ();
c5d5d461
JL
7648 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7649 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7650 emit_move_insn (temp, const1_rtx);
7651 emit_label (op1);
7652 return temp;
7653 }
d6a5ac33 7654
bbf6f052
RK
7655 /* If no set-flag instruction, must generate a conditional
7656 store into a temporary variable. Drop through
7657 and handle this like && and ||. */
7658
7659 case TRUTH_ANDIF_EXPR:
7660 case TRUTH_ORIF_EXPR:
e44842fe 7661 if (! ignore
e5e809f4 7662 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7663 /* Make sure we don't have a hard reg (such as function's return
7664 value) live across basic blocks, if not optimizing. */
7665 || (!optimize && GET_CODE (target) == REG
7666 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7667 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7668
7669 if (target)
7670 emit_clr_insn (target);
7671
bbf6f052
RK
7672 op1 = gen_label_rtx ();
7673 jumpifnot (exp, op1);
e44842fe
RK
7674
7675 if (target)
7676 emit_0_to_1_insn (target);
7677
bbf6f052 7678 emit_label (op1);
e44842fe 7679 return ignore ? const0_rtx : target;
bbf6f052
RK
7680
7681 case TRUTH_NOT_EXPR:
7682 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7683 /* The parser is careful to generate TRUTH_NOT_EXPR
7684 only with operands that are always zero or one. */
906c4e36 7685 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7686 target, 1, OPTAB_LIB_WIDEN);
7687 if (temp == 0)
7688 abort ();
7689 return temp;
7690
7691 case COMPOUND_EXPR:
7692 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7693 emit_queue ();
7694 return expand_expr (TREE_OPERAND (exp, 1),
7695 (ignore ? const0_rtx : target),
7696 VOIDmode, 0);
7697
7698 case COND_EXPR:
ac01eace
RK
7699 /* If we would have a "singleton" (see below) were it not for a
7700 conversion in each arm, bring that conversion back out. */
7701 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7702 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7703 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7704 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7705 {
7706 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7707 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7708
7709 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7710 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7711 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7712 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7713 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7714 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7715 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7716 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7717 return expand_expr (build1 (NOP_EXPR, type,
7718 build (COND_EXPR, TREE_TYPE (true),
7719 TREE_OPERAND (exp, 0),
7720 true, false)),
7721 target, tmode, modifier);
7722 }
7723
bbf6f052
RK
7724 {
7725 /* Note that COND_EXPRs whose type is a structure or union
7726 are required to be constructed to contain assignments of
7727 a temporary variable, so that we can evaluate them here
7728 for side effect only. If type is void, we must do likewise. */
7729
7730 /* If an arm of the branch requires a cleanup,
7731 only that cleanup is performed. */
7732
7733 tree singleton = 0;
7734 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7735
7736 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7737 convert it to our mode, if necessary. */
7738 if (integer_onep (TREE_OPERAND (exp, 1))
7739 && integer_zerop (TREE_OPERAND (exp, 2))
7740 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7741 {
dd27116b
RK
7742 if (ignore)
7743 {
7744 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7745 ro_modifier);
dd27116b
RK
7746 return const0_rtx;
7747 }
7748
921b3427 7749 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7750 if (GET_MODE (op0) == mode)
7751 return op0;
d6a5ac33 7752
bbf6f052
RK
7753 if (target == 0)
7754 target = gen_reg_rtx (mode);
7755 convert_move (target, op0, unsignedp);
7756 return target;
7757 }
7758
ac01eace
RK
7759 /* Check for X ? A + B : A. If we have this, we can copy A to the
7760 output and conditionally add B. Similarly for unary operations.
7761 Don't do this if X has side-effects because those side effects
7762 might affect A or B and the "?" operation is a sequence point in
7763 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7764
7765 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7766 && operand_equal_p (TREE_OPERAND (exp, 2),
7767 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7768 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7769 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7770 && operand_equal_p (TREE_OPERAND (exp, 1),
7771 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7772 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7773 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7774 && operand_equal_p (TREE_OPERAND (exp, 2),
7775 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7776 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7777 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7778 && operand_equal_p (TREE_OPERAND (exp, 1),
7779 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7780 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7781
01c8a7c8
RK
7782 /* If we are not to produce a result, we have no target. Otherwise,
7783 if a target was specified use it; it will not be used as an
7784 intermediate target unless it is safe. If no target, use a
7785 temporary. */
7786
7787 if (ignore)
7788 temp = 0;
7789 else if (original_target
e5e809f4 7790 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7791 || (singleton && GET_CODE (original_target) == REG
7792 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7793 && original_target == var_rtx (singleton)))
7794 && GET_MODE (original_target) == mode
7c00d1fe
RK
7795#ifdef HAVE_conditional_move
7796 && (! can_conditionally_move_p (mode)
7797 || GET_CODE (original_target) == REG
7798 || TREE_ADDRESSABLE (type))
7799#endif
01c8a7c8
RK
7800 && ! (GET_CODE (original_target) == MEM
7801 && MEM_VOLATILE_P (original_target)))
7802 temp = original_target;
7803 else if (TREE_ADDRESSABLE (type))
7804 abort ();
7805 else
7806 temp = assign_temp (type, 0, 0, 1);
7807
ac01eace
RK
7808 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7809 do the test of X as a store-flag operation, do this as
7810 A + ((X != 0) << log C). Similarly for other simple binary
7811 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7812 if (temp && singleton && binary_op
bbf6f052
RK
7813 && (TREE_CODE (binary_op) == PLUS_EXPR
7814 || TREE_CODE (binary_op) == MINUS_EXPR
7815 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7816 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7817 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7818 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7819 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7820 {
7821 rtx result;
7822 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7823 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7824 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7825 : xor_optab);
bbf6f052
RK
7826
7827 /* If we had X ? A : A + 1, do this as A + (X == 0).
7828
7829 We have to invert the truth value here and then put it
7830 back later if do_store_flag fails. We cannot simply copy
7831 TREE_OPERAND (exp, 0) to another variable and modify that
7832 because invert_truthvalue can modify the tree pointed to
7833 by its argument. */
7834 if (singleton == TREE_OPERAND (exp, 1))
7835 TREE_OPERAND (exp, 0)
7836 = invert_truthvalue (TREE_OPERAND (exp, 0));
7837
7838 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7839 (safe_from_p (temp, singleton, 1)
906c4e36 7840 ? temp : NULL_RTX),
bbf6f052
RK
7841 mode, BRANCH_COST <= 1);
7842
ac01eace
RK
7843 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7844 result = expand_shift (LSHIFT_EXPR, mode, result,
7845 build_int_2 (tree_log2
7846 (TREE_OPERAND
7847 (binary_op, 1)),
7848 0),
e5e809f4 7849 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7850 ? temp : NULL_RTX), 0);
7851
bbf6f052
RK
7852 if (result)
7853 {
906c4e36 7854 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7855 return expand_binop (mode, boptab, op1, result, temp,
7856 unsignedp, OPTAB_LIB_WIDEN);
7857 }
7858 else if (singleton == TREE_OPERAND (exp, 1))
7859 TREE_OPERAND (exp, 0)
7860 = invert_truthvalue (TREE_OPERAND (exp, 0));
7861 }
7862
dabf8373 7863 do_pending_stack_adjust ();
bbf6f052
RK
7864 NO_DEFER_POP;
7865 op0 = gen_label_rtx ();
7866
7867 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7868 {
7869 if (temp != 0)
7870 {
7871 /* If the target conflicts with the other operand of the
7872 binary op, we can't use it. Also, we can't use the target
7873 if it is a hard register, because evaluating the condition
7874 might clobber it. */
7875 if ((binary_op
e5e809f4 7876 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7877 || (GET_CODE (temp) == REG
7878 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7879 temp = gen_reg_rtx (mode);
7880 store_expr (singleton, temp, 0);
7881 }
7882 else
906c4e36 7883 expand_expr (singleton,
2937cf87 7884 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7885 if (singleton == TREE_OPERAND (exp, 1))
7886 jumpif (TREE_OPERAND (exp, 0), op0);
7887 else
7888 jumpifnot (TREE_OPERAND (exp, 0), op0);
7889
956d6950 7890 start_cleanup_deferral ();
bbf6f052
RK
7891 if (binary_op && temp == 0)
7892 /* Just touch the other operand. */
7893 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7894 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7895 else if (binary_op)
7896 store_expr (build (TREE_CODE (binary_op), type,
7897 make_tree (type, temp),
7898 TREE_OPERAND (binary_op, 1)),
7899 temp, 0);
7900 else
7901 store_expr (build1 (TREE_CODE (unary_op), type,
7902 make_tree (type, temp)),
7903 temp, 0);
7904 op1 = op0;
bbf6f052 7905 }
bbf6f052
RK
7906 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7907 comparison operator. If we have one of these cases, set the
7908 output to A, branch on A (cse will merge these two references),
7909 then set the output to FOO. */
7910 else if (temp
7911 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7912 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7913 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7914 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7915 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7916 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7917 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7918 {
7919 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7920 temp = gen_reg_rtx (mode);
7921 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7922 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7923
956d6950 7924 start_cleanup_deferral ();
bbf6f052
RK
7925 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7926 op1 = op0;
7927 }
7928 else if (temp
7929 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7930 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7931 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7932 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7933 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7934 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7935 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7936 {
7937 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7938 temp = gen_reg_rtx (mode);
7939 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7940 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7941
956d6950 7942 start_cleanup_deferral ();
bbf6f052
RK
7943 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7944 op1 = op0;
7945 }
7946 else
7947 {
7948 op1 = gen_label_rtx ();
7949 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7950
956d6950 7951 start_cleanup_deferral ();
2ac84cfe
NS
7952
7953 /* One branch of the cond can be void, if it never returns. For
7954 example A ? throw : E */
7955 if (temp != 0
7956 && TREE_TYPE (TREE_OPERAND (exp, 1)) != void_type_node)
bbf6f052
RK
7957 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7958 else
906c4e36
RK
7959 expand_expr (TREE_OPERAND (exp, 1),
7960 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7961 end_cleanup_deferral ();
bbf6f052
RK
7962 emit_queue ();
7963 emit_jump_insn (gen_jump (op1));
7964 emit_barrier ();
7965 emit_label (op0);
956d6950 7966 start_cleanup_deferral ();
2ac84cfe
NS
7967 if (temp != 0
7968 && TREE_TYPE (TREE_OPERAND (exp, 2)) != void_type_node)
bbf6f052
RK
7969 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7970 else
906c4e36
RK
7971 expand_expr (TREE_OPERAND (exp, 2),
7972 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7973 }
7974
956d6950 7975 end_cleanup_deferral ();
bbf6f052
RK
7976
7977 emit_queue ();
7978 emit_label (op1);
7979 OK_DEFER_POP;
5dab5552 7980
bbf6f052
RK
7981 return temp;
7982 }
7983
7984 case TARGET_EXPR:
7985 {
7986 /* Something needs to be initialized, but we didn't know
7987 where that thing was when building the tree. For example,
7988 it could be the return value of a function, or a parameter
7989 to a function which lays down in the stack, or a temporary
7990 variable which must be passed by reference.
7991
7992 We guarantee that the expression will either be constructed
7993 or copied into our original target. */
7994
7995 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7996 tree cleanups = NULL_TREE;
5c062816 7997 tree exp1;
bbf6f052
RK
7998
7999 if (TREE_CODE (slot) != VAR_DECL)
8000 abort ();
8001
9c51f375
RK
8002 if (! ignore)
8003 target = original_target;
8004
6fbfac92
JM
8005 /* Set this here so that if we get a target that refers to a
8006 register variable that's already been used, put_reg_into_stack
8007 knows that it should fix up those uses. */
8008 TREE_USED (slot) = 1;
8009
bbf6f052
RK
8010 if (target == 0)
8011 {
8012 if (DECL_RTL (slot) != 0)
ac993f4f
MS
8013 {
8014 target = DECL_RTL (slot);
5c062816 8015 /* If we have already expanded the slot, so don't do
ac993f4f 8016 it again. (mrs) */
5c062816
MS
8017 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8018 return target;
ac993f4f 8019 }
bbf6f052
RK
8020 else
8021 {
e9a25f70 8022 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
8023 /* All temp slots at this level must not conflict. */
8024 preserve_temp_slots (target);
8025 DECL_RTL (slot) = target;
e9a25f70
JL
8026 if (TREE_ADDRESSABLE (slot))
8027 {
8028 TREE_ADDRESSABLE (slot) = 0;
8029 mark_addressable (slot);
8030 }
bbf6f052 8031
e287fd6e
RK
8032 /* Since SLOT is not known to the called function
8033 to belong to its stack frame, we must build an explicit
8034 cleanup. This case occurs when we must build up a reference
8035 to pass the reference as an argument. In this case,
8036 it is very likely that such a reference need not be
8037 built here. */
8038
8039 if (TREE_OPERAND (exp, 2) == 0)
8040 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 8041 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 8042 }
bbf6f052
RK
8043 }
8044 else
8045 {
8046 /* This case does occur, when expanding a parameter which
8047 needs to be constructed on the stack. The target
8048 is the actual stack address that we want to initialize.
8049 The function we call will perform the cleanup in this case. */
8050
8c042b47
RS
8051 /* If we have already assigned it space, use that space,
8052 not target that we were passed in, as our target
8053 parameter is only a hint. */
8054 if (DECL_RTL (slot) != 0)
8055 {
8056 target = DECL_RTL (slot);
8057 /* If we have already expanded the slot, so don't do
8058 it again. (mrs) */
8059 if (TREE_OPERAND (exp, 1) == NULL_TREE)
8060 return target;
8061 }
21002281
JW
8062 else
8063 {
8064 DECL_RTL (slot) = target;
8065 /* If we must have an addressable slot, then make sure that
8066 the RTL that we just stored in slot is OK. */
8067 if (TREE_ADDRESSABLE (slot))
8068 {
8069 TREE_ADDRESSABLE (slot) = 0;
8070 mark_addressable (slot);
8071 }
8072 }
bbf6f052
RK
8073 }
8074
4847c938 8075 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
8076 /* Mark it as expanded. */
8077 TREE_OPERAND (exp, 1) = NULL_TREE;
8078
41531e5b 8079 store_expr (exp1, target, 0);
61d6b1cc 8080
e976b8b2 8081 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 8082
41531e5b 8083 return target;
bbf6f052
RK
8084 }
8085
8086 case INIT_EXPR:
8087 {
8088 tree lhs = TREE_OPERAND (exp, 0);
8089 tree rhs = TREE_OPERAND (exp, 1);
8090 tree noncopied_parts = 0;
8091 tree lhs_type = TREE_TYPE (lhs);
8092
8093 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8094 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
8095 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
8096 TYPE_NONCOPIED_PARTS (lhs_type));
8097 while (noncopied_parts != 0)
8098 {
8099 expand_assignment (TREE_VALUE (noncopied_parts),
8100 TREE_PURPOSE (noncopied_parts), 0, 0);
8101 noncopied_parts = TREE_CHAIN (noncopied_parts);
8102 }
8103 return temp;
8104 }
8105
8106 case MODIFY_EXPR:
8107 {
8108 /* If lhs is complex, expand calls in rhs before computing it.
8109 That's so we don't compute a pointer and save it over a call.
8110 If lhs is simple, compute it first so we can give it as a
8111 target if the rhs is just a call. This avoids an extra temp and copy
8112 and that prevents a partial-subsumption which makes bad code.
8113 Actually we could treat component_ref's of vars like vars. */
8114
8115 tree lhs = TREE_OPERAND (exp, 0);
8116 tree rhs = TREE_OPERAND (exp, 1);
8117 tree noncopied_parts = 0;
8118 tree lhs_type = TREE_TYPE (lhs);
8119
8120 temp = 0;
8121
8122 if (TREE_CODE (lhs) != VAR_DECL
8123 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
8124 && TREE_CODE (lhs) != PARM_DECL
8125 && ! (TREE_CODE (lhs) == INDIRECT_REF
8126 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
8127 preexpand_calls (exp);
8128
8129 /* Check for |= or &= of a bitfield of size one into another bitfield
8130 of size 1. In this case, (unless we need the result of the
8131 assignment) we can do this more efficiently with a
8132 test followed by an assignment, if necessary.
8133
8134 ??? At this point, we can't get a BIT_FIELD_REF here. But if
8135 things change so we do, this code should be enhanced to
8136 support it. */
8137 if (ignore
8138 && TREE_CODE (lhs) == COMPONENT_REF
8139 && (TREE_CODE (rhs) == BIT_IOR_EXPR
8140 || TREE_CODE (rhs) == BIT_AND_EXPR)
8141 && TREE_OPERAND (rhs, 0) == lhs
8142 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
05bccae2
RK
8143 && integer_onep (DECL_SIZE (TREE_OPERAND (lhs, 1)))
8144 && integer_onep (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))))
bbf6f052
RK
8145 {
8146 rtx label = gen_label_rtx ();
8147
8148 do_jump (TREE_OPERAND (rhs, 1),
8149 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
8150 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
8151 expand_assignment (lhs, convert (TREE_TYPE (rhs),
8152 (TREE_CODE (rhs) == BIT_IOR_EXPR
8153 ? integer_one_node
8154 : integer_zero_node)),
8155 0, 0);
e7c33f54 8156 do_pending_stack_adjust ();
bbf6f052
RK
8157 emit_label (label);
8158 return const0_rtx;
8159 }
8160
8161 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
8162 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
8163 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
8164 TYPE_NONCOPIED_PARTS (lhs_type));
8165
8166 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
8167 while (noncopied_parts != 0)
8168 {
8169 expand_assignment (TREE_PURPOSE (noncopied_parts),
8170 TREE_VALUE (noncopied_parts), 0, 0);
8171 noncopied_parts = TREE_CHAIN (noncopied_parts);
8172 }
8173 return temp;
8174 }
8175
6e7f84a7
APB
8176 case RETURN_EXPR:
8177 if (!TREE_OPERAND (exp, 0))
8178 expand_null_return ();
8179 else
8180 expand_return (TREE_OPERAND (exp, 0));
8181 return const0_rtx;
8182
bbf6f052
RK
8183 case PREINCREMENT_EXPR:
8184 case PREDECREMENT_EXPR:
7b8b9722 8185 return expand_increment (exp, 0, ignore);
bbf6f052
RK
8186
8187 case POSTINCREMENT_EXPR:
8188 case POSTDECREMENT_EXPR:
8189 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 8190 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8191
8192 case ADDR_EXPR:
987c71d9 8193 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8194 be a MEM corresponding to a stack slot. */
987c71d9
RK
8195 temp = 0;
8196
bbf6f052
RK
8197 /* Are we taking the address of a nested function? */
8198 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8199 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8200 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8201 && ! TREE_STATIC (exp))
bbf6f052
RK
8202 {
8203 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8204 op0 = force_operand (op0, target);
8205 }
682ba3a6
RK
8206 /* If we are taking the address of something erroneous, just
8207 return a zero. */
8208 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8209 return const0_rtx;
bbf6f052
RK
8210 else
8211 {
e287fd6e
RK
8212 /* We make sure to pass const0_rtx down if we came in with
8213 ignore set, to avoid doing the cleanups twice for something. */
8214 op0 = expand_expr (TREE_OPERAND (exp, 0),
8215 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8216 (modifier == EXPAND_INITIALIZER
8217 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8218
119af78a
RK
8219 /* If we are going to ignore the result, OP0 will have been set
8220 to const0_rtx, so just return it. Don't get confused and
8221 think we are taking the address of the constant. */
8222 if (ignore)
8223 return op0;
8224
3539e816
MS
8225 op0 = protect_from_queue (op0, 0);
8226
c5c76735
JL
8227 /* We would like the object in memory. If it is a constant, we can
8228 have it be statically allocated into memory. For a non-constant,
8229 we need to allocate some memory and store the value into it. */
896102d0
RK
8230
8231 if (CONSTANT_P (op0))
8232 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8233 op0);
987c71d9 8234 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8235 {
8236 mark_temp_addr_taken (op0);
8237 temp = XEXP (op0, 0);
8238 }
896102d0 8239
682ba3a6 8240 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 8241 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
8242 {
8243 /* If this object is in a register, it must be not
0f41302f 8244 be BLKmode. */
896102d0 8245 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 8246 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 8247
7a0b7b9a 8248 mark_temp_addr_taken (memloc);
896102d0
RK
8249 emit_move_insn (memloc, op0);
8250 op0 = memloc;
8251 }
8252
bbf6f052
RK
8253 if (GET_CODE (op0) != MEM)
8254 abort ();
8255
8256 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8257 {
8258 temp = XEXP (op0, 0);
8259#ifdef POINTERS_EXTEND_UNSIGNED
8260 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8261 && mode == ptr_mode)
9fcfcce7 8262 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8263#endif
8264 return temp;
8265 }
987c71d9 8266
bbf6f052
RK
8267 op0 = force_operand (XEXP (op0, 0), target);
8268 }
987c71d9 8269
bbf6f052 8270 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8271 op0 = force_reg (Pmode, op0);
8272
dc6d66b3
RK
8273 if (GET_CODE (op0) == REG
8274 && ! REG_USERVAR_P (op0))
bdb429a5 8275 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)));
987c71d9
RK
8276
8277 /* If we might have had a temp slot, add an equivalent address
8278 for it. */
8279 if (temp != 0)
8280 update_temp_slot_address (temp, op0);
8281
88f63c77
RK
8282#ifdef POINTERS_EXTEND_UNSIGNED
8283 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8284 && mode == ptr_mode)
9fcfcce7 8285 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8286#endif
8287
bbf6f052
RK
8288 return op0;
8289
8290 case ENTRY_VALUE_EXPR:
8291 abort ();
8292
7308a047
RS
8293 /* COMPLEX type for Extended Pascal & Fortran */
8294 case COMPLEX_EXPR:
8295 {
8296 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8297 rtx insns;
7308a047
RS
8298
8299 /* Get the rtx code of the operands. */
8300 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8301 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8302
8303 if (! target)
8304 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8305
6551fa4d 8306 start_sequence ();
7308a047
RS
8307
8308 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8309 emit_move_insn (gen_realpart (mode, target), op0);
8310 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8311
6551fa4d
JW
8312 insns = get_insns ();
8313 end_sequence ();
8314
7308a047 8315 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8316 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8317 each with a separate pseudo as destination.
8318 It's not correct for flow to treat them as a unit. */
6d6e61ce 8319 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8320 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8321 else
8322 emit_insns (insns);
7308a047
RS
8323
8324 return target;
8325 }
8326
8327 case REALPART_EXPR:
2d7050fd
RS
8328 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8329 return gen_realpart (mode, op0);
7308a047
RS
8330
8331 case IMAGPART_EXPR:
2d7050fd
RS
8332 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8333 return gen_imagpart (mode, op0);
7308a047
RS
8334
8335 case CONJ_EXPR:
8336 {
62acb978 8337 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8338 rtx imag_t;
6551fa4d 8339 rtx insns;
7308a047
RS
8340
8341 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8342
8343 if (! target)
d6a5ac33 8344 target = gen_reg_rtx (mode);
7308a047 8345
6551fa4d 8346 start_sequence ();
7308a047
RS
8347
8348 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8349 emit_move_insn (gen_realpart (partmode, target),
8350 gen_realpart (partmode, op0));
7308a047 8351
62acb978
RK
8352 imag_t = gen_imagpart (partmode, target);
8353 temp = expand_unop (partmode, neg_optab,
8354 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8355 if (temp != imag_t)
8356 emit_move_insn (imag_t, temp);
8357
6551fa4d
JW
8358 insns = get_insns ();
8359 end_sequence ();
8360
d6a5ac33
RK
8361 /* Conjugate should appear as a single unit
8362 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8363 each with a separate pseudo as destination.
8364 It's not correct for flow to treat them as a unit. */
6d6e61ce 8365 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8366 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8367 else
8368 emit_insns (insns);
7308a047
RS
8369
8370 return target;
8371 }
8372
e976b8b2
MS
8373 case TRY_CATCH_EXPR:
8374 {
8375 tree handler = TREE_OPERAND (exp, 1);
8376
8377 expand_eh_region_start ();
8378
8379 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8380
8381 expand_eh_region_end (handler);
8382
8383 return op0;
8384 }
8385
b335b813
PB
8386 case TRY_FINALLY_EXPR:
8387 {
8388 tree try_block = TREE_OPERAND (exp, 0);
8389 tree finally_block = TREE_OPERAND (exp, 1);
8390 rtx finally_label = gen_label_rtx ();
8391 rtx done_label = gen_label_rtx ();
8392 rtx return_link = gen_reg_rtx (Pmode);
8393 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8394 (tree) finally_label, (tree) return_link);
8395 TREE_SIDE_EFFECTS (cleanup) = 1;
8396
8397 /* Start a new binding layer that will keep track of all cleanup
8398 actions to be performed. */
8e91754e 8399 expand_start_bindings (2);
b335b813
PB
8400
8401 target_temp_slot_level = temp_slot_level;
8402
8403 expand_decl_cleanup (NULL_TREE, cleanup);
8404 op0 = expand_expr (try_block, target, tmode, modifier);
8405
8406 preserve_temp_slots (op0);
8407 expand_end_bindings (NULL_TREE, 0, 0);
8408 emit_jump (done_label);
8409 emit_label (finally_label);
8410 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8411 emit_indirect_jump (return_link);
8412 emit_label (done_label);
8413 return op0;
8414 }
8415
8416 case GOTO_SUBROUTINE_EXPR:
8417 {
8418 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8419 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8420 rtx return_address = gen_label_rtx ();
8421 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8422 emit_jump (subr);
8423 emit_label (return_address);
8424 return const0_rtx;
8425 }
8426
e976b8b2
MS
8427 case POPDCC_EXPR:
8428 {
8429 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8430 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8431 return const0_rtx;
8432 }
8433
8434 case POPDHC_EXPR:
8435 {
8436 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8437 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8438 return const0_rtx;
8439 }
8440
d3707adb
RH
8441 case VA_ARG_EXPR:
8442 return expand_builtin_va_arg (TREE_OPERAND (exp, 0), type);
8443
bbf6f052 8444 default:
90764a87 8445 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8446 }
8447
8448 /* Here to do an ordinary binary operator, generating an instruction
8449 from the optab already placed in `this_optab'. */
8450 binop:
8451 preexpand_calls (exp);
e5e809f4 8452 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8453 subtarget = 0;
8454 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8455 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8456 binop2:
8457 temp = expand_binop (mode, this_optab, op0, op1, target,
8458 unsignedp, OPTAB_LIB_WIDEN);
8459 if (temp == 0)
8460 abort ();
8461 return temp;
8462}
b93a436e 8463\f
14a774a9
RK
8464/* Similar to expand_expr, except that we don't specify a target, target
8465 mode, or modifier and we return the alignment of the inner type. This is
8466 used in cases where it is not necessary to align the result to the
8467 alignment of its type as long as we know the alignment of the result, for
8468 example for comparisons of BLKmode values. */
8469
8470static rtx
8471expand_expr_unaligned (exp, palign)
8472 register tree exp;
729a2125 8473 unsigned int *palign;
14a774a9
RK
8474{
8475 register rtx op0;
8476 tree type = TREE_TYPE (exp);
8477 register enum machine_mode mode = TYPE_MODE (type);
8478
8479 /* Default the alignment we return to that of the type. */
8480 *palign = TYPE_ALIGN (type);
8481
8482 /* The only cases in which we do anything special is if the resulting mode
8483 is BLKmode. */
8484 if (mode != BLKmode)
8485 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8486
8487 switch (TREE_CODE (exp))
8488 {
8489 case CONVERT_EXPR:
8490 case NOP_EXPR:
8491 case NON_LVALUE_EXPR:
8492 /* Conversions between BLKmode values don't change the underlying
8493 alignment or value. */
8494 if (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == BLKmode)
8495 return expand_expr_unaligned (TREE_OPERAND (exp, 0), palign);
8496 break;
8497
8498 case ARRAY_REF:
8499 /* Much of the code for this case is copied directly from expand_expr.
8500 We need to duplicate it here because we will do something different
8501 in the fall-through case, so we need to handle the same exceptions
8502 it does. */
8503 {
8504 tree array = TREE_OPERAND (exp, 0);
8505 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
8506 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
fed3cef0 8507 tree index = convert (sizetype, TREE_OPERAND (exp, 1));
14a774a9
RK
8508 HOST_WIDE_INT i;
8509
8510 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
8511 abort ();
8512
8513 /* Optimize the special-case of a zero lower bound.
8514
8515 We convert the low_bound to sizetype to avoid some problems
8516 with constant folding. (E.g. suppose the lower bound is 1,
8517 and its mode is QI. Without the conversion, (ARRAY
8518 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
fed3cef0 8519 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
14a774a9
RK
8520
8521 if (! integer_zerop (low_bound))
fed3cef0 8522 index = size_diffop (index, convert (sizetype, low_bound));
14a774a9
RK
8523
8524 /* If this is a constant index into a constant array,
8525 just get the value from the array. Handle both the cases when
8526 we have an explicit constructor and when our operand is a variable
8527 that was declared const. */
8528
05bccae2
RK
8529 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array)
8530 && 0 > compare_tree_int (index,
8531 list_length (CONSTRUCTOR_ELTS
8532 (TREE_OPERAND (exp, 0)))))
14a774a9 8533 {
05bccae2
RK
8534 tree elem;
8535
8536 for (elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)),
8537 i = TREE_INT_CST_LOW (index);
8538 elem != 0 && i != 0; i--, elem = TREE_CHAIN (elem))
8539 ;
8540
8541 if (elem)
8542 return expand_expr_unaligned (fold (TREE_VALUE (elem)), palign);
14a774a9
RK
8543 }
8544
8545 else if (optimize >= 1
8546 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
8547 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
8548 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
8549 {
8550 if (TREE_CODE (index) == INTEGER_CST)
8551 {
8552 tree init = DECL_INITIAL (array);
8553
14a774a9
RK
8554 if (TREE_CODE (init) == CONSTRUCTOR)
8555 {
05bccae2
RK
8556 tree elem;
8557
8558 for (elem = CONSTRUCTOR_ELTS (init);
8559 ! tree_int_cst_equal (TREE_PURPOSE (elem), index);
8560 elem = TREE_CHAIN (elem))
8561 ;
14a774a9 8562
14a774a9
RK
8563 if (elem)
8564 return expand_expr_unaligned (fold (TREE_VALUE (elem)),
8565 palign);
8566 }
8567 }
8568 }
8569 }
8570
8571 /* ... fall through ... */
8572
8573 case COMPONENT_REF:
8574 case BIT_FIELD_REF:
8575 /* If the operand is a CONSTRUCTOR, we can just extract the
8576 appropriate field if it is present. Don't do this if we have
8577 already written the data since we want to refer to that copy
8578 and varasm.c assumes that's what we'll do. */
8579 if (TREE_CODE (exp) != ARRAY_REF
8580 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
8581 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
8582 {
8583 tree elt;
8584
8585 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
8586 elt = TREE_CHAIN (elt))
8587 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1))
8588 /* Note that unlike the case in expand_expr, we know this is
8589 BLKmode and hence not an integer. */
8590 return expand_expr_unaligned (TREE_VALUE (elt), palign);
8591 }
8592
8593 {
8594 enum machine_mode mode1;
770ae6cc 8595 HOST_WIDE_INT bitsize, bitpos;
14a774a9
RK
8596 tree offset;
8597 int volatilep = 0;
729a2125 8598 unsigned int alignment;
14a774a9
RK
8599 int unsignedp;
8600 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
8601 &mode1, &unsignedp, &volatilep,
8602 &alignment);
8603
8604 /* If we got back the original object, something is wrong. Perhaps
8605 we are evaluating an expression too early. In any event, don't
8606 infinitely recurse. */
8607 if (tem == exp)
8608 abort ();
8609
8610 op0 = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8611
8612 /* If this is a constant, put it into a register if it is a
8613 legitimate constant and OFFSET is 0 and memory if it isn't. */
8614 if (CONSTANT_P (op0))
8615 {
8616 enum machine_mode inner_mode = TYPE_MODE (TREE_TYPE (tem));
8617
8618 if (inner_mode != BLKmode && LEGITIMATE_CONSTANT_P (op0)
8619 && offset == 0)
8620 op0 = force_reg (inner_mode, op0);
8621 else
8622 op0 = validize_mem (force_const_mem (inner_mode, op0));
8623 }
8624
8625 if (offset != 0)
8626 {
8627 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
8628
8629 /* If this object is in a register, put it into memory.
8630 This case can't occur in C, but can in Ada if we have
8631 unchecked conversion of an expression from a scalar type to
8632 an array or record type. */
8633 if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
8634 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
8635 {
8636 rtx memloc = assign_temp (TREE_TYPE (tem), 1, 1, 1);
8637
8638 mark_temp_addr_taken (memloc);
8639 emit_move_insn (memloc, op0);
8640 op0 = memloc;
8641 }
8642
8643 if (GET_CODE (op0) != MEM)
8644 abort ();
8645
8646 if (GET_MODE (offset_rtx) != ptr_mode)
8647 {
8648#ifdef POINTERS_EXTEND_UNSIGNED
8649 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
8650#else
8651 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
8652#endif
8653 }
8654
8655 op0 = change_address (op0, VOIDmode,
8656 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
8657 force_reg (ptr_mode,
8658 offset_rtx)));
8659 }
8660
8661 /* Don't forget about volatility even if this is a bitfield. */
8662 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
8663 {
8664 op0 = copy_rtx (op0);
8665 MEM_VOLATILE_P (op0) = 1;
8666 }
8667
8668 /* Check the access. */
8669 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
8670 {
8671 rtx to;
8672 int size;
8673
8674 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
8675 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
8676
8677 /* Check the access right of the pointer. */
8678 if (size > BITS_PER_UNIT)
8679 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
8680 to, ptr_mode, GEN_INT (size / BITS_PER_UNIT),
8681 TYPE_MODE (sizetype),
8682 GEN_INT (MEMORY_USE_RO),
8683 TYPE_MODE (integer_type_node));
8684 }
8685
a2b99161
RK
8686 /* In cases where an aligned union has an unaligned object
8687 as a field, we might be extracting a BLKmode value from
8688 an integer-mode (e.g., SImode) object. Handle this case
8689 by doing the extract into an object as wide as the field
8690 (which we know to be the width of a basic mode), then
8691 storing into memory, and changing the mode to BLKmode.
8692 If we ultimately want the address (EXPAND_CONST_ADDRESS or
8693 EXPAND_INITIALIZER), then we must not copy to a temporary. */
8694 if (mode1 == VOIDmode
8695 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
e1565e65 8696 || (SLOW_UNALIGNED_ACCESS (mode1, alignment)
19caa751 8697 && (TYPE_ALIGN (type) > alignment
a2b99161
RK
8698 || bitpos % TYPE_ALIGN (type) != 0)))
8699 {
8700 enum machine_mode ext_mode = mode_for_size (bitsize, MODE_INT, 1);
8701
8702 if (ext_mode == BLKmode)
8703 {
8704 /* In this case, BITPOS must start at a byte boundary. */
8705 if (GET_CODE (op0) != MEM
8706 || bitpos % BITS_PER_UNIT != 0)
8707 abort ();
8708
8709 op0 = change_address (op0, VOIDmode,
8710 plus_constant (XEXP (op0, 0),
8711 bitpos / BITS_PER_UNIT));
8712 }
8713 else
8714 {
8715 rtx new = assign_stack_temp (ext_mode,
8716 bitsize / BITS_PER_UNIT, 0);
8717
8718 op0 = extract_bit_field (validize_mem (op0), bitsize, bitpos,
8719 unsignedp, NULL_RTX, ext_mode,
8720 ext_mode, alignment,
8721 int_size_in_bytes (TREE_TYPE (tem)));
8722
8723 /* If the result is a record type and BITSIZE is narrower than
8724 the mode of OP0, an integral mode, and this is a big endian
8725 machine, we must put the field into the high-order bits. */
8726 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
8727 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
8728 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
8729 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
8730 size_int (GET_MODE_BITSIZE
8731 (GET_MODE (op0))
8732 - bitsize),
8733 op0, 1);
8734
8735
8736 emit_move_insn (new, op0);
8737 op0 = copy_rtx (new);
8738 PUT_MODE (op0, BLKmode);
8739 }
8740 }
8741 else
8742 /* Get a reference to just this component. */
8743 op0 = change_address (op0, mode1,
8744 plus_constant (XEXP (op0, 0),
8745 (bitpos / BITS_PER_UNIT)));
14a774a9
RK
8746
8747 MEM_ALIAS_SET (op0) = get_alias_set (exp);
8748
8749 /* Adjust the alignment in case the bit position is not
8750 a multiple of the alignment of the inner object. */
8751 while (bitpos % alignment != 0)
8752 alignment >>= 1;
8753
8754 if (GET_CODE (XEXP (op0, 0)) == REG)
bdb429a5 8755 mark_reg_pointer (XEXP (op0, 0), alignment);
14a774a9
RK
8756
8757 MEM_IN_STRUCT_P (op0) = 1;
8758 MEM_VOLATILE_P (op0) |= volatilep;
8759
8760 *palign = alignment;
8761 return op0;
8762 }
8763
8764 default:
8765 break;
8766
8767 }
8768
8769 return expand_expr (exp, NULL_RTX, VOIDmode, EXPAND_NORMAL);
8770}
8771\f
fed3cef0
RK
8772/* Return the tree node if a ARG corresponds to a string constant or zero
8773 if it doesn't. If we return non-zero, set *PTR_OFFSET to the offset
8774 in bytes within the string that ARG is accessing. The type of the
8775 offset will be `sizetype'. */
b93a436e 8776
28f4ec01 8777tree
b93a436e
JL
8778string_constant (arg, ptr_offset)
8779 tree arg;
8780 tree *ptr_offset;
8781{
8782 STRIP_NOPS (arg);
8783
8784 if (TREE_CODE (arg) == ADDR_EXPR
8785 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8786 {
fed3cef0 8787 *ptr_offset = size_zero_node;
b93a436e
JL
8788 return TREE_OPERAND (arg, 0);
8789 }
8790 else if (TREE_CODE (arg) == PLUS_EXPR)
8791 {
8792 tree arg0 = TREE_OPERAND (arg, 0);
8793 tree arg1 = TREE_OPERAND (arg, 1);
8794
8795 STRIP_NOPS (arg0);
8796 STRIP_NOPS (arg1);
8797
8798 if (TREE_CODE (arg0) == ADDR_EXPR
8799 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8800 {
fed3cef0 8801 *ptr_offset = convert (sizetype, arg1);
b93a436e 8802 return TREE_OPERAND (arg0, 0);
bbf6f052 8803 }
b93a436e
JL
8804 else if (TREE_CODE (arg1) == ADDR_EXPR
8805 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8806 {
fed3cef0 8807 *ptr_offset = convert (sizetype, arg0);
b93a436e 8808 return TREE_OPERAND (arg1, 0);
bbf6f052 8809 }
b93a436e 8810 }
ca695ac9 8811
b93a436e
JL
8812 return 0;
8813}
ca695ac9 8814\f
b93a436e
JL
8815/* Expand code for a post- or pre- increment or decrement
8816 and return the RTX for the result.
8817 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 8818
b93a436e
JL
8819static rtx
8820expand_increment (exp, post, ignore)
8821 register tree exp;
8822 int post, ignore;
ca695ac9 8823{
b93a436e
JL
8824 register rtx op0, op1;
8825 register rtx temp, value;
8826 register tree incremented = TREE_OPERAND (exp, 0);
8827 optab this_optab = add_optab;
8828 int icode;
8829 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
8830 int op0_is_copy = 0;
8831 int single_insn = 0;
8832 /* 1 means we can't store into OP0 directly,
8833 because it is a subreg narrower than a word,
8834 and we don't dare clobber the rest of the word. */
8835 int bad_subreg = 0;
1499e0a8 8836
b93a436e
JL
8837 /* Stabilize any component ref that might need to be
8838 evaluated more than once below. */
8839 if (!post
8840 || TREE_CODE (incremented) == BIT_FIELD_REF
8841 || (TREE_CODE (incremented) == COMPONENT_REF
8842 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
8843 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
8844 incremented = stabilize_reference (incremented);
8845 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
8846 ones into save exprs so that they don't accidentally get evaluated
8847 more than once by the code below. */
8848 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
8849 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
8850 incremented = save_expr (incremented);
e9a25f70 8851
b93a436e
JL
8852 /* Compute the operands as RTX.
8853 Note whether OP0 is the actual lvalue or a copy of it:
8854 I believe it is a copy iff it is a register or subreg
8855 and insns were generated in computing it. */
e9a25f70 8856
b93a436e
JL
8857 temp = get_last_insn ();
8858 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 8859
b93a436e
JL
8860 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
8861 in place but instead must do sign- or zero-extension during assignment,
8862 so we copy it into a new register and let the code below use it as
8863 a copy.
e9a25f70 8864
b93a436e
JL
8865 Note that we can safely modify this SUBREG since it is know not to be
8866 shared (it was made by the expand_expr call above). */
8867
8868 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
8869 {
8870 if (post)
8871 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
8872 else
8873 bad_subreg = 1;
8874 }
8875 else if (GET_CODE (op0) == SUBREG
8876 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
8877 {
8878 /* We cannot increment this SUBREG in place. If we are
8879 post-incrementing, get a copy of the old value. Otherwise,
8880 just mark that we cannot increment in place. */
8881 if (post)
8882 op0 = copy_to_reg (op0);
8883 else
8884 bad_subreg = 1;
e9a25f70
JL
8885 }
8886
b93a436e
JL
8887 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
8888 && temp != get_last_insn ());
8889 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
8890 EXPAND_MEMORY_USE_BAD);
1499e0a8 8891
b93a436e
JL
8892 /* Decide whether incrementing or decrementing. */
8893 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
8894 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8895 this_optab = sub_optab;
8896
8897 /* Convert decrement by a constant into a negative increment. */
8898 if (this_optab == sub_optab
8899 && GET_CODE (op1) == CONST_INT)
ca695ac9 8900 {
b93a436e
JL
8901 op1 = GEN_INT (- INTVAL (op1));
8902 this_optab = add_optab;
ca695ac9 8903 }
1499e0a8 8904
b93a436e
JL
8905 /* For a preincrement, see if we can do this with a single instruction. */
8906 if (!post)
8907 {
8908 icode = (int) this_optab->handlers[(int) mode].insn_code;
8909 if (icode != (int) CODE_FOR_nothing
8910 /* Make sure that OP0 is valid for operands 0 and 1
8911 of the insn we want to queue. */
a995e389
RH
8912 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8913 && (*insn_data[icode].operand[1].predicate) (op0, mode)
8914 && (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e
JL
8915 single_insn = 1;
8916 }
bbf6f052 8917
b93a436e
JL
8918 /* If OP0 is not the actual lvalue, but rather a copy in a register,
8919 then we cannot just increment OP0. We must therefore contrive to
8920 increment the original value. Then, for postincrement, we can return
8921 OP0 since it is a copy of the old value. For preincrement, expand here
8922 unless we can do it with a single insn.
bbf6f052 8923
b93a436e
JL
8924 Likewise if storing directly into OP0 would clobber high bits
8925 we need to preserve (bad_subreg). */
8926 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 8927 {
b93a436e
JL
8928 /* This is the easiest way to increment the value wherever it is.
8929 Problems with multiple evaluation of INCREMENTED are prevented
8930 because either (1) it is a component_ref or preincrement,
8931 in which case it was stabilized above, or (2) it is an array_ref
8932 with constant index in an array in a register, which is
8933 safe to reevaluate. */
8934 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
8935 || TREE_CODE (exp) == PREDECREMENT_EXPR)
8936 ? MINUS_EXPR : PLUS_EXPR),
8937 TREE_TYPE (exp),
8938 incremented,
8939 TREE_OPERAND (exp, 1));
a358cee0 8940
b93a436e
JL
8941 while (TREE_CODE (incremented) == NOP_EXPR
8942 || TREE_CODE (incremented) == CONVERT_EXPR)
8943 {
8944 newexp = convert (TREE_TYPE (incremented), newexp);
8945 incremented = TREE_OPERAND (incremented, 0);
8946 }
bbf6f052 8947
b93a436e
JL
8948 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
8949 return post ? op0 : temp;
8950 }
bbf6f052 8951
b93a436e
JL
8952 if (post)
8953 {
8954 /* We have a true reference to the value in OP0.
8955 If there is an insn to add or subtract in this mode, queue it.
8956 Queueing the increment insn avoids the register shuffling
8957 that often results if we must increment now and first save
8958 the old value for subsequent use. */
bbf6f052 8959
b93a436e
JL
8960#if 0 /* Turned off to avoid making extra insn for indexed memref. */
8961 op0 = stabilize (op0);
8962#endif
41dfd40c 8963
b93a436e
JL
8964 icode = (int) this_optab->handlers[(int) mode].insn_code;
8965 if (icode != (int) CODE_FOR_nothing
8966 /* Make sure that OP0 is valid for operands 0 and 1
8967 of the insn we want to queue. */
a995e389
RH
8968 && (*insn_data[icode].operand[0].predicate) (op0, mode)
8969 && (*insn_data[icode].operand[1].predicate) (op0, mode))
b93a436e 8970 {
a995e389 8971 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8972 op1 = force_reg (mode, op1);
bbf6f052 8973
b93a436e
JL
8974 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
8975 }
8976 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
8977 {
8978 rtx addr = (general_operand (XEXP (op0, 0), mode)
8979 ? force_reg (Pmode, XEXP (op0, 0))
8980 : copy_to_reg (XEXP (op0, 0)));
8981 rtx temp, result;
ca695ac9 8982
b93a436e
JL
8983 op0 = change_address (op0, VOIDmode, addr);
8984 temp = force_reg (GET_MODE (op0), op0);
a995e389 8985 if (! (*insn_data[icode].operand[2].predicate) (op1, mode))
b93a436e 8986 op1 = force_reg (mode, op1);
ca695ac9 8987
b93a436e
JL
8988 /* The increment queue is LIFO, thus we have to `queue'
8989 the instructions in reverse order. */
8990 enqueue_insn (op0, gen_move_insn (op0, temp));
8991 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
8992 return result;
bbf6f052
RK
8993 }
8994 }
ca695ac9 8995
b93a436e
JL
8996 /* Preincrement, or we can't increment with one simple insn. */
8997 if (post)
8998 /* Save a copy of the value before inc or dec, to return it later. */
8999 temp = value = copy_to_reg (op0);
9000 else
9001 /* Arrange to return the incremented value. */
9002 /* Copy the rtx because expand_binop will protect from the queue,
9003 and the results of that would be invalid for us to return
9004 if our caller does emit_queue before using our result. */
9005 temp = copy_rtx (value = op0);
bbf6f052 9006
b93a436e
JL
9007 /* Increment however we can. */
9008 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 9009 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
9010 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
9011 /* Make sure the value is stored into OP0. */
9012 if (op1 != op0)
9013 emit_move_insn (op0, op1);
5718612f 9014
b93a436e
JL
9015 return temp;
9016}
9017\f
9018/* Expand all function calls contained within EXP, innermost ones first.
9019 But don't look within expressions that have sequence points.
9020 For each CALL_EXPR, record the rtx for its value
9021 in the CALL_EXPR_RTL field. */
5718612f 9022
b93a436e
JL
9023static void
9024preexpand_calls (exp)
9025 tree exp;
9026{
9027 register int nops, i;
9028 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 9029
b93a436e
JL
9030 if (! do_preexpand_calls)
9031 return;
5718612f 9032
b93a436e 9033 /* Only expressions and references can contain calls. */
bbf6f052 9034
b93a436e
JL
9035 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
9036 return;
bbf6f052 9037
b93a436e
JL
9038 switch (TREE_CODE (exp))
9039 {
9040 case CALL_EXPR:
9041 /* Do nothing if already expanded. */
9042 if (CALL_EXPR_RTL (exp) != 0
9043 /* Do nothing if the call returns a variable-sized object. */
9044 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
9045 /* Do nothing to built-in functions. */
9046 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
9047 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
9048 == FUNCTION_DECL)
9049 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
9050 return;
bbf6f052 9051
b93a436e
JL
9052 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
9053 return;
bbf6f052 9054
b93a436e
JL
9055 case COMPOUND_EXPR:
9056 case COND_EXPR:
9057 case TRUTH_ANDIF_EXPR:
9058 case TRUTH_ORIF_EXPR:
9059 /* If we find one of these, then we can be sure
9060 the adjust will be done for it (since it makes jumps).
9061 Do it now, so that if this is inside an argument
9062 of a function, we don't get the stack adjustment
9063 after some other args have already been pushed. */
9064 do_pending_stack_adjust ();
9065 return;
bbf6f052 9066
b93a436e
JL
9067 case BLOCK:
9068 case RTL_EXPR:
9069 case WITH_CLEANUP_EXPR:
9070 case CLEANUP_POINT_EXPR:
9071 case TRY_CATCH_EXPR:
9072 return;
bbf6f052 9073
b93a436e
JL
9074 case SAVE_EXPR:
9075 if (SAVE_EXPR_RTL (exp) != 0)
9076 return;
9077
9078 default:
9079 break;
ca695ac9 9080 }
bbf6f052 9081
b93a436e
JL
9082 nops = tree_code_length[(int) TREE_CODE (exp)];
9083 for (i = 0; i < nops; i++)
9084 if (TREE_OPERAND (exp, i) != 0)
9085 {
19832c77
MM
9086 if (TREE_CODE (exp) == TARGET_EXPR && i == 2)
9087 /* We don't need to preexpand the cleanup for a TARGET_EXPR.
9088 It doesn't happen before the call is made. */
9089 ;
9090 else
9091 {
9092 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
9093 if (type == 'e' || type == '<' || type == '1' || type == '2'
9094 || type == 'r')
9095 preexpand_calls (TREE_OPERAND (exp, i));
9096 }
b93a436e
JL
9097 }
9098}
9099\f
9100/* At the start of a function, record that we have no previously-pushed
9101 arguments waiting to be popped. */
bbf6f052 9102
b93a436e
JL
9103void
9104init_pending_stack_adjust ()
9105{
9106 pending_stack_adjust = 0;
9107}
bbf6f052 9108
b93a436e 9109/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
9110 so the adjustment won't get done.
9111
9112 Note, if the current function calls alloca, then it must have a
9113 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 9114
b93a436e
JL
9115void
9116clear_pending_stack_adjust ()
9117{
9118#ifdef EXIT_IGNORE_STACK
9119 if (optimize > 0
060fbabf
JL
9120 && (! flag_omit_frame_pointer || current_function_calls_alloca)
9121 && EXIT_IGNORE_STACK
b93a436e
JL
9122 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
9123 && ! flag_inline_functions)
9124 pending_stack_adjust = 0;
9125#endif
9126}
bbf6f052 9127
b93a436e
JL
9128/* Pop any previously-pushed arguments that have not been popped yet. */
9129
9130void
9131do_pending_stack_adjust ()
9132{
9133 if (inhibit_defer_pop == 0)
ca695ac9 9134 {
b93a436e
JL
9135 if (pending_stack_adjust != 0)
9136 adjust_stack (GEN_INT (pending_stack_adjust));
9137 pending_stack_adjust = 0;
bbf6f052 9138 }
bbf6f052
RK
9139}
9140\f
b93a436e 9141/* Expand conditional expressions. */
bbf6f052 9142
b93a436e
JL
9143/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
9144 LABEL is an rtx of code CODE_LABEL, in this function and all the
9145 functions here. */
bbf6f052 9146
b93a436e
JL
9147void
9148jumpifnot (exp, label)
ca695ac9 9149 tree exp;
b93a436e 9150 rtx label;
bbf6f052 9151{
b93a436e
JL
9152 do_jump (exp, label, NULL_RTX);
9153}
bbf6f052 9154
b93a436e 9155/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 9156
b93a436e
JL
9157void
9158jumpif (exp, label)
9159 tree exp;
9160 rtx label;
9161{
9162 do_jump (exp, NULL_RTX, label);
9163}
ca695ac9 9164
b93a436e
JL
9165/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
9166 the result is zero, or IF_TRUE_LABEL if the result is one.
9167 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
9168 meaning fall through in that case.
ca695ac9 9169
b93a436e
JL
9170 do_jump always does any pending stack adjust except when it does not
9171 actually perform a jump. An example where there is no jump
9172 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 9173
b93a436e
JL
9174 This function is responsible for optimizing cases such as
9175 &&, || and comparison operators in EXP. */
5718612f 9176
b93a436e
JL
9177void
9178do_jump (exp, if_false_label, if_true_label)
9179 tree exp;
9180 rtx if_false_label, if_true_label;
9181{
9182 register enum tree_code code = TREE_CODE (exp);
9183 /* Some cases need to create a label to jump to
9184 in order to properly fall through.
9185 These cases set DROP_THROUGH_LABEL nonzero. */
9186 rtx drop_through_label = 0;
9187 rtx temp;
b93a436e
JL
9188 int i;
9189 tree type;
9190 enum machine_mode mode;
ca695ac9 9191
dbecbbe4
JL
9192#ifdef MAX_INTEGER_COMPUTATION_MODE
9193 check_max_integer_computation_mode (exp);
9194#endif
9195
b93a436e 9196 emit_queue ();
ca695ac9 9197
b93a436e 9198 switch (code)
ca695ac9 9199 {
b93a436e 9200 case ERROR_MARK:
ca695ac9 9201 break;
bbf6f052 9202
b93a436e
JL
9203 case INTEGER_CST:
9204 temp = integer_zerop (exp) ? if_false_label : if_true_label;
9205 if (temp)
9206 emit_jump (temp);
9207 break;
bbf6f052 9208
b93a436e
JL
9209#if 0
9210 /* This is not true with #pragma weak */
9211 case ADDR_EXPR:
9212 /* The address of something can never be zero. */
9213 if (if_true_label)
9214 emit_jump (if_true_label);
9215 break;
9216#endif
bbf6f052 9217
b93a436e
JL
9218 case NOP_EXPR:
9219 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
9220 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
9221 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
9222 goto normal;
9223 case CONVERT_EXPR:
9224 /* If we are narrowing the operand, we have to do the compare in the
9225 narrower mode. */
9226 if ((TYPE_PRECISION (TREE_TYPE (exp))
9227 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
9228 goto normal;
9229 case NON_LVALUE_EXPR:
9230 case REFERENCE_EXPR:
9231 case ABS_EXPR:
9232 case NEGATE_EXPR:
9233 case LROTATE_EXPR:
9234 case RROTATE_EXPR:
9235 /* These cannot change zero->non-zero or vice versa. */
9236 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9237 break;
bbf6f052 9238
14a774a9
RK
9239 case WITH_RECORD_EXPR:
9240 /* Put the object on the placeholder list, recurse through our first
9241 operand, and pop the list. */
9242 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
9243 placeholder_list);
9244 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9245 placeholder_list = TREE_CHAIN (placeholder_list);
9246 break;
9247
b93a436e
JL
9248#if 0
9249 /* This is never less insns than evaluating the PLUS_EXPR followed by
9250 a test and can be longer if the test is eliminated. */
9251 case PLUS_EXPR:
9252 /* Reduce to minus. */
9253 exp = build (MINUS_EXPR, TREE_TYPE (exp),
9254 TREE_OPERAND (exp, 0),
9255 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
9256 TREE_OPERAND (exp, 1))));
9257 /* Process as MINUS. */
ca695ac9 9258#endif
bbf6f052 9259
b93a436e
JL
9260 case MINUS_EXPR:
9261 /* Non-zero iff operands of minus differ. */
b30f05db
BS
9262 do_compare_and_jump (build (NE_EXPR, TREE_TYPE (exp),
9263 TREE_OPERAND (exp, 0),
9264 TREE_OPERAND (exp, 1)),
9265 NE, NE, if_false_label, if_true_label);
b93a436e 9266 break;
bbf6f052 9267
b93a436e
JL
9268 case BIT_AND_EXPR:
9269 /* If we are AND'ing with a small constant, do this comparison in the
9270 smallest type that fits. If the machine doesn't have comparisons
9271 that small, it will be converted back to the wider comparison.
9272 This helps if we are testing the sign bit of a narrower object.
9273 combine can't do this for us because it can't know whether a
9274 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 9275
b93a436e
JL
9276 if (! SLOW_BYTE_ACCESS
9277 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
9278 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
05bccae2 9279 && (i = tree_floor_log2 (TREE_OPERAND (exp, 1))) >= 0
b93a436e
JL
9280 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
9281 && (type = type_for_mode (mode, 1)) != 0
9282 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9283 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9284 != CODE_FOR_nothing))
9285 {
9286 do_jump (convert (type, exp), if_false_label, if_true_label);
9287 break;
9288 }
9289 goto normal;
bbf6f052 9290
b93a436e
JL
9291 case TRUTH_NOT_EXPR:
9292 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9293 break;
bbf6f052 9294
b93a436e
JL
9295 case TRUTH_ANDIF_EXPR:
9296 if (if_false_label == 0)
9297 if_false_label = drop_through_label = gen_label_rtx ();
9298 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
9299 start_cleanup_deferral ();
9300 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9301 end_cleanup_deferral ();
9302 break;
bbf6f052 9303
b93a436e
JL
9304 case TRUTH_ORIF_EXPR:
9305 if (if_true_label == 0)
9306 if_true_label = drop_through_label = gen_label_rtx ();
9307 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
9308 start_cleanup_deferral ();
9309 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9310 end_cleanup_deferral ();
9311 break;
bbf6f052 9312
b93a436e
JL
9313 case COMPOUND_EXPR:
9314 push_temp_slots ();
9315 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
9316 preserve_temp_slots (NULL_RTX);
9317 free_temp_slots ();
9318 pop_temp_slots ();
9319 emit_queue ();
9320 do_pending_stack_adjust ();
9321 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
9322 break;
bbf6f052 9323
b93a436e
JL
9324 case COMPONENT_REF:
9325 case BIT_FIELD_REF:
9326 case ARRAY_REF:
9327 {
770ae6cc
RK
9328 HOST_WIDE_INT bitsize, bitpos;
9329 int unsignedp;
b93a436e
JL
9330 enum machine_mode mode;
9331 tree type;
9332 tree offset;
9333 int volatilep = 0;
729a2125 9334 unsigned int alignment;
bbf6f052 9335
b93a436e
JL
9336 /* Get description of this reference. We don't actually care
9337 about the underlying object here. */
19caa751
RK
9338 get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
9339 &unsignedp, &volatilep, &alignment);
bbf6f052 9340
b93a436e
JL
9341 type = type_for_size (bitsize, unsignedp);
9342 if (! SLOW_BYTE_ACCESS
9343 && type != 0 && bitsize >= 0
9344 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
9345 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
9346 != CODE_FOR_nothing))
9347 {
9348 do_jump (convert (type, exp), if_false_label, if_true_label);
9349 break;
9350 }
9351 goto normal;
9352 }
bbf6f052 9353
b93a436e
JL
9354 case COND_EXPR:
9355 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
9356 if (integer_onep (TREE_OPERAND (exp, 1))
9357 && integer_zerop (TREE_OPERAND (exp, 2)))
9358 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 9359
b93a436e
JL
9360 else if (integer_zerop (TREE_OPERAND (exp, 1))
9361 && integer_onep (TREE_OPERAND (exp, 2)))
9362 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 9363
b93a436e
JL
9364 else
9365 {
9366 register rtx label1 = gen_label_rtx ();
9367 drop_through_label = gen_label_rtx ();
bbf6f052 9368
b93a436e 9369 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 9370
b93a436e
JL
9371 start_cleanup_deferral ();
9372 /* Now the THEN-expression. */
9373 do_jump (TREE_OPERAND (exp, 1),
9374 if_false_label ? if_false_label : drop_through_label,
9375 if_true_label ? if_true_label : drop_through_label);
9376 /* In case the do_jump just above never jumps. */
9377 do_pending_stack_adjust ();
9378 emit_label (label1);
bbf6f052 9379
b93a436e
JL
9380 /* Now the ELSE-expression. */
9381 do_jump (TREE_OPERAND (exp, 2),
9382 if_false_label ? if_false_label : drop_through_label,
9383 if_true_label ? if_true_label : drop_through_label);
9384 end_cleanup_deferral ();
9385 }
9386 break;
bbf6f052 9387
b93a436e
JL
9388 case EQ_EXPR:
9389 {
9390 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9391
9ec36da5
JL
9392 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9393 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9394 {
9395 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9396 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9397 do_jump
9398 (fold
9399 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
9400 fold (build (EQ_EXPR, TREE_TYPE (exp),
9401 fold (build1 (REALPART_EXPR,
9402 TREE_TYPE (inner_type),
9403 exp0)),
9404 fold (build1 (REALPART_EXPR,
9405 TREE_TYPE (inner_type),
9406 exp1)))),
9407 fold (build (EQ_EXPR, TREE_TYPE (exp),
9408 fold (build1 (IMAGPART_EXPR,
9409 TREE_TYPE (inner_type),
9410 exp0)),
9411 fold (build1 (IMAGPART_EXPR,
9412 TREE_TYPE (inner_type),
9413 exp1)))))),
9414 if_false_label, if_true_label);
9415 }
9ec36da5
JL
9416
9417 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9418 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
9419
b93a436e 9420 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9421 && !can_compare_p (EQ, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9422 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
9423 else
b30f05db 9424 do_compare_and_jump (exp, EQ, EQ, if_false_label, if_true_label);
b93a436e
JL
9425 break;
9426 }
bbf6f052 9427
b93a436e
JL
9428 case NE_EXPR:
9429 {
9430 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 9431
9ec36da5
JL
9432 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
9433 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
9434 {
9435 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
9436 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
9437 do_jump
9438 (fold
9439 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
9440 fold (build (NE_EXPR, TREE_TYPE (exp),
9441 fold (build1 (REALPART_EXPR,
9442 TREE_TYPE (inner_type),
9443 exp0)),
9444 fold (build1 (REALPART_EXPR,
9445 TREE_TYPE (inner_type),
9446 exp1)))),
9447 fold (build (NE_EXPR, TREE_TYPE (exp),
9448 fold (build1 (IMAGPART_EXPR,
9449 TREE_TYPE (inner_type),
9450 exp0)),
9451 fold (build1 (IMAGPART_EXPR,
9452 TREE_TYPE (inner_type),
9453 exp1)))))),
9454 if_false_label, if_true_label);
9455 }
9ec36da5
JL
9456
9457 else if (integer_zerop (TREE_OPERAND (exp, 1)))
9458 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
9459
b93a436e 9460 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
1eb8759b 9461 && !can_compare_p (NE, TYPE_MODE (inner_type), ccp_jump))
b93a436e
JL
9462 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
9463 else
b30f05db 9464 do_compare_and_jump (exp, NE, NE, if_false_label, if_true_label);
b93a436e
JL
9465 break;
9466 }
bbf6f052 9467
b93a436e 9468 case LT_EXPR:
1c0290ea
BS
9469 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9470 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9471 && ! can_compare_p (LT, mode, ccp_jump))
b93a436e
JL
9472 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
9473 else
b30f05db 9474 do_compare_and_jump (exp, LT, LTU, if_false_label, if_true_label);
b93a436e 9475 break;
bbf6f052 9476
b93a436e 9477 case LE_EXPR:
1c0290ea
BS
9478 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9479 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9480 && ! can_compare_p (LE, mode, ccp_jump))
b93a436e
JL
9481 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
9482 else
b30f05db 9483 do_compare_and_jump (exp, LE, LEU, if_false_label, if_true_label);
b93a436e 9484 break;
bbf6f052 9485
b93a436e 9486 case GT_EXPR:
1c0290ea
BS
9487 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9488 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9489 && ! can_compare_p (GT, mode, ccp_jump))
b93a436e
JL
9490 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
9491 else
b30f05db 9492 do_compare_and_jump (exp, GT, GTU, if_false_label, if_true_label);
b93a436e 9493 break;
bbf6f052 9494
b93a436e 9495 case GE_EXPR:
1c0290ea
BS
9496 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9497 if (GET_MODE_CLASS (mode) == MODE_INT
1eb8759b 9498 && ! can_compare_p (GE, mode, ccp_jump))
b93a436e
JL
9499 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
9500 else
b30f05db 9501 do_compare_and_jump (exp, GE, GEU, if_false_label, if_true_label);
b93a436e 9502 break;
bbf6f052 9503
1eb8759b
RH
9504 case UNORDERED_EXPR:
9505 case ORDERED_EXPR:
9506 {
9507 enum rtx_code cmp, rcmp;
9508 int do_rev;
9509
9510 if (code == UNORDERED_EXPR)
9511 cmp = UNORDERED, rcmp = ORDERED;
9512 else
9513 cmp = ORDERED, rcmp = UNORDERED;
9514 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9515
9516 do_rev = 0;
9517 if (! can_compare_p (cmp, mode, ccp_jump)
9518 && (can_compare_p (rcmp, mode, ccp_jump)
9519 /* If the target doesn't provide either UNORDERED or ORDERED
9520 comparisons, canonicalize on UNORDERED for the library. */
9521 || rcmp == UNORDERED))
9522 do_rev = 1;
9523
9524 if (! do_rev)
9525 do_compare_and_jump (exp, cmp, cmp, if_false_label, if_true_label);
9526 else
9527 do_compare_and_jump (exp, rcmp, rcmp, if_true_label, if_false_label);
9528 }
9529 break;
9530
9531 {
9532 enum rtx_code rcode1;
9533 enum tree_code tcode2;
9534
9535 case UNLT_EXPR:
9536 rcode1 = UNLT;
9537 tcode2 = LT_EXPR;
9538 goto unordered_bcc;
9539 case UNLE_EXPR:
9540 rcode1 = UNLE;
9541 tcode2 = LE_EXPR;
9542 goto unordered_bcc;
9543 case UNGT_EXPR:
9544 rcode1 = UNGT;
9545 tcode2 = GT_EXPR;
9546 goto unordered_bcc;
9547 case UNGE_EXPR:
9548 rcode1 = UNGE;
9549 tcode2 = GE_EXPR;
9550 goto unordered_bcc;
9551 case UNEQ_EXPR:
9552 rcode1 = UNEQ;
9553 tcode2 = EQ_EXPR;
9554 goto unordered_bcc;
7913f3d0 9555
1eb8759b
RH
9556 unordered_bcc:
9557 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9558 if (can_compare_p (rcode1, mode, ccp_jump))
9559 do_compare_and_jump (exp, rcode1, rcode1, if_false_label,
9560 if_true_label);
9561 else
9562 {
9563 tree op0 = save_expr (TREE_OPERAND (exp, 0));
9564 tree op1 = save_expr (TREE_OPERAND (exp, 1));
9565 tree cmp0, cmp1;
9566
9567 /* If the target doesn't support combined unordered
9568 compares, decompose into UNORDERED + comparison. */
9569 cmp0 = fold (build (UNORDERED_EXPR, TREE_TYPE (exp), op0, op1));
9570 cmp1 = fold (build (tcode2, TREE_TYPE (exp), op0, op1));
9571 exp = build (TRUTH_ORIF_EXPR, TREE_TYPE (exp), cmp0, cmp1);
9572 do_jump (exp, if_false_label, if_true_label);
9573 }
9574 }
9575 break;
9576
b93a436e
JL
9577 default:
9578 normal:
9579 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
9580#if 0
9581 /* This is not needed any more and causes poor code since it causes
9582 comparisons and tests from non-SI objects to have different code
9583 sequences. */
9584 /* Copy to register to avoid generating bad insns by cse
9585 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
9586 if (!cse_not_expected && GET_CODE (temp) == MEM)
9587 temp = copy_to_reg (temp);
ca695ac9 9588#endif
b93a436e 9589 do_pending_stack_adjust ();
b30f05db
BS
9590 /* Do any postincrements in the expression that was tested. */
9591 emit_queue ();
9592
9593 if (GET_CODE (temp) == CONST_INT || GET_CODE (temp) == LABEL_REF)
9594 {
9595 rtx target = temp == const0_rtx ? if_false_label : if_true_label;
9596 if (target)
9597 emit_jump (target);
9598 }
b93a436e 9599 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
1eb8759b 9600 && ! can_compare_p (NE, GET_MODE (temp), ccp_jump))
b93a436e
JL
9601 /* Note swapping the labels gives us not-equal. */
9602 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
9603 else if (GET_MODE (temp) != VOIDmode)
b30f05db
BS
9604 do_compare_rtx_and_jump (temp, CONST0_RTX (GET_MODE (temp)),
9605 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
9606 GET_MODE (temp), NULL_RTX, 0,
9607 if_false_label, if_true_label);
b93a436e
JL
9608 else
9609 abort ();
9610 }
bbf6f052 9611
b93a436e
JL
9612 if (drop_through_label)
9613 {
9614 /* If do_jump produces code that might be jumped around,
9615 do any stack adjusts from that code, before the place
9616 where control merges in. */
9617 do_pending_stack_adjust ();
9618 emit_label (drop_through_label);
9619 }
bbf6f052 9620}
b93a436e
JL
9621\f
9622/* Given a comparison expression EXP for values too wide to be compared
9623 with one insn, test the comparison and jump to the appropriate label.
9624 The code of EXP is ignored; we always test GT if SWAP is 0,
9625 and LT if SWAP is 1. */
bbf6f052 9626
b93a436e
JL
9627static void
9628do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
9629 tree exp;
9630 int swap;
9631 rtx if_false_label, if_true_label;
9632{
9633 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
9634 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
9635 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
b93a436e 9636 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
bbf6f052 9637
b30f05db 9638 do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label);
f81497d9
RS
9639}
9640
b93a436e
JL
9641/* Compare OP0 with OP1, word at a time, in mode MODE.
9642 UNSIGNEDP says to do unsigned comparison.
9643 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 9644
b93a436e
JL
9645void
9646do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
9647 enum machine_mode mode;
9648 int unsignedp;
9649 rtx op0, op1;
9650 rtx if_false_label, if_true_label;
f81497d9 9651{
b93a436e
JL
9652 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9653 rtx drop_through_label = 0;
9654 int i;
f81497d9 9655
b93a436e
JL
9656 if (! if_true_label || ! if_false_label)
9657 drop_through_label = gen_label_rtx ();
9658 if (! if_true_label)
9659 if_true_label = drop_through_label;
9660 if (! if_false_label)
9661 if_false_label = drop_through_label;
f81497d9 9662
b93a436e
JL
9663 /* Compare a word at a time, high order first. */
9664 for (i = 0; i < nwords; i++)
9665 {
b93a436e 9666 rtx op0_word, op1_word;
bbf6f052 9667
b93a436e
JL
9668 if (WORDS_BIG_ENDIAN)
9669 {
9670 op0_word = operand_subword_force (op0, i, mode);
9671 op1_word = operand_subword_force (op1, i, mode);
9672 }
9673 else
9674 {
9675 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
9676 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
9677 }
bbf6f052 9678
b93a436e 9679 /* All but high-order word must be compared as unsigned. */
b30f05db
BS
9680 do_compare_rtx_and_jump (op0_word, op1_word, GT,
9681 (unsignedp || i > 0), word_mode, NULL_RTX, 0,
9682 NULL_RTX, if_true_label);
bbf6f052 9683
b93a436e 9684 /* Consider lower words only if these are equal. */
b30f05db
BS
9685 do_compare_rtx_and_jump (op0_word, op1_word, NE, unsignedp, word_mode,
9686 NULL_RTX, 0, NULL_RTX, if_false_label);
b93a436e 9687 }
bbf6f052 9688
b93a436e
JL
9689 if (if_false_label)
9690 emit_jump (if_false_label);
9691 if (drop_through_label)
9692 emit_label (drop_through_label);
bbf6f052
RK
9693}
9694
b93a436e
JL
9695/* Given an EQ_EXPR expression EXP for values too wide to be compared
9696 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 9697
b93a436e
JL
9698static void
9699do_jump_by_parts_equality (exp, if_false_label, if_true_label)
9700 tree exp;
9701 rtx if_false_label, if_true_label;
bbf6f052 9702{
b93a436e
JL
9703 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
9704 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
9705 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
9706 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
9707 int i;
9708 rtx drop_through_label = 0;
bbf6f052 9709
b93a436e
JL
9710 if (! if_false_label)
9711 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9712
b93a436e 9713 for (i = 0; i < nwords; i++)
b30f05db
BS
9714 do_compare_rtx_and_jump (operand_subword_force (op0, i, mode),
9715 operand_subword_force (op1, i, mode),
9716 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
9717 word_mode, NULL_RTX, 0, if_false_label,
9718 NULL_RTX);
bbf6f052 9719
b93a436e
JL
9720 if (if_true_label)
9721 emit_jump (if_true_label);
9722 if (drop_through_label)
9723 emit_label (drop_through_label);
bbf6f052 9724}
b93a436e
JL
9725\f
9726/* Jump according to whether OP0 is 0.
9727 We assume that OP0 has an integer mode that is too wide
9728 for the available compare insns. */
bbf6f052 9729
f5963e61 9730void
b93a436e
JL
9731do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
9732 rtx op0;
9733 rtx if_false_label, if_true_label;
ca695ac9 9734{
b93a436e
JL
9735 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
9736 rtx part;
9737 int i;
9738 rtx drop_through_label = 0;
bbf6f052 9739
b93a436e
JL
9740 /* The fastest way of doing this comparison on almost any machine is to
9741 "or" all the words and compare the result. If all have to be loaded
9742 from memory and this is a very wide item, it's possible this may
9743 be slower, but that's highly unlikely. */
bbf6f052 9744
b93a436e
JL
9745 part = gen_reg_rtx (word_mode);
9746 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
9747 for (i = 1; i < nwords && part != 0; i++)
9748 part = expand_binop (word_mode, ior_optab, part,
9749 operand_subword_force (op0, i, GET_MODE (op0)),
9750 part, 1, OPTAB_WIDEN);
bbf6f052 9751
b93a436e
JL
9752 if (part != 0)
9753 {
b30f05db
BS
9754 do_compare_rtx_and_jump (part, const0_rtx, EQ, 1, word_mode,
9755 NULL_RTX, 0, if_false_label, if_true_label);
bbf6f052 9756
b93a436e
JL
9757 return;
9758 }
bbf6f052 9759
b93a436e
JL
9760 /* If we couldn't do the "or" simply, do this with a series of compares. */
9761 if (! if_false_label)
9762 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 9763
b93a436e 9764 for (i = 0; i < nwords; i++)
b30f05db
BS
9765 do_compare_rtx_and_jump (operand_subword_force (op0, i, GET_MODE (op0)),
9766 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0,
9767 if_false_label, NULL_RTX);
bbf6f052 9768
b93a436e
JL
9769 if (if_true_label)
9770 emit_jump (if_true_label);
0f41302f 9771
b93a436e
JL
9772 if (drop_through_label)
9773 emit_label (drop_through_label);
bbf6f052 9774}
b93a436e 9775\f
b30f05db 9776/* Generate code for a comparison of OP0 and OP1 with rtx code CODE.
b93a436e
JL
9777 (including code to compute the values to be compared)
9778 and set (CC0) according to the result.
b30f05db 9779 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9780
b93a436e 9781 We force a stack adjustment unless there are currently
b30f05db 9782 things pushed on the stack that aren't yet used.
ca695ac9 9783
b30f05db
BS
9784 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9785 compared.
9786
9787 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9788 size of MODE should be used. */
9789
9790rtx
9791compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
9792 register rtx op0, op1;
9793 enum rtx_code code;
9794 int unsignedp;
9795 enum machine_mode mode;
9796 rtx size;
729a2125 9797 unsigned int align;
b93a436e 9798{
b30f05db 9799 rtx tem;
76bbe028 9800
b30f05db
BS
9801 /* If one operand is constant, make it the second one. Only do this
9802 if the other operand is not constant as well. */
ca695ac9 9803
b30f05db
BS
9804 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9805 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
bbf6f052 9806 {
b30f05db
BS
9807 tem = op0;
9808 op0 = op1;
9809 op1 = tem;
9810 code = swap_condition (code);
ca695ac9 9811 }
bbf6f052 9812
b30f05db 9813 if (flag_force_mem)
b93a436e 9814 {
b30f05db
BS
9815 op0 = force_not_mem (op0);
9816 op1 = force_not_mem (op1);
9817 }
bbf6f052 9818
b30f05db
BS
9819 do_pending_stack_adjust ();
9820
9821 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9822 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
9823 return tem;
9824
9825#if 0
9826 /* There's no need to do this now that combine.c can eliminate lots of
9827 sign extensions. This can be less efficient in certain cases on other
9828 machines. */
9829
9830 /* If this is a signed equality comparison, we can do it as an
9831 unsigned comparison since zero-extension is cheaper than sign
9832 extension and comparisons with zero are done as unsigned. This is
9833 the case even on machines that can do fast sign extension, since
9834 zero-extension is easier to combine with other operations than
9835 sign-extension is. If we are comparing against a constant, we must
9836 convert it to what it would look like unsigned. */
9837 if ((code == EQ || code == NE) && ! unsignedp
9838 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9839 {
9840 if (GET_CODE (op1) == CONST_INT
9841 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9842 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9843 unsignedp = 1;
b93a436e
JL
9844 }
9845#endif
b30f05db
BS
9846
9847 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
0f41302f 9848
b30f05db 9849 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
ca695ac9 9850}
bbf6f052 9851
b30f05db 9852/* Like do_compare_and_jump but expects the values to compare as two rtx's.
b93a436e 9853 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 9854
b93a436e
JL
9855 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
9856 compared.
bbf6f052 9857
b93a436e
JL
9858 If ALIGN is non-zero, it is the alignment of this type; if zero, the
9859 size of MODE should be used. */
ca695ac9 9860
b30f05db
BS
9861void
9862do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode, size, align,
9863 if_false_label, if_true_label)
b93a436e
JL
9864 register rtx op0, op1;
9865 enum rtx_code code;
9866 int unsignedp;
9867 enum machine_mode mode;
9868 rtx size;
729a2125 9869 unsigned int align;
b30f05db 9870 rtx if_false_label, if_true_label;
bbf6f052 9871{
b93a436e 9872 rtx tem;
b30f05db
BS
9873 int dummy_true_label = 0;
9874
9875 /* Reverse the comparison if that is safe and we want to jump if it is
9876 false. */
9877 if (! if_true_label && ! FLOAT_MODE_P (mode))
9878 {
9879 if_true_label = if_false_label;
9880 if_false_label = 0;
9881 code = reverse_condition (code);
9882 }
bbf6f052 9883
b93a436e
JL
9884 /* If one operand is constant, make it the second one. Only do this
9885 if the other operand is not constant as well. */
e7c33f54 9886
b93a436e
JL
9887 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
9888 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 9889 {
b93a436e
JL
9890 tem = op0;
9891 op0 = op1;
9892 op1 = tem;
9893 code = swap_condition (code);
9894 }
bbf6f052 9895
b93a436e
JL
9896 if (flag_force_mem)
9897 {
9898 op0 = force_not_mem (op0);
9899 op1 = force_not_mem (op1);
9900 }
bbf6f052 9901
b93a436e 9902 do_pending_stack_adjust ();
ca695ac9 9903
b93a436e
JL
9904 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
9905 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
b30f05db
BS
9906 {
9907 if (tem == const_true_rtx)
9908 {
9909 if (if_true_label)
9910 emit_jump (if_true_label);
9911 }
9912 else
9913 {
9914 if (if_false_label)
9915 emit_jump (if_false_label);
9916 }
9917 return;
9918 }
ca695ac9 9919
b93a436e
JL
9920#if 0
9921 /* There's no need to do this now that combine.c can eliminate lots of
9922 sign extensions. This can be less efficient in certain cases on other
9923 machines. */
ca695ac9 9924
b93a436e
JL
9925 /* If this is a signed equality comparison, we can do it as an
9926 unsigned comparison since zero-extension is cheaper than sign
9927 extension and comparisons with zero are done as unsigned. This is
9928 the case even on machines that can do fast sign extension, since
9929 zero-extension is easier to combine with other operations than
9930 sign-extension is. If we are comparing against a constant, we must
9931 convert it to what it would look like unsigned. */
9932 if ((code == EQ || code == NE) && ! unsignedp
9933 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
9934 {
9935 if (GET_CODE (op1) == CONST_INT
9936 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
9937 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
9938 unsignedp = 1;
9939 }
9940#endif
ca695ac9 9941
b30f05db
BS
9942 if (! if_true_label)
9943 {
9944 dummy_true_label = 1;
9945 if_true_label = gen_label_rtx ();
9946 }
9947
9948 emit_cmp_and_jump_insns (op0, op1, code, size, mode, unsignedp, align,
9949 if_true_label);
9950
9951 if (if_false_label)
9952 emit_jump (if_false_label);
9953 if (dummy_true_label)
9954 emit_label (if_true_label);
9955}
9956
9957/* Generate code for a comparison expression EXP (including code to compute
9958 the values to be compared) and a conditional jump to IF_FALSE_LABEL and/or
9959 IF_TRUE_LABEL. One of the labels can be NULL_RTX, in which case the
9960 generated code will drop through.
9961 SIGNED_CODE should be the rtx operation for this comparison for
9962 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
9963
9964 We force a stack adjustment unless there are currently
9965 things pushed on the stack that aren't yet used. */
9966
9967static void
9968do_compare_and_jump (exp, signed_code, unsigned_code, if_false_label,
9969 if_true_label)
9970 register tree exp;
9971 enum rtx_code signed_code, unsigned_code;
9972 rtx if_false_label, if_true_label;
9973{
729a2125 9974 unsigned int align0, align1;
b30f05db
BS
9975 register rtx op0, op1;
9976 register tree type;
9977 register enum machine_mode mode;
9978 int unsignedp;
9979 enum rtx_code code;
9980
9981 /* Don't crash if the comparison was erroneous. */
14a774a9 9982 op0 = expand_expr_unaligned (TREE_OPERAND (exp, 0), &align0);
b30f05db
BS
9983 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
9984 return;
9985
14a774a9 9986 op1 = expand_expr_unaligned (TREE_OPERAND (exp, 1), &align1);
b30f05db
BS
9987 type = TREE_TYPE (TREE_OPERAND (exp, 0));
9988 mode = TYPE_MODE (type);
9989 unsignedp = TREE_UNSIGNED (type);
9990 code = unsignedp ? unsigned_code : signed_code;
9991
9992#ifdef HAVE_canonicalize_funcptr_for_compare
9993 /* If function pointers need to be "canonicalized" before they can
9994 be reliably compared, then canonicalize them. */
9995 if (HAVE_canonicalize_funcptr_for_compare
9996 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
9997 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
9998 == FUNCTION_TYPE))
9999 {
10000 rtx new_op0 = gen_reg_rtx (mode);
10001
10002 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10003 op0 = new_op0;
10004 }
10005
10006 if (HAVE_canonicalize_funcptr_for_compare
10007 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10008 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10009 == FUNCTION_TYPE))
10010 {
10011 rtx new_op1 = gen_reg_rtx (mode);
10012
10013 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10014 op1 = new_op1;
10015 }
10016#endif
10017
10018 /* Do any postincrements in the expression that was tested. */
10019 emit_queue ();
10020
10021 do_compare_rtx_and_jump (op0, op1, code, unsignedp, mode,
10022 ((mode == BLKmode)
10023 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
19caa751 10024 MIN (align0, align1),
b30f05db 10025 if_false_label, if_true_label);
b93a436e
JL
10026}
10027\f
10028/* Generate code to calculate EXP using a store-flag instruction
10029 and return an rtx for the result. EXP is either a comparison
10030 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 10031
b93a436e 10032 If TARGET is nonzero, store the result there if convenient.
ca695ac9 10033
b93a436e
JL
10034 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
10035 cheap.
ca695ac9 10036
b93a436e
JL
10037 Return zero if there is no suitable set-flag instruction
10038 available on this machine.
ca695ac9 10039
b93a436e
JL
10040 Once expand_expr has been called on the arguments of the comparison,
10041 we are committed to doing the store flag, since it is not safe to
10042 re-evaluate the expression. We emit the store-flag insn by calling
10043 emit_store_flag, but only expand the arguments if we have a reason
10044 to believe that emit_store_flag will be successful. If we think that
10045 it will, but it isn't, we have to simulate the store-flag with a
10046 set/jump/set sequence. */
ca695ac9 10047
b93a436e
JL
10048static rtx
10049do_store_flag (exp, target, mode, only_cheap)
10050 tree exp;
10051 rtx target;
10052 enum machine_mode mode;
10053 int only_cheap;
10054{
10055 enum rtx_code code;
10056 tree arg0, arg1, type;
10057 tree tem;
10058 enum machine_mode operand_mode;
10059 int invert = 0;
10060 int unsignedp;
10061 rtx op0, op1;
10062 enum insn_code icode;
10063 rtx subtarget = target;
381127e8 10064 rtx result, label;
ca695ac9 10065
b93a436e
JL
10066 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
10067 result at the end. We can't simply invert the test since it would
10068 have already been inverted if it were valid. This case occurs for
10069 some floating-point comparisons. */
ca695ac9 10070
b93a436e
JL
10071 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
10072 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 10073
b93a436e
JL
10074 arg0 = TREE_OPERAND (exp, 0);
10075 arg1 = TREE_OPERAND (exp, 1);
10076 type = TREE_TYPE (arg0);
10077 operand_mode = TYPE_MODE (type);
10078 unsignedp = TREE_UNSIGNED (type);
ca695ac9 10079
b93a436e
JL
10080 /* We won't bother with BLKmode store-flag operations because it would mean
10081 passing a lot of information to emit_store_flag. */
10082 if (operand_mode == BLKmode)
10083 return 0;
ca695ac9 10084
b93a436e
JL
10085 /* We won't bother with store-flag operations involving function pointers
10086 when function pointers must be canonicalized before comparisons. */
10087#ifdef HAVE_canonicalize_funcptr_for_compare
10088 if (HAVE_canonicalize_funcptr_for_compare
10089 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10090 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10091 == FUNCTION_TYPE))
10092 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10093 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10094 == FUNCTION_TYPE))))
10095 return 0;
ca695ac9
JB
10096#endif
10097
b93a436e
JL
10098 STRIP_NOPS (arg0);
10099 STRIP_NOPS (arg1);
ca695ac9 10100
b93a436e
JL
10101 /* Get the rtx comparison code to use. We know that EXP is a comparison
10102 operation of some type. Some comparisons against 1 and -1 can be
10103 converted to comparisons with zero. Do so here so that the tests
10104 below will be aware that we have a comparison with zero. These
10105 tests will not catch constants in the first operand, but constants
10106 are rarely passed as the first operand. */
ca695ac9 10107
b93a436e
JL
10108 switch (TREE_CODE (exp))
10109 {
10110 case EQ_EXPR:
10111 code = EQ;
bbf6f052 10112 break;
b93a436e
JL
10113 case NE_EXPR:
10114 code = NE;
bbf6f052 10115 break;
b93a436e
JL
10116 case LT_EXPR:
10117 if (integer_onep (arg1))
10118 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
10119 else
10120 code = unsignedp ? LTU : LT;
ca695ac9 10121 break;
b93a436e
JL
10122 case LE_EXPR:
10123 if (! unsignedp && integer_all_onesp (arg1))
10124 arg1 = integer_zero_node, code = LT;
10125 else
10126 code = unsignedp ? LEU : LE;
ca695ac9 10127 break;
b93a436e
JL
10128 case GT_EXPR:
10129 if (! unsignedp && integer_all_onesp (arg1))
10130 arg1 = integer_zero_node, code = GE;
10131 else
10132 code = unsignedp ? GTU : GT;
10133 break;
10134 case GE_EXPR:
10135 if (integer_onep (arg1))
10136 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
10137 else
10138 code = unsignedp ? GEU : GE;
ca695ac9 10139 break;
1eb8759b
RH
10140
10141 case UNORDERED_EXPR:
10142 code = UNORDERED;
10143 break;
10144 case ORDERED_EXPR:
10145 code = ORDERED;
10146 break;
10147 case UNLT_EXPR:
10148 code = UNLT;
10149 break;
10150 case UNLE_EXPR:
10151 code = UNLE;
10152 break;
10153 case UNGT_EXPR:
10154 code = UNGT;
10155 break;
10156 case UNGE_EXPR:
10157 code = UNGE;
10158 break;
10159 case UNEQ_EXPR:
10160 code = UNEQ;
10161 break;
1eb8759b 10162
ca695ac9 10163 default:
b93a436e 10164 abort ();
bbf6f052 10165 }
bbf6f052 10166
b93a436e
JL
10167 /* Put a constant second. */
10168 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
10169 {
10170 tem = arg0; arg0 = arg1; arg1 = tem;
10171 code = swap_condition (code);
ca695ac9 10172 }
bbf6f052 10173
b93a436e
JL
10174 /* If this is an equality or inequality test of a single bit, we can
10175 do this by shifting the bit being tested to the low-order bit and
10176 masking the result with the constant 1. If the condition was EQ,
10177 we xor it with 1. This does not require an scc insn and is faster
10178 than an scc insn even if we have it. */
d39985fa 10179
b93a436e
JL
10180 if ((code == NE || code == EQ)
10181 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
10182 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10183 {
10184 tree inner = TREE_OPERAND (arg0, 0);
10185 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
10186 int ops_unsignedp;
bbf6f052 10187
b93a436e
JL
10188 /* If INNER is a right shift of a constant and it plus BITNUM does
10189 not overflow, adjust BITNUM and INNER. */
ca695ac9 10190
b93a436e
JL
10191 if (TREE_CODE (inner) == RSHIFT_EXPR
10192 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
10193 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
05bccae2
RK
10194 && bitnum < TYPE_PRECISION (type)
10195 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
10196 bitnum - TYPE_PRECISION (type)))
ca695ac9 10197 {
b93a436e
JL
10198 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
10199 inner = TREE_OPERAND (inner, 0);
ca695ac9 10200 }
ca695ac9 10201
b93a436e
JL
10202 /* If we are going to be able to omit the AND below, we must do our
10203 operations as unsigned. If we must use the AND, we have a choice.
10204 Normally unsigned is faster, but for some machines signed is. */
10205 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
10206#ifdef LOAD_EXTEND_OP
10207 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
10208#else
10209 : 1
10210#endif
10211 );
bbf6f052 10212
b93a436e
JL
10213 if (subtarget == 0 || GET_CODE (subtarget) != REG
10214 || GET_MODE (subtarget) != operand_mode
e5e809f4 10215 || ! safe_from_p (subtarget, inner, 1))
b93a436e 10216 subtarget = 0;
bbf6f052 10217
b93a436e 10218 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 10219
b93a436e
JL
10220 if (bitnum != 0)
10221 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
10222 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 10223
b93a436e
JL
10224 if (GET_MODE (op0) != mode)
10225 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 10226
b93a436e
JL
10227 if ((code == EQ && ! invert) || (code == NE && invert))
10228 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
10229 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 10230
b93a436e
JL
10231 /* Put the AND last so it can combine with more things. */
10232 if (bitnum != TYPE_PRECISION (type) - 1)
10233 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 10234
b93a436e
JL
10235 return op0;
10236 }
bbf6f052 10237
b93a436e 10238 /* Now see if we are likely to be able to do this. Return if not. */
1eb8759b 10239 if (! can_compare_p (code, operand_mode, ccp_store_flag))
b93a436e 10240 return 0;
1eb8759b 10241
b93a436e
JL
10242 icode = setcc_gen_code[(int) code];
10243 if (icode == CODE_FOR_nothing
a995e389 10244 || (only_cheap && insn_data[(int) icode].operand[0].mode != mode))
ca695ac9 10245 {
b93a436e
JL
10246 /* We can only do this if it is one of the special cases that
10247 can be handled without an scc insn. */
10248 if ((code == LT && integer_zerop (arg1))
10249 || (! only_cheap && code == GE && integer_zerop (arg1)))
10250 ;
10251 else if (BRANCH_COST >= 0
10252 && ! only_cheap && (code == NE || code == EQ)
10253 && TREE_CODE (type) != REAL_TYPE
10254 && ((abs_optab->handlers[(int) operand_mode].insn_code
10255 != CODE_FOR_nothing)
10256 || (ffs_optab->handlers[(int) operand_mode].insn_code
10257 != CODE_FOR_nothing)))
10258 ;
10259 else
10260 return 0;
ca695ac9 10261 }
b93a436e
JL
10262
10263 preexpand_calls (exp);
10264 if (subtarget == 0 || GET_CODE (subtarget) != REG
10265 || GET_MODE (subtarget) != operand_mode
e5e809f4 10266 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
10267 subtarget = 0;
10268
10269 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
10270 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
10271
10272 if (target == 0)
10273 target = gen_reg_rtx (mode);
10274
10275 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
10276 because, if the emit_store_flag does anything it will succeed and
10277 OP0 and OP1 will not be used subsequently. */
ca695ac9 10278
b93a436e
JL
10279 result = emit_store_flag (target, code,
10280 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
10281 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
10282 operand_mode, unsignedp, 1);
ca695ac9 10283
b93a436e
JL
10284 if (result)
10285 {
10286 if (invert)
10287 result = expand_binop (mode, xor_optab, result, const1_rtx,
10288 result, 0, OPTAB_LIB_WIDEN);
10289 return result;
ca695ac9 10290 }
bbf6f052 10291
b93a436e
JL
10292 /* If this failed, we have to do this with set/compare/jump/set code. */
10293 if (GET_CODE (target) != REG
10294 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
10295 target = gen_reg_rtx (GET_MODE (target));
10296
10297 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
10298 result = compare_from_rtx (op0, op1, code, unsignedp,
10299 operand_mode, NULL_RTX, 0);
10300 if (GET_CODE (result) == CONST_INT)
10301 return (((result == const0_rtx && ! invert)
10302 || (result != const0_rtx && invert))
10303 ? const0_rtx : const1_rtx);
ca695ac9 10304
b93a436e
JL
10305 label = gen_label_rtx ();
10306 if (bcc_gen_fctn[(int) code] == 0)
10307 abort ();
0f41302f 10308
b93a436e
JL
10309 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
10310 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
10311 emit_label (label);
bbf6f052 10312
b93a436e 10313 return target;
ca695ac9 10314}
b93a436e
JL
10315\f
10316/* Generate a tablejump instruction (used for switch statements). */
10317
10318#ifdef HAVE_tablejump
e87b4f3f 10319
b93a436e
JL
10320/* INDEX is the value being switched on, with the lowest value
10321 in the table already subtracted.
10322 MODE is its expected mode (needed if INDEX is constant).
10323 RANGE is the length of the jump table.
10324 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 10325
b93a436e
JL
10326 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
10327 index value is out of range. */
0f41302f 10328
ca695ac9 10329void
b93a436e
JL
10330do_tablejump (index, mode, range, table_label, default_label)
10331 rtx index, range, table_label, default_label;
10332 enum machine_mode mode;
ca695ac9 10333{
b93a436e 10334 register rtx temp, vector;
88d3b7f0 10335
b93a436e
JL
10336 /* Do an unsigned comparison (in the proper mode) between the index
10337 expression and the value which represents the length of the range.
10338 Since we just finished subtracting the lower bound of the range
10339 from the index expression, this comparison allows us to simultaneously
10340 check that the original index expression value is both greater than
10341 or equal to the minimum value of the range and less than or equal to
10342 the maximum value of the range. */
709f5be1 10343
c5d5d461
JL
10344 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
10345 0, default_label);
bbf6f052 10346
b93a436e
JL
10347 /* If index is in range, it must fit in Pmode.
10348 Convert to Pmode so we can index with it. */
10349 if (mode != Pmode)
10350 index = convert_to_mode (Pmode, index, 1);
bbf6f052 10351
b93a436e
JL
10352 /* Don't let a MEM slip thru, because then INDEX that comes
10353 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
10354 and break_out_memory_refs will go to work on it and mess it up. */
10355#ifdef PIC_CASE_VECTOR_ADDRESS
10356 if (flag_pic && GET_CODE (index) != REG)
10357 index = copy_to_mode_reg (Pmode, index);
10358#endif
ca695ac9 10359
b93a436e
JL
10360 /* If flag_force_addr were to affect this address
10361 it could interfere with the tricky assumptions made
10362 about addresses that contain label-refs,
10363 which may be valid only very near the tablejump itself. */
10364 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
10365 GET_MODE_SIZE, because this indicates how large insns are. The other
10366 uses should all be Pmode, because they are addresses. This code
10367 could fail if addresses and insns are not the same size. */
10368 index = gen_rtx_PLUS (Pmode,
10369 gen_rtx_MULT (Pmode, index,
10370 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
10371 gen_rtx_LABEL_REF (Pmode, table_label));
10372#ifdef PIC_CASE_VECTOR_ADDRESS
10373 if (flag_pic)
10374 index = PIC_CASE_VECTOR_ADDRESS (index);
10375 else
bbf6f052 10376#endif
b93a436e
JL
10377 index = memory_address_noforce (CASE_VECTOR_MODE, index);
10378 temp = gen_reg_rtx (CASE_VECTOR_MODE);
10379 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
10380 RTX_UNCHANGING_P (vector) = 1;
10381 convert_move (temp, vector, 0);
10382
10383 emit_jump_insn (gen_tablejump (temp, table_label));
10384
10385 /* If we are generating PIC code or if the table is PC-relative, the
10386 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
10387 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
10388 emit_barrier ();
bbf6f052 10389}
b93a436e
JL
10390
10391#endif /* HAVE_tablejump */
This page took 2.768377 seconds and 5 git commands to generate.