]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
Testcase for emit_group_store patch.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Like STACK_BOUNDARY but in units of bytes, not bits. */
69#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
18543a22
ILT
71/* Assume that case vectors are not pc-relative. */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
bbf6f052
RK
76/* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82int cse_not_expected;
83
84/* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87int do_preexpand_calls = 1;
88
89/* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91int pending_stack_adjust;
92
93/* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97int inhibit_defer_pop;
98
bbf6f052
RK
99/* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102static rtx saveregs_value;
103
dcf76fff
TW
104/* Similarly for __builtin_apply_args. */
105static rtx apply_args_value;
106
cff48d8f
RH
107/* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110static int can_handle_constant_p;
111
956d6950
JL
112/* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 recursion. */
115static int in_check_memory_usage;
116
4969d05d
RK
117/* This structure is used by move_by_pieces to describe the move to
118 be performed. */
4969d05d
RK
119struct move_by_pieces
120{
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
e9cf6a97 125 int to_struct;
4969d05d
RK
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
e9cf6a97 130 int from_struct;
4969d05d
RK
131 int len;
132 int offset;
133 int reverse;
134};
135
9de08200
RK
136/* This structure is used by clear_by_pieces to describe the clear to
137 be performed. */
138
139struct clear_by_pieces
140{
141 rtx to;
142 rtx to_addr;
143 int autinc_to;
144 int explicit_inc_to;
145 int to_struct;
146 int len;
147 int offset;
148 int reverse;
149};
150
292b1216 151extern struct obstack permanent_obstack;
4ed67205 152extern rtx arg_pointer_save_area;
c02bd5d9 153
03566575
JW
154static rtx get_push_address PROTO ((int));
155
4969d05d
RK
156static rtx enqueue_insn PROTO((rtx, rtx));
157static int queued_subexp_p PROTO((rtx));
158static void init_queue PROTO((void));
4969d05d 159static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 160static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 161 struct move_by_pieces *));
9de08200 162static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 163static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
164 struct clear_by_pieces *));
165static int is_zeros_p PROTO((tree));
166static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
167static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
168 tree, tree, int));
e1a43f73 169static void store_constructor PROTO((tree, rtx, int));
4969d05d 170static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
171 enum machine_mode, int, int,
172 int, int));
e009aaf3
JL
173static enum memory_use_mode
174 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
175static tree save_noncopied_parts PROTO((tree, tree));
176static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 177static int safe_from_p PROTO((rtx, tree, int));
4969d05d 178static int fixed_type_p PROTO((tree));
01c8a7c8 179static rtx var_rtx PROTO((tree));
4969d05d
RK
180static int get_pointer_alignment PROTO((tree, unsigned));
181static tree string_constant PROTO((tree, tree *));
182static tree c_strlen PROTO((tree));
55a6ba9f 183static rtx get_memory_rtx PROTO((tree));
307b821c
RK
184static rtx expand_builtin PROTO((tree, rtx, rtx,
185 enum machine_mode, int));
0006469d
TW
186static int apply_args_size PROTO((void));
187static int apply_result_size PROTO((void));
188static rtx result_vector PROTO((int, rtx));
189static rtx expand_builtin_apply_args PROTO((void));
190static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
191static void expand_builtin_return PROTO((rtx));
7b8b9722 192static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
193static void preexpand_calls PROTO((tree));
194static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
195static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
4969d05d
RK
196static void do_jump_for_compare PROTO((rtx, rtx, rtx));
197static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
198static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 199
4fa52007
RK
200/* Record for each mode whether we can move a register directly to or
201 from an object of that mode in memory. If we can't, we won't try
202 to use that mode directly when accessing a field of that mode. */
203
204static char direct_load[NUM_MACHINE_MODES];
205static char direct_store[NUM_MACHINE_MODES];
206
7e24ffc9
HPN
207/* If a memory-to-memory move would take MOVE_RATIO or more simple
208 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
209
210#ifndef MOVE_RATIO
266007a7 211#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
212#define MOVE_RATIO 2
213#else
996d9dac
MM
214/* If we are optimizing for space (-Os), cut down the default move ratio */
215#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
216#endif
217#endif
e87b4f3f 218
266007a7 219/* This array records the insn_code of insns to perform block moves. */
e6677db3 220enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 221
9de08200
RK
222/* This array records the insn_code of insns to perform block clears. */
223enum insn_code clrstr_optab[NUM_MACHINE_MODES];
224
0f41302f 225/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
226
227#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 228#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 229#endif
0006469d
TW
230
231/* Register mappings for target machines without register windows. */
232#ifndef INCOMING_REGNO
233#define INCOMING_REGNO(OUT) (OUT)
234#endif
235#ifndef OUTGOING_REGNO
236#define OUTGOING_REGNO(IN) (IN)
237#endif
bbf6f052 238\f
4fa52007 239/* This is run once per compilation to set up which modes can be used
266007a7 240 directly in memory and to initialize the block move optab. */
4fa52007
RK
241
242void
243init_expr_once ()
244{
245 rtx insn, pat;
246 enum machine_mode mode;
cff48d8f 247 int num_clobbers;
9ec36da5
JL
248 rtx mem, mem1;
249 char *free_point;
250
251 start_sequence ();
252
253 /* Since we are on the permanent obstack, we must be sure we save this
254 spot AFTER we call start_sequence, since it will reuse the rtl it
255 makes. */
256 free_point = (char *) oballoc (0);
257
e2549997
RS
258 /* Try indexing by frame ptr and try by stack ptr.
259 It is known that on the Convex the stack ptr isn't a valid index.
260 With luck, one or the other is valid on any machine. */
9ec36da5
JL
261 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
262 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 263
38a448ca 264 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
265 pat = PATTERN (insn);
266
267 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
268 mode = (enum machine_mode) ((int) mode + 1))
269 {
270 int regno;
271 rtx reg;
4fa52007
RK
272
273 direct_load[(int) mode] = direct_store[(int) mode] = 0;
274 PUT_MODE (mem, mode);
e2549997 275 PUT_MODE (mem1, mode);
4fa52007 276
e6fe56a4
RK
277 /* See if there is some register that can be used in this mode and
278 directly loaded or stored from memory. */
279
7308a047
RS
280 if (mode != VOIDmode && mode != BLKmode)
281 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
282 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
283 regno++)
284 {
285 if (! HARD_REGNO_MODE_OK (regno, mode))
286 continue;
e6fe56a4 287
38a448ca 288 reg = gen_rtx_REG (mode, regno);
e6fe56a4 289
7308a047
RS
290 SET_SRC (pat) = mem;
291 SET_DEST (pat) = reg;
292 if (recog (pat, insn, &num_clobbers) >= 0)
293 direct_load[(int) mode] = 1;
e6fe56a4 294
e2549997
RS
295 SET_SRC (pat) = mem1;
296 SET_DEST (pat) = reg;
297 if (recog (pat, insn, &num_clobbers) >= 0)
298 direct_load[(int) mode] = 1;
299
7308a047
RS
300 SET_SRC (pat) = reg;
301 SET_DEST (pat) = mem;
302 if (recog (pat, insn, &num_clobbers) >= 0)
303 direct_store[(int) mode] = 1;
e2549997
RS
304
305 SET_SRC (pat) = reg;
306 SET_DEST (pat) = mem1;
307 if (recog (pat, insn, &num_clobbers) >= 0)
308 direct_store[(int) mode] = 1;
7308a047 309 }
4fa52007
RK
310 }
311
cff48d8f
RH
312 /* Find out if CONSTANT_P_RTX is accepted. */
313 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
314 FIRST_PSEUDO_REGISTER);
315 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
316 SET_DEST (pat));
317 if (recog (pat, insn, &num_clobbers) >= 0)
318 can_handle_constant_p = 1;
319
4fa52007 320 end_sequence ();
9ec36da5 321 obfree (free_point);
4fa52007 322}
cff48d8f 323
bbf6f052
RK
324/* This is run at the start of compiling a function. */
325
326void
327init_expr ()
328{
329 init_queue ();
330
331 pending_stack_adjust = 0;
332 inhibit_defer_pop = 0;
bbf6f052 333 saveregs_value = 0;
0006469d 334 apply_args_value = 0;
e87b4f3f 335 forced_labels = 0;
bbf6f052
RK
336}
337
338/* Save all variables describing the current status into the structure *P.
339 This is used before starting a nested function. */
340
341void
342save_expr_status (p)
343 struct function *p;
344{
345 /* Instead of saving the postincrement queue, empty it. */
346 emit_queue ();
347
348 p->pending_stack_adjust = pending_stack_adjust;
349 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 350 p->saveregs_value = saveregs_value;
0006469d 351 p->apply_args_value = apply_args_value;
e87b4f3f 352 p->forced_labels = forced_labels;
bbf6f052
RK
353
354 pending_stack_adjust = 0;
355 inhibit_defer_pop = 0;
bbf6f052 356 saveregs_value = 0;
0006469d 357 apply_args_value = 0;
e87b4f3f 358 forced_labels = 0;
bbf6f052
RK
359}
360
361/* Restore all variables describing the current status from the structure *P.
362 This is used after a nested function. */
363
364void
365restore_expr_status (p)
366 struct function *p;
367{
368 pending_stack_adjust = p->pending_stack_adjust;
369 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 370 saveregs_value = p->saveregs_value;
0006469d 371 apply_args_value = p->apply_args_value;
e87b4f3f 372 forced_labels = p->forced_labels;
bbf6f052
RK
373}
374\f
375/* Manage the queue of increment instructions to be output
376 for POSTINCREMENT_EXPR expressions, etc. */
377
378static rtx pending_chain;
379
380/* Queue up to increment (or change) VAR later. BODY says how:
381 BODY should be the same thing you would pass to emit_insn
382 to increment right away. It will go to emit_insn later on.
383
384 The value is a QUEUED expression to be used in place of VAR
385 where you want to guarantee the pre-incrementation value of VAR. */
386
387static rtx
388enqueue_insn (var, body)
389 rtx var, body;
390{
38a448ca
RH
391 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
392 var, NULL_RTX, NULL_RTX, body,
393 pending_chain);
bbf6f052
RK
394 return pending_chain;
395}
396
397/* Use protect_from_queue to convert a QUEUED expression
398 into something that you can put immediately into an instruction.
399 If the queued incrementation has not happened yet,
400 protect_from_queue returns the variable itself.
401 If the incrementation has happened, protect_from_queue returns a temp
402 that contains a copy of the old value of the variable.
403
404 Any time an rtx which might possibly be a QUEUED is to be put
405 into an instruction, it must be passed through protect_from_queue first.
406 QUEUED expressions are not meaningful in instructions.
407
408 Do not pass a value through protect_from_queue and then hold
409 on to it for a while before putting it in an instruction!
410 If the queue is flushed in between, incorrect code will result. */
411
412rtx
413protect_from_queue (x, modify)
414 register rtx x;
415 int modify;
416{
417 register RTX_CODE code = GET_CODE (x);
418
419#if 0 /* A QUEUED can hang around after the queue is forced out. */
420 /* Shortcut for most common case. */
421 if (pending_chain == 0)
422 return x;
423#endif
424
425 if (code != QUEUED)
426 {
e9baa644
RK
427 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
428 use of autoincrement. Make a copy of the contents of the memory
429 location rather than a copy of the address, but not if the value is
430 of mode BLKmode. Don't modify X in place since it might be
431 shared. */
bbf6f052
RK
432 if (code == MEM && GET_MODE (x) != BLKmode
433 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
434 {
435 register rtx y = XEXP (x, 0);
38a448ca 436 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
437
438 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
439 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
440 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
41472af8 441 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 442
bbf6f052
RK
443 if (QUEUED_INSN (y))
444 {
e9baa644
RK
445 register rtx temp = gen_reg_rtx (GET_MODE (new));
446 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
447 QUEUED_INSN (y));
448 return temp;
449 }
e9baa644 450 return new;
bbf6f052
RK
451 }
452 /* Otherwise, recursively protect the subexpressions of all
453 the kinds of rtx's that can contain a QUEUED. */
454 if (code == MEM)
3f15938e
RS
455 {
456 rtx tem = protect_from_queue (XEXP (x, 0), 0);
457 if (tem != XEXP (x, 0))
458 {
459 x = copy_rtx (x);
460 XEXP (x, 0) = tem;
461 }
462 }
bbf6f052
RK
463 else if (code == PLUS || code == MULT)
464 {
3f15938e
RS
465 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
466 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
467 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
468 {
469 x = copy_rtx (x);
470 XEXP (x, 0) = new0;
471 XEXP (x, 1) = new1;
472 }
bbf6f052
RK
473 }
474 return x;
475 }
476 /* If the increment has not happened, use the variable itself. */
477 if (QUEUED_INSN (x) == 0)
478 return QUEUED_VAR (x);
479 /* If the increment has happened and a pre-increment copy exists,
480 use that copy. */
481 if (QUEUED_COPY (x) != 0)
482 return QUEUED_COPY (x);
483 /* The increment has happened but we haven't set up a pre-increment copy.
484 Set one up now, and use it. */
485 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
486 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
487 QUEUED_INSN (x));
488 return QUEUED_COPY (x);
489}
490
491/* Return nonzero if X contains a QUEUED expression:
492 if it contains anything that will be altered by a queued increment.
493 We handle only combinations of MEM, PLUS, MINUS and MULT operators
494 since memory addresses generally contain only those. */
495
496static int
497queued_subexp_p (x)
498 rtx x;
499{
500 register enum rtx_code code = GET_CODE (x);
501 switch (code)
502 {
503 case QUEUED:
504 return 1;
505 case MEM:
506 return queued_subexp_p (XEXP (x, 0));
507 case MULT:
508 case PLUS:
509 case MINUS:
e9a25f70
JL
510 return (queued_subexp_p (XEXP (x, 0))
511 || queued_subexp_p (XEXP (x, 1)));
512 default:
513 return 0;
bbf6f052 514 }
bbf6f052
RK
515}
516
517/* Perform all the pending incrementations. */
518
519void
520emit_queue ()
521{
522 register rtx p;
381127e8 523 while ((p = pending_chain))
bbf6f052 524 {
41b083c4
R
525 rtx body = QUEUED_BODY (p);
526
527 if (GET_CODE (body) == SEQUENCE)
528 {
529 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
530 emit_insn (QUEUED_BODY (p));
531 }
532 else
533 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
534 pending_chain = QUEUED_NEXT (p);
535 }
536}
537
538static void
539init_queue ()
540{
541 if (pending_chain)
542 abort ();
543}
544\f
545/* Copy data from FROM to TO, where the machine modes are not the same.
546 Both modes may be integer, or both may be floating.
547 UNSIGNEDP should be nonzero if FROM is an unsigned type.
548 This causes zero-extension instead of sign-extension. */
549
550void
551convert_move (to, from, unsignedp)
552 register rtx to, from;
553 int unsignedp;
554{
555 enum machine_mode to_mode = GET_MODE (to);
556 enum machine_mode from_mode = GET_MODE (from);
557 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
558 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
559 enum insn_code code;
560 rtx libcall;
561
562 /* rtx code for making an equivalent value. */
563 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
564
565 to = protect_from_queue (to, 1);
566 from = protect_from_queue (from, 0);
567
568 if (to_real != from_real)
569 abort ();
570
1499e0a8
RK
571 /* If FROM is a SUBREG that indicates that we have already done at least
572 the required extension, strip it. We don't handle such SUBREGs as
573 TO here. */
574
575 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
576 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
577 >= GET_MODE_SIZE (to_mode))
578 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
579 from = gen_lowpart (to_mode, from), from_mode = to_mode;
580
581 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
582 abort ();
583
bbf6f052
RK
584 if (to_mode == from_mode
585 || (from_mode == VOIDmode && CONSTANT_P (from)))
586 {
587 emit_move_insn (to, from);
588 return;
589 }
590
591 if (to_real)
592 {
81d79e2c
RS
593 rtx value;
594
2b01c326 595 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 596 {
2b01c326
RK
597 /* Try converting directly if the insn is supported. */
598 if ((code = can_extend_p (to_mode, from_mode, 0))
599 != CODE_FOR_nothing)
600 {
601 emit_unop_insn (code, to, from, UNKNOWN);
602 return;
603 }
bbf6f052 604 }
2b01c326 605
b424402e
RS
606#ifdef HAVE_trunchfqf2
607 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
608 {
609 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
610 return;
611 }
612#endif
704af6a1
JL
613#ifdef HAVE_trunctqfqf2
614 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
615 {
616 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
617 return;
618 }
619#endif
b424402e
RS
620#ifdef HAVE_truncsfqf2
621 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
622 {
623 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
624 return;
625 }
626#endif
627#ifdef HAVE_truncdfqf2
628 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
629 {
630 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
631 return;
632 }
633#endif
634#ifdef HAVE_truncxfqf2
635 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
636 {
637 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
638 return;
639 }
640#endif
641#ifdef HAVE_trunctfqf2
642 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
643 {
644 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
645 return;
646 }
647#endif
03747aa3
RK
648
649#ifdef HAVE_trunctqfhf2
650 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
651 {
652 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
653 return;
654 }
655#endif
b424402e
RS
656#ifdef HAVE_truncsfhf2
657 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
658 {
659 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
660 return;
661 }
662#endif
663#ifdef HAVE_truncdfhf2
664 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
665 {
666 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
667 return;
668 }
669#endif
670#ifdef HAVE_truncxfhf2
671 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
672 {
673 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
674 return;
675 }
676#endif
677#ifdef HAVE_trunctfhf2
678 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
679 {
680 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
681 return;
682 }
683#endif
2b01c326
RK
684
685#ifdef HAVE_truncsftqf2
686 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
687 {
688 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
689 return;
690 }
691#endif
692#ifdef HAVE_truncdftqf2
693 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
694 {
695 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
696 return;
697 }
698#endif
699#ifdef HAVE_truncxftqf2
700 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
701 {
702 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
703 return;
704 }
705#endif
706#ifdef HAVE_trunctftqf2
707 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
708 {
709 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
710 return;
711 }
712#endif
713
bbf6f052
RK
714#ifdef HAVE_truncdfsf2
715 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
716 {
717 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
718 return;
719 }
720#endif
b092b471
JW
721#ifdef HAVE_truncxfsf2
722 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
723 {
724 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
725 return;
726 }
727#endif
bbf6f052
RK
728#ifdef HAVE_trunctfsf2
729 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
730 {
731 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
732 return;
733 }
734#endif
b092b471
JW
735#ifdef HAVE_truncxfdf2
736 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
737 {
738 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
739 return;
740 }
741#endif
bbf6f052
RK
742#ifdef HAVE_trunctfdf2
743 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
744 {
745 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
746 return;
747 }
748#endif
749
b092b471
JW
750 libcall = (rtx) 0;
751 switch (from_mode)
752 {
753 case SFmode:
754 switch (to_mode)
755 {
756 case DFmode:
757 libcall = extendsfdf2_libfunc;
758 break;
759
760 case XFmode:
761 libcall = extendsfxf2_libfunc;
762 break;
763
764 case TFmode:
765 libcall = extendsftf2_libfunc;
766 break;
e9a25f70
JL
767
768 default:
769 break;
b092b471
JW
770 }
771 break;
772
773 case DFmode:
774 switch (to_mode)
775 {
776 case SFmode:
777 libcall = truncdfsf2_libfunc;
778 break;
779
780 case XFmode:
781 libcall = extenddfxf2_libfunc;
782 break;
783
784 case TFmode:
785 libcall = extenddftf2_libfunc;
786 break;
e9a25f70
JL
787
788 default:
789 break;
b092b471
JW
790 }
791 break;
792
793 case XFmode:
794 switch (to_mode)
795 {
796 case SFmode:
797 libcall = truncxfsf2_libfunc;
798 break;
799
800 case DFmode:
801 libcall = truncxfdf2_libfunc;
802 break;
e9a25f70
JL
803
804 default:
805 break;
b092b471
JW
806 }
807 break;
808
809 case TFmode:
810 switch (to_mode)
811 {
812 case SFmode:
813 libcall = trunctfsf2_libfunc;
814 break;
815
816 case DFmode:
817 libcall = trunctfdf2_libfunc;
818 break;
e9a25f70
JL
819
820 default:
821 break;
b092b471
JW
822 }
823 break;
e9a25f70
JL
824
825 default:
826 break;
b092b471
JW
827 }
828
829 if (libcall == (rtx) 0)
830 /* This conversion is not implemented yet. */
bbf6f052
RK
831 abort ();
832
81d79e2c
RS
833 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
834 1, from, from_mode);
835 emit_move_insn (to, value);
bbf6f052
RK
836 return;
837 }
838
839 /* Now both modes are integers. */
840
841 /* Handle expanding beyond a word. */
842 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
843 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
844 {
845 rtx insns;
846 rtx lowpart;
847 rtx fill_value;
848 rtx lowfrom;
849 int i;
850 enum machine_mode lowpart_mode;
851 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
852
853 /* Try converting directly if the insn is supported. */
854 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
855 != CODE_FOR_nothing)
856 {
cd1b4b44
RK
857 /* If FROM is a SUBREG, put it into a register. Do this
858 so that we always generate the same set of insns for
859 better cse'ing; if an intermediate assignment occurred,
860 we won't be doing the operation directly on the SUBREG. */
861 if (optimize > 0 && GET_CODE (from) == SUBREG)
862 from = force_reg (from_mode, from);
bbf6f052
RK
863 emit_unop_insn (code, to, from, equiv_code);
864 return;
865 }
866 /* Next, try converting via full word. */
867 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
868 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
869 != CODE_FOR_nothing))
870 {
a81fee56 871 if (GET_CODE (to) == REG)
38a448ca 872 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
873 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
874 emit_unop_insn (code, to,
875 gen_lowpart (word_mode, to), equiv_code);
876 return;
877 }
878
879 /* No special multiword conversion insn; do it by hand. */
880 start_sequence ();
881
5c5033c3
RK
882 /* Since we will turn this into a no conflict block, we must ensure
883 that the source does not overlap the target. */
884
885 if (reg_overlap_mentioned_p (to, from))
886 from = force_reg (from_mode, from);
887
bbf6f052
RK
888 /* Get a copy of FROM widened to a word, if necessary. */
889 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
890 lowpart_mode = word_mode;
891 else
892 lowpart_mode = from_mode;
893
894 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
895
896 lowpart = gen_lowpart (lowpart_mode, to);
897 emit_move_insn (lowpart, lowfrom);
898
899 /* Compute the value to put in each remaining word. */
900 if (unsignedp)
901 fill_value = const0_rtx;
902 else
903 {
904#ifdef HAVE_slt
905 if (HAVE_slt
906 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
907 && STORE_FLAG_VALUE == -1)
908 {
906c4e36
RK
909 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
910 lowpart_mode, 0, 0);
bbf6f052
RK
911 fill_value = gen_reg_rtx (word_mode);
912 emit_insn (gen_slt (fill_value));
913 }
914 else
915#endif
916 {
917 fill_value
918 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
919 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 920 NULL_RTX, 0);
bbf6f052
RK
921 fill_value = convert_to_mode (word_mode, fill_value, 1);
922 }
923 }
924
925 /* Fill the remaining words. */
926 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
927 {
928 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
929 rtx subword = operand_subword (to, index, 1, to_mode);
930
931 if (subword == 0)
932 abort ();
933
934 if (fill_value != subword)
935 emit_move_insn (subword, fill_value);
936 }
937
938 insns = get_insns ();
939 end_sequence ();
940
906c4e36 941 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 942 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
943 return;
944 }
945
d3c64ee3
RS
946 /* Truncating multi-word to a word or less. */
947 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
948 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 949 {
431a6eca
JW
950 if (!((GET_CODE (from) == MEM
951 && ! MEM_VOLATILE_P (from)
952 && direct_load[(int) to_mode]
953 && ! mode_dependent_address_p (XEXP (from, 0)))
954 || GET_CODE (from) == REG
955 || GET_CODE (from) == SUBREG))
956 from = force_reg (from_mode, from);
bbf6f052
RK
957 convert_move (to, gen_lowpart (word_mode, from), 0);
958 return;
959 }
960
961 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
962 if (to_mode == PQImode)
963 {
964 if (from_mode != QImode)
965 from = convert_to_mode (QImode, from, unsignedp);
966
967#ifdef HAVE_truncqipqi2
968 if (HAVE_truncqipqi2)
969 {
970 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
971 return;
972 }
973#endif /* HAVE_truncqipqi2 */
974 abort ();
975 }
976
977 if (from_mode == PQImode)
978 {
979 if (to_mode != QImode)
980 {
981 from = convert_to_mode (QImode, from, unsignedp);
982 from_mode = QImode;
983 }
984 else
985 {
986#ifdef HAVE_extendpqiqi2
987 if (HAVE_extendpqiqi2)
988 {
989 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
990 return;
991 }
992#endif /* HAVE_extendpqiqi2 */
993 abort ();
994 }
995 }
996
bbf6f052
RK
997 if (to_mode == PSImode)
998 {
999 if (from_mode != SImode)
1000 from = convert_to_mode (SImode, from, unsignedp);
1001
1f584163
DE
1002#ifdef HAVE_truncsipsi2
1003 if (HAVE_truncsipsi2)
bbf6f052 1004 {
1f584163 1005 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1006 return;
1007 }
1f584163 1008#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1009 abort ();
1010 }
1011
1012 if (from_mode == PSImode)
1013 {
1014 if (to_mode != SImode)
1015 {
1016 from = convert_to_mode (SImode, from, unsignedp);
1017 from_mode = SImode;
1018 }
1019 else
1020 {
1f584163
DE
1021#ifdef HAVE_extendpsisi2
1022 if (HAVE_extendpsisi2)
bbf6f052 1023 {
1f584163 1024 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1025 return;
1026 }
1f584163 1027#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1028 abort ();
1029 }
1030 }
1031
0407367d
RK
1032 if (to_mode == PDImode)
1033 {
1034 if (from_mode != DImode)
1035 from = convert_to_mode (DImode, from, unsignedp);
1036
1037#ifdef HAVE_truncdipdi2
1038 if (HAVE_truncdipdi2)
1039 {
1040 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1041 return;
1042 }
1043#endif /* HAVE_truncdipdi2 */
1044 abort ();
1045 }
1046
1047 if (from_mode == PDImode)
1048 {
1049 if (to_mode != DImode)
1050 {
1051 from = convert_to_mode (DImode, from, unsignedp);
1052 from_mode = DImode;
1053 }
1054 else
1055 {
1056#ifdef HAVE_extendpdidi2
1057 if (HAVE_extendpdidi2)
1058 {
1059 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1060 return;
1061 }
1062#endif /* HAVE_extendpdidi2 */
1063 abort ();
1064 }
1065 }
1066
bbf6f052
RK
1067 /* Now follow all the conversions between integers
1068 no more than a word long. */
1069
1070 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1071 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1072 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1073 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1074 {
d3c64ee3
RS
1075 if (!((GET_CODE (from) == MEM
1076 && ! MEM_VOLATILE_P (from)
1077 && direct_load[(int) to_mode]
1078 && ! mode_dependent_address_p (XEXP (from, 0)))
1079 || GET_CODE (from) == REG
1080 || GET_CODE (from) == SUBREG))
1081 from = force_reg (from_mode, from);
34aa3599
RK
1082 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1083 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1084 from = copy_to_reg (from);
bbf6f052
RK
1085 emit_move_insn (to, gen_lowpart (to_mode, from));
1086 return;
1087 }
1088
d3c64ee3 1089 /* Handle extension. */
bbf6f052
RK
1090 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1091 {
1092 /* Convert directly if that works. */
1093 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1094 != CODE_FOR_nothing)
1095 {
1096 emit_unop_insn (code, to, from, equiv_code);
1097 return;
1098 }
1099 else
1100 {
1101 enum machine_mode intermediate;
1102
1103 /* Search for a mode to convert via. */
1104 for (intermediate = from_mode; intermediate != VOIDmode;
1105 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1106 if (((can_extend_p (to_mode, intermediate, unsignedp)
1107 != CODE_FOR_nothing)
1108 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1109 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1110 && (can_extend_p (intermediate, from_mode, unsignedp)
1111 != CODE_FOR_nothing))
1112 {
1113 convert_move (to, convert_to_mode (intermediate, from,
1114 unsignedp), unsignedp);
1115 return;
1116 }
1117
1118 /* No suitable intermediate mode. */
1119 abort ();
1120 }
1121 }
1122
1123 /* Support special truncate insns for certain modes. */
1124
1125 if (from_mode == DImode && to_mode == SImode)
1126 {
1127#ifdef HAVE_truncdisi2
1128 if (HAVE_truncdisi2)
1129 {
1130 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1131 return;
1132 }
1133#endif
1134 convert_move (to, force_reg (from_mode, from), unsignedp);
1135 return;
1136 }
1137
1138 if (from_mode == DImode && to_mode == HImode)
1139 {
1140#ifdef HAVE_truncdihi2
1141 if (HAVE_truncdihi2)
1142 {
1143 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1144 return;
1145 }
1146#endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1149 }
1150
1151 if (from_mode == DImode && to_mode == QImode)
1152 {
1153#ifdef HAVE_truncdiqi2
1154 if (HAVE_truncdiqi2)
1155 {
1156 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1157 return;
1158 }
1159#endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1162 }
1163
1164 if (from_mode == SImode && to_mode == HImode)
1165 {
1166#ifdef HAVE_truncsihi2
1167 if (HAVE_truncsihi2)
1168 {
1169 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1170 return;
1171 }
1172#endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
1177 if (from_mode == SImode && to_mode == QImode)
1178 {
1179#ifdef HAVE_truncsiqi2
1180 if (HAVE_truncsiqi2)
1181 {
1182 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1183 return;
1184 }
1185#endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == HImode && to_mode == QImode)
1191 {
1192#ifdef HAVE_trunchiqi2
1193 if (HAVE_trunchiqi2)
1194 {
1195 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1196 return;
1197 }
1198#endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
b9bcad65
RK
1203 if (from_mode == TImode && to_mode == DImode)
1204 {
1205#ifdef HAVE_trunctidi2
1206 if (HAVE_trunctidi2)
1207 {
1208 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1209 return;
1210 }
1211#endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
1216 if (from_mode == TImode && to_mode == SImode)
1217 {
1218#ifdef HAVE_trunctisi2
1219 if (HAVE_trunctisi2)
1220 {
1221 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1222 return;
1223 }
1224#endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
1229 if (from_mode == TImode && to_mode == HImode)
1230 {
1231#ifdef HAVE_trunctihi2
1232 if (HAVE_trunctihi2)
1233 {
1234 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1235 return;
1236 }
1237#endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1240 }
1241
1242 if (from_mode == TImode && to_mode == QImode)
1243 {
1244#ifdef HAVE_trunctiqi2
1245 if (HAVE_trunctiqi2)
1246 {
1247 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1248 return;
1249 }
1250#endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1253 }
1254
bbf6f052
RK
1255 /* Handle truncation of volatile memrefs, and so on;
1256 the things that couldn't be truncated directly,
1257 and for which there was no special instruction. */
1258 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1259 {
1260 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1261 emit_move_insn (to, temp);
1262 return;
1263 }
1264
1265 /* Mode combination is not recognized. */
1266 abort ();
1267}
1268
1269/* Return an rtx for a value that would result
1270 from converting X to mode MODE.
1271 Both X and MODE may be floating, or both integer.
1272 UNSIGNEDP is nonzero if X is an unsigned value.
1273 This can be done by referring to a part of X in place
5d901c31
RS
1274 or by copying to a new temporary with conversion.
1275
1276 This function *must not* call protect_from_queue
1277 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1278
1279rtx
1280convert_to_mode (mode, x, unsignedp)
1281 enum machine_mode mode;
1282 rtx x;
1283 int unsignedp;
5ffe63ed
RS
1284{
1285 return convert_modes (mode, VOIDmode, x, unsignedp);
1286}
1287
1288/* Return an rtx for a value that would result
1289 from converting X from mode OLDMODE to mode MODE.
1290 Both modes may be floating, or both integer.
1291 UNSIGNEDP is nonzero if X is an unsigned value.
1292
1293 This can be done by referring to a part of X in place
1294 or by copying to a new temporary with conversion.
1295
1296 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1297
1298 This function *must not* call protect_from_queue
1299 except when putting X into an insn (in which case convert_move does it). */
1300
1301rtx
1302convert_modes (mode, oldmode, x, unsignedp)
1303 enum machine_mode mode, oldmode;
1304 rtx x;
1305 int unsignedp;
bbf6f052
RK
1306{
1307 register rtx temp;
5ffe63ed 1308
1499e0a8
RK
1309 /* If FROM is a SUBREG that indicates that we have already done at least
1310 the required extension, strip it. */
1311
1312 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1313 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1314 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1315 x = gen_lowpart (mode, x);
bbf6f052 1316
64791b18
RK
1317 if (GET_MODE (x) != VOIDmode)
1318 oldmode = GET_MODE (x);
1319
5ffe63ed 1320 if (mode == oldmode)
bbf6f052
RK
1321 return x;
1322
1323 /* There is one case that we must handle specially: If we are converting
906c4e36 1324 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1325 we are to interpret the constant as unsigned, gen_lowpart will do
1326 the wrong if the constant appears negative. What we want to do is
1327 make the high-order word of the constant zero, not all ones. */
1328
1329 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1330 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1331 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1332 {
1333 HOST_WIDE_INT val = INTVAL (x);
1334
1335 if (oldmode != VOIDmode
1336 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1337 {
1338 int width = GET_MODE_BITSIZE (oldmode);
1339
1340 /* We need to zero extend VAL. */
1341 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1342 }
1343
1344 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1345 }
bbf6f052
RK
1346
1347 /* We can do this with a gen_lowpart if both desired and current modes
1348 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1349 non-volatile MEM. Except for the constant case where MODE is no
1350 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1351
ba2e110c
RK
1352 if ((GET_CODE (x) == CONST_INT
1353 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1354 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1355 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1356 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1357 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1358 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1359 && direct_load[(int) mode])
2bf29316
JW
1360 || (GET_CODE (x) == REG
1361 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1362 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1363 {
1364 /* ?? If we don't know OLDMODE, we have to assume here that
1365 X does not need sign- or zero-extension. This may not be
1366 the case, but it's the best we can do. */
1367 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1368 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1369 {
1370 HOST_WIDE_INT val = INTVAL (x);
1371 int width = GET_MODE_BITSIZE (oldmode);
1372
1373 /* We must sign or zero-extend in this case. Start by
1374 zero-extending, then sign extend if we need to. */
1375 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1376 if (! unsignedp
1377 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1378 val |= (HOST_WIDE_INT) (-1) << width;
1379
1380 return GEN_INT (val);
1381 }
1382
1383 return gen_lowpart (mode, x);
1384 }
bbf6f052
RK
1385
1386 temp = gen_reg_rtx (mode);
1387 convert_move (temp, x, unsignedp);
1388 return temp;
1389}
1390\f
1391/* Generate several move instructions to copy LEN bytes
1392 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1393 The caller must pass FROM and TO
1394 through protect_from_queue before calling.
1395 ALIGN (in bytes) is maximum alignment we can assume. */
1396
2e245dac 1397void
bbf6f052
RK
1398move_by_pieces (to, from, len, align)
1399 rtx to, from;
1400 int len, align;
1401{
1402 struct move_by_pieces data;
1403 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1404 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1405
1406 data.offset = 0;
1407 data.to_addr = to_addr;
1408 data.from_addr = from_addr;
1409 data.to = to;
1410 data.from = from;
1411 data.autinc_to
1412 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1413 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1414 data.autinc_from
1415 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1416 || GET_CODE (from_addr) == POST_INC
1417 || GET_CODE (from_addr) == POST_DEC);
1418
1419 data.explicit_inc_from = 0;
1420 data.explicit_inc_to = 0;
1421 data.reverse
1422 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1423 if (data.reverse) data.offset = len;
1424 data.len = len;
1425
e9cf6a97
JW
1426 data.to_struct = MEM_IN_STRUCT_P (to);
1427 data.from_struct = MEM_IN_STRUCT_P (from);
1428
bbf6f052
RK
1429 /* If copying requires more than two move insns,
1430 copy addresses to registers (to make displacements shorter)
1431 and use post-increment if available. */
1432 if (!(data.autinc_from && data.autinc_to)
1433 && move_by_pieces_ninsns (len, align) > 2)
1434 {
1435#ifdef HAVE_PRE_DECREMENT
1436 if (data.reverse && ! data.autinc_from)
1437 {
1438 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1439 data.autinc_from = 1;
1440 data.explicit_inc_from = -1;
1441 }
1442#endif
1443#ifdef HAVE_POST_INCREMENT
1444 if (! data.autinc_from)
1445 {
1446 data.from_addr = copy_addr_to_reg (from_addr);
1447 data.autinc_from = 1;
1448 data.explicit_inc_from = 1;
1449 }
1450#endif
1451 if (!data.autinc_from && CONSTANT_P (from_addr))
1452 data.from_addr = copy_addr_to_reg (from_addr);
1453#ifdef HAVE_PRE_DECREMENT
1454 if (data.reverse && ! data.autinc_to)
1455 {
1456 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1457 data.autinc_to = 1;
1458 data.explicit_inc_to = -1;
1459 }
1460#endif
1461#ifdef HAVE_POST_INCREMENT
1462 if (! data.reverse && ! data.autinc_to)
1463 {
1464 data.to_addr = copy_addr_to_reg (to_addr);
1465 data.autinc_to = 1;
1466 data.explicit_inc_to = 1;
1467 }
1468#endif
1469 if (!data.autinc_to && CONSTANT_P (to_addr))
1470 data.to_addr = copy_addr_to_reg (to_addr);
1471 }
1472
c7a7ac46 1473 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1474 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1475 align = MOVE_MAX;
bbf6f052
RK
1476
1477 /* First move what we can in the largest integer mode, then go to
1478 successively smaller modes. */
1479
1480 while (max_size > 1)
1481 {
1482 enum machine_mode mode = VOIDmode, tmode;
1483 enum insn_code icode;
1484
e7c33f54
RK
1485 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1486 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1487 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1488 mode = tmode;
1489
1490 if (mode == VOIDmode)
1491 break;
1492
1493 icode = mov_optab->handlers[(int) mode].insn_code;
1494 if (icode != CODE_FOR_nothing
1495 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1496 GET_MODE_SIZE (mode)))
1497 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1498
1499 max_size = GET_MODE_SIZE (mode);
1500 }
1501
1502 /* The code above should have handled everything. */
2a8e278c 1503 if (data.len > 0)
bbf6f052
RK
1504 abort ();
1505}
1506
1507/* Return number of insns required to move L bytes by pieces.
1508 ALIGN (in bytes) is maximum alignment we can assume. */
1509
1510static int
1511move_by_pieces_ninsns (l, align)
1512 unsigned int l;
1513 int align;
1514{
1515 register int n_insns = 0;
e87b4f3f 1516 int max_size = MOVE_MAX + 1;
bbf6f052 1517
c7a7ac46 1518 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1519 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1520 align = MOVE_MAX;
bbf6f052
RK
1521
1522 while (max_size > 1)
1523 {
1524 enum machine_mode mode = VOIDmode, tmode;
1525 enum insn_code icode;
1526
e7c33f54
RK
1527 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1528 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1529 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1530 mode = tmode;
1531
1532 if (mode == VOIDmode)
1533 break;
1534
1535 icode = mov_optab->handlers[(int) mode].insn_code;
1536 if (icode != CODE_FOR_nothing
1537 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1538 GET_MODE_SIZE (mode)))
1539 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1540
1541 max_size = GET_MODE_SIZE (mode);
1542 }
1543
1544 return n_insns;
1545}
1546
1547/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1548 with move instructions for mode MODE. GENFUN is the gen_... function
1549 to make a move insn for that mode. DATA has all the other info. */
1550
1551static void
1552move_by_pieces_1 (genfun, mode, data)
eae4b970 1553 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1554 enum machine_mode mode;
1555 struct move_by_pieces *data;
1556{
1557 register int size = GET_MODE_SIZE (mode);
1558 register rtx to1, from1;
1559
1560 while (data->len >= size)
1561 {
1562 if (data->reverse) data->offset -= size;
1563
1564 to1 = (data->autinc_to
38a448ca 1565 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1566 : copy_rtx (change_address (data->to, mode,
1567 plus_constant (data->to_addr,
1568 data->offset))));
e9cf6a97 1569 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1570
db3cf6fb
MS
1571 from1
1572 = (data->autinc_from
38a448ca 1573 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1574 : copy_rtx (change_address (data->from, mode,
1575 plus_constant (data->from_addr,
1576 data->offset))));
e9cf6a97 1577 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1578
1579#ifdef HAVE_PRE_DECREMENT
1580 if (data->explicit_inc_to < 0)
906c4e36 1581 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1582 if (data->explicit_inc_from < 0)
906c4e36 1583 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1584#endif
1585
1586 emit_insn ((*genfun) (to1, from1));
1587#ifdef HAVE_POST_INCREMENT
1588 if (data->explicit_inc_to > 0)
906c4e36 1589 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1590 if (data->explicit_inc_from > 0)
906c4e36 1591 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1592#endif
1593
1594 if (! data->reverse) data->offset += size;
1595
1596 data->len -= size;
1597 }
1598}
1599\f
1600/* Emit code to move a block Y to a block X.
1601 This may be done with string-move instructions,
1602 with multiple scalar move instructions, or with a library call.
1603
1604 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1605 with mode BLKmode.
1606 SIZE is an rtx that says how long they are.
1607 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1608 measured in bytes.
bbf6f052 1609
e9a25f70
JL
1610 Return the address of the new block, if memcpy is called and returns it,
1611 0 otherwise. */
1612
1613rtx
bbf6f052
RK
1614emit_block_move (x, y, size, align)
1615 rtx x, y;
1616 rtx size;
1617 int align;
1618{
e9a25f70 1619 rtx retval = 0;
52cf7115
JL
1620#ifdef TARGET_MEM_FUNCTIONS
1621 static tree fn;
1622 tree call_expr, arg_list;
1623#endif
e9a25f70 1624
bbf6f052
RK
1625 if (GET_MODE (x) != BLKmode)
1626 abort ();
1627
1628 if (GET_MODE (y) != BLKmode)
1629 abort ();
1630
1631 x = protect_from_queue (x, 1);
1632 y = protect_from_queue (y, 0);
5d901c31 1633 size = protect_from_queue (size, 0);
bbf6f052
RK
1634
1635 if (GET_CODE (x) != MEM)
1636 abort ();
1637 if (GET_CODE (y) != MEM)
1638 abort ();
1639 if (size == 0)
1640 abort ();
1641
1642 if (GET_CODE (size) == CONST_INT
906c4e36 1643 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1644 move_by_pieces (x, y, INTVAL (size), align);
1645 else
1646 {
1647 /* Try the most limited insn first, because there's no point
1648 including more than one in the machine description unless
1649 the more limited one has some advantage. */
266007a7 1650
0bba3f6f 1651 rtx opalign = GEN_INT (align);
266007a7
RK
1652 enum machine_mode mode;
1653
1654 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1655 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1656 {
266007a7 1657 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1658
1659 if (code != CODE_FOR_nothing
803090c4
RK
1660 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1661 here because if SIZE is less than the mode mask, as it is
8008b228 1662 returned by the macro, it will definitely be less than the
803090c4 1663 actual mode mask. */
8ca00751
RK
1664 && ((GET_CODE (size) == CONST_INT
1665 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1666 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1667 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1668 && (insn_operand_predicate[(int) code][0] == 0
1669 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1670 && (insn_operand_predicate[(int) code][1] == 0
1671 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1672 && (insn_operand_predicate[(int) code][3] == 0
1673 || (*insn_operand_predicate[(int) code][3]) (opalign,
1674 VOIDmode)))
bbf6f052 1675 {
1ba1e2a8 1676 rtx op2;
266007a7
RK
1677 rtx last = get_last_insn ();
1678 rtx pat;
1679
1ba1e2a8 1680 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1681 if (insn_operand_predicate[(int) code][2] != 0
1682 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1683 op2 = copy_to_mode_reg (mode, op2);
1684
1685 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1686 if (pat)
1687 {
1688 emit_insn (pat);
e9a25f70 1689 return 0;
266007a7
RK
1690 }
1691 else
1692 delete_insns_since (last);
bbf6f052
RK
1693 }
1694 }
bbf6f052
RK
1695
1696#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1697 /* It is incorrect to use the libcall calling conventions to call
1698 memcpy in this context.
1699
1700 This could be a user call to memcpy and the user may wish to
1701 examine the return value from memcpy.
1702
1703 For targets where libcalls and normal calls have different conventions
1704 for returning pointers, we could end up generating incorrect code.
1705
1706 So instead of using a libcall sequence we build up a suitable
1707 CALL_EXPR and expand the call in the normal fashion. */
1708 if (fn == NULL_TREE)
1709 {
1710 tree fntype;
1711
1712 /* This was copied from except.c, I don't know if all this is
1713 necessary in this context or not. */
1714 fn = get_identifier ("memcpy");
1715 push_obstacks_nochange ();
1716 end_temporary_allocation ();
1717 fntype = build_pointer_type (void_type_node);
1718 fntype = build_function_type (fntype, NULL_TREE);
1719 fn = build_decl (FUNCTION_DECL, fn, fntype);
1720 DECL_EXTERNAL (fn) = 1;
1721 TREE_PUBLIC (fn) = 1;
1722 DECL_ARTIFICIAL (fn) = 1;
1723 make_decl_rtl (fn, NULL_PTR, 1);
1724 assemble_external (fn);
1725 pop_obstacks ();
1726 }
1727
1728 /* We need to make an argument list for the function call.
1729
1730 memcpy has three arguments, the first two are void * addresses and
1731 the last is a size_t byte count for the copy. */
1732 arg_list
1733 = build_tree_list (NULL_TREE,
1734 make_tree (build_pointer_type (void_type_node),
1735 XEXP (x, 0)));
1736 TREE_CHAIN (arg_list)
1737 = build_tree_list (NULL_TREE,
1738 make_tree (build_pointer_type (void_type_node),
1739 XEXP (y, 0)));
1740 TREE_CHAIN (TREE_CHAIN (arg_list))
1741 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1742 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1743
1744 /* Now we have to build up the CALL_EXPR itself. */
1745 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1746 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1747 call_expr, arg_list, NULL_TREE);
1748 TREE_SIDE_EFFECTS (call_expr) = 1;
1749
1750 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1751#else
d562e42e 1752 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1753 VOIDmode, 3, XEXP (y, 0), Pmode,
1754 XEXP (x, 0), Pmode,
3b6f75e2
JW
1755 convert_to_mode (TYPE_MODE (integer_type_node), size,
1756 TREE_UNSIGNED (integer_type_node)),
1757 TYPE_MODE (integer_type_node));
bbf6f052
RK
1758#endif
1759 }
e9a25f70
JL
1760
1761 return retval;
bbf6f052
RK
1762}
1763\f
1764/* Copy all or part of a value X into registers starting at REGNO.
1765 The number of registers to be filled is NREGS. */
1766
1767void
1768move_block_to_reg (regno, x, nregs, mode)
1769 int regno;
1770 rtx x;
1771 int nregs;
1772 enum machine_mode mode;
1773{
1774 int i;
381127e8
RL
1775#ifdef HAVE_load_multiple
1776 rtx pat;
1777 rtx last;
1778#endif
bbf6f052 1779
72bb9717
RK
1780 if (nregs == 0)
1781 return;
1782
bbf6f052
RK
1783 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1784 x = validize_mem (force_const_mem (mode, x));
1785
1786 /* See if the machine can do this with a load multiple insn. */
1787#ifdef HAVE_load_multiple
c3a02afe 1788 if (HAVE_load_multiple)
bbf6f052 1789 {
c3a02afe 1790 last = get_last_insn ();
38a448ca 1791 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1792 GEN_INT (nregs));
1793 if (pat)
1794 {
1795 emit_insn (pat);
1796 return;
1797 }
1798 else
1799 delete_insns_since (last);
bbf6f052 1800 }
bbf6f052
RK
1801#endif
1802
1803 for (i = 0; i < nregs; i++)
38a448ca 1804 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1805 operand_subword_force (x, i, mode));
1806}
1807
1808/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1809 The number of registers to be filled is NREGS. SIZE indicates the number
1810 of bytes in the object X. */
1811
bbf6f052
RK
1812
1813void
0040593d 1814move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1815 int regno;
1816 rtx x;
1817 int nregs;
0040593d 1818 int size;
bbf6f052
RK
1819{
1820 int i;
381127e8
RL
1821#ifdef HAVE_store_multiple
1822 rtx pat;
1823 rtx last;
1824#endif
58a32c5c 1825 enum machine_mode mode;
bbf6f052 1826
58a32c5c
DE
1827 /* If SIZE is that of a mode no bigger than a word, just use that
1828 mode's store operation. */
1829 if (size <= UNITS_PER_WORD
1830 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1831 {
1832 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1833 gen_rtx_REG (mode, regno));
58a32c5c
DE
1834 return;
1835 }
1836
0040593d 1837 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1838 to the left before storing to memory. Note that the previous test
1839 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1840 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1841 {
1842 rtx tem = operand_subword (x, 0, 1, BLKmode);
1843 rtx shift;
1844
1845 if (tem == 0)
1846 abort ();
1847
1848 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1849 gen_rtx_REG (word_mode, regno),
0040593d
JW
1850 build_int_2 ((UNITS_PER_WORD - size)
1851 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1852 emit_move_insn (tem, shift);
1853 return;
1854 }
1855
bbf6f052
RK
1856 /* See if the machine can do this with a store multiple insn. */
1857#ifdef HAVE_store_multiple
c3a02afe 1858 if (HAVE_store_multiple)
bbf6f052 1859 {
c3a02afe 1860 last = get_last_insn ();
38a448ca 1861 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1862 GEN_INT (nregs));
1863 if (pat)
1864 {
1865 emit_insn (pat);
1866 return;
1867 }
1868 else
1869 delete_insns_since (last);
bbf6f052 1870 }
bbf6f052
RK
1871#endif
1872
1873 for (i = 0; i < nregs; i++)
1874 {
1875 rtx tem = operand_subword (x, i, 1, BLKmode);
1876
1877 if (tem == 0)
1878 abort ();
1879
38a448ca 1880 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1881 }
1882}
1883
aac5cc16
RH
1884/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1885 registers represented by a PARALLEL. SSIZE represents the total size of
1886 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1887 SRC in bits. */
1888/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1889 the balance will be in what would be the low-order memory addresses, i.e.
1890 left justified for big endian, right justified for little endian. This
1891 happens to be true for the targets currently using this support. If this
1892 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1893 would be needed. */
fffa9c1d
JW
1894
1895void
aac5cc16
RH
1896emit_group_load (dst, orig_src, ssize, align)
1897 rtx dst, orig_src;
1898 int align, ssize;
fffa9c1d 1899{
aac5cc16
RH
1900 rtx *tmps, src;
1901 int start, i;
fffa9c1d 1902
aac5cc16 1903 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1904 abort ();
1905
1906 /* Check for a NULL entry, used to indicate that the parameter goes
1907 both on the stack and in registers. */
aac5cc16
RH
1908 if (XEXP (XVECEXP (dst, 0, 0), 0))
1909 start = 0;
fffa9c1d 1910 else
aac5cc16
RH
1911 start = 1;
1912
1913 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1914
1915 /* If we won't be loading directly from memory, protect the real source
1916 from strange tricks we might play. */
1917 src = orig_src;
1918 if (GET_CODE (src) != MEM)
1919 {
1920 src = gen_reg_rtx (GET_MODE (orig_src));
1921 emit_move_insn (src, orig_src);
1922 }
1923
1924 /* Process the pieces. */
1925 for (i = start; i < XVECLEN (dst, 0); i++)
1926 {
1927 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1928 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1929 int bytelen = GET_MODE_SIZE (mode);
1930 int shift = 0;
1931
1932 /* Handle trailing fragments that run over the size of the struct. */
1933 if (ssize >= 0 && bytepos + bytelen > ssize)
1934 {
1935 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1936 bytelen = ssize - bytepos;
1937 if (bytelen <= 0)
1938 abort();
1939 }
1940
1941 /* Optimize the access just a bit. */
1942 if (GET_CODE (src) == MEM
1943 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1944 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1945 && bytelen == GET_MODE_SIZE (mode))
1946 {
1947 tmps[i] = gen_reg_rtx (mode);
1948 emit_move_insn (tmps[i],
1949 change_address (src, mode,
1950 plus_constant (XEXP (src, 0),
1951 bytepos)));
fffa9c1d
JW
1952 }
1953 else
aac5cc16
RH
1954 {
1955 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1956 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1957 mode, mode, align, ssize);
1958 }
fffa9c1d 1959
aac5cc16
RH
1960 if (BYTES_BIG_ENDIAN && shift)
1961 {
1962 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1963 tmps[i], 0, OPTAB_WIDEN);
1964 }
fffa9c1d 1965 }
aac5cc16
RH
1966 emit_queue();
1967
1968 /* Copy the extracted pieces into the proper (probable) hard regs. */
1969 for (i = start; i < XVECLEN (dst, 0); i++)
1970 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1971}
1972
aac5cc16
RH
1973/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1974 registers represented by a PARALLEL. SSIZE represents the total size of
1975 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1976
1977void
aac5cc16
RH
1978emit_group_store (orig_dst, src, ssize, align)
1979 rtx orig_dst, src;
1980 int ssize, align;
fffa9c1d 1981{
aac5cc16
RH
1982 rtx *tmps, dst;
1983 int start, i;
fffa9c1d 1984
aac5cc16 1985 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1986 abort ();
1987
1988 /* Check for a NULL entry, used to indicate that the parameter goes
1989 both on the stack and in registers. */
aac5cc16
RH
1990 if (XEXP (XVECEXP (src, 0, 0), 0))
1991 start = 0;
fffa9c1d 1992 else
aac5cc16
RH
1993 start = 1;
1994
1995 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 1996
aac5cc16
RH
1997 /* Copy the (probable) hard regs into pseudos. */
1998 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1999 {
aac5cc16
RH
2000 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2001 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2002 emit_move_insn (tmps[i], reg);
2003 }
2004 emit_queue();
fffa9c1d 2005
aac5cc16
RH
2006 /* If we won't be storing directly into memory, protect the real destination
2007 from strange tricks we might play. */
2008 dst = orig_dst;
2009 if (GET_CODE (dst) != MEM)
2010 {
2011 dst = gen_reg_rtx (GET_MODE (orig_dst));
2012 /* Make life a bit easier for combine. */
2013 emit_move_insn (dst, const0_rtx);
2014 }
2015 else if (! MEM_IN_STRUCT_P (dst))
2016 {
2017 /* store_bit_field requires that memory operations have
2018 mem_in_struct_p set; we might not. */
fffa9c1d 2019
aac5cc16
RH
2020 dst = copy_rtx (orig_dst);
2021 MEM_IN_STRUCT_P (dst) = 1;
2022 }
2023
2024 /* Process the pieces. */
2025 for (i = start; i < XVECLEN (src, 0); i++)
2026 {
2027 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2028 enum machine_mode mode = GET_MODE (tmps[i]);
2029 int bytelen = GET_MODE_SIZE (mode);
2030
2031 /* Handle trailing fragments that run over the size of the struct. */
2032 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2033 {
aac5cc16
RH
2034 if (BYTES_BIG_ENDIAN)
2035 {
2036 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2037 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2038 tmps[i], 0, OPTAB_WIDEN);
2039 }
2040 bytelen = ssize - bytepos;
71bc0330 2041 }
fffa9c1d 2042
aac5cc16
RH
2043 /* Optimize the access just a bit. */
2044 if (GET_CODE (dst) == MEM
2045 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2046 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2047 && bytelen == GET_MODE_SIZE (mode))
2048 {
2049 emit_move_insn (change_address (dst, mode,
2050 plus_constant (XEXP (dst, 0),
2051 bytepos)),
2052 tmps[i]);
2053 }
2054 else
2055 {
2056 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2057 mode, tmps[i], align, ssize);
2058 }
fffa9c1d 2059 }
aac5cc16
RH
2060 emit_queue();
2061
2062 /* Copy from the pseudo into the (probable) hard reg. */
2063 if (GET_CODE (dst) == REG)
2064 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2065}
2066
c36fce9a
GRK
2067/* Generate code to copy a BLKmode object of TYPE out of a
2068 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2069 is null, a stack temporary is created. TGTBLK is returned.
2070
2071 The primary purpose of this routine is to handle functions
2072 that return BLKmode structures in registers. Some machines
2073 (the PA for example) want to return all small structures
2074 in registers regardless of the structure's alignment.
2075 */
2076
2077rtx
2078copy_blkmode_from_reg(tgtblk,srcreg,type)
2079 rtx tgtblk;
2080 rtx srcreg;
2081 tree type;
2082{
2083 int bytes = int_size_in_bytes (type);
2084 rtx src = NULL, dst = NULL;
2085 int bitsize = MIN (TYPE_ALIGN (type), BITS_PER_WORD);
2086 int bitpos, xbitpos, big_endian_correction = 0;
2087
2088 if (tgtblk == 0)
2089 {
2090 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
2091 MEM_IN_STRUCT_P (tgtblk) = AGGREGATE_TYPE_P (type);
2092 preserve_temp_slots (tgtblk);
2093 }
2094
2095 /* This code assumes srcreg is at least a full word. If it isn't,
2096 copy it into a new pseudo which is a full word. */
2097 if (GET_MODE (srcreg) != BLKmode
2098 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2099 srcreg = convert_to_mode (word_mode, srcreg,
2100 TREE_UNSIGNED (type));
2101
2102 /* Structures whose size is not a multiple of a word are aligned
2103 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2104 machine, this means we must skip the empty high order bytes when
2105 calculating the bit offset. */
2106 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2107 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2108 * BITS_PER_UNIT));
2109
2110 /* Copy the structure BITSIZE bites at a time.
2111
2112 We could probably emit more efficient code for machines
2113 which do not use strict alignment, but it doesn't seem
2114 worth the effort at the current time. */
2115 for (bitpos = 0, xbitpos = big_endian_correction;
2116 bitpos < bytes * BITS_PER_UNIT;
2117 bitpos += bitsize, xbitpos += bitsize)
2118 {
2119
2120 /* We need a new source operand each time xbitpos is on a
2121 word boundary and when xbitpos == big_endian_correction
2122 (the first time through). */
2123 if (xbitpos % BITS_PER_WORD == 0
2124 || xbitpos == big_endian_correction)
2125 src = operand_subword_force (srcreg,
2126 xbitpos / BITS_PER_WORD,
2127 BLKmode);
2128
2129 /* We need a new destination operand each time bitpos is on
2130 a word boundary. */
2131 if (bitpos % BITS_PER_WORD == 0)
2132 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2133
2134 /* Use xbitpos for the source extraction (right justified) and
2135 xbitpos for the destination store (left justified). */
2136 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2137 extract_bit_field (src, bitsize,
2138 xbitpos % BITS_PER_WORD, 1,
2139 NULL_RTX, word_mode,
2140 word_mode,
2141 bitsize / BITS_PER_UNIT,
2142 BITS_PER_WORD),
2143 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2144 }
2145 return tgtblk;
2146}
2147
2148
94b25f81
RK
2149/* Add a USE expression for REG to the (possibly empty) list pointed
2150 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2151
2152void
b3f8cf4a
RK
2153use_reg (call_fusage, reg)
2154 rtx *call_fusage, reg;
2155{
0304dfbb
DE
2156 if (GET_CODE (reg) != REG
2157 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2158 abort();
2159
2160 *call_fusage
38a448ca
RH
2161 = gen_rtx_EXPR_LIST (VOIDmode,
2162 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2163}
2164
94b25f81
RK
2165/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2166 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2167
2168void
0304dfbb
DE
2169use_regs (call_fusage, regno, nregs)
2170 rtx *call_fusage;
bbf6f052
RK
2171 int regno;
2172 int nregs;
2173{
0304dfbb 2174 int i;
bbf6f052 2175
0304dfbb
DE
2176 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2177 abort ();
2178
2179 for (i = 0; i < nregs; i++)
38a448ca 2180 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2181}
fffa9c1d
JW
2182
2183/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2184 PARALLEL REGS. This is for calls that pass values in multiple
2185 non-contiguous locations. The Irix 6 ABI has examples of this. */
2186
2187void
2188use_group_regs (call_fusage, regs)
2189 rtx *call_fusage;
2190 rtx regs;
2191{
2192 int i;
2193
6bd35f86
DE
2194 for (i = 0; i < XVECLEN (regs, 0); i++)
2195 {
2196 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2197
6bd35f86
DE
2198 /* A NULL entry means the parameter goes both on the stack and in
2199 registers. This can also be a MEM for targets that pass values
2200 partially on the stack and partially in registers. */
e9a25f70 2201 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2202 use_reg (call_fusage, reg);
2203 }
fffa9c1d 2204}
bbf6f052 2205\f
9de08200
RK
2206/* Generate several move instructions to clear LEN bytes of block TO.
2207 (A MEM rtx with BLKmode). The caller must pass TO through
2208 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2209 we can assume. */
2210
2211static void
2212clear_by_pieces (to, len, align)
2213 rtx to;
2214 int len, align;
2215{
2216 struct clear_by_pieces data;
2217 rtx to_addr = XEXP (to, 0);
2218 int max_size = MOVE_MAX + 1;
2219
2220 data.offset = 0;
2221 data.to_addr = to_addr;
2222 data.to = to;
2223 data.autinc_to
2224 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2225 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2226
2227 data.explicit_inc_to = 0;
2228 data.reverse
2229 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2230 if (data.reverse) data.offset = len;
2231 data.len = len;
2232
2233 data.to_struct = MEM_IN_STRUCT_P (to);
2234
2235 /* If copying requires more than two move insns,
2236 copy addresses to registers (to make displacements shorter)
2237 and use post-increment if available. */
2238 if (!data.autinc_to
2239 && move_by_pieces_ninsns (len, align) > 2)
2240 {
2241#ifdef HAVE_PRE_DECREMENT
2242 if (data.reverse && ! data.autinc_to)
2243 {
2244 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2245 data.autinc_to = 1;
2246 data.explicit_inc_to = -1;
2247 }
2248#endif
2249#ifdef HAVE_POST_INCREMENT
2250 if (! data.reverse && ! data.autinc_to)
2251 {
2252 data.to_addr = copy_addr_to_reg (to_addr);
2253 data.autinc_to = 1;
2254 data.explicit_inc_to = 1;
2255 }
2256#endif
2257 if (!data.autinc_to && CONSTANT_P (to_addr))
2258 data.to_addr = copy_addr_to_reg (to_addr);
2259 }
2260
2261 if (! SLOW_UNALIGNED_ACCESS
2262 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2263 align = MOVE_MAX;
2264
2265 /* First move what we can in the largest integer mode, then go to
2266 successively smaller modes. */
2267
2268 while (max_size > 1)
2269 {
2270 enum machine_mode mode = VOIDmode, tmode;
2271 enum insn_code icode;
2272
2273 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2274 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2275 if (GET_MODE_SIZE (tmode) < max_size)
2276 mode = tmode;
2277
2278 if (mode == VOIDmode)
2279 break;
2280
2281 icode = mov_optab->handlers[(int) mode].insn_code;
2282 if (icode != CODE_FOR_nothing
2283 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2284 GET_MODE_SIZE (mode)))
2285 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2286
2287 max_size = GET_MODE_SIZE (mode);
2288 }
2289
2290 /* The code above should have handled everything. */
2291 if (data.len != 0)
2292 abort ();
2293}
2294
2295/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2296 with move instructions for mode MODE. GENFUN is the gen_... function
2297 to make a move insn for that mode. DATA has all the other info. */
2298
2299static void
2300clear_by_pieces_1 (genfun, mode, data)
eae4b970 2301 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2302 enum machine_mode mode;
2303 struct clear_by_pieces *data;
2304{
2305 register int size = GET_MODE_SIZE (mode);
2306 register rtx to1;
2307
2308 while (data->len >= size)
2309 {
2310 if (data->reverse) data->offset -= size;
2311
2312 to1 = (data->autinc_to
38a448ca 2313 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2314 : copy_rtx (change_address (data->to, mode,
2315 plus_constant (data->to_addr,
2316 data->offset))));
9de08200
RK
2317 MEM_IN_STRUCT_P (to1) = data->to_struct;
2318
2319#ifdef HAVE_PRE_DECREMENT
2320 if (data->explicit_inc_to < 0)
2321 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2322#endif
2323
2324 emit_insn ((*genfun) (to1, const0_rtx));
2325#ifdef HAVE_POST_INCREMENT
2326 if (data->explicit_inc_to > 0)
2327 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2328#endif
2329
2330 if (! data->reverse) data->offset += size;
2331
2332 data->len -= size;
2333 }
2334}
2335\f
bbf6f052 2336/* Write zeros through the storage of OBJECT.
9de08200 2337 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2338 the maximum alignment we can is has, measured in bytes.
bbf6f052 2339
e9a25f70
JL
2340 If we call a function that returns the length of the block, return it. */
2341
2342rtx
9de08200 2343clear_storage (object, size, align)
bbf6f052 2344 rtx object;
4c08eef0 2345 rtx size;
9de08200 2346 int align;
bbf6f052 2347{
52cf7115
JL
2348#ifdef TARGET_MEM_FUNCTIONS
2349 static tree fn;
2350 tree call_expr, arg_list;
2351#endif
e9a25f70
JL
2352 rtx retval = 0;
2353
bbf6f052
RK
2354 if (GET_MODE (object) == BLKmode)
2355 {
9de08200
RK
2356 object = protect_from_queue (object, 1);
2357 size = protect_from_queue (size, 0);
2358
2359 if (GET_CODE (size) == CONST_INT
2360 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2361 clear_by_pieces (object, INTVAL (size), align);
2362
2363 else
2364 {
2365 /* Try the most limited insn first, because there's no point
2366 including more than one in the machine description unless
2367 the more limited one has some advantage. */
2368
2369 rtx opalign = GEN_INT (align);
2370 enum machine_mode mode;
2371
2372 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2373 mode = GET_MODE_WIDER_MODE (mode))
2374 {
2375 enum insn_code code = clrstr_optab[(int) mode];
2376
2377 if (code != CODE_FOR_nothing
2378 /* We don't need MODE to be narrower than
2379 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2380 the mode mask, as it is returned by the macro, it will
2381 definitely be less than the actual mode mask. */
2382 && ((GET_CODE (size) == CONST_INT
2383 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2384 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2385 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2386 && (insn_operand_predicate[(int) code][0] == 0
2387 || (*insn_operand_predicate[(int) code][0]) (object,
2388 BLKmode))
2389 && (insn_operand_predicate[(int) code][2] == 0
2390 || (*insn_operand_predicate[(int) code][2]) (opalign,
2391 VOIDmode)))
2392 {
2393 rtx op1;
2394 rtx last = get_last_insn ();
2395 rtx pat;
2396
2397 op1 = convert_to_mode (mode, size, 1);
2398 if (insn_operand_predicate[(int) code][1] != 0
2399 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2400 mode))
2401 op1 = copy_to_mode_reg (mode, op1);
2402
2403 pat = GEN_FCN ((int) code) (object, op1, opalign);
2404 if (pat)
2405 {
2406 emit_insn (pat);
e9a25f70 2407 return 0;
9de08200
RK
2408 }
2409 else
2410 delete_insns_since (last);
2411 }
2412 }
2413
2414
bbf6f052 2415#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
2416 /* It is incorrect to use the libcall calling conventions to call
2417 memset in this context.
2418
2419 This could be a user call to memset and the user may wish to
2420 examine the return value from memset.
2421
2422 For targets where libcalls and normal calls have different conventions
2423 for returning pointers, we could end up generating incorrect code.
2424
2425 So instead of using a libcall sequence we build up a suitable
2426 CALL_EXPR and expand the call in the normal fashion. */
2427 if (fn == NULL_TREE)
2428 {
2429 tree fntype;
2430
2431 /* This was copied from except.c, I don't know if all this is
2432 necessary in this context or not. */
2433 fn = get_identifier ("memset");
2434 push_obstacks_nochange ();
2435 end_temporary_allocation ();
2436 fntype = build_pointer_type (void_type_node);
2437 fntype = build_function_type (fntype, NULL_TREE);
2438 fn = build_decl (FUNCTION_DECL, fn, fntype);
2439 DECL_EXTERNAL (fn) = 1;
2440 TREE_PUBLIC (fn) = 1;
2441 DECL_ARTIFICIAL (fn) = 1;
2442 make_decl_rtl (fn, NULL_PTR, 1);
2443 assemble_external (fn);
2444 pop_obstacks ();
2445 }
2446
2447 /* We need to make an argument list for the function call.
2448
2449 memset has three arguments, the first is a void * addresses, the
2450 second a integer with the initialization value, the last is a size_t
2451 byte count for the copy. */
2452 arg_list
2453 = build_tree_list (NULL_TREE,
2454 make_tree (build_pointer_type (void_type_node),
2455 XEXP (object, 0)));
2456 TREE_CHAIN (arg_list)
2457 = build_tree_list (NULL_TREE,
2458 make_tree (integer_type_node, const0_rtx));
2459 TREE_CHAIN (TREE_CHAIN (arg_list))
2460 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2461 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2462
2463 /* Now we have to build up the CALL_EXPR itself. */
2464 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2465 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2466 call_expr, arg_list, NULL_TREE);
2467 TREE_SIDE_EFFECTS (call_expr) = 1;
2468
2469 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2470#else
9de08200
RK
2471 emit_library_call (bzero_libfunc, 0,
2472 VOIDmode, 2,
2473 XEXP (object, 0), Pmode,
e9a25f70
JL
2474 convert_to_mode
2475 (TYPE_MODE (integer_type_node), size,
2476 TREE_UNSIGNED (integer_type_node)),
9de08200 2477 TYPE_MODE (integer_type_node));
bbf6f052 2478#endif
9de08200 2479 }
bbf6f052
RK
2480 }
2481 else
66ed0683 2482 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2483
2484 return retval;
bbf6f052
RK
2485}
2486
2487/* Generate code to copy Y into X.
2488 Both Y and X must have the same mode, except that
2489 Y can be a constant with VOIDmode.
2490 This mode cannot be BLKmode; use emit_block_move for that.
2491
2492 Return the last instruction emitted. */
2493
2494rtx
2495emit_move_insn (x, y)
2496 rtx x, y;
2497{
2498 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2499
2500 x = protect_from_queue (x, 1);
2501 y = protect_from_queue (y, 0);
2502
2503 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2504 abort ();
2505
2506 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2507 y = force_const_mem (mode, y);
2508
2509 /* If X or Y are memory references, verify that their addresses are valid
2510 for the machine. */
2511 if (GET_CODE (x) == MEM
2512 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2513 && ! push_operand (x, GET_MODE (x)))
2514 || (flag_force_addr
2515 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2516 x = change_address (x, VOIDmode, XEXP (x, 0));
2517
2518 if (GET_CODE (y) == MEM
2519 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2520 || (flag_force_addr
2521 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2522 y = change_address (y, VOIDmode, XEXP (y, 0));
2523
2524 if (mode == BLKmode)
2525 abort ();
2526
261c4230
RS
2527 return emit_move_insn_1 (x, y);
2528}
2529
2530/* Low level part of emit_move_insn.
2531 Called just like emit_move_insn, but assumes X and Y
2532 are basically valid. */
2533
2534rtx
2535emit_move_insn_1 (x, y)
2536 rtx x, y;
2537{
2538 enum machine_mode mode = GET_MODE (x);
2539 enum machine_mode submode;
2540 enum mode_class class = GET_MODE_CLASS (mode);
2541 int i;
2542
bbf6f052
RK
2543 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2544 return
2545 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2546
89742723 2547 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2548 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2549 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2550 * BITS_PER_UNIT),
2551 (class == MODE_COMPLEX_INT
2552 ? MODE_INT : MODE_FLOAT),
2553 0))
7308a047
RS
2554 && (mov_optab->handlers[(int) submode].insn_code
2555 != CODE_FOR_nothing))
2556 {
2557 /* Don't split destination if it is a stack push. */
2558 int stack = push_operand (x, GET_MODE (x));
7308a047 2559
7308a047
RS
2560 /* If this is a stack, push the highpart first, so it
2561 will be in the argument order.
2562
2563 In that case, change_address is used only to convert
2564 the mode, not to change the address. */
c937357e
RS
2565 if (stack)
2566 {
e33c0d66
RS
2567 /* Note that the real part always precedes the imag part in memory
2568 regardless of machine's endianness. */
c937357e
RS
2569#ifdef STACK_GROWS_DOWNWARD
2570 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2571 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2572 gen_imagpart (submode, y)));
c937357e 2573 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2574 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2575 gen_realpart (submode, y)));
c937357e
RS
2576#else
2577 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2578 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2579 gen_realpart (submode, y)));
c937357e 2580 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2581 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2582 gen_imagpart (submode, y)));
c937357e
RS
2583#endif
2584 }
2585 else
2586 {
2638126a
BS
2587 /* Show the output dies here. */
2588 if (x != y)
9e6a5703 2589 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2590
c937357e 2591 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2592 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2593 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2594 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2595 }
7308a047 2596
7a1ab50a 2597 return get_last_insn ();
7308a047
RS
2598 }
2599
bbf6f052
RK
2600 /* This will handle any multi-word mode that lacks a move_insn pattern.
2601 However, you will get better code if you define such patterns,
2602 even if they must turn into multiple assembler instructions. */
a4320483 2603 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2604 {
2605 rtx last_insn = 0;
6551fa4d 2606
a98c9f1a
RK
2607#ifdef PUSH_ROUNDING
2608
2609 /* If X is a push on the stack, do the push now and replace
2610 X with a reference to the stack pointer. */
2611 if (push_operand (x, GET_MODE (x)))
2612 {
2613 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2614 x = change_address (x, VOIDmode, stack_pointer_rtx);
2615 }
2616#endif
2617
15a7a8ec 2618 /* Show the output dies here. */
43e046cb 2619 if (x != y)
38a448ca 2620 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2621
bbf6f052
RK
2622 for (i = 0;
2623 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2624 i++)
2625 {
2626 rtx xpart = operand_subword (x, i, 1, mode);
2627 rtx ypart = operand_subword (y, i, 1, mode);
2628
2629 /* If we can't get a part of Y, put Y into memory if it is a
2630 constant. Otherwise, force it into a register. If we still
2631 can't get a part of Y, abort. */
2632 if (ypart == 0 && CONSTANT_P (y))
2633 {
2634 y = force_const_mem (mode, y);
2635 ypart = operand_subword (y, i, 1, mode);
2636 }
2637 else if (ypart == 0)
2638 ypart = operand_subword_force (y, i, mode);
2639
2640 if (xpart == 0 || ypart == 0)
2641 abort ();
2642
2643 last_insn = emit_move_insn (xpart, ypart);
2644 }
6551fa4d 2645
bbf6f052
RK
2646 return last_insn;
2647 }
2648 else
2649 abort ();
2650}
2651\f
2652/* Pushing data onto the stack. */
2653
2654/* Push a block of length SIZE (perhaps variable)
2655 and return an rtx to address the beginning of the block.
2656 Note that it is not possible for the value returned to be a QUEUED.
2657 The value may be virtual_outgoing_args_rtx.
2658
2659 EXTRA is the number of bytes of padding to push in addition to SIZE.
2660 BELOW nonzero means this padding comes at low addresses;
2661 otherwise, the padding comes at high addresses. */
2662
2663rtx
2664push_block (size, extra, below)
2665 rtx size;
2666 int extra, below;
2667{
2668 register rtx temp;
88f63c77
RK
2669
2670 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2671 if (CONSTANT_P (size))
2672 anti_adjust_stack (plus_constant (size, extra));
2673 else if (GET_CODE (size) == REG && extra == 0)
2674 anti_adjust_stack (size);
2675 else
2676 {
2677 rtx temp = copy_to_mode_reg (Pmode, size);
2678 if (extra != 0)
906c4e36 2679 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2680 temp, 0, OPTAB_LIB_WIDEN);
2681 anti_adjust_stack (temp);
2682 }
2683
2684#ifdef STACK_GROWS_DOWNWARD
2685 temp = virtual_outgoing_args_rtx;
2686 if (extra != 0 && below)
2687 temp = plus_constant (temp, extra);
2688#else
2689 if (GET_CODE (size) == CONST_INT)
2690 temp = plus_constant (virtual_outgoing_args_rtx,
2691 - INTVAL (size) - (below ? 0 : extra));
2692 else if (extra != 0 && !below)
38a448ca 2693 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2694 negate_rtx (Pmode, plus_constant (size, extra)));
2695 else
38a448ca 2696 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2697 negate_rtx (Pmode, size));
2698#endif
2699
2700 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2701}
2702
87e38d84 2703rtx
bbf6f052
RK
2704gen_push_operand ()
2705{
38a448ca 2706 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2707}
2708
921b3427
RK
2709/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2710 block of SIZE bytes. */
2711
2712static rtx
2713get_push_address (size)
2714 int size;
2715{
2716 register rtx temp;
2717
2718 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2719 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2720 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2721 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2722 else
2723 temp = stack_pointer_rtx;
2724
c85f7c16 2725 return copy_to_reg (temp);
921b3427
RK
2726}
2727
bbf6f052
RK
2728/* Generate code to push X onto the stack, assuming it has mode MODE and
2729 type TYPE.
2730 MODE is redundant except when X is a CONST_INT (since they don't
2731 carry mode info).
2732 SIZE is an rtx for the size of data to be copied (in bytes),
2733 needed only if X is BLKmode.
2734
2735 ALIGN (in bytes) is maximum alignment we can assume.
2736
cd048831
RK
2737 If PARTIAL and REG are both nonzero, then copy that many of the first
2738 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2739 The amount of space pushed is decreased by PARTIAL words,
2740 rounded *down* to a multiple of PARM_BOUNDARY.
2741 REG must be a hard register in this case.
cd048831
RK
2742 If REG is zero but PARTIAL is not, take any all others actions for an
2743 argument partially in registers, but do not actually load any
2744 registers.
bbf6f052
RK
2745
2746 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2747 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2748
2749 On a machine that lacks real push insns, ARGS_ADDR is the address of
2750 the bottom of the argument block for this call. We use indexing off there
2751 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2752 argument block has not been preallocated.
2753
e5e809f4
JL
2754 ARGS_SO_FAR is the size of args previously pushed for this call.
2755
2756 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2757 for arguments passed in registers. If nonzero, it will be the number
2758 of bytes required. */
bbf6f052
RK
2759
2760void
2761emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2762 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2763 register rtx x;
2764 enum machine_mode mode;
2765 tree type;
2766 rtx size;
2767 int align;
2768 int partial;
2769 rtx reg;
2770 int extra;
2771 rtx args_addr;
2772 rtx args_so_far;
e5e809f4 2773 int reg_parm_stack_space;
bbf6f052
RK
2774{
2775 rtx xinner;
2776 enum direction stack_direction
2777#ifdef STACK_GROWS_DOWNWARD
2778 = downward;
2779#else
2780 = upward;
2781#endif
2782
2783 /* Decide where to pad the argument: `downward' for below,
2784 `upward' for above, or `none' for don't pad it.
2785 Default is below for small data on big-endian machines; else above. */
2786 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2787
2788 /* Invert direction if stack is post-update. */
2789 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2790 if (where_pad != none)
2791 where_pad = (where_pad == downward ? upward : downward);
2792
2793 xinner = x = protect_from_queue (x, 0);
2794
2795 if (mode == BLKmode)
2796 {
2797 /* Copy a block into the stack, entirely or partially. */
2798
2799 register rtx temp;
2800 int used = partial * UNITS_PER_WORD;
2801 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2802 int skip;
2803
2804 if (size == 0)
2805 abort ();
2806
2807 used -= offset;
2808
2809 /* USED is now the # of bytes we need not copy to the stack
2810 because registers will take care of them. */
2811
2812 if (partial != 0)
2813 xinner = change_address (xinner, BLKmode,
2814 plus_constant (XEXP (xinner, 0), used));
2815
2816 /* If the partial register-part of the arg counts in its stack size,
2817 skip the part of stack space corresponding to the registers.
2818 Otherwise, start copying to the beginning of the stack space,
2819 by setting SKIP to 0. */
e5e809f4 2820 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2821
2822#ifdef PUSH_ROUNDING
2823 /* Do it with several push insns if that doesn't take lots of insns
2824 and if there is no difficulty with push insns that skip bytes
2825 on the stack for alignment purposes. */
2826 if (args_addr == 0
2827 && GET_CODE (size) == CONST_INT
2828 && skip == 0
2829 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2830 < MOVE_RATIO)
bbf6f052
RK
2831 /* Here we avoid the case of a structure whose weak alignment
2832 forces many pushes of a small amount of data,
2833 and such small pushes do rounding that causes trouble. */
c7a7ac46 2834 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2835 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2836 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2837 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2838 {
2839 /* Push padding now if padding above and stack grows down,
2840 or if padding below and stack grows up.
2841 But if space already allocated, this has already been done. */
2842 if (extra && args_addr == 0
2843 && where_pad != none && where_pad != stack_direction)
906c4e36 2844 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2845
38a448ca 2846 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2847 INTVAL (size) - used, align);
921b3427 2848
956d6950 2849 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2850 {
2851 rtx temp;
2852
956d6950 2853 in_check_memory_usage = 1;
921b3427 2854 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2855 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2856 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2857 temp, ptr_mode,
2858 XEXP (xinner, 0), ptr_mode,
2859 GEN_INT (INTVAL(size) - used),
2860 TYPE_MODE (sizetype));
2861 else
2862 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2863 temp, ptr_mode,
2864 GEN_INT (INTVAL(size) - used),
2865 TYPE_MODE (sizetype),
956d6950
JL
2866 GEN_INT (MEMORY_USE_RW),
2867 TYPE_MODE (integer_type_node));
2868 in_check_memory_usage = 0;
921b3427 2869 }
bbf6f052
RK
2870 }
2871 else
2872#endif /* PUSH_ROUNDING */
2873 {
2874 /* Otherwise make space on the stack and copy the data
2875 to the address of that space. */
2876
2877 /* Deduct words put into registers from the size we must copy. */
2878 if (partial != 0)
2879 {
2880 if (GET_CODE (size) == CONST_INT)
906c4e36 2881 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2882 else
2883 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2884 GEN_INT (used), NULL_RTX, 0,
2885 OPTAB_LIB_WIDEN);
bbf6f052
RK
2886 }
2887
2888 /* Get the address of the stack space.
2889 In this case, we do not deal with EXTRA separately.
2890 A single stack adjust will do. */
2891 if (! args_addr)
2892 {
2893 temp = push_block (size, extra, where_pad == downward);
2894 extra = 0;
2895 }
2896 else if (GET_CODE (args_so_far) == CONST_INT)
2897 temp = memory_address (BLKmode,
2898 plus_constant (args_addr,
2899 skip + INTVAL (args_so_far)));
2900 else
2901 temp = memory_address (BLKmode,
38a448ca
RH
2902 plus_constant (gen_rtx_PLUS (Pmode,
2903 args_addr,
2904 args_so_far),
bbf6f052 2905 skip));
956d6950 2906 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2907 {
2908 rtx target;
2909
956d6950 2910 in_check_memory_usage = 1;
921b3427 2911 target = copy_to_reg (temp);
c85f7c16 2912 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2913 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2914 target, ptr_mode,
2915 XEXP (xinner, 0), ptr_mode,
2916 size, TYPE_MODE (sizetype));
2917 else
2918 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2919 target, ptr_mode,
2920 size, TYPE_MODE (sizetype),
956d6950
JL
2921 GEN_INT (MEMORY_USE_RW),
2922 TYPE_MODE (integer_type_node));
2923 in_check_memory_usage = 0;
921b3427 2924 }
bbf6f052
RK
2925
2926 /* TEMP is the address of the block. Copy the data there. */
2927 if (GET_CODE (size) == CONST_INT
2928 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2929 < MOVE_RATIO))
2930 {
38a448ca 2931 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2932 INTVAL (size), align);
2933 goto ret;
2934 }
e5e809f4 2935 else
bbf6f052 2936 {
e5e809f4
JL
2937 rtx opalign = GEN_INT (align);
2938 enum machine_mode mode;
9e6a5703 2939 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
2940
2941 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2942 mode != VOIDmode;
2943 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2944 {
e5e809f4
JL
2945 enum insn_code code = movstr_optab[(int) mode];
2946
2947 if (code != CODE_FOR_nothing
2948 && ((GET_CODE (size) == CONST_INT
2949 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2950 <= (GET_MODE_MASK (mode) >> 1)))
2951 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2952 && (insn_operand_predicate[(int) code][0] == 0
2953 || ((*insn_operand_predicate[(int) code][0])
2954 (target, BLKmode)))
2955 && (insn_operand_predicate[(int) code][1] == 0
2956 || ((*insn_operand_predicate[(int) code][1])
2957 (xinner, BLKmode)))
2958 && (insn_operand_predicate[(int) code][3] == 0
2959 || ((*insn_operand_predicate[(int) code][3])
2960 (opalign, VOIDmode))))
2961 {
2962 rtx op2 = convert_to_mode (mode, size, 1);
2963 rtx last = get_last_insn ();
2964 rtx pat;
2965
2966 if (insn_operand_predicate[(int) code][2] != 0
2967 && ! ((*insn_operand_predicate[(int) code][2])
2968 (op2, mode)))
2969 op2 = copy_to_mode_reg (mode, op2);
2970
2971 pat = GEN_FCN ((int) code) (target, xinner,
2972 op2, opalign);
2973 if (pat)
2974 {
2975 emit_insn (pat);
2976 goto ret;
2977 }
2978 else
2979 delete_insns_since (last);
2980 }
c841050e 2981 }
bbf6f052 2982 }
bbf6f052
RK
2983
2984#ifndef ACCUMULATE_OUTGOING_ARGS
2985 /* If the source is referenced relative to the stack pointer,
2986 copy it to another register to stabilize it. We do not need
2987 to do this if we know that we won't be changing sp. */
2988
2989 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2990 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2991 temp = copy_to_reg (temp);
2992#endif
2993
2994 /* Make inhibit_defer_pop nonzero around the library call
2995 to force it to pop the bcopy-arguments right away. */
2996 NO_DEFER_POP;
2997#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2998 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2999 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3000 convert_to_mode (TYPE_MODE (sizetype),
3001 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3002 TYPE_MODE (sizetype));
bbf6f052 3003#else
d562e42e 3004 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3005 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3006 convert_to_mode (TYPE_MODE (integer_type_node),
3007 size,
3008 TREE_UNSIGNED (integer_type_node)),
3009 TYPE_MODE (integer_type_node));
bbf6f052
RK
3010#endif
3011 OK_DEFER_POP;
3012 }
3013 }
3014 else if (partial > 0)
3015 {
3016 /* Scalar partly in registers. */
3017
3018 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3019 int i;
3020 int not_stack;
3021 /* # words of start of argument
3022 that we must make space for but need not store. */
3023 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3024 int args_offset = INTVAL (args_so_far);
3025 int skip;
3026
3027 /* Push padding now if padding above and stack grows down,
3028 or if padding below and stack grows up.
3029 But if space already allocated, this has already been done. */
3030 if (extra && args_addr == 0
3031 && where_pad != none && where_pad != stack_direction)
906c4e36 3032 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3033
3034 /* If we make space by pushing it, we might as well push
3035 the real data. Otherwise, we can leave OFFSET nonzero
3036 and leave the space uninitialized. */
3037 if (args_addr == 0)
3038 offset = 0;
3039
3040 /* Now NOT_STACK gets the number of words that we don't need to
3041 allocate on the stack. */
3042 not_stack = partial - offset;
3043
3044 /* If the partial register-part of the arg counts in its stack size,
3045 skip the part of stack space corresponding to the registers.
3046 Otherwise, start copying to the beginning of the stack space,
3047 by setting SKIP to 0. */
e5e809f4 3048 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3049
3050 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3051 x = validize_mem (force_const_mem (mode, x));
3052
3053 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3054 SUBREGs of such registers are not allowed. */
3055 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3056 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3057 x = copy_to_reg (x);
3058
3059 /* Loop over all the words allocated on the stack for this arg. */
3060 /* We can do it by words, because any scalar bigger than a word
3061 has a size a multiple of a word. */
3062#ifndef PUSH_ARGS_REVERSED
3063 for (i = not_stack; i < size; i++)
3064#else
3065 for (i = size - 1; i >= not_stack; i--)
3066#endif
3067 if (i >= not_stack + offset)
3068 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3069 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3070 0, args_addr,
3071 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
3072 * UNITS_PER_WORD)),
3073 reg_parm_stack_space);
bbf6f052
RK
3074 }
3075 else
3076 {
3077 rtx addr;
921b3427 3078 rtx target = NULL_RTX;
bbf6f052
RK
3079
3080 /* Push padding now if padding above and stack grows down,
3081 or if padding below and stack grows up.
3082 But if space already allocated, this has already been done. */
3083 if (extra && args_addr == 0
3084 && where_pad != none && where_pad != stack_direction)
906c4e36 3085 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3086
3087#ifdef PUSH_ROUNDING
3088 if (args_addr == 0)
3089 addr = gen_push_operand ();
3090 else
3091#endif
921b3427
RK
3092 {
3093 if (GET_CODE (args_so_far) == CONST_INT)
3094 addr
3095 = memory_address (mode,
3096 plus_constant (args_addr,
3097 INTVAL (args_so_far)));
3098 else
38a448ca
RH
3099 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3100 args_so_far));
921b3427
RK
3101 target = addr;
3102 }
bbf6f052 3103
38a448ca 3104 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3105
956d6950 3106 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 3107 {
956d6950 3108 in_check_memory_usage = 1;
921b3427
RK
3109 if (target == 0)
3110 target = get_push_address (GET_MODE_SIZE (mode));
3111
c85f7c16 3112 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
3113 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3114 target, ptr_mode,
3115 XEXP (x, 0), ptr_mode,
3116 GEN_INT (GET_MODE_SIZE (mode)),
3117 TYPE_MODE (sizetype));
3118 else
3119 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3120 target, ptr_mode,
3121 GEN_INT (GET_MODE_SIZE (mode)),
3122 TYPE_MODE (sizetype),
956d6950
JL
3123 GEN_INT (MEMORY_USE_RW),
3124 TYPE_MODE (integer_type_node));
3125 in_check_memory_usage = 0;
921b3427 3126 }
bbf6f052
RK
3127 }
3128
3129 ret:
3130 /* If part should go in registers, copy that part
3131 into the appropriate registers. Do this now, at the end,
3132 since mem-to-mem copies above may do function calls. */
cd048831 3133 if (partial > 0 && reg != 0)
fffa9c1d
JW
3134 {
3135 /* Handle calls that pass values in multiple non-contiguous locations.
3136 The Irix 6 ABI has examples of this. */
3137 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3138 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3139 else
3140 move_block_to_reg (REGNO (reg), x, partial, mode);
3141 }
bbf6f052
RK
3142
3143 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3144 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3145}
3146\f
bbf6f052
RK
3147/* Expand an assignment that stores the value of FROM into TO.
3148 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3149 (This may contain a QUEUED rtx;
3150 if the value is constant, this rtx is a constant.)
3151 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3152
3153 SUGGEST_REG is no longer actually used.
3154 It used to mean, copy the value through a register
3155 and return that register, if that is possible.
709f5be1 3156 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3157
3158rtx
3159expand_assignment (to, from, want_value, suggest_reg)
3160 tree to, from;
3161 int want_value;
3162 int suggest_reg;
3163{
3164 register rtx to_rtx = 0;
3165 rtx result;
3166
3167 /* Don't crash if the lhs of the assignment was erroneous. */
3168
3169 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3170 {
3171 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3172 return want_value ? result : NULL_RTX;
3173 }
bbf6f052
RK
3174
3175 /* Assignment of a structure component needs special treatment
3176 if the structure component's rtx is not simply a MEM.
6be58303
JW
3177 Assignment of an array element at a constant index, and assignment of
3178 an array element in an unaligned packed structure field, has the same
3179 problem. */
bbf6f052 3180
08293add
RK
3181 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3182 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3183 {
3184 enum machine_mode mode1;
3185 int bitsize;
3186 int bitpos;
7bb0943f 3187 tree offset;
bbf6f052
RK
3188 int unsignedp;
3189 int volatilep = 0;
0088fcb1 3190 tree tem;
d78d243c 3191 int alignment;
0088fcb1
RK
3192
3193 push_temp_slots ();
839c4796
RK
3194 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3195 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3196
3197 /* If we are going to use store_bit_field and extract_bit_field,
3198 make sure to_rtx will be safe for multiple use. */
3199
3200 if (mode1 == VOIDmode && want_value)
3201 tem = stabilize_reference (tem);
3202
921b3427 3203 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3204 if (offset != 0)
3205 {
906c4e36 3206 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3207
3208 if (GET_CODE (to_rtx) != MEM)
3209 abort ();
bd070e1a
RH
3210
3211 if (GET_MODE (offset_rtx) != ptr_mode)
3212 {
3213#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3214 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3215#else
3216 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3217#endif
3218 }
3219
89752202
HB
3220 if (GET_CODE (to_rtx) == MEM
3221 && GET_MODE (to_rtx) == BLKmode
3222 && bitsize
3223 && (bitpos % bitsize) == 0
3224 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3225 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3226 {
3227 rtx temp = change_address (to_rtx, mode1,
3228 plus_constant (XEXP (to_rtx, 0),
3229 (bitpos /
3230 BITS_PER_UNIT)));
3231 if (GET_CODE (XEXP (temp, 0)) == REG)
3232 to_rtx = temp;
3233 else
3234 to_rtx = change_address (to_rtx, mode1,
3235 force_reg (GET_MODE (XEXP (temp, 0)),
3236 XEXP (temp, 0)));
3237 bitpos = 0;
3238 }
3239
7bb0943f 3240 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
3241 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3242 force_reg (ptr_mode, offset_rtx)));
7bb0943f 3243 }
bbf6f052
RK
3244 if (volatilep)
3245 {
3246 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3247 {
3248 /* When the offset is zero, to_rtx is the address of the
3249 structure we are storing into, and hence may be shared.
3250 We must make a new MEM before setting the volatile bit. */
3251 if (offset == 0)
effbcc6a
RK
3252 to_rtx = copy_rtx (to_rtx);
3253
01188446
JW
3254 MEM_VOLATILE_P (to_rtx) = 1;
3255 }
bbf6f052
RK
3256#if 0 /* This was turned off because, when a field is volatile
3257 in an object which is not volatile, the object may be in a register,
3258 and then we would abort over here. */
3259 else
3260 abort ();
3261#endif
3262 }
3263
956d6950
JL
3264 if (TREE_CODE (to) == COMPONENT_REF
3265 && TREE_READONLY (TREE_OPERAND (to, 1)))
3266 {
8bd6ecc2 3267 if (offset == 0)
956d6950
JL
3268 to_rtx = copy_rtx (to_rtx);
3269
3270 RTX_UNCHANGING_P (to_rtx) = 1;
3271 }
3272
921b3427
RK
3273 /* Check the access. */
3274 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
3275 {
3276 rtx to_addr;
3277 int size;
3278 int best_mode_size;
3279 enum machine_mode best_mode;
3280
3281 best_mode = get_best_mode (bitsize, bitpos,
3282 TYPE_ALIGN (TREE_TYPE (tem)),
3283 mode1, volatilep);
3284 if (best_mode == VOIDmode)
3285 best_mode = QImode;
3286
3287 best_mode_size = GET_MODE_BITSIZE (best_mode);
3288 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3289 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3290 size *= GET_MODE_SIZE (best_mode);
3291
3292 /* Check the access right of the pointer. */
e9a25f70
JL
3293 if (size)
3294 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3295 to_addr, ptr_mode,
3296 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3297 GEN_INT (MEMORY_USE_WO),
3298 TYPE_MODE (integer_type_node));
921b3427
RK
3299 }
3300
bbf6f052
RK
3301 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3302 (want_value
3303 /* Spurious cast makes HPUX compiler happy. */
3304 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3305 : VOIDmode),
3306 unsignedp,
3307 /* Required alignment of containing datum. */
d78d243c 3308 alignment,
ece32014
MM
3309 int_size_in_bytes (TREE_TYPE (tem)),
3310 get_alias_set (to));
bbf6f052
RK
3311 preserve_temp_slots (result);
3312 free_temp_slots ();
0088fcb1 3313 pop_temp_slots ();
bbf6f052 3314
709f5be1
RS
3315 /* If the value is meaningful, convert RESULT to the proper mode.
3316 Otherwise, return nothing. */
5ffe63ed
RS
3317 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3318 TYPE_MODE (TREE_TYPE (from)),
3319 result,
3320 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3321 : NULL_RTX);
bbf6f052
RK
3322 }
3323
cd1db108
RS
3324 /* If the rhs is a function call and its value is not an aggregate,
3325 call the function before we start to compute the lhs.
3326 This is needed for correct code for cases such as
3327 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3328 requires loading up part of an address in a separate insn.
3329
3330 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3331 a promoted variable where the zero- or sign- extension needs to be done.
3332 Handling this in the normal way is safe because no computation is done
3333 before the call. */
3334 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3335 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3336 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3337 {
0088fcb1
RK
3338 rtx value;
3339
3340 push_temp_slots ();
3341 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3342 if (to_rtx == 0)
921b3427 3343 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3344
fffa9c1d
JW
3345 /* Handle calls that return values in multiple non-contiguous locations.
3346 The Irix 6 ABI has examples of this. */
3347 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3348 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3349 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3350 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3351 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3352 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3353 else
3354 emit_move_insn (to_rtx, value);
cd1db108
RS
3355 preserve_temp_slots (to_rtx);
3356 free_temp_slots ();
0088fcb1 3357 pop_temp_slots ();
709f5be1 3358 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3359 }
3360
bbf6f052
RK
3361 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3362 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3363
3364 if (to_rtx == 0)
41472af8
MM
3365 {
3366 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3367 if (GET_CODE (to_rtx) == MEM)
3368 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3369 }
bbf6f052 3370
86d38d25
RS
3371 /* Don't move directly into a return register. */
3372 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3373 {
0088fcb1
RK
3374 rtx temp;
3375
3376 push_temp_slots ();
3377 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3378 emit_move_insn (to_rtx, temp);
3379 preserve_temp_slots (to_rtx);
3380 free_temp_slots ();
0088fcb1 3381 pop_temp_slots ();
709f5be1 3382 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3383 }
3384
bbf6f052
RK
3385 /* In case we are returning the contents of an object which overlaps
3386 the place the value is being stored, use a safe function when copying
3387 a value through a pointer into a structure value return block. */
3388 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3389 && current_function_returns_struct
3390 && !current_function_returns_pcc_struct)
3391 {
0088fcb1
RK
3392 rtx from_rtx, size;
3393
3394 push_temp_slots ();
33a20d10 3395 size = expr_size (from);
921b3427
RK
3396 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3397 EXPAND_MEMORY_USE_DONT);
3398
3399 /* Copy the rights of the bitmap. */
3400 if (flag_check_memory_usage)
3401 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3402 XEXP (to_rtx, 0), ptr_mode,
3403 XEXP (from_rtx, 0), ptr_mode,
3404 convert_to_mode (TYPE_MODE (sizetype),
3405 size, TREE_UNSIGNED (sizetype)),
3406 TYPE_MODE (sizetype));
bbf6f052
RK
3407
3408#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3409 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3410 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3411 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3412 convert_to_mode (TYPE_MODE (sizetype),
3413 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3414 TYPE_MODE (sizetype));
bbf6f052 3415#else
d562e42e 3416 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3417 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3418 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3419 convert_to_mode (TYPE_MODE (integer_type_node),
3420 size, TREE_UNSIGNED (integer_type_node)),
3421 TYPE_MODE (integer_type_node));
bbf6f052
RK
3422#endif
3423
3424 preserve_temp_slots (to_rtx);
3425 free_temp_slots ();
0088fcb1 3426 pop_temp_slots ();
709f5be1 3427 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3428 }
3429
3430 /* Compute FROM and store the value in the rtx we got. */
3431
0088fcb1 3432 push_temp_slots ();
bbf6f052
RK
3433 result = store_expr (from, to_rtx, want_value);
3434 preserve_temp_slots (result);
3435 free_temp_slots ();
0088fcb1 3436 pop_temp_slots ();
709f5be1 3437 return want_value ? result : NULL_RTX;
bbf6f052
RK
3438}
3439
3440/* Generate code for computing expression EXP,
3441 and storing the value into TARGET.
bbf6f052
RK
3442 TARGET may contain a QUEUED rtx.
3443
709f5be1
RS
3444 If WANT_VALUE is nonzero, return a copy of the value
3445 not in TARGET, so that we can be sure to use the proper
3446 value in a containing expression even if TARGET has something
3447 else stored in it. If possible, we copy the value through a pseudo
3448 and return that pseudo. Or, if the value is constant, we try to
3449 return the constant. In some cases, we return a pseudo
3450 copied *from* TARGET.
3451
3452 If the mode is BLKmode then we may return TARGET itself.
3453 It turns out that in BLKmode it doesn't cause a problem.
3454 because C has no operators that could combine two different
3455 assignments into the same BLKmode object with different values
3456 with no sequence point. Will other languages need this to
3457 be more thorough?
3458
3459 If WANT_VALUE is 0, we return NULL, to make sure
3460 to catch quickly any cases where the caller uses the value
3461 and fails to set WANT_VALUE. */
bbf6f052
RK
3462
3463rtx
709f5be1 3464store_expr (exp, target, want_value)
bbf6f052
RK
3465 register tree exp;
3466 register rtx target;
709f5be1 3467 int want_value;
bbf6f052
RK
3468{
3469 register rtx temp;
3470 int dont_return_target = 0;
3471
3472 if (TREE_CODE (exp) == COMPOUND_EXPR)
3473 {
3474 /* Perform first part of compound expression, then assign from second
3475 part. */
3476 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3477 emit_queue ();
709f5be1 3478 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3479 }
3480 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3481 {
3482 /* For conditional expression, get safe form of the target. Then
3483 test the condition, doing the appropriate assignment on either
3484 side. This avoids the creation of unnecessary temporaries.
3485 For non-BLKmode, it is more efficient not to do this. */
3486
3487 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3488
3489 emit_queue ();
3490 target = protect_from_queue (target, 1);
3491
dabf8373 3492 do_pending_stack_adjust ();
bbf6f052
RK
3493 NO_DEFER_POP;
3494 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3495 start_cleanup_deferral ();
709f5be1 3496 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3497 end_cleanup_deferral ();
bbf6f052
RK
3498 emit_queue ();
3499 emit_jump_insn (gen_jump (lab2));
3500 emit_barrier ();
3501 emit_label (lab1);
956d6950 3502 start_cleanup_deferral ();
709f5be1 3503 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3504 end_cleanup_deferral ();
bbf6f052
RK
3505 emit_queue ();
3506 emit_label (lab2);
3507 OK_DEFER_POP;
a3a58acc 3508
709f5be1 3509 return want_value ? target : NULL_RTX;
bbf6f052 3510 }
709f5be1 3511 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3512 && GET_MODE (target) != BLKmode)
3513 /* If target is in memory and caller wants value in a register instead,
3514 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3515 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3516 We know expand_expr will not use the target in that case.
3517 Don't do this if TARGET is volatile because we are supposed
3518 to write it and then read it. */
bbf6f052 3519 {
906c4e36 3520 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3521 GET_MODE (target), 0);
3522 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3523 temp = copy_to_reg (temp);
3524 dont_return_target = 1;
3525 }
3526 else if (queued_subexp_p (target))
709f5be1
RS
3527 /* If target contains a postincrement, let's not risk
3528 using it as the place to generate the rhs. */
bbf6f052
RK
3529 {
3530 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3531 {
3532 /* Expand EXP into a new pseudo. */
3533 temp = gen_reg_rtx (GET_MODE (target));
3534 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3535 }
3536 else
906c4e36 3537 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3538
3539 /* If target is volatile, ANSI requires accessing the value
3540 *from* the target, if it is accessed. So make that happen.
3541 In no case return the target itself. */
3542 if (! MEM_VOLATILE_P (target) && want_value)
3543 dont_return_target = 1;
bbf6f052 3544 }
1499e0a8
RK
3545 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3546 /* If this is an scalar in a register that is stored in a wider mode
3547 than the declared mode, compute the result into its declared mode
3548 and then convert to the wider mode. Our value is the computed
3549 expression. */
3550 {
5a32d038 3551 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3552 which will often result in some optimizations. Do the conversion
3553 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3554 the extend. But don't do this if the type of EXP is a subtype
3555 of something else since then the conversion might involve
3556 more than just converting modes. */
3557 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3558 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3559 {
3560 if (TREE_UNSIGNED (TREE_TYPE (exp))
3561 != SUBREG_PROMOTED_UNSIGNED_P (target))
3562 exp
3563 = convert
3564 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3565 TREE_TYPE (exp)),
3566 exp);
3567
3568 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3569 SUBREG_PROMOTED_UNSIGNED_P (target)),
3570 exp);
3571 }
5a32d038 3572
1499e0a8 3573 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3574
766f36c7 3575 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3576 the access now so it gets done only once. Likewise if
3577 it contains TARGET. */
3578 if (GET_CODE (temp) == MEM && want_value
3579 && (MEM_VOLATILE_P (temp)
3580 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3581 temp = copy_to_reg (temp);
3582
b258707c
RS
3583 /* If TEMP is a VOIDmode constant, use convert_modes to make
3584 sure that we properly convert it. */
3585 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3586 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3587 TYPE_MODE (TREE_TYPE (exp)), temp,
3588 SUBREG_PROMOTED_UNSIGNED_P (target));
3589
1499e0a8
RK
3590 convert_move (SUBREG_REG (target), temp,
3591 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3592 return want_value ? temp : NULL_RTX;
1499e0a8 3593 }
bbf6f052
RK
3594 else
3595 {
3596 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3597 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3598 If TARGET is a volatile mem ref, either return TARGET
3599 or return a reg copied *from* TARGET; ANSI requires this.
3600
3601 Otherwise, if TEMP is not TARGET, return TEMP
3602 if it is constant (for efficiency),
3603 or if we really want the correct value. */
bbf6f052
RK
3604 if (!(target && GET_CODE (target) == REG
3605 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3606 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3607 && ! rtx_equal_p (temp, target)
709f5be1 3608 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3609 dont_return_target = 1;
3610 }
3611
b258707c
RS
3612 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3613 the same as that of TARGET, adjust the constant. This is needed, for
3614 example, in case it is a CONST_DOUBLE and we want only a word-sized
3615 value. */
3616 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3617 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3618 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3619 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3620 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3621
921b3427
RK
3622 if (flag_check_memory_usage
3623 && GET_CODE (target) == MEM
3624 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3625 {
3626 if (GET_CODE (temp) == MEM)
3627 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3628 XEXP (target, 0), ptr_mode,
3629 XEXP (temp, 0), ptr_mode,
3630 expr_size (exp), TYPE_MODE (sizetype));
3631 else
3632 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3633 XEXP (target, 0), ptr_mode,
3634 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3635 GEN_INT (MEMORY_USE_WO),
3636 TYPE_MODE (integer_type_node));
921b3427
RK
3637 }
3638
bbf6f052
RK
3639 /* If value was not generated in the target, store it there.
3640 Convert the value to TARGET's type first if nec. */
3641
6036acbb
R
3642 if ((! rtx_equal_p (temp, target)
3643 || side_effects_p (temp)
3644 || side_effects_p (target))
3645 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3646 {
3647 target = protect_from_queue (target, 1);
3648 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3649 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3650 {
3651 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3652 if (dont_return_target)
3653 {
3654 /* In this case, we will return TEMP,
3655 so make sure it has the proper mode.
3656 But don't forget to store the value into TARGET. */
3657 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3658 emit_move_insn (target, temp);
3659 }
3660 else
3661 convert_move (target, temp, unsignedp);
3662 }
3663
3664 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3665 {
3666 /* Handle copying a string constant into an array.
3667 The string constant may be shorter than the array.
3668 So copy just the string's actual length, and clear the rest. */
3669 rtx size;
22619c3f 3670 rtx addr;
bbf6f052 3671
e87b4f3f
RS
3672 /* Get the size of the data type of the string,
3673 which is actually the size of the target. */
3674 size = expr_size (exp);
3675 if (GET_CODE (size) == CONST_INT
3676 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3677 emit_block_move (target, temp, size,
3678 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3679 else
bbf6f052 3680 {
e87b4f3f
RS
3681 /* Compute the size of the data to copy from the string. */
3682 tree copy_size
c03b7665 3683 = size_binop (MIN_EXPR,
b50d17a1 3684 make_tree (sizetype, size),
c03b7665
RK
3685 convert (sizetype,
3686 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3687 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3688 VOIDmode, 0);
e87b4f3f
RS
3689 rtx label = 0;
3690
3691 /* Copy that much. */
3692 emit_block_move (target, temp, copy_size_rtx,
3693 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3694
88f63c77
RK
3695 /* Figure out how much is left in TARGET that we have to clear.
3696 Do all calculations in ptr_mode. */
3697
3698 addr = XEXP (target, 0);
3699 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3700
e87b4f3f
RS
3701 if (GET_CODE (copy_size_rtx) == CONST_INT)
3702 {
88f63c77 3703 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3704 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3705 }
3706 else
3707 {
88f63c77
RK
3708 addr = force_reg (ptr_mode, addr);
3709 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3710 copy_size_rtx, NULL_RTX, 0,
3711 OPTAB_LIB_WIDEN);
e87b4f3f 3712
88f63c77 3713 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3714 copy_size_rtx, NULL_RTX, 0,
3715 OPTAB_LIB_WIDEN);
e87b4f3f 3716
906c4e36 3717 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3718 GET_MODE (size), 0, 0);
3719 label = gen_label_rtx ();
3720 emit_jump_insn (gen_blt (label));
3721 }
3722
3723 if (size != const0_rtx)
3724 {
921b3427
RK
3725 /* Be sure we can write on ADDR. */
3726 if (flag_check_memory_usage)
3727 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3728 addr, ptr_mode,
3729 size, TYPE_MODE (sizetype),
956d6950
JL
3730 GEN_INT (MEMORY_USE_WO),
3731 TYPE_MODE (integer_type_node));
bbf6f052 3732#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3733 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3734 addr, ptr_mode,
3b6f75e2
JW
3735 const0_rtx, TYPE_MODE (integer_type_node),
3736 convert_to_mode (TYPE_MODE (sizetype),
3737 size,
3738 TREE_UNSIGNED (sizetype)),
3739 TYPE_MODE (sizetype));
bbf6f052 3740#else
d562e42e 3741 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3742 addr, ptr_mode,
3b6f75e2
JW
3743 convert_to_mode (TYPE_MODE (integer_type_node),
3744 size,
3745 TREE_UNSIGNED (integer_type_node)),
3746 TYPE_MODE (integer_type_node));
bbf6f052 3747#endif
e87b4f3f 3748 }
22619c3f 3749
e87b4f3f
RS
3750 if (label)
3751 emit_label (label);
bbf6f052
RK
3752 }
3753 }
fffa9c1d
JW
3754 /* Handle calls that return values in multiple non-contiguous locations.
3755 The Irix 6 ABI has examples of this. */
3756 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3757 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3758 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3759 else if (GET_MODE (temp) == BLKmode)
3760 emit_block_move (target, temp, expr_size (exp),
3761 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3762 else
3763 emit_move_insn (target, temp);
3764 }
709f5be1 3765
766f36c7
RK
3766 /* If we don't want a value, return NULL_RTX. */
3767 if (! want_value)
3768 return NULL_RTX;
3769
3770 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3771 ??? The latter test doesn't seem to make sense. */
3772 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3773 return temp;
766f36c7
RK
3774
3775 /* Return TARGET itself if it is a hard register. */
3776 else if (want_value && GET_MODE (target) != BLKmode
3777 && ! (GET_CODE (target) == REG
3778 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3779 return copy_to_reg (target);
766f36c7
RK
3780
3781 else
709f5be1 3782 return target;
bbf6f052
RK
3783}
3784\f
9de08200
RK
3785/* Return 1 if EXP just contains zeros. */
3786
3787static int
3788is_zeros_p (exp)
3789 tree exp;
3790{
3791 tree elt;
3792
3793 switch (TREE_CODE (exp))
3794 {
3795 case CONVERT_EXPR:
3796 case NOP_EXPR:
3797 case NON_LVALUE_EXPR:
3798 return is_zeros_p (TREE_OPERAND (exp, 0));
3799
3800 case INTEGER_CST:
3801 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3802
3803 case COMPLEX_CST:
3804 return
3805 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3806
3807 case REAL_CST:
41c9120b 3808 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3809
3810 case CONSTRUCTOR:
e1a43f73
PB
3811 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3812 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3813 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3814 if (! is_zeros_p (TREE_VALUE (elt)))
3815 return 0;
3816
3817 return 1;
e9a25f70
JL
3818
3819 default:
3820 return 0;
9de08200 3821 }
9de08200
RK
3822}
3823
3824/* Return 1 if EXP contains mostly (3/4) zeros. */
3825
3826static int
3827mostly_zeros_p (exp)
3828 tree exp;
3829{
9de08200
RK
3830 if (TREE_CODE (exp) == CONSTRUCTOR)
3831 {
e1a43f73
PB
3832 int elts = 0, zeros = 0;
3833 tree elt = CONSTRUCTOR_ELTS (exp);
3834 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3835 {
3836 /* If there are no ranges of true bits, it is all zero. */
3837 return elt == NULL_TREE;
3838 }
3839 for (; elt; elt = TREE_CHAIN (elt))
3840 {
3841 /* We do not handle the case where the index is a RANGE_EXPR,
3842 so the statistic will be somewhat inaccurate.
3843 We do make a more accurate count in store_constructor itself,
3844 so since this function is only used for nested array elements,
0f41302f 3845 this should be close enough. */
e1a43f73
PB
3846 if (mostly_zeros_p (TREE_VALUE (elt)))
3847 zeros++;
3848 elts++;
3849 }
9de08200
RK
3850
3851 return 4 * zeros >= 3 * elts;
3852 }
3853
3854 return is_zeros_p (exp);
3855}
3856\f
e1a43f73
PB
3857/* Helper function for store_constructor.
3858 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3859 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3860 CLEARED is as for store_constructor.
3861
3862 This provides a recursive shortcut back to store_constructor when it isn't
3863 necessary to go through store_field. This is so that we can pass through
3864 the cleared field to let store_constructor know that we may not have to
3865 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3866
3867static void
3868store_constructor_field (target, bitsize, bitpos,
3869 mode, exp, type, cleared)
3870 rtx target;
3871 int bitsize, bitpos;
3872 enum machine_mode mode;
3873 tree exp, type;
3874 int cleared;
3875{
3876 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3877 && bitpos % BITS_PER_UNIT == 0
3878 /* If we have a non-zero bitpos for a register target, then we just
3879 let store_field do the bitfield handling. This is unlikely to
3880 generate unnecessary clear instructions anyways. */
3881 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3882 {
126e5b0d
JW
3883 if (bitpos != 0)
3884 target = change_address (target, VOIDmode,
3885 plus_constant (XEXP (target, 0),
3886 bitpos / BITS_PER_UNIT));
3887 store_constructor (exp, target, cleared);
e1a43f73
PB
3888 }
3889 else
3890 store_field (target, bitsize, bitpos, mode, exp,
3891 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
ece32014 3892 int_size_in_bytes (type), 0);
e1a43f73
PB
3893}
3894
bbf6f052 3895/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3896 TARGET is either a REG or a MEM.
0f41302f 3897 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3898
3899static void
e1a43f73 3900store_constructor (exp, target, cleared)
bbf6f052
RK
3901 tree exp;
3902 rtx target;
e1a43f73 3903 int cleared;
bbf6f052 3904{
4af3895e 3905 tree type = TREE_TYPE (exp);
34c73909 3906 rtx exp_size = expr_size (exp);
4af3895e 3907
bbf6f052
RK
3908 /* We know our target cannot conflict, since safe_from_p has been called. */
3909#if 0
3910 /* Don't try copying piece by piece into a hard register
3911 since that is vulnerable to being clobbered by EXP.
3912 Instead, construct in a pseudo register and then copy it all. */
3913 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3914 {
3915 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3916 store_constructor (exp, temp, 0);
bbf6f052
RK
3917 emit_move_insn (target, temp);
3918 return;
3919 }
3920#endif
3921
e44842fe
RK
3922 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3923 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3924 {
3925 register tree elt;
3926
4af3895e 3927 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3928 if (TREE_CODE (type) == UNION_TYPE
3929 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3930 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3931
3932 /* If we are building a static constructor into a register,
3933 set the initial value as zero so we can fold the value into
67225c15
RK
3934 a constant. But if more than one register is involved,
3935 this probably loses. */
3936 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3937 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3938 {
3939 if (! cleared)
e9a25f70 3940 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3941
9de08200
RK
3942 cleared = 1;
3943 }
3944
3945 /* If the constructor has fewer fields than the structure
3946 or if we are initializing the structure to mostly zeros,
bbf6f052 3947 clear the whole structure first. */
9de08200
RK
3948 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3949 != list_length (TYPE_FIELDS (type)))
3950 || mostly_zeros_p (exp))
3951 {
3952 if (! cleared)
3953 clear_storage (target, expr_size (exp),
3954 TYPE_ALIGN (type) / BITS_PER_UNIT);
3955
3956 cleared = 1;
3957 }
bbf6f052
RK
3958 else
3959 /* Inform later passes that the old value is dead. */
38a448ca 3960 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3961
3962 /* Store each element of the constructor into
3963 the corresponding field of TARGET. */
3964
3965 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3966 {
3967 register tree field = TREE_PURPOSE (elt);
34c73909 3968 tree value = TREE_VALUE (elt);
bbf6f052
RK
3969 register enum machine_mode mode;
3970 int bitsize;
b50d17a1 3971 int bitpos = 0;
bbf6f052 3972 int unsignedp;
b50d17a1
RK
3973 tree pos, constant = 0, offset = 0;
3974 rtx to_rtx = target;
bbf6f052 3975
f32fd778
RS
3976 /* Just ignore missing fields.
3977 We cleared the whole structure, above,
3978 if any fields are missing. */
3979 if (field == 0)
3980 continue;
3981
e1a43f73
PB
3982 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3983 continue;
9de08200 3984
bbf6f052
RK
3985 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3986 unsignedp = TREE_UNSIGNED (field);
3987 mode = DECL_MODE (field);
3988 if (DECL_BIT_FIELD (field))
3989 mode = VOIDmode;
3990
b50d17a1
RK
3991 pos = DECL_FIELD_BITPOS (field);
3992 if (TREE_CODE (pos) == INTEGER_CST)
3993 constant = pos;
3994 else if (TREE_CODE (pos) == PLUS_EXPR
3995 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3996 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3997 else
3998 offset = pos;
3999
4000 if (constant)
cd11b87e 4001 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4002
4003 if (offset)
4004 {
4005 rtx offset_rtx;
4006
4007 if (contains_placeholder_p (offset))
4008 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4009 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4010
b50d17a1
RK
4011 offset = size_binop (FLOOR_DIV_EXPR, offset,
4012 size_int (BITS_PER_UNIT));
bbf6f052 4013
b50d17a1
RK
4014 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4015 if (GET_CODE (to_rtx) != MEM)
4016 abort ();
4017
bd070e1a
RH
4018 if (GET_MODE (offset_rtx) != ptr_mode)
4019 {
4020#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4021 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4022#else
4023 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4024#endif
4025 }
4026
b50d17a1
RK
4027 to_rtx
4028 = change_address (to_rtx, VOIDmode,
38a448ca 4029 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 4030 force_reg (ptr_mode, offset_rtx)));
b50d17a1 4031 }
cf04eb80
RK
4032 if (TREE_READONLY (field))
4033 {
9151b3bf 4034 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4035 to_rtx = copy_rtx (to_rtx);
4036
cf04eb80
RK
4037 RTX_UNCHANGING_P (to_rtx) = 1;
4038 }
4039
34c73909
R
4040#ifdef WORD_REGISTER_OPERATIONS
4041 /* If this initializes a field that is smaller than a word, at the
4042 start of a word, try to widen it to a full word.
4043 This special case allows us to output C++ member function
4044 initializations in a form that the optimizers can understand. */
4045 if (constant
4046 && GET_CODE (target) == REG
4047 && bitsize < BITS_PER_WORD
4048 && bitpos % BITS_PER_WORD == 0
4049 && GET_MODE_CLASS (mode) == MODE_INT
4050 && TREE_CODE (value) == INTEGER_CST
4051 && GET_CODE (exp_size) == CONST_INT
4052 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4053 {
4054 tree type = TREE_TYPE (value);
4055 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4056 {
4057 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4058 value = convert (type, value);
4059 }
4060 if (BYTES_BIG_ENDIAN)
4061 value
4062 = fold (build (LSHIFT_EXPR, type, value,
4063 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4064 bitsize = BITS_PER_WORD;
4065 mode = word_mode;
4066 }
4067#endif
e1a43f73 4068 store_constructor_field (to_rtx, bitsize, bitpos,
34c73909 4069 mode, value, type, cleared);
bbf6f052
RK
4070 }
4071 }
4af3895e 4072 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4073 {
4074 register tree elt;
4075 register int i;
e1a43f73 4076 int need_to_clear;
4af3895e 4077 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4078 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4079 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4080 tree elttype = TREE_TYPE (type);
bbf6f052 4081
e1a43f73 4082 /* If the constructor has fewer elements than the array,
38e01259 4083 clear the whole array first. Similarly if this is
e1a43f73
PB
4084 static constructor of a non-BLKmode object. */
4085 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4086 need_to_clear = 1;
4087 else
4088 {
4089 HOST_WIDE_INT count = 0, zero_count = 0;
4090 need_to_clear = 0;
4091 /* This loop is a more accurate version of the loop in
4092 mostly_zeros_p (it handles RANGE_EXPR in an index).
4093 It is also needed to check for missing elements. */
4094 for (elt = CONSTRUCTOR_ELTS (exp);
4095 elt != NULL_TREE;
df0faff1 4096 elt = TREE_CHAIN (elt))
e1a43f73
PB
4097 {
4098 tree index = TREE_PURPOSE (elt);
4099 HOST_WIDE_INT this_node_count;
4100 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4101 {
4102 tree lo_index = TREE_OPERAND (index, 0);
4103 tree hi_index = TREE_OPERAND (index, 1);
4104 if (TREE_CODE (lo_index) != INTEGER_CST
4105 || TREE_CODE (hi_index) != INTEGER_CST)
4106 {
4107 need_to_clear = 1;
4108 break;
4109 }
4110 this_node_count = TREE_INT_CST_LOW (hi_index)
4111 - TREE_INT_CST_LOW (lo_index) + 1;
4112 }
4113 else
4114 this_node_count = 1;
4115 count += this_node_count;
4116 if (mostly_zeros_p (TREE_VALUE (elt)))
4117 zero_count += this_node_count;
4118 }
8e958f70 4119 /* Clear the entire array first if there are any missing elements,
0f41302f 4120 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4121 if (count < maxelt - minelt + 1
4122 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4123 need_to_clear = 1;
4124 }
4125 if (need_to_clear)
9de08200
RK
4126 {
4127 if (! cleared)
4128 clear_storage (target, expr_size (exp),
4129 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
4130 cleared = 1;
4131 }
bbf6f052
RK
4132 else
4133 /* Inform later passes that the old value is dead. */
38a448ca 4134 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4135
4136 /* Store each element of the constructor into
4137 the corresponding element of TARGET, determined
4138 by counting the elements. */
4139 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4140 elt;
4141 elt = TREE_CHAIN (elt), i++)
4142 {
4143 register enum machine_mode mode;
4144 int bitsize;
4145 int bitpos;
4146 int unsignedp;
e1a43f73 4147 tree value = TREE_VALUE (elt);
03dc44a6
RS
4148 tree index = TREE_PURPOSE (elt);
4149 rtx xtarget = target;
bbf6f052 4150
e1a43f73
PB
4151 if (cleared && is_zeros_p (value))
4152 continue;
9de08200 4153
bbf6f052
RK
4154 mode = TYPE_MODE (elttype);
4155 bitsize = GET_MODE_BITSIZE (mode);
4156 unsignedp = TREE_UNSIGNED (elttype);
4157
e1a43f73
PB
4158 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4159 {
4160 tree lo_index = TREE_OPERAND (index, 0);
4161 tree hi_index = TREE_OPERAND (index, 1);
4162 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4163 struct nesting *loop;
05c0b405
PB
4164 HOST_WIDE_INT lo, hi, count;
4165 tree position;
e1a43f73 4166
0f41302f 4167 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4168 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4169 && TREE_CODE (hi_index) == INTEGER_CST
4170 && (lo = TREE_INT_CST_LOW (lo_index),
4171 hi = TREE_INT_CST_LOW (hi_index),
4172 count = hi - lo + 1,
4173 (GET_CODE (target) != MEM
4174 || count <= 2
4175 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4176 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4177 <= 40 * 8))))
e1a43f73 4178 {
05c0b405
PB
4179 lo -= minelt; hi -= minelt;
4180 for (; lo <= hi; lo++)
e1a43f73 4181 {
05c0b405
PB
4182 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4183 store_constructor_field (target, bitsize, bitpos,
4184 mode, value, type, cleared);
e1a43f73
PB
4185 }
4186 }
4187 else
4188 {
4189 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4190 loop_top = gen_label_rtx ();
4191 loop_end = gen_label_rtx ();
4192
4193 unsignedp = TREE_UNSIGNED (domain);
4194
4195 index = build_decl (VAR_DECL, NULL_TREE, domain);
4196
4197 DECL_RTL (index) = index_r
4198 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4199 &unsignedp, 0));
4200
4201 if (TREE_CODE (value) == SAVE_EXPR
4202 && SAVE_EXPR_RTL (value) == 0)
4203 {
0f41302f
MS
4204 /* Make sure value gets expanded once before the
4205 loop. */
e1a43f73
PB
4206 expand_expr (value, const0_rtx, VOIDmode, 0);
4207 emit_queue ();
4208 }
4209 store_expr (lo_index, index_r, 0);
4210 loop = expand_start_loop (0);
4211
0f41302f 4212 /* Assign value to element index. */
e1a43f73
PB
4213 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4214 size_int (BITS_PER_UNIT));
4215 position = size_binop (MULT_EXPR,
4216 size_binop (MINUS_EXPR, index,
4217 TYPE_MIN_VALUE (domain)),
4218 position);
4219 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4220 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4221 xtarget = change_address (target, mode, addr);
4222 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 4223 store_constructor (value, xtarget, cleared);
e1a43f73
PB
4224 else
4225 store_expr (value, xtarget, 0);
4226
4227 expand_exit_loop_if_false (loop,
4228 build (LT_EXPR, integer_type_node,
4229 index, hi_index));
4230
4231 expand_increment (build (PREINCREMENT_EXPR,
4232 TREE_TYPE (index),
7b8b9722 4233 index, integer_one_node), 0, 0);
e1a43f73
PB
4234 expand_end_loop ();
4235 emit_label (loop_end);
4236
4237 /* Needed by stupid register allocation. to extend the
4238 lifetime of pseudo-regs used by target past the end
4239 of the loop. */
38a448ca 4240 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4241 }
4242 }
4243 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4244 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4245 {
e1a43f73 4246 rtx pos_rtx, addr;
03dc44a6
RS
4247 tree position;
4248
5b6c44ff
RK
4249 if (index == 0)
4250 index = size_int (i);
4251
e1a43f73
PB
4252 if (minelt)
4253 index = size_binop (MINUS_EXPR, index,
4254 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4255 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4256 size_int (BITS_PER_UNIT));
4257 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4258 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4259 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4260 xtarget = change_address (target, mode, addr);
e1a43f73 4261 store_expr (value, xtarget, 0);
03dc44a6
RS
4262 }
4263 else
4264 {
4265 if (index != 0)
7c314719 4266 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4267 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4268 else
4269 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
4270 store_constructor_field (target, bitsize, bitpos,
4271 mode, value, type, cleared);
03dc44a6 4272 }
bbf6f052
RK
4273 }
4274 }
071a6595
PB
4275 /* set constructor assignments */
4276 else if (TREE_CODE (type) == SET_TYPE)
4277 {
e1a43f73 4278 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4279 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4280 tree domain = TYPE_DOMAIN (type);
4281 tree domain_min, domain_max, bitlength;
4282
9faa82d8 4283 /* The default implementation strategy is to extract the constant
071a6595
PB
4284 parts of the constructor, use that to initialize the target,
4285 and then "or" in whatever non-constant ranges we need in addition.
4286
4287 If a large set is all zero or all ones, it is
4288 probably better to set it using memset (if available) or bzero.
4289 Also, if a large set has just a single range, it may also be
4290 better to first clear all the first clear the set (using
0f41302f 4291 bzero/memset), and set the bits we want. */
071a6595 4292
0f41302f 4293 /* Check for all zeros. */
e1a43f73 4294 if (elt == NULL_TREE)
071a6595 4295 {
e1a43f73
PB
4296 if (!cleared)
4297 clear_storage (target, expr_size (exp),
4298 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4299 return;
4300 }
4301
071a6595
PB
4302 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4303 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4304 bitlength = size_binop (PLUS_EXPR,
4305 size_binop (MINUS_EXPR, domain_max, domain_min),
4306 size_one_node);
4307
e1a43f73
PB
4308 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4309 abort ();
4310 nbits = TREE_INT_CST_LOW (bitlength);
4311
4312 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4313 are "complicated" (more than one range), initialize (the
4314 constant parts) by copying from a constant. */
4315 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4316 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4317 {
b4ee5a72
PB
4318 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4319 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4320 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4321 HOST_WIDE_INT word = 0;
4322 int bit_pos = 0;
4323 int ibit = 0;
0f41302f 4324 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4325 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4326 for (;;)
071a6595 4327 {
b4ee5a72
PB
4328 if (bit_buffer[ibit])
4329 {
b09f3348 4330 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4331 word |= (1 << (set_word_size - 1 - bit_pos));
4332 else
4333 word |= 1 << bit_pos;
4334 }
4335 bit_pos++; ibit++;
4336 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4337 {
e1a43f73
PB
4338 if (word != 0 || ! cleared)
4339 {
4340 rtx datum = GEN_INT (word);
4341 rtx to_rtx;
0f41302f
MS
4342 /* The assumption here is that it is safe to use
4343 XEXP if the set is multi-word, but not if
4344 it's single-word. */
e1a43f73
PB
4345 if (GET_CODE (target) == MEM)
4346 {
4347 to_rtx = plus_constant (XEXP (target, 0), offset);
4348 to_rtx = change_address (target, mode, to_rtx);
4349 }
4350 else if (offset == 0)
4351 to_rtx = target;
4352 else
4353 abort ();
4354 emit_move_insn (to_rtx, datum);
4355 }
b4ee5a72
PB
4356 if (ibit == nbits)
4357 break;
4358 word = 0;
4359 bit_pos = 0;
4360 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4361 }
4362 }
071a6595 4363 }
e1a43f73
PB
4364 else if (!cleared)
4365 {
0f41302f 4366 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4367 if (TREE_CHAIN (elt) != NULL_TREE
4368 || (TREE_PURPOSE (elt) == NULL_TREE
4369 ? nbits != 1
4370 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4371 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4372 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4373 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4374 != nbits))))
4375 clear_storage (target, expr_size (exp),
4376 TYPE_ALIGN (type) / BITS_PER_UNIT);
4377 }
4378
4379 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4380 {
4381 /* start of range of element or NULL */
4382 tree startbit = TREE_PURPOSE (elt);
4383 /* end of range of element, or element value */
4384 tree endbit = TREE_VALUE (elt);
381127e8 4385#ifdef TARGET_MEM_FUNCTIONS
071a6595 4386 HOST_WIDE_INT startb, endb;
381127e8 4387#endif
071a6595
PB
4388 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4389
4390 bitlength_rtx = expand_expr (bitlength,
4391 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4392
4393 /* handle non-range tuple element like [ expr ] */
4394 if (startbit == NULL_TREE)
4395 {
4396 startbit = save_expr (endbit);
4397 endbit = startbit;
4398 }
4399 startbit = convert (sizetype, startbit);
4400 endbit = convert (sizetype, endbit);
4401 if (! integer_zerop (domain_min))
4402 {
4403 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4404 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4405 }
4406 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4407 EXPAND_CONST_ADDRESS);
4408 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4409 EXPAND_CONST_ADDRESS);
4410
4411 if (REG_P (target))
4412 {
4413 targetx = assign_stack_temp (GET_MODE (target),
4414 GET_MODE_SIZE (GET_MODE (target)),
4415 0);
4416 emit_move_insn (targetx, target);
4417 }
4418 else if (GET_CODE (target) == MEM)
4419 targetx = target;
4420 else
4421 abort ();
4422
4423#ifdef TARGET_MEM_FUNCTIONS
4424 /* Optimization: If startbit and endbit are
9faa82d8 4425 constants divisible by BITS_PER_UNIT,
0f41302f 4426 call memset instead. */
071a6595
PB
4427 if (TREE_CODE (startbit) == INTEGER_CST
4428 && TREE_CODE (endbit) == INTEGER_CST
4429 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4430 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4431 {
071a6595
PB
4432 emit_library_call (memset_libfunc, 0,
4433 VOIDmode, 3,
e1a43f73
PB
4434 plus_constant (XEXP (targetx, 0),
4435 startb / BITS_PER_UNIT),
071a6595 4436 Pmode,
3b6f75e2 4437 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4438 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4439 TYPE_MODE (sizetype));
071a6595
PB
4440 }
4441 else
4442#endif
4443 {
38a448ca 4444 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4445 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4446 bitlength_rtx, TYPE_MODE (sizetype),
4447 startbit_rtx, TYPE_MODE (sizetype),
4448 endbit_rtx, TYPE_MODE (sizetype));
4449 }
4450 if (REG_P (target))
4451 emit_move_insn (target, targetx);
4452 }
4453 }
bbf6f052
RK
4454
4455 else
4456 abort ();
4457}
4458
4459/* Store the value of EXP (an expression tree)
4460 into a subfield of TARGET which has mode MODE and occupies
4461 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4462 If MODE is VOIDmode, it means that we are storing into a bit-field.
4463
4464 If VALUE_MODE is VOIDmode, return nothing in particular.
4465 UNSIGNEDP is not used in this case.
4466
4467 Otherwise, return an rtx for the value stored. This rtx
4468 has mode VALUE_MODE if that is convenient to do.
4469 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4470
4471 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4472 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4473
4474 ALIAS_SET is the alias set for the destination. This value will
4475 (in general) be different from that for TARGET, since TARGET is a
4476 reference to the containing structure. */
bbf6f052
RK
4477
4478static rtx
4479store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4480 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4481 rtx target;
4482 int bitsize, bitpos;
4483 enum machine_mode mode;
4484 tree exp;
4485 enum machine_mode value_mode;
4486 int unsignedp;
4487 int align;
4488 int total_size;
ece32014 4489 int alias_set;
bbf6f052 4490{
906c4e36 4491 HOST_WIDE_INT width_mask = 0;
bbf6f052 4492
e9a25f70
JL
4493 if (TREE_CODE (exp) == ERROR_MARK)
4494 return const0_rtx;
4495
906c4e36
RK
4496 if (bitsize < HOST_BITS_PER_WIDE_INT)
4497 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4498
4499 /* If we are storing into an unaligned field of an aligned union that is
4500 in a register, we may have the mode of TARGET being an integer mode but
4501 MODE == BLKmode. In that case, get an aligned object whose size and
4502 alignment are the same as TARGET and store TARGET into it (we can avoid
4503 the store if the field being stored is the entire width of TARGET). Then
4504 call ourselves recursively to store the field into a BLKmode version of
4505 that object. Finally, load from the object into TARGET. This is not
4506 very efficient in general, but should only be slightly more expensive
4507 than the otherwise-required unaligned accesses. Perhaps this can be
4508 cleaned up later. */
4509
4510 if (mode == BLKmode
4511 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4512 {
4513 rtx object = assign_stack_temp (GET_MODE (target),
4514 GET_MODE_SIZE (GET_MODE (target)), 0);
4515 rtx blk_object = copy_rtx (object);
4516
24a13950
JW
4517 MEM_IN_STRUCT_P (object) = 1;
4518 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4519 PUT_MODE (blk_object, BLKmode);
4520
4521 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4522 emit_move_insn (object, target);
4523
4524 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4525 align, total_size, alias_set);
bbf6f052 4526
46093b97
RS
4527 /* Even though we aren't returning target, we need to
4528 give it the updated value. */
bbf6f052
RK
4529 emit_move_insn (target, object);
4530
46093b97 4531 return blk_object;
bbf6f052
RK
4532 }
4533
4534 /* If the structure is in a register or if the component
4535 is a bit field, we cannot use addressing to access it.
4536 Use bit-field techniques or SUBREG to store in it. */
4537
4fa52007
RK
4538 if (mode == VOIDmode
4539 || (mode != BLKmode && ! direct_store[(int) mode])
4540 || GET_CODE (target) == REG
c980ac49 4541 || GET_CODE (target) == SUBREG
ccc98036
RS
4542 /* If the field isn't aligned enough to store as an ordinary memref,
4543 store it as a bit field. */
c7a7ac46 4544 || (SLOW_UNALIGNED_ACCESS
ccc98036 4545 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4546 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4547 {
906c4e36 4548 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4549
ef19912d
RK
4550 /* If BITSIZE is narrower than the size of the type of EXP
4551 we will be narrowing TEMP. Normally, what's wanted are the
4552 low-order bits. However, if EXP's type is a record and this is
4553 big-endian machine, we want the upper BITSIZE bits. */
4554 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4555 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4556 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4557 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4558 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4559 - bitsize),
4560 temp, 1);
4561
bbd6cf73
RK
4562 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4563 MODE. */
4564 if (mode != VOIDmode && mode != BLKmode
4565 && mode != TYPE_MODE (TREE_TYPE (exp)))
4566 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4567
a281e72d
RK
4568 /* If the modes of TARGET and TEMP are both BLKmode, both
4569 must be in memory and BITPOS must be aligned on a byte
4570 boundary. If so, we simply do a block copy. */
4571 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4572 {
4573 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4574 || bitpos % BITS_PER_UNIT != 0)
4575 abort ();
4576
0086427c
RK
4577 target = change_address (target, VOIDmode,
4578 plus_constant (XEXP (target, 0),
a281e72d
RK
4579 bitpos / BITS_PER_UNIT));
4580
4581 emit_block_move (target, temp,
4582 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4583 / BITS_PER_UNIT),
4584 1);
4585
4586 return value_mode == VOIDmode ? const0_rtx : target;
4587 }
4588
bbf6f052
RK
4589 /* Store the value in the bitfield. */
4590 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4591 if (value_mode != VOIDmode)
4592 {
4593 /* The caller wants an rtx for the value. */
4594 /* If possible, avoid refetching from the bitfield itself. */
4595 if (width_mask != 0
4596 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4597 {
9074de27 4598 tree count;
5c4d7cfb 4599 enum machine_mode tmode;
86a2c12a 4600
5c4d7cfb
RS
4601 if (unsignedp)
4602 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4603 tmode = GET_MODE (temp);
86a2c12a
RS
4604 if (tmode == VOIDmode)
4605 tmode = value_mode;
5c4d7cfb
RS
4606 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4607 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4608 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4609 }
bbf6f052 4610 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4611 NULL_RTX, value_mode, 0, align,
4612 total_size);
bbf6f052
RK
4613 }
4614 return const0_rtx;
4615 }
4616 else
4617 {
4618 rtx addr = XEXP (target, 0);
4619 rtx to_rtx;
4620
4621 /* If a value is wanted, it must be the lhs;
4622 so make the address stable for multiple use. */
4623
4624 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4625 && ! CONSTANT_ADDRESS_P (addr)
4626 /* A frame-pointer reference is already stable. */
4627 && ! (GET_CODE (addr) == PLUS
4628 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4629 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4630 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4631 addr = copy_to_reg (addr);
4632
4633 /* Now build a reference to just the desired component. */
4634
effbcc6a
RK
4635 to_rtx = copy_rtx (change_address (target, mode,
4636 plus_constant (addr,
4637 (bitpos
4638 / BITS_PER_UNIT))));
bbf6f052 4639 MEM_IN_STRUCT_P (to_rtx) = 1;
ece32014 4640 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4641
4642 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4643 }
4644}
4645\f
4646/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4647 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4648 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4649
4650 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4651 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4652 If the position of the field is variable, we store a tree
4653 giving the variable offset (in units) in *POFFSET.
4654 This offset is in addition to the bit position.
4655 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4656 We set *PALIGNMENT to the alignment in bytes of the address that will be
4657 computed. This is the alignment of the thing we return if *POFFSET
4658 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4659
4660 If any of the extraction expressions is volatile,
4661 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4662
4663 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4664 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4665 is redundant.
4666
4667 If the field describes a variable-sized object, *PMODE is set to
4668 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4669 this case, but the address of the object can be found. */
bbf6f052
RK
4670
4671tree
4969d05d 4672get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4673 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4674 tree exp;
4675 int *pbitsize;
4676 int *pbitpos;
7bb0943f 4677 tree *poffset;
bbf6f052
RK
4678 enum machine_mode *pmode;
4679 int *punsignedp;
4680 int *pvolatilep;
839c4796 4681 int *palignment;
bbf6f052 4682{
b50d17a1 4683 tree orig_exp = exp;
bbf6f052
RK
4684 tree size_tree = 0;
4685 enum machine_mode mode = VOIDmode;
742920c7 4686 tree offset = integer_zero_node;
839c4796 4687 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4688
4689 if (TREE_CODE (exp) == COMPONENT_REF)
4690 {
4691 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4692 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4693 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4694 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4695 }
4696 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4697 {
4698 size_tree = TREE_OPERAND (exp, 1);
4699 *punsignedp = TREE_UNSIGNED (exp);
4700 }
4701 else
4702 {
4703 mode = TYPE_MODE (TREE_TYPE (exp));
4704 *pbitsize = GET_MODE_BITSIZE (mode);
4705 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4706 }
4707
4708 if (size_tree)
4709 {
4710 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4711 mode = BLKmode, *pbitsize = -1;
4712 else
4713 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4714 }
4715
4716 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4717 and find the ultimate containing object. */
4718
4719 *pbitpos = 0;
4720
4721 while (1)
4722 {
7bb0943f 4723 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4724 {
7bb0943f
RS
4725 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4726 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4727 : TREE_OPERAND (exp, 2));
e6d8c385 4728 tree constant = integer_zero_node, var = pos;
bbf6f052 4729
e7f3c83f
RK
4730 /* If this field hasn't been filled in yet, don't go
4731 past it. This should only happen when folding expressions
4732 made during type construction. */
4733 if (pos == 0)
4734 break;
4735
e6d8c385
RK
4736 /* Assume here that the offset is a multiple of a unit.
4737 If not, there should be an explicitly added constant. */
4738 if (TREE_CODE (pos) == PLUS_EXPR
4739 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4740 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4741 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4742 constant = pos, var = integer_zero_node;
4743
4744 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4745 offset = size_binop (PLUS_EXPR, offset,
4746 size_binop (EXACT_DIV_EXPR, var,
4747 size_int (BITS_PER_UNIT)));
bbf6f052 4748 }
bbf6f052 4749
742920c7 4750 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4751 {
742920c7
RK
4752 /* This code is based on the code in case ARRAY_REF in expand_expr
4753 below. We assume here that the size of an array element is
4754 always an integral multiple of BITS_PER_UNIT. */
4755
4756 tree index = TREE_OPERAND (exp, 1);
4757 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4758 tree low_bound
4759 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4760 tree index_type = TREE_TYPE (index);
ead17059 4761 tree xindex;
742920c7 4762
4c08eef0 4763 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4764 {
4c08eef0
RK
4765 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4766 index);
742920c7
RK
4767 index_type = TREE_TYPE (index);
4768 }
4769
74a4fbfc
DB
4770 /* Optimize the special-case of a zero lower bound.
4771
4772 We convert the low_bound to sizetype to avoid some problems
4773 with constant folding. (E.g. suppose the lower bound is 1,
4774 and its mode is QI. Without the conversion, (ARRAY
4775 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4776 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4777
4778 But sizetype isn't quite right either (especially if
4779 the lowbound is negative). FIXME */
4780
ca0f2220 4781 if (! integer_zerop (low_bound))
74a4fbfc
DB
4782 index = fold (build (MINUS_EXPR, index_type, index,
4783 convert (sizetype, low_bound)));
ca0f2220 4784
f8dac6eb
R
4785 if (TREE_CODE (index) == INTEGER_CST)
4786 {
4787 index = convert (sbitsizetype, index);
4788 index_type = TREE_TYPE (index);
4789 }
4790
ead17059
RH
4791 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4792 convert (sbitsizetype,
4793 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4794
ead17059
RH
4795 if (TREE_CODE (xindex) == INTEGER_CST
4796 && TREE_INT_CST_HIGH (xindex) == 0)
4797 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4798 else
956d6950 4799 {
ead17059
RH
4800 /* Either the bit offset calculated above is not constant, or
4801 it overflowed. In either case, redo the multiplication
4802 against the size in units. This is especially important
4803 in the non-constant case to avoid a division at runtime. */
4804 xindex = fold (build (MULT_EXPR, ssizetype, index,
4805 convert (ssizetype,
4806 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4807
4808 if (contains_placeholder_p (xindex))
4809 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4810
4811 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4812 }
bbf6f052
RK
4813 }
4814 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4815 && ! ((TREE_CODE (exp) == NOP_EXPR
4816 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4817 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4818 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4819 != UNION_TYPE))
bbf6f052
RK
4820 && (TYPE_MODE (TREE_TYPE (exp))
4821 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4822 break;
7bb0943f
RS
4823
4824 /* If any reference in the chain is volatile, the effect is volatile. */
4825 if (TREE_THIS_VOLATILE (exp))
4826 *pvolatilep = 1;
839c4796
RK
4827
4828 /* If the offset is non-constant already, then we can't assume any
4829 alignment more than the alignment here. */
4830 if (! integer_zerop (offset))
4831 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4832
bbf6f052
RK
4833 exp = TREE_OPERAND (exp, 0);
4834 }
4835
839c4796
RK
4836 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4837 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4838 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4839 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4840
742920c7
RK
4841 if (integer_zerop (offset))
4842 offset = 0;
4843
b50d17a1
RK
4844 if (offset != 0 && contains_placeholder_p (offset))
4845 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4846
bbf6f052 4847 *pmode = mode;
7bb0943f 4848 *poffset = offset;
839c4796 4849 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4850 return exp;
4851}
921b3427
RK
4852
4853/* Subroutine of expand_exp: compute memory_usage from modifier. */
4854static enum memory_use_mode
4855get_memory_usage_from_modifier (modifier)
4856 enum expand_modifier modifier;
4857{
4858 switch (modifier)
4859 {
4860 case EXPAND_NORMAL:
e5e809f4 4861 case EXPAND_SUM:
921b3427
RK
4862 return MEMORY_USE_RO;
4863 break;
4864 case EXPAND_MEMORY_USE_WO:
4865 return MEMORY_USE_WO;
4866 break;
4867 case EXPAND_MEMORY_USE_RW:
4868 return MEMORY_USE_RW;
4869 break;
921b3427 4870 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4871 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4872 MEMORY_USE_DONT, because they are modifiers to a call of
4873 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4874 case EXPAND_CONST_ADDRESS:
e5e809f4 4875 case EXPAND_INITIALIZER:
921b3427
RK
4876 return MEMORY_USE_DONT;
4877 case EXPAND_MEMORY_USE_BAD:
4878 default:
4879 abort ();
4880 }
4881}
bbf6f052
RK
4882\f
4883/* Given an rtx VALUE that may contain additions and multiplications,
4884 return an equivalent value that just refers to a register or memory.
4885 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4886 and returning a pseudo-register containing the value.
4887
4888 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4889
4890rtx
4891force_operand (value, target)
4892 rtx value, target;
4893{
4894 register optab binoptab = 0;
4895 /* Use a temporary to force order of execution of calls to
4896 `force_operand'. */
4897 rtx tmp;
4898 register rtx op2;
4899 /* Use subtarget as the target for operand 0 of a binary operation. */
4900 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4901
8b015896
RH
4902 /* Check for a PIC address load. */
4903 if (flag_pic
4904 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4905 && XEXP (value, 0) == pic_offset_table_rtx
4906 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4907 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4908 || GET_CODE (XEXP (value, 1)) == CONST))
4909 {
4910 if (!subtarget)
4911 subtarget = gen_reg_rtx (GET_MODE (value));
4912 emit_move_insn (subtarget, value);
4913 return subtarget;
4914 }
4915
bbf6f052
RK
4916 if (GET_CODE (value) == PLUS)
4917 binoptab = add_optab;
4918 else if (GET_CODE (value) == MINUS)
4919 binoptab = sub_optab;
4920 else if (GET_CODE (value) == MULT)
4921 {
4922 op2 = XEXP (value, 1);
4923 if (!CONSTANT_P (op2)
4924 && !(GET_CODE (op2) == REG && op2 != subtarget))
4925 subtarget = 0;
4926 tmp = force_operand (XEXP (value, 0), subtarget);
4927 return expand_mult (GET_MODE (value), tmp,
906c4e36 4928 force_operand (op2, NULL_RTX),
bbf6f052
RK
4929 target, 0);
4930 }
4931
4932 if (binoptab)
4933 {
4934 op2 = XEXP (value, 1);
4935 if (!CONSTANT_P (op2)
4936 && !(GET_CODE (op2) == REG && op2 != subtarget))
4937 subtarget = 0;
4938 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4939 {
4940 binoptab = add_optab;
4941 op2 = negate_rtx (GET_MODE (value), op2);
4942 }
4943
4944 /* Check for an addition with OP2 a constant integer and our first
4945 operand a PLUS of a virtual register and something else. In that
4946 case, we want to emit the sum of the virtual register and the
4947 constant first and then add the other value. This allows virtual
4948 register instantiation to simply modify the constant rather than
4949 creating another one around this addition. */
4950 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4951 && GET_CODE (XEXP (value, 0)) == PLUS
4952 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4953 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4954 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4955 {
4956 rtx temp = expand_binop (GET_MODE (value), binoptab,
4957 XEXP (XEXP (value, 0), 0), op2,
4958 subtarget, 0, OPTAB_LIB_WIDEN);
4959 return expand_binop (GET_MODE (value), binoptab, temp,
4960 force_operand (XEXP (XEXP (value, 0), 1), 0),
4961 target, 0, OPTAB_LIB_WIDEN);
4962 }
4963
4964 tmp = force_operand (XEXP (value, 0), subtarget);
4965 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4966 force_operand (op2, NULL_RTX),
bbf6f052 4967 target, 0, OPTAB_LIB_WIDEN);
8008b228 4968 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4969 because the only operations we are expanding here are signed ones. */
4970 }
4971 return value;
4972}
4973\f
4974/* Subroutine of expand_expr:
4975 save the non-copied parts (LIST) of an expr (LHS), and return a list
4976 which can restore these values to their previous values,
4977 should something modify their storage. */
4978
4979static tree
4980save_noncopied_parts (lhs, list)
4981 tree lhs;
4982 tree list;
4983{
4984 tree tail;
4985 tree parts = 0;
4986
4987 for (tail = list; tail; tail = TREE_CHAIN (tail))
4988 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4989 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4990 else
4991 {
4992 tree part = TREE_VALUE (tail);
4993 tree part_type = TREE_TYPE (part);
906c4e36 4994 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4995 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4996 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4997 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4998 parts = tree_cons (to_be_saved,
906c4e36
RK
4999 build (RTL_EXPR, part_type, NULL_TREE,
5000 (tree) target),
bbf6f052
RK
5001 parts);
5002 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5003 }
5004 return parts;
5005}
5006
5007/* Subroutine of expand_expr:
5008 record the non-copied parts (LIST) of an expr (LHS), and return a list
5009 which specifies the initial values of these parts. */
5010
5011static tree
5012init_noncopied_parts (lhs, list)
5013 tree lhs;
5014 tree list;
5015{
5016 tree tail;
5017 tree parts = 0;
5018
5019 for (tail = list; tail; tail = TREE_CHAIN (tail))
5020 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5021 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
5022 else
5023 {
5024 tree part = TREE_VALUE (tail);
5025 tree part_type = TREE_TYPE (part);
906c4e36 5026 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5027 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5028 }
5029 return parts;
5030}
5031
5032/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5033 EXP can reference X, which is being modified. TOP_P is nonzero if this
5034 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5035 for EXP, as opposed to a recursive call to this function.
5036
5037 It is always safe for this routine to return zero since it merely
5038 searches for optimization opportunities. */
bbf6f052
RK
5039
5040static int
e5e809f4 5041safe_from_p (x, exp, top_p)
bbf6f052
RK
5042 rtx x;
5043 tree exp;
e5e809f4 5044 int top_p;
bbf6f052
RK
5045{
5046 rtx exp_rtl = 0;
5047 int i, nops;
ff439b5f
CB
5048 static int save_expr_count;
5049 static int save_expr_size = 0;
5050 static tree *save_expr_rewritten;
5051 static tree save_expr_trees[256];
bbf6f052 5052
6676e72f
RK
5053 if (x == 0
5054 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5055 have no way of allocating temporaries of variable size
5056 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5057 So we assume here that something at a higher level has prevented a
f4510f37 5058 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5059 do this when X is BLKmode and when we are at the top level. */
5060 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5061 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5062 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5063 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5064 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5065 != INTEGER_CST)
f4510f37 5066 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5067 return 1;
5068
ff439b5f
CB
5069 if (top_p && save_expr_size == 0)
5070 {
5071 int rtn;
5072
5073 save_expr_count = 0;
5074 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5075 save_expr_rewritten = &save_expr_trees[0];
5076
5077 rtn = safe_from_p (x, exp, 1);
5078
5079 for (i = 0; i < save_expr_count; ++i)
5080 {
5081 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5082 abort ();
5083 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5084 }
5085
5086 save_expr_size = 0;
5087
5088 return rtn;
5089 }
5090
bbf6f052
RK
5091 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5092 find the underlying pseudo. */
5093 if (GET_CODE (x) == SUBREG)
5094 {
5095 x = SUBREG_REG (x);
5096 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5097 return 0;
5098 }
5099
5100 /* If X is a location in the outgoing argument area, it is always safe. */
5101 if (GET_CODE (x) == MEM
5102 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5103 || (GET_CODE (XEXP (x, 0)) == PLUS
5104 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5105 return 1;
5106
5107 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5108 {
5109 case 'd':
5110 exp_rtl = DECL_RTL (exp);
5111 break;
5112
5113 case 'c':
5114 return 1;
5115
5116 case 'x':
5117 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5118 return ((TREE_VALUE (exp) == 0
e5e809f4 5119 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5120 && (TREE_CHAIN (exp) == 0
e5e809f4 5121 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5122 else if (TREE_CODE (exp) == ERROR_MARK)
5123 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5124 else
5125 return 0;
5126
5127 case '1':
e5e809f4 5128 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5129
5130 case '2':
5131 case '<':
e5e809f4
JL
5132 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5133 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5134
5135 case 'e':
5136 case 'r':
5137 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5138 the expression. If it is set, we conflict iff we are that rtx or
5139 both are in memory. Otherwise, we check all operands of the
5140 expression recursively. */
5141
5142 switch (TREE_CODE (exp))
5143 {
5144 case ADDR_EXPR:
e44842fe 5145 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5146 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5147 || TREE_STATIC (exp));
bbf6f052
RK
5148
5149 case INDIRECT_REF:
5150 if (GET_CODE (x) == MEM)
5151 return 0;
5152 break;
5153
5154 case CALL_EXPR:
5155 exp_rtl = CALL_EXPR_RTL (exp);
5156 if (exp_rtl == 0)
5157 {
5158 /* Assume that the call will clobber all hard registers and
5159 all of memory. */
5160 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5161 || GET_CODE (x) == MEM)
5162 return 0;
5163 }
5164
5165 break;
5166
5167 case RTL_EXPR:
3bb5826a
RK
5168 /* If a sequence exists, we would have to scan every instruction
5169 in the sequence to see if it was safe. This is probably not
5170 worthwhile. */
5171 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5172 return 0;
5173
3bb5826a 5174 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5175 break;
5176
5177 case WITH_CLEANUP_EXPR:
5178 exp_rtl = RTL_EXPR_RTL (exp);
5179 break;
5180
5dab5552 5181 case CLEANUP_POINT_EXPR:
e5e809f4 5182 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5183
bbf6f052
RK
5184 case SAVE_EXPR:
5185 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5186 if (exp_rtl)
5187 break;
5188
5189 /* This SAVE_EXPR might appear many times in the top-level
5190 safe_from_p() expression, and if it has a complex
5191 subexpression, examining it multiple times could result
5192 in a combinatorial explosion. E.g. on an Alpha
5193 running at least 200MHz, a Fortran test case compiled with
5194 optimization took about 28 minutes to compile -- even though
5195 it was only a few lines long, and the complicated line causing
5196 so much time to be spent in the earlier version of safe_from_p()
5197 had only 293 or so unique nodes.
5198
5199 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5200 where it is so we can turn it back in the top-level safe_from_p()
5201 when we're done. */
5202
5203 /* For now, don't bother re-sizing the array. */
5204 if (save_expr_count >= save_expr_size)
5205 return 0;
5206 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5207
5208 nops = tree_code_length[(int) SAVE_EXPR];
5209 for (i = 0; i < nops; i++)
ff59bfe6
JM
5210 {
5211 tree operand = TREE_OPERAND (exp, i);
5212 if (operand == NULL_TREE)
5213 continue;
5214 TREE_SET_CODE (exp, ERROR_MARK);
5215 if (!safe_from_p (x, operand, 0))
5216 return 0;
5217 TREE_SET_CODE (exp, SAVE_EXPR);
5218 }
5219 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5220 return 1;
bbf6f052 5221
8129842c
RS
5222 case BIND_EXPR:
5223 /* The only operand we look at is operand 1. The rest aren't
5224 part of the expression. */
e5e809f4 5225 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5226
bbf6f052 5227 case METHOD_CALL_EXPR:
0f41302f 5228 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5229 abort ();
e9a25f70
JL
5230
5231 default:
5232 break;
bbf6f052
RK
5233 }
5234
5235 /* If we have an rtx, we do not need to scan our operands. */
5236 if (exp_rtl)
5237 break;
5238
5239 nops = tree_code_length[(int) TREE_CODE (exp)];
5240 for (i = 0; i < nops; i++)
5241 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5242 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5243 return 0;
5244 }
5245
5246 /* If we have an rtl, find any enclosed object. Then see if we conflict
5247 with it. */
5248 if (exp_rtl)
5249 {
5250 if (GET_CODE (exp_rtl) == SUBREG)
5251 {
5252 exp_rtl = SUBREG_REG (exp_rtl);
5253 if (GET_CODE (exp_rtl) == REG
5254 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5255 return 0;
5256 }
5257
5258 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5259 are memory and EXP is not readonly. */
5260 return ! (rtx_equal_p (x, exp_rtl)
5261 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5262 && ! TREE_READONLY (exp)));
5263 }
5264
5265 /* If we reach here, it is safe. */
5266 return 1;
5267}
5268
5269/* Subroutine of expand_expr: return nonzero iff EXP is an
5270 expression whose type is statically determinable. */
5271
5272static int
5273fixed_type_p (exp)
5274 tree exp;
5275{
5276 if (TREE_CODE (exp) == PARM_DECL
5277 || TREE_CODE (exp) == VAR_DECL
5278 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5279 || TREE_CODE (exp) == COMPONENT_REF
5280 || TREE_CODE (exp) == ARRAY_REF)
5281 return 1;
5282 return 0;
5283}
01c8a7c8
RK
5284
5285/* Subroutine of expand_expr: return rtx if EXP is a
5286 variable or parameter; else return 0. */
5287
5288static rtx
5289var_rtx (exp)
5290 tree exp;
5291{
5292 STRIP_NOPS (exp);
5293 switch (TREE_CODE (exp))
5294 {
5295 case PARM_DECL:
5296 case VAR_DECL:
5297 return DECL_RTL (exp);
5298 default:
5299 return 0;
5300 }
5301}
dbecbbe4
JL
5302
5303#ifdef MAX_INTEGER_COMPUTATION_MODE
5304void
5305check_max_integer_computation_mode (exp)
5306 tree exp;
5307{
5308 enum tree_code code = TREE_CODE (exp);
5309 enum machine_mode mode;
5310
71bca506
JL
5311 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5312 if (code == NOP_EXPR
5313 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5314 return;
5315
dbecbbe4
JL
5316 /* First check the type of the overall operation. We need only look at
5317 unary, binary and relational operations. */
5318 if (TREE_CODE_CLASS (code) == '1'
5319 || TREE_CODE_CLASS (code) == '2'
5320 || TREE_CODE_CLASS (code) == '<')
5321 {
5322 mode = TYPE_MODE (TREE_TYPE (exp));
5323 if (GET_MODE_CLASS (mode) == MODE_INT
5324 && mode > MAX_INTEGER_COMPUTATION_MODE)
5325 fatal ("unsupported wide integer operation");
5326 }
5327
5328 /* Check operand of a unary op. */
5329 if (TREE_CODE_CLASS (code) == '1')
5330 {
5331 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5332 if (GET_MODE_CLASS (mode) == MODE_INT
5333 && mode > MAX_INTEGER_COMPUTATION_MODE)
5334 fatal ("unsupported wide integer operation");
5335 }
5336
5337 /* Check operands of a binary/comparison op. */
5338 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5339 {
5340 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5341 if (GET_MODE_CLASS (mode) == MODE_INT
5342 && mode > MAX_INTEGER_COMPUTATION_MODE)
5343 fatal ("unsupported wide integer operation");
5344
5345 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5346 if (GET_MODE_CLASS (mode) == MODE_INT
5347 && mode > MAX_INTEGER_COMPUTATION_MODE)
5348 fatal ("unsupported wide integer operation");
5349 }
5350}
5351#endif
5352
bbf6f052
RK
5353\f
5354/* expand_expr: generate code for computing expression EXP.
5355 An rtx for the computed value is returned. The value is never null.
5356 In the case of a void EXP, const0_rtx is returned.
5357
5358 The value may be stored in TARGET if TARGET is nonzero.
5359 TARGET is just a suggestion; callers must assume that
5360 the rtx returned may not be the same as TARGET.
5361
5362 If TARGET is CONST0_RTX, it means that the value will be ignored.
5363
5364 If TMODE is not VOIDmode, it suggests generating the
5365 result in mode TMODE. But this is done only when convenient.
5366 Otherwise, TMODE is ignored and the value generated in its natural mode.
5367 TMODE is just a suggestion; callers must assume that
5368 the rtx returned may not have mode TMODE.
5369
d6a5ac33
RK
5370 Note that TARGET may have neither TMODE nor MODE. In that case, it
5371 probably will not be used.
bbf6f052
RK
5372
5373 If MODIFIER is EXPAND_SUM then when EXP is an addition
5374 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5375 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5376 products as above, or REG or MEM, or constant.
5377 Ordinarily in such cases we would output mul or add instructions
5378 and then return a pseudo reg containing the sum.
5379
5380 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5381 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5382 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5383 This is used for outputting expressions used in initializers.
5384
5385 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5386 with a constant address even if that address is not normally legitimate.
5387 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5388
5389rtx
5390expand_expr (exp, target, tmode, modifier)
5391 register tree exp;
5392 rtx target;
5393 enum machine_mode tmode;
5394 enum expand_modifier modifier;
5395{
b50d17a1
RK
5396 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5397 This is static so it will be accessible to our recursive callees. */
5398 static tree placeholder_list = 0;
bbf6f052
RK
5399 register rtx op0, op1, temp;
5400 tree type = TREE_TYPE (exp);
5401 int unsignedp = TREE_UNSIGNED (type);
5402 register enum machine_mode mode = TYPE_MODE (type);
5403 register enum tree_code code = TREE_CODE (exp);
5404 optab this_optab;
5405 /* Use subtarget as the target for operand 0 of a binary operation. */
5406 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5407 rtx original_target = target;
dd27116b
RK
5408 int ignore = (target == const0_rtx
5409 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
5410 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5411 || code == COND_EXPR)
dd27116b 5412 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 5413 tree context;
921b3427
RK
5414 /* Used by check-memory-usage to make modifier read only. */
5415 enum expand_modifier ro_modifier;
bbf6f052 5416
921b3427
RK
5417 /* Make a read-only version of the modifier. */
5418 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5419 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5420 ro_modifier = modifier;
5421 else
5422 ro_modifier = EXPAND_NORMAL;
ca695ac9 5423
bbf6f052
RK
5424 /* Don't use hard regs as subtargets, because the combiner
5425 can only handle pseudo regs. */
5426 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5427 subtarget = 0;
5428 /* Avoid subtargets inside loops,
5429 since they hide some invariant expressions. */
5430 if (preserve_subexpressions_p ())
5431 subtarget = 0;
5432
dd27116b
RK
5433 /* If we are going to ignore this result, we need only do something
5434 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5435 is, short-circuit the most common cases here. Note that we must
5436 not call expand_expr with anything but const0_rtx in case this
5437 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5438
dd27116b
RK
5439 if (ignore)
5440 {
5441 if (! TREE_SIDE_EFFECTS (exp))
5442 return const0_rtx;
5443
5444 /* Ensure we reference a volatile object even if value is ignored. */
5445 if (TREE_THIS_VOLATILE (exp)
5446 && TREE_CODE (exp) != FUNCTION_DECL
5447 && mode != VOIDmode && mode != BLKmode)
5448 {
921b3427 5449 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5450 if (GET_CODE (temp) == MEM)
5451 temp = copy_to_reg (temp);
5452 return const0_rtx;
5453 }
5454
5455 if (TREE_CODE_CLASS (code) == '1')
5456 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5457 VOIDmode, ro_modifier);
dd27116b
RK
5458 else if (TREE_CODE_CLASS (code) == '2'
5459 || TREE_CODE_CLASS (code) == '<')
5460 {
921b3427
RK
5461 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5462 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5463 return const0_rtx;
5464 }
5465 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5466 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5467 /* If the second operand has no side effects, just evaluate
0f41302f 5468 the first. */
dd27116b 5469 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5470 VOIDmode, ro_modifier);
dd27116b 5471
90764a87 5472 target = 0;
dd27116b 5473 }
bbf6f052 5474
dbecbbe4 5475#ifdef MAX_INTEGER_COMPUTATION_MODE
ce3c0b53
JL
5476 if (target
5477 && TREE_CODE (exp) != INTEGER_CST
5478 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5479 && TREE_CODE (exp) != ARRAY_REF
5480 && TREE_CODE (exp) != COMPONENT_REF
5481 && TREE_CODE (exp) != BIT_FIELD_REF
5482 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5483 && TREE_CODE (exp) != VAR_DECL)
dbecbbe4
JL
5484 {
5485 enum machine_mode mode = GET_MODE (target);
5486
5487 if (GET_MODE_CLASS (mode) == MODE_INT
5488 && mode > MAX_INTEGER_COMPUTATION_MODE)
5489 fatal ("unsupported wide integer operation");
5490 }
5491
71bca506 5492 if (TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5493 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5494 && TREE_CODE (exp) != ARRAY_REF
5495 && TREE_CODE (exp) != COMPONENT_REF
5496 && TREE_CODE (exp) != BIT_FIELD_REF
5497 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5498 && TREE_CODE (exp) != VAR_DECL
71bca506 5499 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5500 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5501 fatal ("unsupported wide integer operation");
5502
5503 check_max_integer_computation_mode (exp);
5504#endif
5505
e44842fe
RK
5506 /* If will do cse, generate all results into pseudo registers
5507 since 1) that allows cse to find more things
5508 and 2) otherwise cse could produce an insn the machine
5509 cannot support. */
5510
bbf6f052
RK
5511 if (! cse_not_expected && mode != BLKmode && target
5512 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5513 target = subtarget;
5514
bbf6f052
RK
5515 switch (code)
5516 {
5517 case LABEL_DECL:
b552441b
RS
5518 {
5519 tree function = decl_function_context (exp);
5520 /* Handle using a label in a containing function. */
d0977240
RK
5521 if (function != current_function_decl
5522 && function != inline_function_decl && function != 0)
b552441b
RS
5523 {
5524 struct function *p = find_function_data (function);
5525 /* Allocate in the memory associated with the function
5526 that the label is in. */
5527 push_obstacks (p->function_obstack,
5528 p->function_maybepermanent_obstack);
5529
38a448ca
RH
5530 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5531 label_rtx (exp),
5532 p->forced_labels);
b552441b
RS
5533 pop_obstacks ();
5534 }
5535 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
5536 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5537 label_rtx (exp), forced_labels);
5538 temp = gen_rtx_MEM (FUNCTION_MODE,
5539 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5540 if (function != current_function_decl
5541 && function != inline_function_decl && function != 0)
26fcb35a
RS
5542 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5543 return temp;
b552441b 5544 }
bbf6f052
RK
5545
5546 case PARM_DECL:
5547 if (DECL_RTL (exp) == 0)
5548 {
5549 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5550 return CONST0_RTX (mode);
bbf6f052
RK
5551 }
5552
0f41302f 5553 /* ... fall through ... */
d6a5ac33 5554
bbf6f052 5555 case VAR_DECL:
2dca20cd
RS
5556 /* If a static var's type was incomplete when the decl was written,
5557 but the type is complete now, lay out the decl now. */
5558 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5559 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5560 {
5561 push_obstacks_nochange ();
5562 end_temporary_allocation ();
5563 layout_decl (exp, 0);
5564 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5565 pop_obstacks ();
5566 }
d6a5ac33 5567
921b3427
RK
5568 /* Only check automatic variables. Currently, function arguments are
5569 not checked (this can be done at compile-time with prototypes).
5570 Aggregates are not checked. */
5571 if (flag_check_memory_usage && code == VAR_DECL
5572 && GET_CODE (DECL_RTL (exp)) == MEM
5573 && DECL_CONTEXT (exp) != NULL_TREE
5574 && ! TREE_STATIC (exp)
5575 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5576 {
5577 enum memory_use_mode memory_usage;
5578 memory_usage = get_memory_usage_from_modifier (modifier);
5579
5580 if (memory_usage != MEMORY_USE_DONT)
5581 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5582 XEXP (DECL_RTL (exp), 0), ptr_mode,
5583 GEN_INT (int_size_in_bytes (type)),
5584 TYPE_MODE (sizetype),
956d6950
JL
5585 GEN_INT (memory_usage),
5586 TYPE_MODE (integer_type_node));
921b3427
RK
5587 }
5588
0f41302f 5589 /* ... fall through ... */
d6a5ac33 5590
2dca20cd 5591 case FUNCTION_DECL:
bbf6f052
RK
5592 case RESULT_DECL:
5593 if (DECL_RTL (exp) == 0)
5594 abort ();
d6a5ac33 5595
e44842fe
RK
5596 /* Ensure variable marked as used even if it doesn't go through
5597 a parser. If it hasn't be used yet, write out an external
5598 definition. */
5599 if (! TREE_USED (exp))
5600 {
5601 assemble_external (exp);
5602 TREE_USED (exp) = 1;
5603 }
5604
dc6d66b3
RK
5605 /* Show we haven't gotten RTL for this yet. */
5606 temp = 0;
5607
bbf6f052
RK
5608 /* Handle variables inherited from containing functions. */
5609 context = decl_function_context (exp);
5610
5611 /* We treat inline_function_decl as an alias for the current function
5612 because that is the inline function whose vars, types, etc.
5613 are being merged into the current function.
5614 See expand_inline_function. */
d6a5ac33 5615
bbf6f052
RK
5616 if (context != 0 && context != current_function_decl
5617 && context != inline_function_decl
5618 /* If var is static, we don't need a static chain to access it. */
5619 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5620 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5621 {
5622 rtx addr;
5623
5624 /* Mark as non-local and addressable. */
81feeecb 5625 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5626 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5627 abort ();
bbf6f052
RK
5628 mark_addressable (exp);
5629 if (GET_CODE (DECL_RTL (exp)) != MEM)
5630 abort ();
5631 addr = XEXP (DECL_RTL (exp), 0);
5632 if (GET_CODE (addr) == MEM)
38a448ca
RH
5633 addr = gen_rtx_MEM (Pmode,
5634 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5635 else
5636 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5637 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5638 }
4af3895e 5639
bbf6f052
RK
5640 /* This is the case of an array whose size is to be determined
5641 from its initializer, while the initializer is still being parsed.
5642 See expand_decl. */
d6a5ac33 5643
dc6d66b3
RK
5644 else if (GET_CODE (DECL_RTL (exp)) == MEM
5645 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5646 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5647 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5648
5649 /* If DECL_RTL is memory, we are in the normal case and either
5650 the address is not valid or it is not a register and -fforce-addr
5651 is specified, get the address into a register. */
5652
dc6d66b3
RK
5653 else if (GET_CODE (DECL_RTL (exp)) == MEM
5654 && modifier != EXPAND_CONST_ADDRESS
5655 && modifier != EXPAND_SUM
5656 && modifier != EXPAND_INITIALIZER
5657 && (! memory_address_p (DECL_MODE (exp),
5658 XEXP (DECL_RTL (exp), 0))
5659 || (flag_force_addr
5660 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5661 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5662 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5663
dc6d66b3
RK
5664 /* If we got something, return it. But first, set the alignment
5665 the address is a register. */
5666 if (temp != 0)
5667 {
5668 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5669 mark_reg_pointer (XEXP (temp, 0),
5670 DECL_ALIGN (exp) / BITS_PER_UNIT);
5671
5672 return temp;
5673 }
5674
1499e0a8
RK
5675 /* If the mode of DECL_RTL does not match that of the decl, it
5676 must be a promoted value. We return a SUBREG of the wanted mode,
5677 but mark it so that we know that it was already extended. */
5678
5679 if (GET_CODE (DECL_RTL (exp)) == REG
5680 && GET_MODE (DECL_RTL (exp)) != mode)
5681 {
1499e0a8
RK
5682 /* Get the signedness used for this variable. Ensure we get the
5683 same mode we got when the variable was declared. */
78911e8b
RK
5684 if (GET_MODE (DECL_RTL (exp))
5685 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5686 abort ();
5687
38a448ca 5688 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5689 SUBREG_PROMOTED_VAR_P (temp) = 1;
5690 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5691 return temp;
5692 }
5693
bbf6f052
RK
5694 return DECL_RTL (exp);
5695
5696 case INTEGER_CST:
5697 return immed_double_const (TREE_INT_CST_LOW (exp),
5698 TREE_INT_CST_HIGH (exp),
5699 mode);
5700
5701 case CONST_DECL:
921b3427
RK
5702 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5703 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5704
5705 case REAL_CST:
5706 /* If optimized, generate immediate CONST_DOUBLE
5707 which will be turned into memory by reload if necessary.
5708
5709 We used to force a register so that loop.c could see it. But
5710 this does not allow gen_* patterns to perform optimizations with
5711 the constants. It also produces two insns in cases like "x = 1.0;".
5712 On most machines, floating-point constants are not permitted in
5713 many insns, so we'd end up copying it to a register in any case.
5714
5715 Now, we do the copying in expand_binop, if appropriate. */
5716 return immed_real_const (exp);
5717
5718 case COMPLEX_CST:
5719 case STRING_CST:
5720 if (! TREE_CST_RTL (exp))
5721 output_constant_def (exp);
5722
5723 /* TREE_CST_RTL probably contains a constant address.
5724 On RISC machines where a constant address isn't valid,
5725 make some insns to get that address into a register. */
5726 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5727 && modifier != EXPAND_CONST_ADDRESS
5728 && modifier != EXPAND_INITIALIZER
5729 && modifier != EXPAND_SUM
d6a5ac33
RK
5730 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5731 || (flag_force_addr
5732 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5733 return change_address (TREE_CST_RTL (exp), VOIDmode,
5734 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5735 return TREE_CST_RTL (exp);
5736
bf1e5319 5737 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5738 {
5739 rtx to_return;
5740 char *saved_input_filename = input_filename;
5741 int saved_lineno = lineno;
5742 input_filename = EXPR_WFL_FILENAME (exp);
5743 lineno = EXPR_WFL_LINENO (exp);
5744 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5745 emit_line_note (input_filename, lineno);
5746 /* Possibly avoid switching back and force here */
5747 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5748 input_filename = saved_input_filename;
5749 lineno = saved_lineno;
5750 return to_return;
5751 }
bf1e5319 5752
bbf6f052
RK
5753 case SAVE_EXPR:
5754 context = decl_function_context (exp);
d6a5ac33 5755
d0977240
RK
5756 /* If this SAVE_EXPR was at global context, assume we are an
5757 initialization function and move it into our context. */
5758 if (context == 0)
5759 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5760
bbf6f052
RK
5761 /* We treat inline_function_decl as an alias for the current function
5762 because that is the inline function whose vars, types, etc.
5763 are being merged into the current function.
5764 See expand_inline_function. */
5765 if (context == current_function_decl || context == inline_function_decl)
5766 context = 0;
5767
5768 /* If this is non-local, handle it. */
5769 if (context)
5770 {
d0977240
RK
5771 /* The following call just exists to abort if the context is
5772 not of a containing function. */
5773 find_function_data (context);
5774
bbf6f052
RK
5775 temp = SAVE_EXPR_RTL (exp);
5776 if (temp && GET_CODE (temp) == REG)
5777 {
5778 put_var_into_stack (exp);
5779 temp = SAVE_EXPR_RTL (exp);
5780 }
5781 if (temp == 0 || GET_CODE (temp) != MEM)
5782 abort ();
5783 return change_address (temp, mode,
5784 fix_lexical_addr (XEXP (temp, 0), exp));
5785 }
5786 if (SAVE_EXPR_RTL (exp) == 0)
5787 {
06089a8b
RK
5788 if (mode == VOIDmode)
5789 temp = const0_rtx;
5790 else
e5e809f4 5791 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5792
bbf6f052 5793 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5794 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5795 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5796 save_expr_regs);
ff78f773
RK
5797
5798 /* If the mode of TEMP does not match that of the expression, it
5799 must be a promoted value. We pass store_expr a SUBREG of the
5800 wanted mode but mark it so that we know that it was already
5801 extended. Note that `unsignedp' was modified above in
5802 this case. */
5803
5804 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5805 {
38a448ca 5806 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5807 SUBREG_PROMOTED_VAR_P (temp) = 1;
5808 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5809 }
5810
4c7a0be9 5811 if (temp == const0_rtx)
921b3427
RK
5812 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5813 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5814 else
5815 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5816
5817 TREE_USED (exp) = 1;
bbf6f052 5818 }
1499e0a8
RK
5819
5820 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5821 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5822 but mark it so that we know that it was already extended. */
1499e0a8
RK
5823
5824 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5825 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5826 {
e70d22c8
RK
5827 /* Compute the signedness and make the proper SUBREG. */
5828 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5829 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5830 SUBREG_PROMOTED_VAR_P (temp) = 1;
5831 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5832 return temp;
5833 }
5834
bbf6f052
RK
5835 return SAVE_EXPR_RTL (exp);
5836
679163cf
MS
5837 case UNSAVE_EXPR:
5838 {
5839 rtx temp;
5840 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5841 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5842 return temp;
5843 }
5844
b50d17a1 5845 case PLACEHOLDER_EXPR:
e9a25f70
JL
5846 {
5847 tree placeholder_expr;
5848
5849 /* If there is an object on the head of the placeholder list,
e5e809f4 5850 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5851 further information, see tree.def. */
5852 for (placeholder_expr = placeholder_list;
5853 placeholder_expr != 0;
5854 placeholder_expr = TREE_CHAIN (placeholder_expr))
5855 {
5856 tree need_type = TYPE_MAIN_VARIANT (type);
5857 tree object = 0;
5858 tree old_list = placeholder_list;
5859 tree elt;
5860
e5e809f4
JL
5861 /* Find the outermost reference that is of the type we want.
5862 If none, see if any object has a type that is a pointer to
5863 the type we want. */
5864 for (elt = TREE_PURPOSE (placeholder_expr);
5865 elt != 0 && object == 0;
5866 elt
5867 = ((TREE_CODE (elt) == COMPOUND_EXPR
5868 || TREE_CODE (elt) == COND_EXPR)
5869 ? TREE_OPERAND (elt, 1)
5870 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5871 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5872 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5873 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5874 ? TREE_OPERAND (elt, 0) : 0))
5875 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5876 object = elt;
e9a25f70 5877
e9a25f70 5878 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5879 elt != 0 && object == 0;
5880 elt
5881 = ((TREE_CODE (elt) == COMPOUND_EXPR
5882 || TREE_CODE (elt) == COND_EXPR)
5883 ? TREE_OPERAND (elt, 1)
5884 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5885 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5886 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5887 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5888 ? TREE_OPERAND (elt, 0) : 0))
5889 if (POINTER_TYPE_P (TREE_TYPE (elt))
5890 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5891 == need_type))
e5e809f4 5892 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5893
e9a25f70 5894 if (object != 0)
2cde2255 5895 {
e9a25f70
JL
5896 /* Expand this object skipping the list entries before
5897 it was found in case it is also a PLACEHOLDER_EXPR.
5898 In that case, we want to translate it using subsequent
5899 entries. */
5900 placeholder_list = TREE_CHAIN (placeholder_expr);
5901 temp = expand_expr (object, original_target, tmode,
5902 ro_modifier);
5903 placeholder_list = old_list;
5904 return temp;
2cde2255 5905 }
e9a25f70
JL
5906 }
5907 }
b50d17a1
RK
5908
5909 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5910 abort ();
5911
5912 case WITH_RECORD_EXPR:
5913 /* Put the object on the placeholder list, expand our first operand,
5914 and pop the list. */
5915 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5916 placeholder_list);
5917 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5918 tmode, ro_modifier);
b50d17a1
RK
5919 placeholder_list = TREE_CHAIN (placeholder_list);
5920 return target;
5921
70e6ca43
APB
5922 case GOTO_EXPR:
5923 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
5924 expand_goto (TREE_OPERAND (exp, 0));
5925 else
5926 expand_computed_goto (TREE_OPERAND (exp, 0));
5927 return const0_rtx;
5928
bbf6f052 5929 case EXIT_EXPR:
e44842fe
RK
5930 expand_exit_loop_if_false (NULL_PTR,
5931 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5932 return const0_rtx;
5933
f42e28dd
APB
5934 case LABELED_BLOCK_EXPR:
5935 if (LABELED_BLOCK_BODY (exp))
5936 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
5937 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
5938 return const0_rtx;
5939
5940 case EXIT_BLOCK_EXPR:
5941 if (EXIT_BLOCK_RETURN (exp))
5942 really_sorry ("returned value in block_exit_expr");
5943 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
5944 return const0_rtx;
5945
bbf6f052 5946 case LOOP_EXPR:
0088fcb1 5947 push_temp_slots ();
bbf6f052
RK
5948 expand_start_loop (1);
5949 expand_expr_stmt (TREE_OPERAND (exp, 0));
5950 expand_end_loop ();
0088fcb1 5951 pop_temp_slots ();
bbf6f052
RK
5952
5953 return const0_rtx;
5954
5955 case BIND_EXPR:
5956 {
5957 tree vars = TREE_OPERAND (exp, 0);
5958 int vars_need_expansion = 0;
5959
5960 /* Need to open a binding contour here because
e976b8b2 5961 if there are any cleanups they must be contained here. */
bbf6f052
RK
5962 expand_start_bindings (0);
5963
2df53c0b
RS
5964 /* Mark the corresponding BLOCK for output in its proper place. */
5965 if (TREE_OPERAND (exp, 2) != 0
5966 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5967 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5968
5969 /* If VARS have not yet been expanded, expand them now. */
5970 while (vars)
5971 {
5972 if (DECL_RTL (vars) == 0)
5973 {
5974 vars_need_expansion = 1;
5975 expand_decl (vars);
5976 }
5977 expand_decl_init (vars);
5978 vars = TREE_CHAIN (vars);
5979 }
5980
921b3427 5981 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5982
5983 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5984
5985 return temp;
5986 }
5987
5988 case RTL_EXPR:
83b853c9
JM
5989 if (RTL_EXPR_SEQUENCE (exp))
5990 {
5991 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5992 abort ();
5993 emit_insns (RTL_EXPR_SEQUENCE (exp));
5994 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5995 }
99310285 5996 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5997 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5998 return RTL_EXPR_RTL (exp);
5999
6000 case CONSTRUCTOR:
dd27116b
RK
6001 /* If we don't need the result, just ensure we evaluate any
6002 subexpressions. */
6003 if (ignore)
6004 {
6005 tree elt;
6006 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6007 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6008 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6009 return const0_rtx;
6010 }
3207b172 6011
4af3895e
JVA
6012 /* All elts simple constants => refer to a constant in memory. But
6013 if this is a non-BLKmode mode, let it store a field at a time
6014 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6015 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6016 store directly into the target unless the type is large enough
6017 that memcpy will be used. If we are making an initializer and
3207b172 6018 all operands are constant, put it in memory as well. */
dd27116b 6019 else if ((TREE_STATIC (exp)
3207b172 6020 && ((mode == BLKmode
e5e809f4 6021 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6022 || TREE_ADDRESSABLE (exp)
6023 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
6024 && (move_by_pieces_ninsns
67225c15
RK
6025 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6026 TYPE_ALIGN (type) / BITS_PER_UNIT)
9862dea9 6027 >= MOVE_RATIO)
9de08200 6028 && ! mostly_zeros_p (exp))))
dd27116b 6029 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6030 {
6031 rtx constructor = output_constant_def (exp);
b552441b
RS
6032 if (modifier != EXPAND_CONST_ADDRESS
6033 && modifier != EXPAND_INITIALIZER
6034 && modifier != EXPAND_SUM
d6a5ac33
RK
6035 && (! memory_address_p (GET_MODE (constructor),
6036 XEXP (constructor, 0))
6037 || (flag_force_addr
6038 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6039 constructor = change_address (constructor, VOIDmode,
6040 XEXP (constructor, 0));
6041 return constructor;
6042 }
6043
bbf6f052
RK
6044 else
6045 {
e9ac02a6
JW
6046 /* Handle calls that pass values in multiple non-contiguous
6047 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6048 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6049 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6050 {
6051 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6052 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6053 else
6054 target = assign_temp (type, 0, 1, 1);
6055 }
07604beb
RK
6056
6057 if (TREE_READONLY (exp))
6058 {
9151b3bf 6059 if (GET_CODE (target) == MEM)
effbcc6a
RK
6060 target = copy_rtx (target);
6061
07604beb
RK
6062 RTX_UNCHANGING_P (target) = 1;
6063 }
6064
e1a43f73 6065 store_constructor (exp, target, 0);
bbf6f052
RK
6066 return target;
6067 }
6068
6069 case INDIRECT_REF:
6070 {
6071 tree exp1 = TREE_OPERAND (exp, 0);
6072 tree exp2;
7581a30f
JW
6073 tree index;
6074 tree string = string_constant (exp1, &index);
6075 int i;
6076
06eaa86f 6077 /* Try to optimize reads from const strings. */
7581a30f
JW
6078 if (string
6079 && TREE_CODE (string) == STRING_CST
6080 && TREE_CODE (index) == INTEGER_CST
6081 && !TREE_INT_CST_HIGH (index)
6082 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6083 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6084 && GET_MODE_SIZE (mode) == 1
6085 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6086 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6087
405f0da6
JW
6088 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6089 op0 = memory_address (mode, op0);
8c8a8e34 6090
921b3427
RK
6091 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
6092 {
6093 enum memory_use_mode memory_usage;
6094 memory_usage = get_memory_usage_from_modifier (modifier);
6095
6096 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6097 {
6098 in_check_memory_usage = 1;
6099 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6100 op0, ptr_mode,
6101 GEN_INT (int_size_in_bytes (type)),
6102 TYPE_MODE (sizetype),
6103 GEN_INT (memory_usage),
6104 TYPE_MODE (integer_type_node));
6105 in_check_memory_usage = 0;
6106 }
921b3427
RK
6107 }
6108
38a448ca 6109 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6110 /* If address was computed by addition,
6111 mark this as an element of an aggregate. */
9ec36da5
JL
6112 if (TREE_CODE (exp1) == PLUS_EXPR
6113 || (TREE_CODE (exp1) == SAVE_EXPR
6114 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6115 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6116 || (TREE_CODE (exp1) == ADDR_EXPR
6117 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6118 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 6119 MEM_IN_STRUCT_P (temp) = 1;
b5f88157
JL
6120
6121 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
6122 into some aggregate too. In theory we could fold this into the
6123 previous check and use rtx_addr_varies_p there too.
6124
6125 However, this seems safer. */
6126 if (!MEM_IN_STRUCT_P (temp)
6127 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
6128 /* This may have been an array reference to the first element
6129 that was optimized away from being an addition. */
6130 || (TREE_CODE (exp1) == NOP_EXPR
6131 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6132 == REFERENCE_TYPE)
6133 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
6134 == POINTER_TYPE)
6135 && (AGGREGATE_TYPE_P
6136 (TREE_TYPE (TREE_TYPE
6137 (TREE_OPERAND (exp1, 0))))))))))
6138 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
6139
2c4c436a 6140 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6141 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6142
6143 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6144 here, because, in C and C++, the fact that a location is accessed
6145 through a pointer to const does not mean that the value there can
6146 never change. Languages where it can never change should
6147 also set TREE_STATIC. */
5cb7a25a 6148 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6149 return temp;
6150 }
bbf6f052
RK
6151
6152 case ARRAY_REF:
742920c7
RK
6153 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6154 abort ();
bbf6f052 6155
bbf6f052 6156 {
742920c7
RK
6157 tree array = TREE_OPERAND (exp, 0);
6158 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6159 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6160 tree index = TREE_OPERAND (exp, 1);
6161 tree index_type = TREE_TYPE (index);
08293add 6162 HOST_WIDE_INT i;
b50d17a1 6163
d4c89139
PB
6164 /* Optimize the special-case of a zero lower bound.
6165
6166 We convert the low_bound to sizetype to avoid some problems
6167 with constant folding. (E.g. suppose the lower bound is 1,
6168 and its mode is QI. Without the conversion, (ARRAY
6169 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6170 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6171
6172 But sizetype isn't quite right either (especially if
6173 the lowbound is negative). FIXME */
6174
742920c7 6175 if (! integer_zerop (low_bound))
d4c89139
PB
6176 index = fold (build (MINUS_EXPR, index_type, index,
6177 convert (sizetype, low_bound)));
742920c7 6178
742920c7 6179 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6180 This is not done in fold so it won't happen inside &.
6181 Don't fold if this is for wide characters since it's too
6182 difficult to do correctly and this is a very rare case. */
742920c7
RK
6183
6184 if (TREE_CODE (array) == STRING_CST
6185 && TREE_CODE (index) == INTEGER_CST
6186 && !TREE_INT_CST_HIGH (index)
307b821c 6187 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6188 && GET_MODE_CLASS (mode) == MODE_INT
6189 && GET_MODE_SIZE (mode) == 1)
307b821c 6190 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6191
742920c7
RK
6192 /* If this is a constant index into a constant array,
6193 just get the value from the array. Handle both the cases when
6194 we have an explicit constructor and when our operand is a variable
6195 that was declared const. */
4af3895e 6196
742920c7
RK
6197 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6198 {
6199 if (TREE_CODE (index) == INTEGER_CST
6200 && TREE_INT_CST_HIGH (index) == 0)
6201 {
6202 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6203
6204 i = TREE_INT_CST_LOW (index);
6205 while (elem && i--)
6206 elem = TREE_CHAIN (elem);
6207 if (elem)
6208 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6209 tmode, ro_modifier);
742920c7
RK
6210 }
6211 }
4af3895e 6212
742920c7
RK
6213 else if (optimize >= 1
6214 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6215 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6216 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6217 {
08293add 6218 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6219 {
6220 tree init = DECL_INITIAL (array);
6221
6222 i = TREE_INT_CST_LOW (index);
6223 if (TREE_CODE (init) == CONSTRUCTOR)
6224 {
6225 tree elem = CONSTRUCTOR_ELTS (init);
6226
03dc44a6
RS
6227 while (elem
6228 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6229 elem = TREE_CHAIN (elem);
6230 if (elem)
6231 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6232 tmode, ro_modifier);
742920c7
RK
6233 }
6234 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6235 && TREE_INT_CST_HIGH (index) == 0
6236 && (TREE_INT_CST_LOW (index)
6237 < TREE_STRING_LENGTH (init)))
6238 return (GEN_INT
6239 (TREE_STRING_POINTER
6240 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6241 }
6242 }
6243 }
8c8a8e34 6244
08293add 6245 /* ... fall through ... */
bbf6f052
RK
6246
6247 case COMPONENT_REF:
6248 case BIT_FIELD_REF:
4af3895e 6249 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6250 appropriate field if it is present. Don't do this if we have
6251 already written the data since we want to refer to that copy
6252 and varasm.c assumes that's what we'll do. */
4af3895e 6253 if (code != ARRAY_REF
7a0b7b9a
RK
6254 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6255 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6256 {
6257 tree elt;
6258
6259 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6260 elt = TREE_CHAIN (elt))
86b5812c
RK
6261 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6262 /* We can normally use the value of the field in the
6263 CONSTRUCTOR. However, if this is a bitfield in
6264 an integral mode that we can fit in a HOST_WIDE_INT,
6265 we must mask only the number of bits in the bitfield,
6266 since this is done implicitly by the constructor. If
6267 the bitfield does not meet either of those conditions,
6268 we can't do this optimization. */
6269 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6270 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6271 == MODE_INT)
6272 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6273 <= HOST_BITS_PER_WIDE_INT))))
6274 {
6275 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6276 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6277 {
6278 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6279
6280 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6281 {
6282 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6283 op0 = expand_and (op0, op1, target);
6284 }
6285 else
6286 {
e5e809f4
JL
6287 enum machine_mode imode
6288 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6289 tree count
e5e809f4
JL
6290 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6291 0);
86b5812c
RK
6292
6293 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6294 target, 0);
6295 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6296 target, 0);
6297 }
6298 }
6299
6300 return op0;
6301 }
4af3895e
JVA
6302 }
6303
bbf6f052
RK
6304 {
6305 enum machine_mode mode1;
6306 int bitsize;
6307 int bitpos;
7bb0943f 6308 tree offset;
bbf6f052 6309 int volatilep = 0;
034f9101 6310 int alignment;
839c4796
RK
6311 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6312 &mode1, &unsignedp, &volatilep,
6313 &alignment);
bbf6f052 6314
e7f3c83f
RK
6315 /* If we got back the original object, something is wrong. Perhaps
6316 we are evaluating an expression too early. In any event, don't
6317 infinitely recurse. */
6318 if (tem == exp)
6319 abort ();
6320
3d27140a 6321 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6322 computation, since it will need a temporary and TARGET is known
6323 to have to do. This occurs in unchecked conversion in Ada. */
6324
6325 op0 = expand_expr (tem,
6326 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6327 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6328 != INTEGER_CST)
6329 ? target : NULL_RTX),
4ed67205 6330 VOIDmode,
e5e809f4
JL
6331 modifier == EXPAND_INITIALIZER
6332 ? modifier : EXPAND_NORMAL);
bbf6f052 6333
8c8a8e34 6334 /* If this is a constant, put it into a register if it is a
8008b228 6335 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6336 if (CONSTANT_P (op0))
6337 {
6338 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6339 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6340 op0 = force_reg (mode, op0);
6341 else
6342 op0 = validize_mem (force_const_mem (mode, op0));
6343 }
6344
7bb0943f
RS
6345 if (offset != 0)
6346 {
906c4e36 6347 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6348
6349 if (GET_CODE (op0) != MEM)
6350 abort ();
2d48c13d
JL
6351
6352 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6353 {
2d48c13d 6354#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6355 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6356#else
bd070e1a 6357 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6358#endif
bd070e1a 6359 }
2d48c13d 6360
89752202
HB
6361 if (GET_CODE (op0) == MEM
6362 && GET_MODE (op0) == BLKmode
6363 && bitsize
6364 && (bitpos % bitsize) == 0
6365 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6366 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6367 {
6368 rtx temp = change_address (op0, mode1,
6369 plus_constant (XEXP (op0, 0),
6370 (bitpos /
6371 BITS_PER_UNIT)));
6372 if (GET_CODE (XEXP (temp, 0)) == REG)
6373 op0 = temp;
6374 else
6375 op0 = change_address (op0, mode1,
6376 force_reg (GET_MODE (XEXP (temp, 0)),
6377 XEXP (temp, 0)));
6378 bitpos = 0;
6379 }
6380
6381
7bb0943f 6382 op0 = change_address (op0, VOIDmode,
38a448ca
RH
6383 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6384 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
6385 }
6386
bbf6f052
RK
6387 /* Don't forget about volatility even if this is a bitfield. */
6388 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6389 {
6390 op0 = copy_rtx (op0);
6391 MEM_VOLATILE_P (op0) = 1;
6392 }
6393
921b3427
RK
6394 /* Check the access. */
6395 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
6396 {
6397 enum memory_use_mode memory_usage;
6398 memory_usage = get_memory_usage_from_modifier (modifier);
6399
6400 if (memory_usage != MEMORY_USE_DONT)
6401 {
6402 rtx to;
6403 int size;
6404
6405 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6406 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6407
6408 /* Check the access right of the pointer. */
e9a25f70
JL
6409 if (size > BITS_PER_UNIT)
6410 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6411 to, ptr_mode,
6412 GEN_INT (size / BITS_PER_UNIT),
6413 TYPE_MODE (sizetype),
956d6950
JL
6414 GEN_INT (memory_usage),
6415 TYPE_MODE (integer_type_node));
921b3427
RK
6416 }
6417 }
6418
ccc98036
RS
6419 /* In cases where an aligned union has an unaligned object
6420 as a field, we might be extracting a BLKmode value from
6421 an integer-mode (e.g., SImode) object. Handle this case
6422 by doing the extract into an object as wide as the field
6423 (which we know to be the width of a basic mode), then
f2420d0b
JW
6424 storing into memory, and changing the mode to BLKmode.
6425 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6426 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6427 if (mode1 == VOIDmode
ccc98036 6428 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6429 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6430 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6431 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6432 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6433 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6434 /* If the field isn't aligned enough to fetch as a memref,
6435 fetch it as a bit field. */
6436 || (SLOW_UNALIGNED_ACCESS
6437 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
6438 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6439 {
bbf6f052
RK
6440 enum machine_mode ext_mode = mode;
6441
6442 if (ext_mode == BLKmode)
6443 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6444
6445 if (ext_mode == BLKmode)
a281e72d
RK
6446 {
6447 /* In this case, BITPOS must start at a byte boundary and
6448 TARGET, if specified, must be a MEM. */
6449 if (GET_CODE (op0) != MEM
6450 || (target != 0 && GET_CODE (target) != MEM)
6451 || bitpos % BITS_PER_UNIT != 0)
6452 abort ();
6453
6454 op0 = change_address (op0, VOIDmode,
6455 plus_constant (XEXP (op0, 0),
6456 bitpos / BITS_PER_UNIT));
6457 if (target == 0)
6458 target = assign_temp (type, 0, 1, 1);
6459
6460 emit_block_move (target, op0,
6461 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6462 / BITS_PER_UNIT),
6463 1);
6464
6465 return target;
6466 }
bbf6f052 6467
dc6d66b3
RK
6468 op0 = validize_mem (op0);
6469
6470 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6471 mark_reg_pointer (XEXP (op0, 0), alignment);
6472
6473 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6474 unsignedp, target, ext_mode, ext_mode,
034f9101 6475 alignment,
bbf6f052 6476 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6477
6478 /* If the result is a record type and BITSIZE is narrower than
6479 the mode of OP0, an integral mode, and this is a big endian
6480 machine, we must put the field into the high-order bits. */
6481 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6482 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6483 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6484 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6485 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6486 - bitsize),
6487 op0, 1);
6488
bbf6f052
RK
6489 if (mode == BLKmode)
6490 {
6491 rtx new = assign_stack_temp (ext_mode,
6492 bitsize / BITS_PER_UNIT, 0);
6493
6494 emit_move_insn (new, op0);
6495 op0 = copy_rtx (new);
6496 PUT_MODE (op0, BLKmode);
092dded9 6497 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
6498 }
6499
6500 return op0;
6501 }
6502
05019f83
RK
6503 /* If the result is BLKmode, use that to access the object
6504 now as well. */
6505 if (mode == BLKmode)
6506 mode1 = BLKmode;
6507
bbf6f052
RK
6508 /* Get a reference to just this component. */
6509 if (modifier == EXPAND_CONST_ADDRESS
6510 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6511 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6512 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6513 else
6514 op0 = change_address (op0, mode1,
6515 plus_constant (XEXP (op0, 0),
6516 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6517
6518 if (GET_CODE (op0) == MEM)
6519 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6520
dc6d66b3
RK
6521 if (GET_CODE (XEXP (op0, 0)) == REG)
6522 mark_reg_pointer (XEXP (op0, 0), alignment);
6523
bbf6f052
RK
6524 MEM_IN_STRUCT_P (op0) = 1;
6525 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6526 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6527 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6528 || modifier == EXPAND_INITIALIZER)
bbf6f052 6529 return op0;
0d15e60c 6530 else if (target == 0)
bbf6f052 6531 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6532
bbf6f052
RK
6533 convert_move (target, op0, unsignedp);
6534 return target;
6535 }
6536
bbf6f052
RK
6537 /* Intended for a reference to a buffer of a file-object in Pascal.
6538 But it's not certain that a special tree code will really be
6539 necessary for these. INDIRECT_REF might work for them. */
6540 case BUFFER_REF:
6541 abort ();
6542
7308a047 6543 case IN_EXPR:
7308a047 6544 {
d6a5ac33
RK
6545 /* Pascal set IN expression.
6546
6547 Algorithm:
6548 rlo = set_low - (set_low%bits_per_word);
6549 the_word = set [ (index - rlo)/bits_per_word ];
6550 bit_index = index % bits_per_word;
6551 bitmask = 1 << bit_index;
6552 return !!(the_word & bitmask); */
6553
7308a047
RS
6554 tree set = TREE_OPERAND (exp, 0);
6555 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6556 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6557 tree set_type = TREE_TYPE (set);
7308a047
RS
6558 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6559 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6560 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6561 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6562 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6563 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6564 rtx setaddr = XEXP (setval, 0);
6565 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6566 rtx rlow;
6567 rtx diff, quo, rem, addr, bit, result;
7308a047 6568
d6a5ac33
RK
6569 preexpand_calls (exp);
6570
6571 /* If domain is empty, answer is no. Likewise if index is constant
6572 and out of bounds. */
51723711 6573 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6574 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6575 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6576 || (TREE_CODE (index) == INTEGER_CST
6577 && TREE_CODE (set_low_bound) == INTEGER_CST
6578 && tree_int_cst_lt (index, set_low_bound))
6579 || (TREE_CODE (set_high_bound) == INTEGER_CST
6580 && TREE_CODE (index) == INTEGER_CST
6581 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6582 return const0_rtx;
6583
d6a5ac33
RK
6584 if (target == 0)
6585 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6586
6587 /* If we get here, we have to generate the code for both cases
6588 (in range and out of range). */
6589
6590 op0 = gen_label_rtx ();
6591 op1 = gen_label_rtx ();
6592
6593 if (! (GET_CODE (index_val) == CONST_INT
6594 && GET_CODE (lo_r) == CONST_INT))
6595 {
17938e57 6596 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 6597 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
6598 emit_jump_insn (gen_blt (op1));
6599 }
6600
6601 if (! (GET_CODE (index_val) == CONST_INT
6602 && GET_CODE (hi_r) == CONST_INT))
6603 {
17938e57 6604 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 6605 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
6606 emit_jump_insn (gen_bgt (op1));
6607 }
6608
6609 /* Calculate the element number of bit zero in the first word
6610 of the set. */
6611 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6612 rlow = GEN_INT (INTVAL (lo_r)
6613 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6614 else
17938e57
RK
6615 rlow = expand_binop (index_mode, and_optab, lo_r,
6616 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6617 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6618
d6a5ac33
RK
6619 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6620 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6621
6622 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6623 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6624 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6625 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6626
7308a047 6627 addr = memory_address (byte_mode,
d6a5ac33
RK
6628 expand_binop (index_mode, add_optab, diff,
6629 setaddr, NULL_RTX, iunsignedp,
17938e57 6630 OPTAB_LIB_WIDEN));
d6a5ac33 6631
7308a047
RS
6632 /* Extract the bit we want to examine */
6633 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6634 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6635 make_tree (TREE_TYPE (index), rem),
6636 NULL_RTX, 1);
6637 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6638 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6639 1, OPTAB_LIB_WIDEN);
17938e57
RK
6640
6641 if (result != target)
6642 convert_move (target, result, 1);
7308a047
RS
6643
6644 /* Output the code to handle the out-of-range case. */
6645 emit_jump (op0);
6646 emit_label (op1);
6647 emit_move_insn (target, const0_rtx);
6648 emit_label (op0);
6649 return target;
6650 }
6651
bbf6f052
RK
6652 case WITH_CLEANUP_EXPR:
6653 if (RTL_EXPR_RTL (exp) == 0)
6654 {
6655 RTL_EXPR_RTL (exp)
921b3427 6656 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6657 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6658
bbf6f052
RK
6659 /* That's it for this cleanup. */
6660 TREE_OPERAND (exp, 2) = 0;
6661 }
6662 return RTL_EXPR_RTL (exp);
6663
5dab5552
MS
6664 case CLEANUP_POINT_EXPR:
6665 {
d93d4205 6666 extern int temp_slot_level;
e976b8b2
MS
6667 /* Start a new binding layer that will keep track of all cleanup
6668 actions to be performed. */
6669 expand_start_bindings (0);
6670
d93d4205 6671 target_temp_slot_level = temp_slot_level;
e976b8b2 6672
921b3427 6673 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6674 /* If we're going to use this value, load it up now. */
6675 if (! ignore)
6676 op0 = force_not_mem (op0);
d93d4205 6677 preserve_temp_slots (op0);
e976b8b2 6678 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6679 }
6680 return op0;
6681
bbf6f052
RK
6682 case CALL_EXPR:
6683 /* Check for a built-in function. */
6684 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6685 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6686 == FUNCTION_DECL)
bbf6f052
RK
6687 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6688 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6689
bbf6f052
RK
6690 /* If this call was expanded already by preexpand_calls,
6691 just return the result we got. */
6692 if (CALL_EXPR_RTL (exp) != 0)
6693 return CALL_EXPR_RTL (exp);
d6a5ac33 6694
8129842c 6695 return expand_call (exp, target, ignore);
bbf6f052
RK
6696
6697 case NON_LVALUE_EXPR:
6698 case NOP_EXPR:
6699 case CONVERT_EXPR:
6700 case REFERENCE_EXPR:
bbf6f052
RK
6701 if (TREE_CODE (type) == UNION_TYPE)
6702 {
6703 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6704 if (target == 0)
06089a8b
RK
6705 {
6706 if (mode != BLKmode)
6707 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6708 else
6709 target = assign_temp (type, 0, 1, 1);
6710 }
d6a5ac33 6711
bbf6f052
RK
6712 if (GET_CODE (target) == MEM)
6713 /* Store data into beginning of memory target. */
6714 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6715 change_address (target, TYPE_MODE (valtype), 0), 0);
6716
bbf6f052
RK
6717 else if (GET_CODE (target) == REG)
6718 /* Store this field into a union of the proper type. */
6719 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6720 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6721 VOIDmode, 0, 1,
ece32014
MM
6722 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6723 0);
bbf6f052
RK
6724 else
6725 abort ();
6726
6727 /* Return the entire union. */
6728 return target;
6729 }
d6a5ac33 6730
7f62854a
RK
6731 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6732 {
6733 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6734 ro_modifier);
7f62854a
RK
6735
6736 /* If the signedness of the conversion differs and OP0 is
6737 a promoted SUBREG, clear that indication since we now
6738 have to do the proper extension. */
6739 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6740 && GET_CODE (op0) == SUBREG)
6741 SUBREG_PROMOTED_VAR_P (op0) = 0;
6742
6743 return op0;
6744 }
6745
1499e0a8 6746 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6747 if (GET_MODE (op0) == mode)
6748 return op0;
12342f90 6749
d6a5ac33
RK
6750 /* If OP0 is a constant, just convert it into the proper mode. */
6751 if (CONSTANT_P (op0))
6752 return
6753 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6754 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6755
26fcb35a 6756 if (modifier == EXPAND_INITIALIZER)
38a448ca 6757 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6758
bbf6f052 6759 if (target == 0)
d6a5ac33
RK
6760 return
6761 convert_to_mode (mode, op0,
6762 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6763 else
d6a5ac33
RK
6764 convert_move (target, op0,
6765 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6766 return target;
6767
6768 case PLUS_EXPR:
0f41302f
MS
6769 /* We come here from MINUS_EXPR when the second operand is a
6770 constant. */
bbf6f052
RK
6771 plus_expr:
6772 this_optab = add_optab;
6773
6774 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6775 something else, make sure we add the register to the constant and
6776 then to the other thing. This case can occur during strength
6777 reduction and doing it this way will produce better code if the
6778 frame pointer or argument pointer is eliminated.
6779
6780 fold-const.c will ensure that the constant is always in the inner
6781 PLUS_EXPR, so the only case we need to do anything about is if
6782 sp, ap, or fp is our second argument, in which case we must swap
6783 the innermost first argument and our second argument. */
6784
6785 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6786 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6787 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6788 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6789 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6790 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6791 {
6792 tree t = TREE_OPERAND (exp, 1);
6793
6794 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6795 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6796 }
6797
88f63c77 6798 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6799 something, we might be forming a constant. So try to use
6800 plus_constant. If it produces a sum and we can't accept it,
6801 use force_operand. This allows P = &ARR[const] to generate
6802 efficient code on machines where a SYMBOL_REF is not a valid
6803 address.
6804
6805 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6806 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6807 || mode == ptr_mode)
bbf6f052 6808 {
c980ac49
RS
6809 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6810 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6811 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6812 {
6813 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6814 EXPAND_SUM);
6815 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6816 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6817 op1 = force_operand (op1, target);
6818 return op1;
6819 }
bbf6f052 6820
c980ac49
RS
6821 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6822 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6823 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6824 {
6825 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6826 EXPAND_SUM);
6827 if (! CONSTANT_P (op0))
6828 {
6829 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6830 VOIDmode, modifier);
709f5be1
RS
6831 /* Don't go to both_summands if modifier
6832 says it's not right to return a PLUS. */
6833 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6834 goto binop2;
c980ac49
RS
6835 goto both_summands;
6836 }
6837 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6838 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6839 op0 = force_operand (op0, target);
6840 return op0;
6841 }
bbf6f052
RK
6842 }
6843
6844 /* No sense saving up arithmetic to be done
6845 if it's all in the wrong mode to form part of an address.
6846 And force_operand won't know whether to sign-extend or
6847 zero-extend. */
6848 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6849 || mode != ptr_mode)
c980ac49 6850 goto binop;
bbf6f052
RK
6851
6852 preexpand_calls (exp);
e5e809f4 6853 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6854 subtarget = 0;
6855
921b3427
RK
6856 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6857 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6858
c980ac49 6859 both_summands:
bbf6f052
RK
6860 /* Make sure any term that's a sum with a constant comes last. */
6861 if (GET_CODE (op0) == PLUS
6862 && CONSTANT_P (XEXP (op0, 1)))
6863 {
6864 temp = op0;
6865 op0 = op1;
6866 op1 = temp;
6867 }
6868 /* If adding to a sum including a constant,
6869 associate it to put the constant outside. */
6870 if (GET_CODE (op1) == PLUS
6871 && CONSTANT_P (XEXP (op1, 1)))
6872 {
6873 rtx constant_term = const0_rtx;
6874
6875 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6876 if (temp != 0)
6877 op0 = temp;
6f90e075
JW
6878 /* Ensure that MULT comes first if there is one. */
6879 else if (GET_CODE (op0) == MULT)
38a448ca 6880 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6881 else
38a448ca 6882 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6883
6884 /* Let's also eliminate constants from op0 if possible. */
6885 op0 = eliminate_constant_term (op0, &constant_term);
6886
6887 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6888 their sum should be a constant. Form it into OP1, since the
6889 result we want will then be OP0 + OP1. */
6890
6891 temp = simplify_binary_operation (PLUS, mode, constant_term,
6892 XEXP (op1, 1));
6893 if (temp != 0)
6894 op1 = temp;
6895 else
38a448ca 6896 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6897 }
6898
6899 /* Put a constant term last and put a multiplication first. */
6900 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6901 temp = op1, op1 = op0, op0 = temp;
6902
6903 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6904 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6905
6906 case MINUS_EXPR:
ea87523e
RK
6907 /* For initializers, we are allowed to return a MINUS of two
6908 symbolic constants. Here we handle all cases when both operands
6909 are constant. */
bbf6f052
RK
6910 /* Handle difference of two symbolic constants,
6911 for the sake of an initializer. */
6912 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6913 && really_constant_p (TREE_OPERAND (exp, 0))
6914 && really_constant_p (TREE_OPERAND (exp, 1)))
6915 {
906c4e36 6916 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6917 VOIDmode, ro_modifier);
906c4e36 6918 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6919 VOIDmode, ro_modifier);
ea87523e 6920
ea87523e
RK
6921 /* If the last operand is a CONST_INT, use plus_constant of
6922 the negated constant. Else make the MINUS. */
6923 if (GET_CODE (op1) == CONST_INT)
6924 return plus_constant (op0, - INTVAL (op1));
6925 else
38a448ca 6926 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6927 }
6928 /* Convert A - const to A + (-const). */
6929 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6930 {
ae431183
RK
6931 tree negated = fold (build1 (NEGATE_EXPR, type,
6932 TREE_OPERAND (exp, 1)));
6933
6934 /* Deal with the case where we can't negate the constant
6935 in TYPE. */
6936 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6937 {
6938 tree newtype = signed_type (type);
6939 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6940 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6941 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6942
6943 if (! TREE_OVERFLOW (newneg))
6944 return expand_expr (convert (type,
6945 build (PLUS_EXPR, newtype,
6946 newop0, newneg)),
921b3427 6947 target, tmode, ro_modifier);
ae431183
RK
6948 }
6949 else
6950 {
6951 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6952 goto plus_expr;
6953 }
bbf6f052
RK
6954 }
6955 this_optab = sub_optab;
6956 goto binop;
6957
6958 case MULT_EXPR:
6959 preexpand_calls (exp);
6960 /* If first operand is constant, swap them.
6961 Thus the following special case checks need only
6962 check the second operand. */
6963 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6964 {
6965 register tree t1 = TREE_OPERAND (exp, 0);
6966 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6967 TREE_OPERAND (exp, 1) = t1;
6968 }
6969
6970 /* Attempt to return something suitable for generating an
6971 indexed address, for machines that support that. */
6972
88f63c77 6973 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6974 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6975 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6976 {
921b3427
RK
6977 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6978 EXPAND_SUM);
bbf6f052
RK
6979
6980 /* Apply distributive law if OP0 is x+c. */
6981 if (GET_CODE (op0) == PLUS
6982 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6983 return gen_rtx_PLUS (mode,
6984 gen_rtx_MULT (mode, XEXP (op0, 0),
6985 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6986 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6987 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6988
6989 if (GET_CODE (op0) != REG)
906c4e36 6990 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6991 if (GET_CODE (op0) != REG)
6992 op0 = copy_to_mode_reg (mode, op0);
6993
38a448ca
RH
6994 return gen_rtx_MULT (mode, op0,
6995 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6996 }
6997
e5e809f4 6998 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6999 subtarget = 0;
7000
7001 /* Check for multiplying things that have been extended
7002 from a narrower type. If this machine supports multiplying
7003 in that narrower type with a result in the desired type,
7004 do it that way, and avoid the explicit type-conversion. */
7005 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7006 && TREE_CODE (type) == INTEGER_TYPE
7007 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7008 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7009 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7010 && int_fits_type_p (TREE_OPERAND (exp, 1),
7011 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7012 /* Don't use a widening multiply if a shift will do. */
7013 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7014 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7015 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7016 ||
7017 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7018 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7019 ==
7020 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7021 /* If both operands are extended, they must either both
7022 be zero-extended or both be sign-extended. */
7023 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7024 ==
7025 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7026 {
7027 enum machine_mode innermode
7028 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7029 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7030 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7031 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7032 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7033 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7034 {
b10af0c8
TG
7035 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7036 {
7037 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7038 NULL_RTX, VOIDmode, 0);
7039 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7040 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7041 VOIDmode, 0);
7042 else
7043 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7044 NULL_RTX, VOIDmode, 0);
7045 goto binop2;
7046 }
7047 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7048 && innermode == word_mode)
7049 {
7050 rtx htem;
7051 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7052 NULL_RTX, VOIDmode, 0);
7053 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7054 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7055 VOIDmode, 0);
7056 else
7057 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7058 NULL_RTX, VOIDmode, 0);
7059 temp = expand_binop (mode, other_optab, op0, op1, target,
7060 unsignedp, OPTAB_LIB_WIDEN);
7061 htem = expand_mult_highpart_adjust (innermode,
7062 gen_highpart (innermode, temp),
7063 op0, op1,
7064 gen_highpart (innermode, temp),
7065 unsignedp);
7066 emit_move_insn (gen_highpart (innermode, temp), htem);
7067 return temp;
7068 }
bbf6f052
RK
7069 }
7070 }
7071 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7072 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7073 return expand_mult (mode, op0, op1, target, unsignedp);
7074
7075 case TRUNC_DIV_EXPR:
7076 case FLOOR_DIV_EXPR:
7077 case CEIL_DIV_EXPR:
7078 case ROUND_DIV_EXPR:
7079 case EXACT_DIV_EXPR:
7080 preexpand_calls (exp);
e5e809f4 7081 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7082 subtarget = 0;
7083 /* Possible optimization: compute the dividend with EXPAND_SUM
7084 then if the divisor is constant can optimize the case
7085 where some terms of the dividend have coeffs divisible by it. */
7086 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7087 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7088 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7089
7090 case RDIV_EXPR:
7091 this_optab = flodiv_optab;
7092 goto binop;
7093
7094 case TRUNC_MOD_EXPR:
7095 case FLOOR_MOD_EXPR:
7096 case CEIL_MOD_EXPR:
7097 case ROUND_MOD_EXPR:
7098 preexpand_calls (exp);
e5e809f4 7099 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7100 subtarget = 0;
7101 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7102 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7103 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7104
7105 case FIX_ROUND_EXPR:
7106 case FIX_FLOOR_EXPR:
7107 case FIX_CEIL_EXPR:
7108 abort (); /* Not used for C. */
7109
7110 case FIX_TRUNC_EXPR:
906c4e36 7111 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7112 if (target == 0)
7113 target = gen_reg_rtx (mode);
7114 expand_fix (target, op0, unsignedp);
7115 return target;
7116
7117 case FLOAT_EXPR:
906c4e36 7118 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7119 if (target == 0)
7120 target = gen_reg_rtx (mode);
7121 /* expand_float can't figure out what to do if FROM has VOIDmode.
7122 So give it the correct mode. With -O, cse will optimize this. */
7123 if (GET_MODE (op0) == VOIDmode)
7124 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7125 op0);
7126 expand_float (target, op0,
7127 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7128 return target;
7129
7130 case NEGATE_EXPR:
5b22bee8 7131 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7132 temp = expand_unop (mode, neg_optab, op0, target, 0);
7133 if (temp == 0)
7134 abort ();
7135 return temp;
7136
7137 case ABS_EXPR:
7138 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7139
2d7050fd 7140 /* Handle complex values specially. */
d6a5ac33
RK
7141 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7142 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7143 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7144
bbf6f052
RK
7145 /* Unsigned abs is simply the operand. Testing here means we don't
7146 risk generating incorrect code below. */
7147 if (TREE_UNSIGNED (type))
7148 return op0;
7149
2e5ec6cf 7150 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7151 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7152
7153 case MAX_EXPR:
7154 case MIN_EXPR:
7155 target = original_target;
e5e809f4 7156 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7157 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7158 || GET_MODE (target) != mode
bbf6f052
RK
7159 || (GET_CODE (target) == REG
7160 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7161 target = gen_reg_rtx (mode);
906c4e36 7162 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7163 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7164
7165 /* First try to do it with a special MIN or MAX instruction.
7166 If that does not win, use a conditional jump to select the proper
7167 value. */
7168 this_optab = (TREE_UNSIGNED (type)
7169 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7170 : (code == MIN_EXPR ? smin_optab : smax_optab));
7171
7172 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7173 OPTAB_WIDEN);
7174 if (temp != 0)
7175 return temp;
7176
fa2981d8
JW
7177 /* At this point, a MEM target is no longer useful; we will get better
7178 code without it. */
7179
7180 if (GET_CODE (target) == MEM)
7181 target = gen_reg_rtx (mode);
7182
ee456b1c
RK
7183 if (target != op0)
7184 emit_move_insn (target, op0);
d6a5ac33 7185
bbf6f052 7186 op0 = gen_label_rtx ();
d6a5ac33 7187
f81497d9
RS
7188 /* If this mode is an integer too wide to compare properly,
7189 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 7190 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 7191 {
f81497d9 7192 if (code == MAX_EXPR)
d6a5ac33
RK
7193 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7194 target, op1, NULL_RTX, op0);
bbf6f052 7195 else
d6a5ac33
RK
7196 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7197 op1, target, NULL_RTX, op0);
ee456b1c 7198 emit_move_insn (target, op1);
bbf6f052 7199 }
f81497d9
RS
7200 else
7201 {
7202 if (code == MAX_EXPR)
7203 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7204 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7205 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
7206 else
7207 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7208 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7209 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 7210 if (temp == const0_rtx)
ee456b1c 7211 emit_move_insn (target, op1);
f81497d9
RS
7212 else if (temp != const_true_rtx)
7213 {
7214 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7215 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7216 else
7217 abort ();
ee456b1c 7218 emit_move_insn (target, op1);
f81497d9
RS
7219 }
7220 }
bbf6f052
RK
7221 emit_label (op0);
7222 return target;
7223
bbf6f052
RK
7224 case BIT_NOT_EXPR:
7225 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7226 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7227 if (temp == 0)
7228 abort ();
7229 return temp;
7230
7231 case FFS_EXPR:
7232 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7233 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7234 if (temp == 0)
7235 abort ();
7236 return temp;
7237
d6a5ac33
RK
7238 /* ??? Can optimize bitwise operations with one arg constant.
7239 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7240 and (a bitwise1 b) bitwise2 b (etc)
7241 but that is probably not worth while. */
7242
7243 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7244 boolean values when we want in all cases to compute both of them. In
7245 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7246 as actual zero-or-1 values and then bitwise anding. In cases where
7247 there cannot be any side effects, better code would be made by
7248 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7249 how to recognize those cases. */
7250
bbf6f052
RK
7251 case TRUTH_AND_EXPR:
7252 case BIT_AND_EXPR:
7253 this_optab = and_optab;
7254 goto binop;
7255
bbf6f052
RK
7256 case TRUTH_OR_EXPR:
7257 case BIT_IOR_EXPR:
7258 this_optab = ior_optab;
7259 goto binop;
7260
874726a8 7261 case TRUTH_XOR_EXPR:
bbf6f052
RK
7262 case BIT_XOR_EXPR:
7263 this_optab = xor_optab;
7264 goto binop;
7265
7266 case LSHIFT_EXPR:
7267 case RSHIFT_EXPR:
7268 case LROTATE_EXPR:
7269 case RROTATE_EXPR:
7270 preexpand_calls (exp);
e5e809f4 7271 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7272 subtarget = 0;
7273 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7274 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7275 unsignedp);
7276
d6a5ac33
RK
7277 /* Could determine the answer when only additive constants differ. Also,
7278 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7279 case LT_EXPR:
7280 case LE_EXPR:
7281 case GT_EXPR:
7282 case GE_EXPR:
7283 case EQ_EXPR:
7284 case NE_EXPR:
7285 preexpand_calls (exp);
7286 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7287 if (temp != 0)
7288 return temp;
d6a5ac33 7289
0f41302f 7290 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7291 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7292 && original_target
7293 && GET_CODE (original_target) == REG
7294 && (GET_MODE (original_target)
7295 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7296 {
d6a5ac33
RK
7297 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7298 VOIDmode, 0);
7299
bbf6f052
RK
7300 if (temp != original_target)
7301 temp = copy_to_reg (temp);
d6a5ac33 7302
bbf6f052 7303 op1 = gen_label_rtx ();
906c4e36 7304 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
7305 GET_MODE (temp), unsignedp, 0);
7306 emit_jump_insn (gen_beq (op1));
7307 emit_move_insn (temp, const1_rtx);
7308 emit_label (op1);
7309 return temp;
7310 }
d6a5ac33 7311
bbf6f052
RK
7312 /* If no set-flag instruction, must generate a conditional
7313 store into a temporary variable. Drop through
7314 and handle this like && and ||. */
7315
7316 case TRUTH_ANDIF_EXPR:
7317 case TRUTH_ORIF_EXPR:
e44842fe 7318 if (! ignore
e5e809f4 7319 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7320 /* Make sure we don't have a hard reg (such as function's return
7321 value) live across basic blocks, if not optimizing. */
7322 || (!optimize && GET_CODE (target) == REG
7323 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7324 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7325
7326 if (target)
7327 emit_clr_insn (target);
7328
bbf6f052
RK
7329 op1 = gen_label_rtx ();
7330 jumpifnot (exp, op1);
e44842fe
RK
7331
7332 if (target)
7333 emit_0_to_1_insn (target);
7334
bbf6f052 7335 emit_label (op1);
e44842fe 7336 return ignore ? const0_rtx : target;
bbf6f052
RK
7337
7338 case TRUTH_NOT_EXPR:
7339 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7340 /* The parser is careful to generate TRUTH_NOT_EXPR
7341 only with operands that are always zero or one. */
906c4e36 7342 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7343 target, 1, OPTAB_LIB_WIDEN);
7344 if (temp == 0)
7345 abort ();
7346 return temp;
7347
7348 case COMPOUND_EXPR:
7349 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7350 emit_queue ();
7351 return expand_expr (TREE_OPERAND (exp, 1),
7352 (ignore ? const0_rtx : target),
7353 VOIDmode, 0);
7354
7355 case COND_EXPR:
ac01eace
RK
7356 /* If we would have a "singleton" (see below) were it not for a
7357 conversion in each arm, bring that conversion back out. */
7358 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7359 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7360 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7361 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7362 {
7363 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7364 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7365
7366 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7367 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7368 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7369 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7370 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7371 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7372 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7373 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7374 return expand_expr (build1 (NOP_EXPR, type,
7375 build (COND_EXPR, TREE_TYPE (true),
7376 TREE_OPERAND (exp, 0),
7377 true, false)),
7378 target, tmode, modifier);
7379 }
7380
bbf6f052
RK
7381 {
7382 /* Note that COND_EXPRs whose type is a structure or union
7383 are required to be constructed to contain assignments of
7384 a temporary variable, so that we can evaluate them here
7385 for side effect only. If type is void, we must do likewise. */
7386
7387 /* If an arm of the branch requires a cleanup,
7388 only that cleanup is performed. */
7389
7390 tree singleton = 0;
7391 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7392
7393 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7394 convert it to our mode, if necessary. */
7395 if (integer_onep (TREE_OPERAND (exp, 1))
7396 && integer_zerop (TREE_OPERAND (exp, 2))
7397 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7398 {
dd27116b
RK
7399 if (ignore)
7400 {
7401 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7402 ro_modifier);
dd27116b
RK
7403 return const0_rtx;
7404 }
7405
921b3427 7406 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7407 if (GET_MODE (op0) == mode)
7408 return op0;
d6a5ac33 7409
bbf6f052
RK
7410 if (target == 0)
7411 target = gen_reg_rtx (mode);
7412 convert_move (target, op0, unsignedp);
7413 return target;
7414 }
7415
ac01eace
RK
7416 /* Check for X ? A + B : A. If we have this, we can copy A to the
7417 output and conditionally add B. Similarly for unary operations.
7418 Don't do this if X has side-effects because those side effects
7419 might affect A or B and the "?" operation is a sequence point in
7420 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7421
7422 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7423 && operand_equal_p (TREE_OPERAND (exp, 2),
7424 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7425 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7426 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7427 && operand_equal_p (TREE_OPERAND (exp, 1),
7428 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7429 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7430 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7431 && operand_equal_p (TREE_OPERAND (exp, 2),
7432 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7433 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7434 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7435 && operand_equal_p (TREE_OPERAND (exp, 1),
7436 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7437 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7438
01c8a7c8
RK
7439 /* If we are not to produce a result, we have no target. Otherwise,
7440 if a target was specified use it; it will not be used as an
7441 intermediate target unless it is safe. If no target, use a
7442 temporary. */
7443
7444 if (ignore)
7445 temp = 0;
7446 else if (original_target
e5e809f4 7447 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7448 || (singleton && GET_CODE (original_target) == REG
7449 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7450 && original_target == var_rtx (singleton)))
7451 && GET_MODE (original_target) == mode
7c00d1fe
RK
7452#ifdef HAVE_conditional_move
7453 && (! can_conditionally_move_p (mode)
7454 || GET_CODE (original_target) == REG
7455 || TREE_ADDRESSABLE (type))
7456#endif
01c8a7c8
RK
7457 && ! (GET_CODE (original_target) == MEM
7458 && MEM_VOLATILE_P (original_target)))
7459 temp = original_target;
7460 else if (TREE_ADDRESSABLE (type))
7461 abort ();
7462 else
7463 temp = assign_temp (type, 0, 0, 1);
7464
ac01eace
RK
7465 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7466 do the test of X as a store-flag operation, do this as
7467 A + ((X != 0) << log C). Similarly for other simple binary
7468 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7469 if (temp && singleton && binary_op
bbf6f052
RK
7470 && (TREE_CODE (binary_op) == PLUS_EXPR
7471 || TREE_CODE (binary_op) == MINUS_EXPR
7472 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7473 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7474 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7475 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7476 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7477 {
7478 rtx result;
7479 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7480 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7481 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7482 : xor_optab);
bbf6f052
RK
7483
7484 /* If we had X ? A : A + 1, do this as A + (X == 0).
7485
7486 We have to invert the truth value here and then put it
7487 back later if do_store_flag fails. We cannot simply copy
7488 TREE_OPERAND (exp, 0) to another variable and modify that
7489 because invert_truthvalue can modify the tree pointed to
7490 by its argument. */
7491 if (singleton == TREE_OPERAND (exp, 1))
7492 TREE_OPERAND (exp, 0)
7493 = invert_truthvalue (TREE_OPERAND (exp, 0));
7494
7495 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7496 (safe_from_p (temp, singleton, 1)
906c4e36 7497 ? temp : NULL_RTX),
bbf6f052
RK
7498 mode, BRANCH_COST <= 1);
7499
ac01eace
RK
7500 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7501 result = expand_shift (LSHIFT_EXPR, mode, result,
7502 build_int_2 (tree_log2
7503 (TREE_OPERAND
7504 (binary_op, 1)),
7505 0),
e5e809f4 7506 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7507 ? temp : NULL_RTX), 0);
7508
bbf6f052
RK
7509 if (result)
7510 {
906c4e36 7511 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7512 return expand_binop (mode, boptab, op1, result, temp,
7513 unsignedp, OPTAB_LIB_WIDEN);
7514 }
7515 else if (singleton == TREE_OPERAND (exp, 1))
7516 TREE_OPERAND (exp, 0)
7517 = invert_truthvalue (TREE_OPERAND (exp, 0));
7518 }
7519
dabf8373 7520 do_pending_stack_adjust ();
bbf6f052
RK
7521 NO_DEFER_POP;
7522 op0 = gen_label_rtx ();
7523
7524 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7525 {
7526 if (temp != 0)
7527 {
7528 /* If the target conflicts with the other operand of the
7529 binary op, we can't use it. Also, we can't use the target
7530 if it is a hard register, because evaluating the condition
7531 might clobber it. */
7532 if ((binary_op
e5e809f4 7533 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7534 || (GET_CODE (temp) == REG
7535 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7536 temp = gen_reg_rtx (mode);
7537 store_expr (singleton, temp, 0);
7538 }
7539 else
906c4e36 7540 expand_expr (singleton,
2937cf87 7541 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7542 if (singleton == TREE_OPERAND (exp, 1))
7543 jumpif (TREE_OPERAND (exp, 0), op0);
7544 else
7545 jumpifnot (TREE_OPERAND (exp, 0), op0);
7546
956d6950 7547 start_cleanup_deferral ();
bbf6f052
RK
7548 if (binary_op && temp == 0)
7549 /* Just touch the other operand. */
7550 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7551 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7552 else if (binary_op)
7553 store_expr (build (TREE_CODE (binary_op), type,
7554 make_tree (type, temp),
7555 TREE_OPERAND (binary_op, 1)),
7556 temp, 0);
7557 else
7558 store_expr (build1 (TREE_CODE (unary_op), type,
7559 make_tree (type, temp)),
7560 temp, 0);
7561 op1 = op0;
bbf6f052 7562 }
bbf6f052
RK
7563 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7564 comparison operator. If we have one of these cases, set the
7565 output to A, branch on A (cse will merge these two references),
7566 then set the output to FOO. */
7567 else if (temp
7568 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7569 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7570 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7571 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7572 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7573 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7574 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7575 {
7576 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7577 temp = gen_reg_rtx (mode);
7578 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7579 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7580
956d6950 7581 start_cleanup_deferral ();
bbf6f052
RK
7582 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7583 op1 = op0;
7584 }
7585 else if (temp
7586 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7587 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7588 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7589 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7590 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7591 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7592 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7593 {
7594 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7595 temp = gen_reg_rtx (mode);
7596 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7597 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7598
956d6950 7599 start_cleanup_deferral ();
bbf6f052
RK
7600 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7601 op1 = op0;
7602 }
7603 else
7604 {
7605 op1 = gen_label_rtx ();
7606 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7607
956d6950 7608 start_cleanup_deferral ();
bbf6f052
RK
7609 if (temp != 0)
7610 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7611 else
906c4e36
RK
7612 expand_expr (TREE_OPERAND (exp, 1),
7613 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7614 end_cleanup_deferral ();
bbf6f052
RK
7615 emit_queue ();
7616 emit_jump_insn (gen_jump (op1));
7617 emit_barrier ();
7618 emit_label (op0);
956d6950 7619 start_cleanup_deferral ();
bbf6f052
RK
7620 if (temp != 0)
7621 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7622 else
906c4e36
RK
7623 expand_expr (TREE_OPERAND (exp, 2),
7624 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7625 }
7626
956d6950 7627 end_cleanup_deferral ();
bbf6f052
RK
7628
7629 emit_queue ();
7630 emit_label (op1);
7631 OK_DEFER_POP;
5dab5552 7632
bbf6f052
RK
7633 return temp;
7634 }
7635
7636 case TARGET_EXPR:
7637 {
7638 /* Something needs to be initialized, but we didn't know
7639 where that thing was when building the tree. For example,
7640 it could be the return value of a function, or a parameter
7641 to a function which lays down in the stack, or a temporary
7642 variable which must be passed by reference.
7643
7644 We guarantee that the expression will either be constructed
7645 or copied into our original target. */
7646
7647 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7648 tree cleanups = NULL_TREE;
5c062816 7649 tree exp1;
bbf6f052
RK
7650
7651 if (TREE_CODE (slot) != VAR_DECL)
7652 abort ();
7653
9c51f375
RK
7654 if (! ignore)
7655 target = original_target;
7656
bbf6f052
RK
7657 if (target == 0)
7658 {
7659 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7660 {
7661 target = DECL_RTL (slot);
5c062816 7662 /* If we have already expanded the slot, so don't do
ac993f4f 7663 it again. (mrs) */
5c062816
MS
7664 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7665 return target;
ac993f4f 7666 }
bbf6f052
RK
7667 else
7668 {
e9a25f70 7669 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7670 /* All temp slots at this level must not conflict. */
7671 preserve_temp_slots (target);
7672 DECL_RTL (slot) = target;
e9a25f70
JL
7673 if (TREE_ADDRESSABLE (slot))
7674 {
7675 TREE_ADDRESSABLE (slot) = 0;
7676 mark_addressable (slot);
7677 }
bbf6f052 7678
e287fd6e
RK
7679 /* Since SLOT is not known to the called function
7680 to belong to its stack frame, we must build an explicit
7681 cleanup. This case occurs when we must build up a reference
7682 to pass the reference as an argument. In this case,
7683 it is very likely that such a reference need not be
7684 built here. */
7685
7686 if (TREE_OPERAND (exp, 2) == 0)
7687 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7688 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7689 }
bbf6f052
RK
7690 }
7691 else
7692 {
7693 /* This case does occur, when expanding a parameter which
7694 needs to be constructed on the stack. The target
7695 is the actual stack address that we want to initialize.
7696 The function we call will perform the cleanup in this case. */
7697
8c042b47
RS
7698 /* If we have already assigned it space, use that space,
7699 not target that we were passed in, as our target
7700 parameter is only a hint. */
7701 if (DECL_RTL (slot) != 0)
7702 {
7703 target = DECL_RTL (slot);
7704 /* If we have already expanded the slot, so don't do
7705 it again. (mrs) */
7706 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7707 return target;
7708 }
21002281
JW
7709 else
7710 {
7711 DECL_RTL (slot) = target;
7712 /* If we must have an addressable slot, then make sure that
7713 the RTL that we just stored in slot is OK. */
7714 if (TREE_ADDRESSABLE (slot))
7715 {
7716 TREE_ADDRESSABLE (slot) = 0;
7717 mark_addressable (slot);
7718 }
7719 }
bbf6f052
RK
7720 }
7721
4847c938 7722 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7723 /* Mark it as expanded. */
7724 TREE_OPERAND (exp, 1) = NULL_TREE;
7725
e5e809f4 7726 TREE_USED (slot) = 1;
41531e5b 7727 store_expr (exp1, target, 0);
61d6b1cc 7728
e976b8b2 7729 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7730
41531e5b 7731 return target;
bbf6f052
RK
7732 }
7733
7734 case INIT_EXPR:
7735 {
7736 tree lhs = TREE_OPERAND (exp, 0);
7737 tree rhs = TREE_OPERAND (exp, 1);
7738 tree noncopied_parts = 0;
7739 tree lhs_type = TREE_TYPE (lhs);
7740
7741 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7742 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7743 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7744 TYPE_NONCOPIED_PARTS (lhs_type));
7745 while (noncopied_parts != 0)
7746 {
7747 expand_assignment (TREE_VALUE (noncopied_parts),
7748 TREE_PURPOSE (noncopied_parts), 0, 0);
7749 noncopied_parts = TREE_CHAIN (noncopied_parts);
7750 }
7751 return temp;
7752 }
7753
7754 case MODIFY_EXPR:
7755 {
7756 /* If lhs is complex, expand calls in rhs before computing it.
7757 That's so we don't compute a pointer and save it over a call.
7758 If lhs is simple, compute it first so we can give it as a
7759 target if the rhs is just a call. This avoids an extra temp and copy
7760 and that prevents a partial-subsumption which makes bad code.
7761 Actually we could treat component_ref's of vars like vars. */
7762
7763 tree lhs = TREE_OPERAND (exp, 0);
7764 tree rhs = TREE_OPERAND (exp, 1);
7765 tree noncopied_parts = 0;
7766 tree lhs_type = TREE_TYPE (lhs);
7767
7768 temp = 0;
7769
7770 if (TREE_CODE (lhs) != VAR_DECL
7771 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7772 && TREE_CODE (lhs) != PARM_DECL
7773 && ! (TREE_CODE (lhs) == INDIRECT_REF
7774 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7775 preexpand_calls (exp);
7776
7777 /* Check for |= or &= of a bitfield of size one into another bitfield
7778 of size 1. In this case, (unless we need the result of the
7779 assignment) we can do this more efficiently with a
7780 test followed by an assignment, if necessary.
7781
7782 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7783 things change so we do, this code should be enhanced to
7784 support it. */
7785 if (ignore
7786 && TREE_CODE (lhs) == COMPONENT_REF
7787 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7788 || TREE_CODE (rhs) == BIT_AND_EXPR)
7789 && TREE_OPERAND (rhs, 0) == lhs
7790 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7791 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7792 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7793 {
7794 rtx label = gen_label_rtx ();
7795
7796 do_jump (TREE_OPERAND (rhs, 1),
7797 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7798 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7799 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7800 (TREE_CODE (rhs) == BIT_IOR_EXPR
7801 ? integer_one_node
7802 : integer_zero_node)),
7803 0, 0);
e7c33f54 7804 do_pending_stack_adjust ();
bbf6f052
RK
7805 emit_label (label);
7806 return const0_rtx;
7807 }
7808
7809 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7810 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7811 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7812 TYPE_NONCOPIED_PARTS (lhs_type));
7813
7814 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7815 while (noncopied_parts != 0)
7816 {
7817 expand_assignment (TREE_PURPOSE (noncopied_parts),
7818 TREE_VALUE (noncopied_parts), 0, 0);
7819 noncopied_parts = TREE_CHAIN (noncopied_parts);
7820 }
7821 return temp;
7822 }
7823
6e7f84a7
APB
7824 case RETURN_EXPR:
7825 if (!TREE_OPERAND (exp, 0))
7826 expand_null_return ();
7827 else
7828 expand_return (TREE_OPERAND (exp, 0));
7829 return const0_rtx;
7830
bbf6f052
RK
7831 case PREINCREMENT_EXPR:
7832 case PREDECREMENT_EXPR:
7b8b9722 7833 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7834
7835 case POSTINCREMENT_EXPR:
7836 case POSTDECREMENT_EXPR:
7837 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7838 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7839
7840 case ADDR_EXPR:
987c71d9 7841 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7842 be a MEM corresponding to a stack slot. */
987c71d9
RK
7843 temp = 0;
7844
bbf6f052
RK
7845 /* Are we taking the address of a nested function? */
7846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7847 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7848 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7849 && ! TREE_STATIC (exp))
bbf6f052
RK
7850 {
7851 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7852 op0 = force_operand (op0, target);
7853 }
682ba3a6
RK
7854 /* If we are taking the address of something erroneous, just
7855 return a zero. */
7856 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7857 return const0_rtx;
bbf6f052
RK
7858 else
7859 {
e287fd6e
RK
7860 /* We make sure to pass const0_rtx down if we came in with
7861 ignore set, to avoid doing the cleanups twice for something. */
7862 op0 = expand_expr (TREE_OPERAND (exp, 0),
7863 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7864 (modifier == EXPAND_INITIALIZER
7865 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7866
119af78a
RK
7867 /* If we are going to ignore the result, OP0 will have been set
7868 to const0_rtx, so just return it. Don't get confused and
7869 think we are taking the address of the constant. */
7870 if (ignore)
7871 return op0;
7872
3539e816
MS
7873 op0 = protect_from_queue (op0, 0);
7874
896102d0
RK
7875 /* We would like the object in memory. If it is a constant,
7876 we can have it be statically allocated into memory. For
682ba3a6 7877 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7878 memory and store the value into it. */
7879
7880 if (CONSTANT_P (op0))
7881 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7882 op0);
987c71d9 7883 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7884 {
7885 mark_temp_addr_taken (op0);
7886 temp = XEXP (op0, 0);
7887 }
896102d0 7888
682ba3a6 7889 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 7890 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
7891 {
7892 /* If this object is in a register, it must be not
0f41302f 7893 be BLKmode. */
896102d0 7894 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7895 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7896
7a0b7b9a 7897 mark_temp_addr_taken (memloc);
896102d0
RK
7898 emit_move_insn (memloc, op0);
7899 op0 = memloc;
7900 }
7901
bbf6f052
RK
7902 if (GET_CODE (op0) != MEM)
7903 abort ();
7904
7905 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7906 {
7907 temp = XEXP (op0, 0);
7908#ifdef POINTERS_EXTEND_UNSIGNED
7909 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7910 && mode == ptr_mode)
9fcfcce7 7911 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7912#endif
7913 return temp;
7914 }
987c71d9 7915
bbf6f052
RK
7916 op0 = force_operand (XEXP (op0, 0), target);
7917 }
987c71d9 7918
bbf6f052 7919 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7920 op0 = force_reg (Pmode, op0);
7921
dc6d66b3
RK
7922 if (GET_CODE (op0) == REG
7923 && ! REG_USERVAR_P (op0))
7924 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7925
7926 /* If we might have had a temp slot, add an equivalent address
7927 for it. */
7928 if (temp != 0)
7929 update_temp_slot_address (temp, op0);
7930
88f63c77
RK
7931#ifdef POINTERS_EXTEND_UNSIGNED
7932 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7933 && mode == ptr_mode)
9fcfcce7 7934 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7935#endif
7936
bbf6f052
RK
7937 return op0;
7938
7939 case ENTRY_VALUE_EXPR:
7940 abort ();
7941
7308a047
RS
7942 /* COMPLEX type for Extended Pascal & Fortran */
7943 case COMPLEX_EXPR:
7944 {
7945 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7946 rtx insns;
7308a047
RS
7947
7948 /* Get the rtx code of the operands. */
7949 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7950 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7951
7952 if (! target)
7953 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7954
6551fa4d 7955 start_sequence ();
7308a047
RS
7956
7957 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7958 emit_move_insn (gen_realpart (mode, target), op0);
7959 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7960
6551fa4d
JW
7961 insns = get_insns ();
7962 end_sequence ();
7963
7308a047 7964 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7965 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7966 each with a separate pseudo as destination.
7967 It's not correct for flow to treat them as a unit. */
6d6e61ce 7968 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7969 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7970 else
7971 emit_insns (insns);
7308a047
RS
7972
7973 return target;
7974 }
7975
7976 case REALPART_EXPR:
2d7050fd
RS
7977 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7978 return gen_realpart (mode, op0);
7308a047
RS
7979
7980 case IMAGPART_EXPR:
2d7050fd
RS
7981 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7982 return gen_imagpart (mode, op0);
7308a047
RS
7983
7984 case CONJ_EXPR:
7985 {
62acb978 7986 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7987 rtx imag_t;
6551fa4d 7988 rtx insns;
7308a047
RS
7989
7990 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7991
7992 if (! target)
d6a5ac33 7993 target = gen_reg_rtx (mode);
7308a047 7994
6551fa4d 7995 start_sequence ();
7308a047
RS
7996
7997 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7998 emit_move_insn (gen_realpart (partmode, target),
7999 gen_realpart (partmode, op0));
7308a047 8000
62acb978
RK
8001 imag_t = gen_imagpart (partmode, target);
8002 temp = expand_unop (partmode, neg_optab,
8003 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8004 if (temp != imag_t)
8005 emit_move_insn (imag_t, temp);
8006
6551fa4d
JW
8007 insns = get_insns ();
8008 end_sequence ();
8009
d6a5ac33
RK
8010 /* Conjugate should appear as a single unit
8011 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8012 each with a separate pseudo as destination.
8013 It's not correct for flow to treat them as a unit. */
6d6e61ce 8014 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8015 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8016 else
8017 emit_insns (insns);
7308a047
RS
8018
8019 return target;
8020 }
8021
e976b8b2
MS
8022 case TRY_CATCH_EXPR:
8023 {
8024 tree handler = TREE_OPERAND (exp, 1);
8025
8026 expand_eh_region_start ();
8027
8028 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8029
8030 expand_eh_region_end (handler);
8031
8032 return op0;
8033 }
8034
8035 case POPDCC_EXPR:
8036 {
8037 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8038 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8039 return const0_rtx;
8040 }
8041
8042 case POPDHC_EXPR:
8043 {
8044 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8045 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8046 return const0_rtx;
8047 }
8048
bbf6f052 8049 case ERROR_MARK:
66538193
RS
8050 op0 = CONST0_RTX (tmode);
8051 if (op0 != 0)
8052 return op0;
bbf6f052
RK
8053 return const0_rtx;
8054
8055 default:
90764a87 8056 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8057 }
8058
8059 /* Here to do an ordinary binary operator, generating an instruction
8060 from the optab already placed in `this_optab'. */
8061 binop:
8062 preexpand_calls (exp);
e5e809f4 8063 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8064 subtarget = 0;
8065 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8066 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8067 binop2:
8068 temp = expand_binop (mode, this_optab, op0, op1, target,
8069 unsignedp, OPTAB_LIB_WIDEN);
8070 if (temp == 0)
8071 abort ();
8072 return temp;
8073}
bbf6f052 8074
bbf6f052 8075
b93a436e
JL
8076\f
8077/* Return the alignment in bits of EXP, a pointer valued expression.
8078 But don't return more than MAX_ALIGN no matter what.
8079 The alignment returned is, by default, the alignment of the thing that
8080 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8081
8082 Otherwise, look at the expression to see if we can do better, i.e., if the
8083 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 8084
b93a436e
JL
8085static int
8086get_pointer_alignment (exp, max_align)
8087 tree exp;
8088 unsigned max_align;
bbf6f052 8089{
b93a436e
JL
8090 unsigned align, inner;
8091
8092 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8093 return 0;
8094
8095 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8096 align = MIN (align, max_align);
8097
8098 while (1)
bbf6f052 8099 {
b93a436e 8100 switch (TREE_CODE (exp))
bbf6f052 8101 {
b93a436e
JL
8102 case NOP_EXPR:
8103 case CONVERT_EXPR:
8104 case NON_LVALUE_EXPR:
8105 exp = TREE_OPERAND (exp, 0);
8106 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8107 return align;
8108 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8109 align = MIN (inner, max_align);
8110 break;
8111
8112 case PLUS_EXPR:
8113 /* If sum of pointer + int, restrict our maximum alignment to that
8114 imposed by the integer. If not, we can't do any better than
8115 ALIGN. */
8116 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8117 return align;
8118
8119 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8120 & (max_align - 1))
8121 != 0)
8122 max_align >>= 1;
8123
8124 exp = TREE_OPERAND (exp, 0);
8125 break;
8126
8127 case ADDR_EXPR:
8128 /* See what we are pointing at and look at its alignment. */
8129 exp = TREE_OPERAND (exp, 0);
8130 if (TREE_CODE (exp) == FUNCTION_DECL)
8131 align = FUNCTION_BOUNDARY;
8132 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8133 align = DECL_ALIGN (exp);
8134#ifdef CONSTANT_ALIGNMENT
8135 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8136 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 8137#endif
b93a436e 8138 return MIN (align, max_align);
c02bd5d9 8139
b93a436e
JL
8140 default:
8141 return align;
8142 }
8143 }
8144}
8145\f
8146/* Return the tree node and offset if a given argument corresponds to
8147 a string constant. */
8148
8149static tree
8150string_constant (arg, ptr_offset)
8151 tree arg;
8152 tree *ptr_offset;
8153{
8154 STRIP_NOPS (arg);
8155
8156 if (TREE_CODE (arg) == ADDR_EXPR
8157 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8158 {
8159 *ptr_offset = integer_zero_node;
8160 return TREE_OPERAND (arg, 0);
8161 }
8162 else if (TREE_CODE (arg) == PLUS_EXPR)
8163 {
8164 tree arg0 = TREE_OPERAND (arg, 0);
8165 tree arg1 = TREE_OPERAND (arg, 1);
8166
8167 STRIP_NOPS (arg0);
8168 STRIP_NOPS (arg1);
8169
8170 if (TREE_CODE (arg0) == ADDR_EXPR
8171 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8172 {
b93a436e
JL
8173 *ptr_offset = arg1;
8174 return TREE_OPERAND (arg0, 0);
bbf6f052 8175 }
b93a436e
JL
8176 else if (TREE_CODE (arg1) == ADDR_EXPR
8177 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8178 {
b93a436e
JL
8179 *ptr_offset = arg0;
8180 return TREE_OPERAND (arg1, 0);
bbf6f052 8181 }
b93a436e 8182 }
ca695ac9 8183
b93a436e
JL
8184 return 0;
8185}
ca695ac9 8186
b93a436e
JL
8187/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8188 way, because it could contain a zero byte in the middle.
8189 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 8190
b93a436e
JL
8191 Unfortunately, string_constant can't access the values of const char
8192 arrays with initializers, so neither can we do so here. */
e87b4f3f 8193
b93a436e
JL
8194static tree
8195c_strlen (src)
8196 tree src;
8197{
8198 tree offset_node;
8199 int offset, max;
8200 char *ptr;
e7c33f54 8201
b93a436e
JL
8202 src = string_constant (src, &offset_node);
8203 if (src == 0)
8204 return 0;
8205 max = TREE_STRING_LENGTH (src);
8206 ptr = TREE_STRING_POINTER (src);
8207 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8208 {
8209 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8210 compute the offset to the following null if we don't know where to
8211 start searching for it. */
8212 int i;
8213 for (i = 0; i < max; i++)
8214 if (ptr[i] == 0)
8215 return 0;
8216 /* We don't know the starting offset, but we do know that the string
8217 has no internal zero bytes. We can assume that the offset falls
8218 within the bounds of the string; otherwise, the programmer deserves
8219 what he gets. Subtract the offset from the length of the string,
8220 and return that. */
8221 /* This would perhaps not be valid if we were dealing with named
8222 arrays in addition to literal string constants. */
8223 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8224 }
e7c33f54 8225
b93a436e
JL
8226 /* We have a known offset into the string. Start searching there for
8227 a null character. */
8228 if (offset_node == 0)
8229 offset = 0;
8230 else
8231 {
8232 /* Did we get a long long offset? If so, punt. */
8233 if (TREE_INT_CST_HIGH (offset_node) != 0)
8234 return 0;
8235 offset = TREE_INT_CST_LOW (offset_node);
8236 }
8237 /* If the offset is known to be out of bounds, warn, and call strlen at
8238 runtime. */
8239 if (offset < 0 || offset > max)
8240 {
8241 warning ("offset outside bounds of constant string");
8242 return 0;
8243 }
8244 /* Use strlen to search for the first zero byte. Since any strings
8245 constructed with build_string will have nulls appended, we win even
8246 if we get handed something like (char[4])"abcd".
e7c33f54 8247
b93a436e
JL
8248 Since OFFSET is our starting index into the string, no further
8249 calculation is needed. */
8250 return size_int (strlen (ptr + offset));
8251}
1bbddf11 8252
b93a436e
JL
8253rtx
8254expand_builtin_return_addr (fndecl_code, count, tem)
8255 enum built_in_function fndecl_code;
8256 int count;
8257 rtx tem;
8258{
8259 int i;
e7c33f54 8260
b93a436e
JL
8261 /* Some machines need special handling before we can access
8262 arbitrary frames. For example, on the sparc, we must first flush
8263 all register windows to the stack. */
8264#ifdef SETUP_FRAME_ADDRESSES
8265 if (count > 0)
8266 SETUP_FRAME_ADDRESSES ();
8267#endif
e87b4f3f 8268
b93a436e
JL
8269 /* On the sparc, the return address is not in the frame, it is in a
8270 register. There is no way to access it off of the current frame
8271 pointer, but it can be accessed off the previous frame pointer by
8272 reading the value from the register window save area. */
8273#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8274 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8275 count--;
8276#endif
60bac6ea 8277
b93a436e
JL
8278 /* Scan back COUNT frames to the specified frame. */
8279 for (i = 0; i < count; i++)
8280 {
8281 /* Assume the dynamic chain pointer is in the word that the
8282 frame address points to, unless otherwise specified. */
8283#ifdef DYNAMIC_CHAIN_ADDRESS
8284 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8285#endif
8286 tem = memory_address (Pmode, tem);
8287 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8288 }
ca695ac9 8289
b93a436e
JL
8290 /* For __builtin_frame_address, return what we've got. */
8291 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8292 return tem;
e9a25f70 8293
b93a436e
JL
8294 /* For __builtin_return_address, Get the return address from that
8295 frame. */
8296#ifdef RETURN_ADDR_RTX
8297 tem = RETURN_ADDR_RTX (count, tem);
8298#else
8299 tem = memory_address (Pmode,
8300 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8301 tem = gen_rtx_MEM (Pmode, tem);
8302#endif
8303 return tem;
8304}
e9a25f70 8305
b93a436e
JL
8306/* __builtin_setjmp is passed a pointer to an array of five words (not
8307 all will be used on all machines). It operates similarly to the C
8308 library function of the same name, but is more efficient. Much of
8309 the code below (and for longjmp) is copied from the handling of
8310 non-local gotos.
ca695ac9 8311
b93a436e
JL
8312 NOTE: This is intended for use by GNAT and the exception handling
8313 scheme in the compiler and will only work in the method used by
8314 them. */
e9a25f70 8315
b93a436e 8316rtx
6fd1c67b 8317expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
8318 rtx buf_addr;
8319 rtx target;
6fd1c67b 8320 rtx first_label, next_label;
b93a436e 8321{
6fd1c67b 8322 rtx lab1 = gen_label_rtx ();
a260abc9
DE
8323 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8324 enum machine_mode value_mode;
b93a436e 8325 rtx stack_save;
e9a25f70 8326
b93a436e 8327 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 8328
b93a436e
JL
8329#ifdef POINTERS_EXTEND_UNSIGNED
8330 buf_addr = convert_memory_address (Pmode, buf_addr);
8331#endif
d7f21d63 8332
b93a436e 8333 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 8334
b93a436e
JL
8335 if (target == 0 || GET_CODE (target) != REG
8336 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8337 target = gen_reg_rtx (value_mode);
d7f21d63 8338
b93a436e 8339 emit_queue ();
d7f21d63 8340
9ec36da5
JL
8341 /* We store the frame pointer and the address of lab1 in the buffer
8342 and use the rest of it for the stack save area, which is
8343 machine-dependent. */
8344
0bc02db4
MS
8345#ifndef BUILTIN_SETJMP_FRAME_VALUE
8346#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8347#endif
8348
b93a436e 8349 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
0bc02db4 8350 BUILTIN_SETJMP_FRAME_VALUE);
6fd1c67b
RH
8351 emit_move_insn (validize_mem
8352 (gen_rtx_MEM (Pmode,
b93a436e
JL
8353 plus_constant (buf_addr,
8354 GET_MODE_SIZE (Pmode)))),
6fd1c67b 8355 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 8356
b93a436e
JL
8357 stack_save = gen_rtx_MEM (sa_mode,
8358 plus_constant (buf_addr,
8359 2 * GET_MODE_SIZE (Pmode)));
8360 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 8361
6fd1c67b
RH
8362 /* If there is further processing to do, do it. */
8363#ifdef HAVE_builtin_setjmp_setup
8364 if (HAVE_builtin_setjmp_setup)
8365 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 8366#endif
d7f21d63 8367
6fd1c67b 8368 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 8369 emit_move_insn (target, const0_rtx);
6fd1c67b 8370 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
8371 emit_barrier ();
8372 emit_label (lab1);
d7f21d63 8373
6fd1c67b
RH
8374 /* Tell flow about the strange goings on. */
8375 current_function_has_nonlocal_label = 1;
8376
8377 /* Clobber the FP when we get here, so we have to make sure it's
8378 marked as used by this function. */
b93a436e 8379 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 8380
b93a436e
JL
8381 /* Mark the static chain as clobbered here so life information
8382 doesn't get messed up for it. */
8383 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 8384
b93a436e
JL
8385 /* Now put in the code to restore the frame pointer, and argument
8386 pointer, if needed. The code below is from expand_end_bindings
8387 in stmt.c; see detailed documentation there. */
8388#ifdef HAVE_nonlocal_goto
8389 if (! HAVE_nonlocal_goto)
8390#endif
8391 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 8392
b93a436e
JL
8393#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8394 if (fixed_regs[ARG_POINTER_REGNUM])
8395 {
8396#ifdef ELIMINABLE_REGS
081f5e7e 8397 int i;
b93a436e 8398 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 8399
b93a436e
JL
8400 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8401 if (elim_regs[i].from == ARG_POINTER_REGNUM
8402 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8403 break;
ca695ac9 8404
b93a436e
JL
8405 if (i == sizeof elim_regs / sizeof elim_regs [0])
8406#endif
8407 {
8408 /* Now restore our arg pointer from the address at which it
8409 was saved in our stack frame.
8410 If there hasn't be space allocated for it yet, make
8411 some now. */
8412 if (arg_pointer_save_area == 0)
8413 arg_pointer_save_area
8414 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8415 emit_move_insn (virtual_incoming_args_rtx,
8416 copy_to_reg (arg_pointer_save_area));
8417 }
8418 }
8419#endif
ca695ac9 8420
6fd1c67b
RH
8421#ifdef HAVE_builtin_setjmp_receiver
8422 if (HAVE_builtin_setjmp_receiver)
8423 emit_insn (gen_builtin_setjmp_receiver (lab1));
8424 else
8425#endif
b93a436e 8426#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
8427 if (HAVE_nonlocal_goto_receiver)
8428 emit_insn (gen_nonlocal_goto_receiver ());
8429 else
b93a436e 8430#endif
081f5e7e
KG
8431 {
8432 ; /* Nothing */
8433 }
6fd1c67b
RH
8434
8435 /* Set TARGET, and branch to the next-time-through label. */
3e2b9a3d 8436 emit_move_insn (target, const1_rtx);
6fd1c67b
RH
8437 emit_jump_insn (gen_jump (next_label));
8438 emit_barrier ();
ca695ac9 8439
6fd1c67b
RH
8440 return target;
8441}
ca695ac9 8442
6fd1c67b
RH
8443void
8444expand_builtin_longjmp (buf_addr, value)
8445 rtx buf_addr, value;
8446{
8447 rtx fp, lab, stack;
a260abc9 8448 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
ca695ac9 8449
6fd1c67b
RH
8450#ifdef POINTERS_EXTEND_UNSIGNED
8451 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 8452#endif
6fd1c67b
RH
8453 buf_addr = force_reg (Pmode, buf_addr);
8454
3e2b9a3d
JW
8455 /* We used to store value in static_chain_rtx, but that fails if pointers
8456 are smaller than integers. We instead require that the user must pass
8457 a second argument of 1, because that is what builtin_setjmp will
8458 return. This also makes EH slightly more efficient, since we are no
8459 longer copying around a value that we don't care about. */
8460 if (value != const1_rtx)
8461 abort ();
6fd1c67b
RH
8462
8463#ifdef HAVE_builtin_longjmp
8464 if (HAVE_builtin_longjmp)
3e2b9a3d 8465 emit_insn (gen_builtin_longjmp (buf_addr));
6fd1c67b 8466 else
b93a436e 8467#endif
6fd1c67b
RH
8468 {
8469 fp = gen_rtx_MEM (Pmode, buf_addr);
8470 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8471 GET_MODE_SIZE (Pmode)));
e9a25f70 8472
6fd1c67b
RH
8473 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8474 2 * GET_MODE_SIZE (Pmode)));
8475
8476 /* Pick up FP, label, and SP from the block and jump. This code is
8477 from expand_goto in stmt.c; see there for detailed comments. */
8478#if HAVE_nonlocal_goto
8479 if (HAVE_nonlocal_goto)
3e2b9a3d
JW
8480 /* We have to pass a value to the nonlocal_goto pattern that will
8481 get copied into the static_chain pointer, but it does not matter
8482 what that value is, because builtin_setjmp does not use it. */
6fd1c67b
RH
8483 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8484 else
b93a436e 8485#endif
6fd1c67b
RH
8486 {
8487 lab = copy_to_reg (lab);
60bac6ea 8488
6fd1c67b
RH
8489 emit_move_insn (hard_frame_pointer_rtx, fp);
8490 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8491
8492 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8493 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
6fd1c67b
RH
8494 emit_indirect_jump (lab);
8495 }
8496 }
b93a436e 8497}
60bac6ea 8498
55a6ba9f
JC
8499static rtx
8500get_memory_rtx (exp)
8501 tree exp;
8502{
8503 rtx mem;
8504 int is_aggregate;
8505
8506 mem = gen_rtx_MEM (BLKmode,
8507 memory_address (BLKmode,
8508 expand_expr (exp, NULL_RTX,
8509 ptr_mode, EXPAND_SUM)));
8510
8511 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8512
8513 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8514 if the value is the address of a structure or if the expression is
8515 cast to a pointer to structure type. */
8516 is_aggregate = 0;
8517
8518 while (TREE_CODE (exp) == NOP_EXPR)
8519 {
8520 tree cast_type = TREE_TYPE (exp);
8521 if (TREE_CODE (cast_type) == POINTER_TYPE
8522 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8523 {
8524 is_aggregate = 1;
8525 break;
8526 }
8527 exp = TREE_OPERAND (exp, 0);
8528 }
8529
8530 if (is_aggregate == 0)
8531 {
8532 tree type;
8533
8534 if (TREE_CODE (exp) == ADDR_EXPR)
8535 /* If this is the address of an object, check whether the
8536 object is an array. */
8537 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8538 else
8539 type = TREE_TYPE (TREE_TYPE (exp));
8540 is_aggregate = AGGREGATE_TYPE_P (type);
8541 }
8542
8543 MEM_IN_STRUCT_P (mem) = is_aggregate;
8544 return mem;
8545}
8546
b93a436e
JL
8547\f
8548/* Expand an expression EXP that calls a built-in function,
8549 with result going to TARGET if that's convenient
8550 (and in mode MODE if that's convenient).
8551 SUBTARGET may be used as the target for computing one of EXP's operands.
8552 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 8553
b93a436e
JL
8554#define CALLED_AS_BUILT_IN(NODE) \
8555 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 8556
b93a436e
JL
8557static rtx
8558expand_builtin (exp, target, subtarget, mode, ignore)
8559 tree exp;
8560 rtx target;
8561 rtx subtarget;
8562 enum machine_mode mode;
8563 int ignore;
8564{
8565 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8566 tree arglist = TREE_OPERAND (exp, 1);
8567 rtx op0;
8568 rtx lab1, insns;
8569 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8570 optab builtin_optab;
60bac6ea 8571
b93a436e
JL
8572 switch (DECL_FUNCTION_CODE (fndecl))
8573 {
8574 case BUILT_IN_ABS:
8575 case BUILT_IN_LABS:
8576 case BUILT_IN_FABS:
8577 /* build_function_call changes these into ABS_EXPR. */
8578 abort ();
4ed67205 8579
b93a436e
JL
8580 case BUILT_IN_SIN:
8581 case BUILT_IN_COS:
8582 /* Treat these like sqrt, but only if the user asks for them. */
8583 if (! flag_fast_math)
8584 break;
8585 case BUILT_IN_FSQRT:
8586 /* If not optimizing, call the library function. */
8587 if (! optimize)
8588 break;
4ed67205 8589
b93a436e
JL
8590 if (arglist == 0
8591 /* Arg could be wrong type if user redeclared this fcn wrong. */
8592 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
8593 break;
8594
b93a436e
JL
8595 /* Stabilize and compute the argument. */
8596 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8597 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8598 {
8599 exp = copy_node (exp);
8600 arglist = copy_node (arglist);
8601 TREE_OPERAND (exp, 1) = arglist;
8602 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8603 }
8604 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 8605
b93a436e
JL
8606 /* Make a suitable register to place result in. */
8607 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 8608
b93a436e
JL
8609 emit_queue ();
8610 start_sequence ();
7565a035 8611
b93a436e
JL
8612 switch (DECL_FUNCTION_CODE (fndecl))
8613 {
8614 case BUILT_IN_SIN:
8615 builtin_optab = sin_optab; break;
8616 case BUILT_IN_COS:
8617 builtin_optab = cos_optab; break;
8618 case BUILT_IN_FSQRT:
8619 builtin_optab = sqrt_optab; break;
8620 default:
8621 abort ();
8622 }
4ed67205 8623
b93a436e
JL
8624 /* Compute into TARGET.
8625 Set TARGET to wherever the result comes back. */
8626 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8627 builtin_optab, op0, target, 0);
8628
8629 /* If we were unable to expand via the builtin, stop the
8630 sequence (without outputting the insns) and break, causing
38e01259 8631 a call to the library function. */
b93a436e 8632 if (target == 0)
4ed67205 8633 {
b93a436e
JL
8634 end_sequence ();
8635 break;
8636 }
4ed67205 8637
b93a436e
JL
8638 /* Check the results by default. But if flag_fast_math is turned on,
8639 then assume sqrt will always be called with valid arguments. */
4ed67205 8640
b93a436e
JL
8641 if (! flag_fast_math)
8642 {
8643 /* Don't define the builtin FP instructions
8644 if your machine is not IEEE. */
8645 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8646 abort ();
4ed67205 8647
b93a436e 8648 lab1 = gen_label_rtx ();
ca55abae 8649
b93a436e
JL
8650 /* Test the result; if it is NaN, set errno=EDOM because
8651 the argument was not in the domain. */
8652 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8653 emit_jump_insn (gen_beq (lab1));
8654
8655#ifdef TARGET_EDOM
8656 {
8657#ifdef GEN_ERRNO_RTX
8658 rtx errno_rtx = GEN_ERRNO_RTX;
8659#else
8660 rtx errno_rtx
8661 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8662#endif
e87b4f3f 8663
b93a436e
JL
8664 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8665 }
8666#else
8667 /* We can't set errno=EDOM directly; let the library call do it.
8668 Pop the arguments right away in case the call gets deleted. */
8669 NO_DEFER_POP;
8670 expand_call (exp, target, 0);
8671 OK_DEFER_POP;
8672#endif
e7c33f54 8673
b93a436e
JL
8674 emit_label (lab1);
8675 }
0006469d 8676
b93a436e
JL
8677 /* Output the entire sequence. */
8678 insns = get_insns ();
8679 end_sequence ();
8680 emit_insns (insns);
8681
8682 return target;
0006469d 8683
b93a436e
JL
8684 case BUILT_IN_FMOD:
8685 break;
0006469d 8686
b93a436e
JL
8687 /* __builtin_apply_args returns block of memory allocated on
8688 the stack into which is stored the arg pointer, structure
8689 value address, static chain, and all the registers that might
8690 possibly be used in performing a function call. The code is
8691 moved to the start of the function so the incoming values are
8692 saved. */
8693 case BUILT_IN_APPLY_ARGS:
8694 /* Don't do __builtin_apply_args more than once in a function.
8695 Save the result of the first call and reuse it. */
8696 if (apply_args_value != 0)
8697 return apply_args_value;
8698 {
8699 /* When this function is called, it means that registers must be
8700 saved on entry to this function. So we migrate the
8701 call to the first insn of this function. */
8702 rtx temp;
8703 rtx seq;
0006469d 8704
b93a436e
JL
8705 start_sequence ();
8706 temp = expand_builtin_apply_args ();
8707 seq = get_insns ();
8708 end_sequence ();
0006469d 8709
b93a436e 8710 apply_args_value = temp;
0006469d 8711
b93a436e
JL
8712 /* Put the sequence after the NOTE that starts the function.
8713 If this is inside a SEQUENCE, make the outer-level insn
8714 chain current, so the code is placed at the start of the
8715 function. */
8716 push_topmost_sequence ();
8717 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8718 pop_topmost_sequence ();
8719 return temp;
8720 }
0006469d 8721
b93a436e
JL
8722 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8723 FUNCTION with a copy of the parameters described by
8724 ARGUMENTS, and ARGSIZE. It returns a block of memory
8725 allocated on the stack into which is stored all the registers
8726 that might possibly be used for returning the result of a
8727 function. ARGUMENTS is the value returned by
8728 __builtin_apply_args. ARGSIZE is the number of bytes of
8729 arguments that must be copied. ??? How should this value be
8730 computed? We'll also need a safe worst case value for varargs
8731 functions. */
8732 case BUILT_IN_APPLY:
8733 if (arglist == 0
8734 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8735 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8736 || TREE_CHAIN (arglist) == 0
8737 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8738 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8739 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8740 return const0_rtx;
8741 else
8742 {
8743 int i;
8744 tree t;
8745 rtx ops[3];
0006469d 8746
b93a436e
JL
8747 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8748 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8749
b93a436e
JL
8750 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8751 }
bbf6f052 8752
b93a436e
JL
8753 /* __builtin_return (RESULT) causes the function to return the
8754 value described by RESULT. RESULT is address of the block of
8755 memory returned by __builtin_apply. */
8756 case BUILT_IN_RETURN:
8757 if (arglist
8758 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8759 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8760 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8761 NULL_RTX, VOIDmode, 0));
8762 return const0_rtx;
ca695ac9 8763
b93a436e
JL
8764 case BUILT_IN_SAVEREGS:
8765 /* Don't do __builtin_saveregs more than once in a function.
8766 Save the result of the first call and reuse it. */
8767 if (saveregs_value != 0)
8768 return saveregs_value;
8769 {
8770 /* When this function is called, it means that registers must be
8771 saved on entry to this function. So we migrate the
8772 call to the first insn of this function. */
8773 rtx temp;
8774 rtx seq;
ca695ac9 8775
b93a436e
JL
8776 /* Now really call the function. `expand_call' does not call
8777 expand_builtin, so there is no danger of infinite recursion here. */
8778 start_sequence ();
ca695ac9 8779
b93a436e
JL
8780#ifdef EXPAND_BUILTIN_SAVEREGS
8781 /* Do whatever the machine needs done in this case. */
8782 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8783#else
8784 /* The register where the function returns its value
8785 is likely to have something else in it, such as an argument.
8786 So preserve that register around the call. */
ca695ac9 8787
b93a436e
JL
8788 if (value_mode != VOIDmode)
8789 {
8790 rtx valreg = hard_libcall_value (value_mode);
8791 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8792
b93a436e
JL
8793 emit_move_insn (saved_valreg, valreg);
8794 temp = expand_call (exp, target, ignore);
8795 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8796 }
8797 else
b93a436e
JL
8798 /* Generate the call, putting the value in a pseudo. */
8799 temp = expand_call (exp, target, ignore);
8800#endif
bbf6f052 8801
b93a436e
JL
8802 seq = get_insns ();
8803 end_sequence ();
bbf6f052 8804
b93a436e 8805 saveregs_value = temp;
bbf6f052 8806
b93a436e
JL
8807 /* Put the sequence after the NOTE that starts the function.
8808 If this is inside a SEQUENCE, make the outer-level insn
8809 chain current, so the code is placed at the start of the
8810 function. */
8811 push_topmost_sequence ();
8812 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8813 pop_topmost_sequence ();
8814 return temp;
8815 }
bbf6f052 8816
b93a436e
JL
8817 /* __builtin_args_info (N) returns word N of the arg space info
8818 for the current function. The number and meanings of words
8819 is controlled by the definition of CUMULATIVE_ARGS. */
8820 case BUILT_IN_ARGS_INFO:
8821 {
8822 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8823 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8824#if 0
8825 /* These are used by the code below that is if 0'ed away */
8826 int i;
b93a436e 8827 tree type, elts, result;
381127e8 8828#endif
bbf6f052 8829
b93a436e
JL
8830 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8831 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8832 __FILE__, __LINE__);
bbf6f052 8833
b93a436e
JL
8834 if (arglist != 0)
8835 {
8836 tree arg = TREE_VALUE (arglist);
8837 if (TREE_CODE (arg) != INTEGER_CST)
8838 error ("argument of `__builtin_args_info' must be constant");
8839 else
8840 {
8841 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8842
b93a436e
JL
8843 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8844 error ("argument of `__builtin_args_info' out of range");
8845 else
8846 return GEN_INT (word_ptr[wordnum]);
8847 }
bbf6f052
RK
8848 }
8849 else
b93a436e 8850 error ("missing argument in `__builtin_args_info'");
bbf6f052 8851
b93a436e 8852 return const0_rtx;
bbf6f052 8853
b93a436e
JL
8854#if 0
8855 for (i = 0; i < nwords; i++)
8856 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8857
b93a436e
JL
8858 type = build_array_type (integer_type_node,
8859 build_index_type (build_int_2 (nwords, 0)));
8860 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8861 TREE_CONSTANT (result) = 1;
8862 TREE_STATIC (result) = 1;
8863 result = build (INDIRECT_REF, build_pointer_type (type), result);
8864 TREE_CONSTANT (result) = 1;
8865 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8866#endif
8867 }
8868
8869 /* Return the address of the first anonymous stack arg. */
8870 case BUILT_IN_NEXT_ARG:
ca695ac9 8871 {
b93a436e
JL
8872 tree fntype = TREE_TYPE (current_function_decl);
8873
8874 if ((TYPE_ARG_TYPES (fntype) == 0
8875 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8876 == void_type_node))
8877 && ! current_function_varargs)
8878 {
8879 error ("`va_start' used in function with fixed args");
8880 return const0_rtx;
8881 }
8882
8883 if (arglist)
8884 {
8885 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8886 tree arg = TREE_VALUE (arglist);
8887
8888 /* Strip off all nops for the sake of the comparison. This
8889 is not quite the same as STRIP_NOPS. It does more.
8890 We must also strip off INDIRECT_EXPR for C++ reference
8891 parameters. */
8892 while (TREE_CODE (arg) == NOP_EXPR
8893 || TREE_CODE (arg) == CONVERT_EXPR
8894 || TREE_CODE (arg) == NON_LVALUE_EXPR
8895 || TREE_CODE (arg) == INDIRECT_REF)
8896 arg = TREE_OPERAND (arg, 0);
8897 if (arg != last_parm)
8898 warning ("second parameter of `va_start' not last named argument");
8899 }
8900 else if (! current_function_varargs)
8901 /* Evidently an out of date version of <stdarg.h>; can't validate
8902 va_start's second argument, but can still work as intended. */
8903 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8904 }
8905
b93a436e
JL
8906 return expand_binop (Pmode, add_optab,
8907 current_function_internal_arg_pointer,
8908 current_function_arg_offset_rtx,
8909 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8910
b93a436e
JL
8911 case BUILT_IN_CLASSIFY_TYPE:
8912 if (arglist != 0)
8913 {
8914 tree type = TREE_TYPE (TREE_VALUE (arglist));
8915 enum tree_code code = TREE_CODE (type);
8916 if (code == VOID_TYPE)
8917 return GEN_INT (void_type_class);
8918 if (code == INTEGER_TYPE)
8919 return GEN_INT (integer_type_class);
8920 if (code == CHAR_TYPE)
8921 return GEN_INT (char_type_class);
8922 if (code == ENUMERAL_TYPE)
8923 return GEN_INT (enumeral_type_class);
8924 if (code == BOOLEAN_TYPE)
8925 return GEN_INT (boolean_type_class);
8926 if (code == POINTER_TYPE)
8927 return GEN_INT (pointer_type_class);
8928 if (code == REFERENCE_TYPE)
8929 return GEN_INT (reference_type_class);
8930 if (code == OFFSET_TYPE)
8931 return GEN_INT (offset_type_class);
8932 if (code == REAL_TYPE)
8933 return GEN_INT (real_type_class);
8934 if (code == COMPLEX_TYPE)
8935 return GEN_INT (complex_type_class);
8936 if (code == FUNCTION_TYPE)
8937 return GEN_INT (function_type_class);
8938 if (code == METHOD_TYPE)
8939 return GEN_INT (method_type_class);
8940 if (code == RECORD_TYPE)
8941 return GEN_INT (record_type_class);
8942 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8943 return GEN_INT (union_type_class);
8944 if (code == ARRAY_TYPE)
8945 {
8946 if (TYPE_STRING_FLAG (type))
8947 return GEN_INT (string_type_class);
8948 else
8949 return GEN_INT (array_type_class);
8950 }
8951 if (code == SET_TYPE)
8952 return GEN_INT (set_type_class);
8953 if (code == FILE_TYPE)
8954 return GEN_INT (file_type_class);
8955 if (code == LANG_TYPE)
8956 return GEN_INT (lang_type_class);
8957 }
8958 return GEN_INT (no_type_class);
ca695ac9 8959
b93a436e
JL
8960 case BUILT_IN_CONSTANT_P:
8961 if (arglist == 0)
8962 return const0_rtx;
8963 else
8964 {
8965 tree arg = TREE_VALUE (arglist);
ca695ac9 8966
b93a436e 8967 STRIP_NOPS (arg);
cff48d8f
RH
8968 if (really_constant_p (arg)
8969 || (TREE_CODE (arg) == ADDR_EXPR
8970 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8971 return const1_rtx;
8972
8973 /* Only emit CONSTANT_P_RTX if CSE will be run.
8974 Moreover, we don't want to expand trees that have side effects,
8975 as the original __builtin_constant_p did not evaluate its
8976 argument at all, and we would break existing usage by changing
8977 this. This quirk was generally useful, eliminating a bit of hair
8978 in the writing of the macros that use this function. Now the
8979 same thing can be better accomplished in an inline function. */
8980
8981 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
8982 {
8983 /* Lazy fixup of old code: issue a warning and fail the test. */
8984 if (! can_handle_constant_p)
8985 {
8986 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8987 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
8988 return const0_rtx;
8989 }
8990 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
8991 expand_expr (arg, NULL_RTX,
8992 VOIDmode, 0));
8993 }
8994
8995 return const0_rtx;
b93a436e 8996 }
ca695ac9 8997
b93a436e
JL
8998 case BUILT_IN_FRAME_ADDRESS:
8999 /* The argument must be a nonnegative integer constant.
9000 It counts the number of frames to scan up the stack.
9001 The value is the address of that frame. */
9002 case BUILT_IN_RETURN_ADDRESS:
9003 /* The argument must be a nonnegative integer constant.
9004 It counts the number of frames to scan up the stack.
9005 The value is the return address saved in that frame. */
9006 if (arglist == 0)
9007 /* Warning about missing arg was already issued. */
9008 return const0_rtx;
9009 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9010 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9011 {
9012 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9013 error ("invalid arg to `__builtin_frame_address'");
9014 else
9015 error ("invalid arg to `__builtin_return_address'");
9016 return const0_rtx;
9017 }
9018 else
9019 {
9020 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9021 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9022 hard_frame_pointer_rtx);
ee33823f 9023
b93a436e
JL
9024 /* Some ports cannot access arbitrary stack frames. */
9025 if (tem == NULL)
9026 {
9027 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9028 warning ("unsupported arg to `__builtin_frame_address'");
9029 else
9030 warning ("unsupported arg to `__builtin_return_address'");
9031 return const0_rtx;
9032 }
ee33823f 9033
b93a436e
JL
9034 /* For __builtin_frame_address, return what we've got. */
9035 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9036 return tem;
ee33823f 9037
b93a436e
JL
9038 if (GET_CODE (tem) != REG)
9039 tem = copy_to_reg (tem);
9040 return tem;
9041 }
ee33823f 9042
b93a436e
JL
9043 /* Returns the address of the area where the structure is returned.
9044 0 otherwise. */
9045 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9046 if (arglist != 0
9047 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9048 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9049 return const0_rtx;
9050 else
9051 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 9052
b93a436e
JL
9053 case BUILT_IN_ALLOCA:
9054 if (arglist == 0
9055 /* Arg could be non-integer if user redeclared this fcn wrong. */
9056 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9057 break;
bbf6f052 9058
b93a436e
JL
9059 /* Compute the argument. */
9060 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 9061
b93a436e
JL
9062 /* Allocate the desired space. */
9063 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 9064
b93a436e
JL
9065 case BUILT_IN_FFS:
9066 /* If not optimizing, call the library function. */
9067 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9068 break;
ca695ac9 9069
b93a436e
JL
9070 if (arglist == 0
9071 /* Arg could be non-integer if user redeclared this fcn wrong. */
9072 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9073 break;
ca695ac9 9074
b93a436e
JL
9075 /* Compute the argument. */
9076 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9077 /* Compute ffs, into TARGET if possible.
9078 Set TARGET to wherever the result comes back. */
9079 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9080 ffs_optab, op0, target, 1);
9081 if (target == 0)
9082 abort ();
9083 return target;
bbf6f052 9084
b93a436e
JL
9085 case BUILT_IN_STRLEN:
9086 /* If not optimizing, call the library function. */
9087 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9088 break;
bbf6f052 9089
b93a436e
JL
9090 if (arglist == 0
9091 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9092 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9093 break;
9094 else
9095 {
9096 tree src = TREE_VALUE (arglist);
9097 tree len = c_strlen (src);
bbf6f052 9098
b93a436e
JL
9099 int align
9100 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 9101
b93a436e
JL
9102 rtx result, src_rtx, char_rtx;
9103 enum machine_mode insn_mode = value_mode, char_mode;
9104 enum insn_code icode;
46b68a37 9105
b93a436e
JL
9106 /* If the length is known, just return it. */
9107 if (len != 0)
9108 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 9109
b93a436e
JL
9110 /* If SRC is not a pointer type, don't do this operation inline. */
9111 if (align == 0)
9112 break;
bbf6f052 9113
b93a436e 9114 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 9115
b93a436e
JL
9116 while (insn_mode != VOIDmode)
9117 {
9118 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9119 if (icode != CODE_FOR_nothing)
9120 break;
ca695ac9 9121
b93a436e
JL
9122 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9123 }
9124 if (insn_mode == VOIDmode)
9125 break;
ca695ac9 9126
b93a436e
JL
9127 /* Make a place to write the result of the instruction. */
9128 result = target;
9129 if (! (result != 0
9130 && GET_CODE (result) == REG
9131 && GET_MODE (result) == insn_mode
9132 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9133 result = gen_reg_rtx (insn_mode);
ca695ac9 9134
b93a436e 9135 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 9136
b93a436e
JL
9137 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9138 result = gen_reg_rtx (insn_mode);
9139 src_rtx = memory_address (BLKmode,
9140 expand_expr (src, NULL_RTX, ptr_mode,
9141 EXPAND_NORMAL));
bbf6f052 9142
b93a436e
JL
9143 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9144 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 9145
b93a436e
JL
9146 /* Check the string is readable and has an end. */
9147 if (flag_check_memory_usage)
9148 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9149 src_rtx, ptr_mode,
9150 GEN_INT (MEMORY_USE_RO),
9151 TYPE_MODE (integer_type_node));
bbf6f052 9152
b93a436e
JL
9153 char_rtx = const0_rtx;
9154 char_mode = insn_operand_mode[(int)icode][2];
9155 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9156 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 9157
b93a436e
JL
9158 emit_insn (GEN_FCN (icode) (result,
9159 gen_rtx_MEM (BLKmode, src_rtx),
9160 char_rtx, GEN_INT (align)));
bbf6f052 9161
b93a436e
JL
9162 /* Return the value in the proper mode for this function. */
9163 if (GET_MODE (result) == value_mode)
9164 return result;
9165 else if (target != 0)
9166 {
9167 convert_move (target, result, 0);
9168 return target;
9169 }
9170 else
9171 return convert_to_mode (value_mode, result, 0);
9172 }
bbf6f052 9173
b93a436e
JL
9174 case BUILT_IN_STRCPY:
9175 /* If not optimizing, call the library function. */
9176 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9177 break;
bbf6f052 9178
b93a436e
JL
9179 if (arglist == 0
9180 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9181 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9182 || TREE_CHAIN (arglist) == 0
9183 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9184 break;
9185 else
9186 {
9187 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 9188
b93a436e
JL
9189 if (len == 0)
9190 break;
bbf6f052 9191
b93a436e 9192 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 9193
b93a436e
JL
9194 chainon (arglist, build_tree_list (NULL_TREE, len));
9195 }
6d100794 9196
b93a436e
JL
9197 /* Drops in. */
9198 case BUILT_IN_MEMCPY:
9199 /* If not optimizing, call the library function. */
9200 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9201 break;
e7c33f54 9202
b93a436e
JL
9203 if (arglist == 0
9204 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9205 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9206 || TREE_CHAIN (arglist) == 0
9207 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9208 != POINTER_TYPE)
9209 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9210 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9211 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9212 != INTEGER_TYPE))
9213 break;
9214 else
9215 {
9216 tree dest = TREE_VALUE (arglist);
9217 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9218 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9219
b93a436e
JL
9220 int src_align
9221 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9222 int dest_align
9223 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9224 rtx dest_mem, src_mem, dest_addr, len_rtx;
e7c33f54 9225
b93a436e
JL
9226 /* If either SRC or DEST is not a pointer type, don't do
9227 this operation in-line. */
9228 if (src_align == 0 || dest_align == 0)
9229 {
9230 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9231 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9232 break;
9233 }
e7c33f54 9234
55a6ba9f
JC
9235 dest_mem = get_memory_rtx (dest);
9236 src_mem = get_memory_rtx (src);
b93a436e 9237 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 9238
b93a436e
JL
9239 /* Just copy the rights of SRC to the rights of DEST. */
9240 if (flag_check_memory_usage)
9241 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
55a6ba9f
JC
9242 XEXP (dest_mem, 0), ptr_mode,
9243 XEXP (src_mem, 0), ptr_mode,
b93a436e 9244 len_rtx, TYPE_MODE (sizetype));
e7c33f54 9245
b93a436e
JL
9246 /* Copy word part most expediently. */
9247 dest_addr
9248 = emit_block_move (dest_mem, src_mem, len_rtx,
9249 MIN (src_align, dest_align));
e7c33f54 9250
b93a436e 9251 if (dest_addr == 0)
55a6ba9f 9252 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
e7c33f54 9253
b93a436e
JL
9254 return dest_addr;
9255 }
e7c33f54 9256
b93a436e
JL
9257 case BUILT_IN_MEMSET:
9258 /* If not optimizing, call the library function. */
9259 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9260 break;
e7c33f54 9261
b93a436e
JL
9262 if (arglist == 0
9263 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9264 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9265 || TREE_CHAIN (arglist) == 0
9266 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9267 != INTEGER_TYPE)
9268 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9269 || (INTEGER_TYPE
9270 != (TREE_CODE (TREE_TYPE
9271 (TREE_VALUE
9272 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9273 break;
9274 else
9275 {
9276 tree dest = TREE_VALUE (arglist);
9277 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9278 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9279
b93a436e
JL
9280 int dest_align
9281 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9282 rtx dest_mem, dest_addr, len_rtx;
e7c33f54 9283
b93a436e
JL
9284 /* If DEST is not a pointer type, don't do this
9285 operation in-line. */
9286 if (dest_align == 0)
9287 break;
bbf6f052 9288
bf931ec8
JW
9289 /* If the arguments have side-effects, then we can only evaluate
9290 them at most once. The following code evaluates them twice if
9291 they are not constants because we break out to expand_call
9292 in that case. They can't be constants if they have side-effects
9293 so we can check for that first. Alternatively, we could call
9294 save_expr to make multiple evaluation safe. */
9295 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9296 break;
9297
b93a436e
JL
9298 /* If VAL is not 0, don't do this operation in-line. */
9299 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9300 break;
bbf6f052 9301
b93a436e
JL
9302 /* If LEN does not expand to a constant, don't do this
9303 operation in-line. */
9304 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9305 if (GET_CODE (len_rtx) != CONST_INT)
9306 break;
bbf6f052 9307
55a6ba9f 9308 dest_mem = get_memory_rtx (dest);
b93a436e
JL
9309
9310 /* Just check DST is writable and mark it as readable. */
9311 if (flag_check_memory_usage)
9312 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
55a6ba9f 9313 XEXP (dest_mem, 0), ptr_mode,
b93a436e
JL
9314 len_rtx, TYPE_MODE (sizetype),
9315 GEN_INT (MEMORY_USE_WO),
9316 TYPE_MODE (integer_type_node));
bbf6f052 9317
bbf6f052 9318
b93a436e 9319 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 9320
b93a436e 9321 if (dest_addr == 0)
55a6ba9f 9322 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
bbf6f052 9323
b93a436e
JL
9324 return dest_addr;
9325 }
bbf6f052 9326
b93a436e
JL
9327/* These comparison functions need an instruction that returns an actual
9328 index. An ordinary compare that just sets the condition codes
9329 is not enough. */
9330#ifdef HAVE_cmpstrsi
9331 case BUILT_IN_STRCMP:
9332 /* If not optimizing, call the library function. */
9333 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9334 break;
bbf6f052 9335
b93a436e
JL
9336 /* If we need to check memory accesses, call the library function. */
9337 if (flag_check_memory_usage)
9338 break;
bbf6f052 9339
b93a436e
JL
9340 if (arglist == 0
9341 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9342 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9343 || TREE_CHAIN (arglist) == 0
9344 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9345 break;
9346 else if (!HAVE_cmpstrsi)
9347 break;
9348 {
9349 tree arg1 = TREE_VALUE (arglist);
9350 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 9351 tree len, len2;
a97f5a86 9352
b93a436e
JL
9353 len = c_strlen (arg1);
9354 if (len)
9355 len = size_binop (PLUS_EXPR, integer_one_node, len);
9356 len2 = c_strlen (arg2);
9357 if (len2)
9358 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 9359
b93a436e
JL
9360 /* If we don't have a constant length for the first, use the length
9361 of the second, if we know it. We don't require a constant for
9362 this case; some cost analysis could be done if both are available
9363 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 9364
b93a436e
JL
9365 If both strings have constant lengths, use the smaller. This
9366 could arise if optimization results in strcpy being called with
9367 two fixed strings, or if the code was machine-generated. We should
9368 add some code to the `memcmp' handler below to deal with such
9369 situations, someday. */
9370 if (!len || TREE_CODE (len) != INTEGER_CST)
9371 {
9372 if (len2)
9373 len = len2;
9374 else if (len == 0)
9375 break;
9376 }
9377 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9378 {
9379 if (tree_int_cst_lt (len2, len))
9380 len = len2;
9381 }
bbf6f052 9382
b93a436e
JL
9383 chainon (arglist, build_tree_list (NULL_TREE, len));
9384 }
bbf6f052 9385
b93a436e
JL
9386 /* Drops in. */
9387 case BUILT_IN_MEMCMP:
9388 /* If not optimizing, call the library function. */
9389 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9390 break;
bbf6f052 9391
b93a436e
JL
9392 /* If we need to check memory accesses, call the library function. */
9393 if (flag_check_memory_usage)
9394 break;
bbf6f052 9395
b93a436e
JL
9396 if (arglist == 0
9397 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9398 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9399 || TREE_CHAIN (arglist) == 0
9400 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9401 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9402 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9403 break;
9404 else if (!HAVE_cmpstrsi)
9405 break;
9406 {
9407 tree arg1 = TREE_VALUE (arglist);
9408 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9409 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9410 rtx result;
0842a179 9411
b93a436e
JL
9412 int arg1_align
9413 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9414 int arg2_align
9415 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9416 enum machine_mode insn_mode
9417 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 9418
b93a436e
JL
9419 /* If we don't have POINTER_TYPE, call the function. */
9420 if (arg1_align == 0 || arg2_align == 0)
9421 {
9422 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9423 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9424 break;
9425 }
bbf6f052 9426
b93a436e
JL
9427 /* Make a place to write the result of the instruction. */
9428 result = target;
9429 if (! (result != 0
9430 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9431 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9432 result = gen_reg_rtx (insn_mode);
bbf6f052 9433
55a6ba9f
JC
9434 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9435 get_memory_rtx (arg2),
b93a436e
JL
9436 expand_expr (len, NULL_RTX, VOIDmode, 0),
9437 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 9438
b93a436e
JL
9439 /* Return the value in the proper mode for this function. */
9440 mode = TYPE_MODE (TREE_TYPE (exp));
9441 if (GET_MODE (result) == mode)
9442 return result;
9443 else if (target != 0)
9444 {
9445 convert_move (target, result, 0);
9446 return target;
9447 }
9448 else
9449 return convert_to_mode (mode, result, 0);
9450 }
9451#else
9452 case BUILT_IN_STRCMP:
9453 case BUILT_IN_MEMCMP:
9454 break;
9455#endif
bbf6f052 9456
b93a436e
JL
9457 case BUILT_IN_SETJMP:
9458 if (arglist == 0
9459 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9460 break;
6fd1c67b
RH
9461 else
9462 {
9463 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9464 VOIDmode, 0);
9465 rtx lab = gen_label_rtx ();
9466 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9467 emit_label (lab);
9468 return ret;
9469 }
bbf6f052 9470
6fd1c67b
RH
9471 /* __builtin_longjmp is passed a pointer to an array of five words.
9472 It's similar to the C library longjmp function but works with
9473 __builtin_setjmp above. */
b93a436e
JL
9474 case BUILT_IN_LONGJMP:
9475 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9476 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9477 break;
b93a436e 9478 else
b93a436e 9479 {
6fd1c67b
RH
9480 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9481 VOIDmode, 0);
9482 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3e2b9a3d 9483 NULL_RTX, VOIDmode, 0);
e0cd0770
JC
9484
9485 if (value != const1_rtx)
9486 {
9487 error ("__builtin_longjmp second argument must be 1");
9488 return const0_rtx;
9489 }
9490
6fd1c67b
RH
9491 expand_builtin_longjmp (buf_addr, value);
9492 return const0_rtx;
b93a436e 9493 }
bbf6f052 9494
e0cd0770
JC
9495 case BUILT_IN_TRAP:
9496#ifdef HAVE_trap
9497 if (HAVE_trap)
9498 emit_insn (gen_trap ());
9499 else
9500#endif
9501 error ("__builtin_trap not supported by this target");
9502 emit_barrier ();
9503 return const0_rtx;
9504
b93a436e
JL
9505 /* Various hooks for the DWARF 2 __throw routine. */
9506 case BUILT_IN_UNWIND_INIT:
9507 expand_builtin_unwind_init ();
9508 return const0_rtx;
71038426
RH
9509 case BUILT_IN_DWARF_CFA:
9510 return virtual_cfa_rtx;
b93a436e
JL
9511#ifdef DWARF2_UNWIND_INFO
9512 case BUILT_IN_DWARF_FP_REGNUM:
9513 return expand_builtin_dwarf_fp_regnum ();
9514 case BUILT_IN_DWARF_REG_SIZE:
9515 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 9516#endif
b93a436e
JL
9517 case BUILT_IN_FROB_RETURN_ADDR:
9518 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9519 case BUILT_IN_EXTRACT_RETURN_ADDR:
9520 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
71038426
RH
9521 case BUILT_IN_EH_RETURN:
9522 expand_builtin_eh_return (TREE_VALUE (arglist),
9523 TREE_VALUE (TREE_CHAIN (arglist)),
9524 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
b93a436e 9525 return const0_rtx;
ca695ac9 9526
b93a436e
JL
9527 default: /* just do library call, if unknown builtin */
9528 error ("built-in function `%s' not currently supported",
9529 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 9530 }
0006469d 9531
b93a436e
JL
9532 /* The switch statement above can drop through to cause the function
9533 to be called normally. */
0006469d 9534
b93a436e 9535 return expand_call (exp, target, ignore);
ca695ac9 9536}
b93a436e
JL
9537\f
9538/* Built-in functions to perform an untyped call and return. */
0006469d 9539
b93a436e
JL
9540/* For each register that may be used for calling a function, this
9541 gives a mode used to copy the register's value. VOIDmode indicates
9542 the register is not used for calling a function. If the machine
9543 has register windows, this gives only the outbound registers.
9544 INCOMING_REGNO gives the corresponding inbound register. */
9545static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9546
b93a436e
JL
9547/* For each register that may be used for returning values, this gives
9548 a mode used to copy the register's value. VOIDmode indicates the
9549 register is not used for returning values. If the machine has
9550 register windows, this gives only the outbound registers.
9551 INCOMING_REGNO gives the corresponding inbound register. */
9552static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9553
b93a436e
JL
9554/* For each register that may be used for calling a function, this
9555 gives the offset of that register into the block returned by
9556 __builtin_apply_args. 0 indicates that the register is not
9557 used for calling a function. */
9558static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9559
9560/* Return the offset of register REGNO into the block returned by
9561 __builtin_apply_args. This is not declared static, since it is
9562 needed in objc-act.c. */
0006469d 9563
b93a436e
JL
9564int
9565apply_args_register_offset (regno)
9566 int regno;
9567{
9568 apply_args_size ();
0006469d 9569
b93a436e
JL
9570 /* Arguments are always put in outgoing registers (in the argument
9571 block) if such make sense. */
9572#ifdef OUTGOING_REGNO
9573 regno = OUTGOING_REGNO(regno);
9574#endif
9575 return apply_args_reg_offset[regno];
9576}
904762c8 9577
b93a436e
JL
9578/* Return the size required for the block returned by __builtin_apply_args,
9579 and initialize apply_args_mode. */
9580
9581static int
9582apply_args_size ()
0006469d 9583{
b93a436e
JL
9584 static int size = -1;
9585 int align, regno;
2f6e6d22 9586 enum machine_mode mode;
0006469d 9587
b93a436e
JL
9588 /* The values computed by this function never change. */
9589 if (size < 0)
ca695ac9 9590 {
b93a436e
JL
9591 /* The first value is the incoming arg-pointer. */
9592 size = GET_MODE_SIZE (Pmode);
0006469d 9593
b93a436e
JL
9594 /* The second value is the structure value address unless this is
9595 passed as an "invisible" first argument. */
9596 if (struct_value_rtx)
9597 size += GET_MODE_SIZE (Pmode);
0006469d 9598
b93a436e
JL
9599 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9600 if (FUNCTION_ARG_REGNO_P (regno))
9601 {
9602 /* Search for the proper mode for copying this register's
9603 value. I'm not sure this is right, but it works so far. */
9604 enum machine_mode best_mode = VOIDmode;
0006469d 9605
b93a436e
JL
9606 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9607 mode != VOIDmode;
9608 mode = GET_MODE_WIDER_MODE (mode))
9609 if (HARD_REGNO_MODE_OK (regno, mode)
9610 && HARD_REGNO_NREGS (regno, mode) == 1)
9611 best_mode = mode;
0006469d 9612
b93a436e
JL
9613 if (best_mode == VOIDmode)
9614 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9615 mode != VOIDmode;
9616 mode = GET_MODE_WIDER_MODE (mode))
9617 if (HARD_REGNO_MODE_OK (regno, mode)
9618 && (mov_optab->handlers[(int) mode].insn_code
9619 != CODE_FOR_nothing))
9620 best_mode = mode;
0006469d 9621
b93a436e
JL
9622 mode = best_mode;
9623 if (mode == VOIDmode)
9624 abort ();
904762c8 9625
b93a436e
JL
9626 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9627 if (size % align != 0)
9628 size = CEIL (size, align) * align;
9629 apply_args_reg_offset[regno] = size;
9630 size += GET_MODE_SIZE (mode);
9631 apply_args_mode[regno] = mode;
9632 }
9633 else
9634 {
9635 apply_args_mode[regno] = VOIDmode;
9636 apply_args_reg_offset[regno] = 0;
9637 }
9638 }
9639 return size;
9640}
0006469d 9641
b93a436e
JL
9642/* Return the size required for the block returned by __builtin_apply,
9643 and initialize apply_result_mode. */
904762c8 9644
b93a436e
JL
9645static int
9646apply_result_size ()
9647{
9648 static int size = -1;
9649 int align, regno;
9650 enum machine_mode mode;
0006469d 9651
b93a436e
JL
9652 /* The values computed by this function never change. */
9653 if (size < 0)
9654 {
9655 size = 0;
0006469d 9656
b93a436e
JL
9657 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9658 if (FUNCTION_VALUE_REGNO_P (regno))
9659 {
9660 /* Search for the proper mode for copying this register's
9661 value. I'm not sure this is right, but it works so far. */
9662 enum machine_mode best_mode = VOIDmode;
0006469d 9663
b93a436e
JL
9664 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9665 mode != TImode;
9666 mode = GET_MODE_WIDER_MODE (mode))
9667 if (HARD_REGNO_MODE_OK (regno, mode))
9668 best_mode = mode;
0006469d 9669
b93a436e
JL
9670 if (best_mode == VOIDmode)
9671 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9672 mode != VOIDmode;
9673 mode = GET_MODE_WIDER_MODE (mode))
9674 if (HARD_REGNO_MODE_OK (regno, mode)
9675 && (mov_optab->handlers[(int) mode].insn_code
9676 != CODE_FOR_nothing))
9677 best_mode = mode;
0006469d 9678
b93a436e
JL
9679 mode = best_mode;
9680 if (mode == VOIDmode)
9681 abort ();
9682
9683 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9684 if (size % align != 0)
9685 size = CEIL (size, align) * align;
9686 size += GET_MODE_SIZE (mode);
9687 apply_result_mode[regno] = mode;
9688 }
9689 else
9690 apply_result_mode[regno] = VOIDmode;
9691
9692 /* Allow targets that use untyped_call and untyped_return to override
9693 the size so that machine-specific information can be stored here. */
9694#ifdef APPLY_RESULT_SIZE
9695 size = APPLY_RESULT_SIZE;
9696#endif
9697 }
9698 return size;
9699}
0006469d 9700
b93a436e
JL
9701#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9702/* Create a vector describing the result block RESULT. If SAVEP is true,
9703 the result block is used to save the values; otherwise it is used to
9704 restore the values. */
9705
9706static rtx
9707result_vector (savep, result)
9708 int savep;
9709 rtx result;
9710{
9711 int regno, size, align, nelts;
9712 enum machine_mode mode;
9713 rtx reg, mem;
9714 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9715
9716 size = nelts = 0;
9717 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9718 if ((mode = apply_result_mode[regno]) != VOIDmode)
9719 {
9720 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9721 if (size % align != 0)
9722 size = CEIL (size, align) * align;
9723 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9724 mem = change_address (result, mode,
9725 plus_constant (XEXP (result, 0), size));
9726 savevec[nelts++] = (savep
9727 ? gen_rtx_SET (VOIDmode, mem, reg)
9728 : gen_rtx_SET (VOIDmode, reg, mem));
9729 size += GET_MODE_SIZE (mode);
ca695ac9 9730 }
b93a436e
JL
9731 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9732}
9733#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9734
b93a436e
JL
9735/* Save the state required to perform an untyped call with the same
9736 arguments as were passed to the current function. */
904762c8 9737
b93a436e
JL
9738static rtx
9739expand_builtin_apply_args ()
9740{
9741 rtx registers;
9742 int size, align, regno;
9743 enum machine_mode mode;
0006469d 9744
b93a436e
JL
9745 /* Create a block where the arg-pointer, structure value address,
9746 and argument registers can be saved. */
9747 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9748
b93a436e
JL
9749 /* Walk past the arg-pointer and structure value address. */
9750 size = GET_MODE_SIZE (Pmode);
9751 if (struct_value_rtx)
9752 size += GET_MODE_SIZE (Pmode);
0cb1d109 9753
b93a436e
JL
9754 /* Save each register used in calling a function to the block. */
9755 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9756 if ((mode = apply_args_mode[regno]) != VOIDmode)
9757 {
9758 rtx tem;
0cb1d109 9759
b93a436e
JL
9760 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9761 if (size % align != 0)
9762 size = CEIL (size, align) * align;
0006469d 9763
b93a436e 9764 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9765
b93a436e
JL
9766#ifdef STACK_REGS
9767 /* For reg-stack.c's stack register household.
9768 Compare with a similar piece of code in function.c. */
0006469d 9769
b93a436e
JL
9770 emit_insn (gen_rtx_USE (mode, tem));
9771#endif
0e8c9172 9772
b93a436e
JL
9773 emit_move_insn (change_address (registers, mode,
9774 plus_constant (XEXP (registers, 0),
9775 size)),
9776 tem);
9777 size += GET_MODE_SIZE (mode);
0e8c9172 9778 }
0006469d 9779
b93a436e
JL
9780 /* Save the arg pointer to the block. */
9781 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9782 copy_to_reg (virtual_incoming_args_rtx));
9783 size = GET_MODE_SIZE (Pmode);
0006469d 9784
b93a436e
JL
9785 /* Save the structure value address unless this is passed as an
9786 "invisible" first argument. */
9787 if (struct_value_incoming_rtx)
9788 {
9789 emit_move_insn (change_address (registers, Pmode,
9790 plus_constant (XEXP (registers, 0),
9791 size)),
9792 copy_to_reg (struct_value_incoming_rtx));
9793 size += GET_MODE_SIZE (Pmode);
9794 }
0006469d 9795
b93a436e
JL
9796 /* Return the address of the block. */
9797 return copy_addr_to_reg (XEXP (registers, 0));
9798}
0006469d 9799
b93a436e
JL
9800/* Perform an untyped call and save the state required to perform an
9801 untyped return of whatever value was returned by the given function. */
0006469d 9802
b93a436e
JL
9803static rtx
9804expand_builtin_apply (function, arguments, argsize)
9805 rtx function, arguments, argsize;
9806{
9807 int size, align, regno;
9808 enum machine_mode mode;
9809 rtx incoming_args, result, reg, dest, call_insn;
9810 rtx old_stack_level = 0;
9811 rtx call_fusage = 0;
0006469d 9812
b93a436e
JL
9813 /* Create a block where the return registers can be saved. */
9814 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9815
9816 /* ??? The argsize value should be adjusted here. */
9817
9818 /* Fetch the arg pointer from the ARGUMENTS block. */
9819 incoming_args = gen_reg_rtx (Pmode);
9820 emit_move_insn (incoming_args,
9821 gen_rtx_MEM (Pmode, arguments));
9822#ifndef STACK_GROWS_DOWNWARD
9823 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9824 incoming_args, 0, OPTAB_LIB_WIDEN);
9825#endif
9826
9827 /* Perform postincrements before actually calling the function. */
ca695ac9 9828 emit_queue ();
0006469d 9829
b93a436e
JL
9830 /* Push a new argument block and copy the arguments. */
9831 do_pending_stack_adjust ();
0006469d 9832
b93a436e
JL
9833 /* Save the stack with nonlocal if available */
9834#ifdef HAVE_save_stack_nonlocal
9835 if (HAVE_save_stack_nonlocal)
9836 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9837 else
9838#endif
9839 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9840
b93a436e
JL
9841 /* Push a block of memory onto the stack to store the memory arguments.
9842 Save the address in a register, and copy the memory arguments. ??? I
9843 haven't figured out how the calling convention macros effect this,
9844 but it's likely that the source and/or destination addresses in
9845 the block copy will need updating in machine specific ways. */
9846 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9847 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9848 gen_rtx_MEM (BLKmode, incoming_args),
9849 argsize,
9850 PARM_BOUNDARY / BITS_PER_UNIT);
9851
9852 /* Refer to the argument block. */
9853 apply_args_size ();
9854 arguments = gen_rtx_MEM (BLKmode, arguments);
9855
9856 /* Walk past the arg-pointer and structure value address. */
9857 size = GET_MODE_SIZE (Pmode);
9858 if (struct_value_rtx)
9859 size += GET_MODE_SIZE (Pmode);
9860
9861 /* Restore each of the registers previously saved. Make USE insns
9862 for each of these registers for use in making the call. */
9863 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9864 if ((mode = apply_args_mode[regno]) != VOIDmode)
9865 {
9866 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9867 if (size % align != 0)
9868 size = CEIL (size, align) * align;
9869 reg = gen_rtx_REG (mode, regno);
9870 emit_move_insn (reg,
9871 change_address (arguments, mode,
9872 plus_constant (XEXP (arguments, 0),
9873 size)));
9874
9875 use_reg (&call_fusage, reg);
9876 size += GET_MODE_SIZE (mode);
9877 }
9878
9879 /* Restore the structure value address unless this is passed as an
9880 "invisible" first argument. */
9881 size = GET_MODE_SIZE (Pmode);
9882 if (struct_value_rtx)
0006469d 9883 {
b93a436e
JL
9884 rtx value = gen_reg_rtx (Pmode);
9885 emit_move_insn (value,
9886 change_address (arguments, Pmode,
9887 plus_constant (XEXP (arguments, 0),
9888 size)));
9889 emit_move_insn (struct_value_rtx, value);
9890 if (GET_CODE (struct_value_rtx) == REG)
9891 use_reg (&call_fusage, struct_value_rtx);
9892 size += GET_MODE_SIZE (Pmode);
ca695ac9 9893 }
0006469d 9894
b93a436e
JL
9895 /* All arguments and registers used for the call are set up by now! */
9896 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9897
b93a436e
JL
9898 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9899 and we don't want to load it into a register as an optimization,
9900 because prepare_call_address already did it if it should be done. */
9901 if (GET_CODE (function) != SYMBOL_REF)
9902 function = memory_address (FUNCTION_MODE, function);
0006469d 9903
b93a436e
JL
9904 /* Generate the actual call instruction and save the return value. */
9905#ifdef HAVE_untyped_call
9906 if (HAVE_untyped_call)
9907 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9908 result, result_vector (1, result)));
9909 else
9910#endif
9911#ifdef HAVE_call_value
9912 if (HAVE_call_value)
ca695ac9 9913 {
b93a436e 9914 rtx valreg = 0;
0006469d 9915
b93a436e
JL
9916 /* Locate the unique return register. It is not possible to
9917 express a call that sets more than one return register using
9918 call_value; use untyped_call for that. In fact, untyped_call
9919 only needs to save the return registers in the given block. */
9920 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9921 if ((mode = apply_result_mode[regno]) != VOIDmode)
9922 {
9923 if (valreg)
9924 abort (); /* HAVE_untyped_call required. */
9925 valreg = gen_rtx_REG (mode, regno);
9926 }
0006469d 9927
b93a436e
JL
9928 emit_call_insn (gen_call_value (valreg,
9929 gen_rtx_MEM (FUNCTION_MODE, function),
9930 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9931
b93a436e
JL
9932 emit_move_insn (change_address (result, GET_MODE (valreg),
9933 XEXP (result, 0)),
9934 valreg);
ca695ac9 9935 }
b93a436e
JL
9936 else
9937#endif
9938 abort ();
0006469d 9939
b93a436e
JL
9940 /* Find the CALL insn we just emitted. */
9941 for (call_insn = get_last_insn ();
9942 call_insn && GET_CODE (call_insn) != CALL_INSN;
9943 call_insn = PREV_INSN (call_insn))
9944 ;
0006469d 9945
b93a436e
JL
9946 if (! call_insn)
9947 abort ();
0006469d 9948
b93a436e
JL
9949 /* Put the register usage information on the CALL. If there is already
9950 some usage information, put ours at the end. */
9951 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9952 {
b93a436e 9953 rtx link;
0006469d 9954
b93a436e
JL
9955 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9956 link = XEXP (link, 1))
9957 ;
9958
9959 XEXP (link, 1) = call_fusage;
ca695ac9 9960 }
b93a436e
JL
9961 else
9962 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9963
b93a436e
JL
9964 /* Restore the stack. */
9965#ifdef HAVE_save_stack_nonlocal
9966 if (HAVE_save_stack_nonlocal)
9967 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9968 else
9969#endif
9970 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9971
9972 /* Return the address of the result block. */
9973 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9974}
bbf6f052 9975
b93a436e 9976/* Perform an untyped return. */
ca695ac9
JB
9977
9978static void
b93a436e
JL
9979expand_builtin_return (result)
9980 rtx result;
bbf6f052 9981{
b93a436e
JL
9982 int size, align, regno;
9983 enum machine_mode mode;
9984 rtx reg;
9985 rtx call_fusage = 0;
bbf6f052 9986
b93a436e
JL
9987 apply_result_size ();
9988 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9989
b93a436e
JL
9990#ifdef HAVE_untyped_return
9991 if (HAVE_untyped_return)
ca695ac9 9992 {
b93a436e
JL
9993 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9994 emit_barrier ();
9995 return;
ca695ac9 9996 }
b93a436e 9997#endif
1499e0a8 9998
b93a436e
JL
9999 /* Restore the return value and note that each value is used. */
10000 size = 0;
10001 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10002 if ((mode = apply_result_mode[regno]) != VOIDmode)
10003 {
10004 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10005 if (size % align != 0)
10006 size = CEIL (size, align) * align;
10007 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10008 emit_move_insn (reg,
10009 change_address (result, mode,
10010 plus_constant (XEXP (result, 0),
10011 size)));
10012
10013 push_to_sequence (call_fusage);
10014 emit_insn (gen_rtx_USE (VOIDmode, reg));
10015 call_fusage = get_insns ();
10016 end_sequence ();
10017 size += GET_MODE_SIZE (mode);
10018 }
10019
10020 /* Put the USE insns before the return. */
10021 emit_insns (call_fusage);
10022
10023 /* Return whatever values was restored by jumping directly to the end
10024 of the function. */
10025 expand_null_return ();
ca695ac9
JB
10026}
10027\f
b93a436e
JL
10028/* Expand code for a post- or pre- increment or decrement
10029 and return the RTX for the result.
10030 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 10031
b93a436e
JL
10032static rtx
10033expand_increment (exp, post, ignore)
10034 register tree exp;
10035 int post, ignore;
ca695ac9 10036{
b93a436e
JL
10037 register rtx op0, op1;
10038 register rtx temp, value;
10039 register tree incremented = TREE_OPERAND (exp, 0);
10040 optab this_optab = add_optab;
10041 int icode;
10042 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10043 int op0_is_copy = 0;
10044 int single_insn = 0;
10045 /* 1 means we can't store into OP0 directly,
10046 because it is a subreg narrower than a word,
10047 and we don't dare clobber the rest of the word. */
10048 int bad_subreg = 0;
1499e0a8 10049
b93a436e
JL
10050 /* Stabilize any component ref that might need to be
10051 evaluated more than once below. */
10052 if (!post
10053 || TREE_CODE (incremented) == BIT_FIELD_REF
10054 || (TREE_CODE (incremented) == COMPONENT_REF
10055 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10056 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10057 incremented = stabilize_reference (incremented);
10058 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10059 ones into save exprs so that they don't accidentally get evaluated
10060 more than once by the code below. */
10061 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10062 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10063 incremented = save_expr (incremented);
e9a25f70 10064
b93a436e
JL
10065 /* Compute the operands as RTX.
10066 Note whether OP0 is the actual lvalue or a copy of it:
10067 I believe it is a copy iff it is a register or subreg
10068 and insns were generated in computing it. */
e9a25f70 10069
b93a436e
JL
10070 temp = get_last_insn ();
10071 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 10072
b93a436e
JL
10073 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10074 in place but instead must do sign- or zero-extension during assignment,
10075 so we copy it into a new register and let the code below use it as
10076 a copy.
e9a25f70 10077
b93a436e
JL
10078 Note that we can safely modify this SUBREG since it is know not to be
10079 shared (it was made by the expand_expr call above). */
10080
10081 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10082 {
10083 if (post)
10084 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10085 else
10086 bad_subreg = 1;
10087 }
10088 else if (GET_CODE (op0) == SUBREG
10089 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10090 {
10091 /* We cannot increment this SUBREG in place. If we are
10092 post-incrementing, get a copy of the old value. Otherwise,
10093 just mark that we cannot increment in place. */
10094 if (post)
10095 op0 = copy_to_reg (op0);
10096 else
10097 bad_subreg = 1;
e9a25f70
JL
10098 }
10099
b93a436e
JL
10100 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10101 && temp != get_last_insn ());
10102 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10103 EXPAND_MEMORY_USE_BAD);
1499e0a8 10104
b93a436e
JL
10105 /* Decide whether incrementing or decrementing. */
10106 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10107 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10108 this_optab = sub_optab;
10109
10110 /* Convert decrement by a constant into a negative increment. */
10111 if (this_optab == sub_optab
10112 && GET_CODE (op1) == CONST_INT)
ca695ac9 10113 {
b93a436e
JL
10114 op1 = GEN_INT (- INTVAL (op1));
10115 this_optab = add_optab;
ca695ac9 10116 }
1499e0a8 10117
b93a436e
JL
10118 /* For a preincrement, see if we can do this with a single instruction. */
10119 if (!post)
10120 {
10121 icode = (int) this_optab->handlers[(int) mode].insn_code;
10122 if (icode != (int) CODE_FOR_nothing
10123 /* Make sure that OP0 is valid for operands 0 and 1
10124 of the insn we want to queue. */
10125 && (*insn_operand_predicate[icode][0]) (op0, mode)
10126 && (*insn_operand_predicate[icode][1]) (op0, mode)
10127 && (*insn_operand_predicate[icode][2]) (op1, mode))
10128 single_insn = 1;
10129 }
bbf6f052 10130
b93a436e
JL
10131 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10132 then we cannot just increment OP0. We must therefore contrive to
10133 increment the original value. Then, for postincrement, we can return
10134 OP0 since it is a copy of the old value. For preincrement, expand here
10135 unless we can do it with a single insn.
bbf6f052 10136
b93a436e
JL
10137 Likewise if storing directly into OP0 would clobber high bits
10138 we need to preserve (bad_subreg). */
10139 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 10140 {
b93a436e
JL
10141 /* This is the easiest way to increment the value wherever it is.
10142 Problems with multiple evaluation of INCREMENTED are prevented
10143 because either (1) it is a component_ref or preincrement,
10144 in which case it was stabilized above, or (2) it is an array_ref
10145 with constant index in an array in a register, which is
10146 safe to reevaluate. */
10147 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10148 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10149 ? MINUS_EXPR : PLUS_EXPR),
10150 TREE_TYPE (exp),
10151 incremented,
10152 TREE_OPERAND (exp, 1));
a358cee0 10153
b93a436e
JL
10154 while (TREE_CODE (incremented) == NOP_EXPR
10155 || TREE_CODE (incremented) == CONVERT_EXPR)
10156 {
10157 newexp = convert (TREE_TYPE (incremented), newexp);
10158 incremented = TREE_OPERAND (incremented, 0);
10159 }
bbf6f052 10160
b93a436e
JL
10161 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10162 return post ? op0 : temp;
10163 }
bbf6f052 10164
b93a436e
JL
10165 if (post)
10166 {
10167 /* We have a true reference to the value in OP0.
10168 If there is an insn to add or subtract in this mode, queue it.
10169 Queueing the increment insn avoids the register shuffling
10170 that often results if we must increment now and first save
10171 the old value for subsequent use. */
bbf6f052 10172
b93a436e
JL
10173#if 0 /* Turned off to avoid making extra insn for indexed memref. */
10174 op0 = stabilize (op0);
10175#endif
41dfd40c 10176
b93a436e
JL
10177 icode = (int) this_optab->handlers[(int) mode].insn_code;
10178 if (icode != (int) CODE_FOR_nothing
10179 /* Make sure that OP0 is valid for operands 0 and 1
10180 of the insn we want to queue. */
10181 && (*insn_operand_predicate[icode][0]) (op0, mode)
10182 && (*insn_operand_predicate[icode][1]) (op0, mode))
10183 {
10184 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10185 op1 = force_reg (mode, op1);
bbf6f052 10186
b93a436e
JL
10187 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10188 }
10189 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10190 {
10191 rtx addr = (general_operand (XEXP (op0, 0), mode)
10192 ? force_reg (Pmode, XEXP (op0, 0))
10193 : copy_to_reg (XEXP (op0, 0)));
10194 rtx temp, result;
ca695ac9 10195
b93a436e
JL
10196 op0 = change_address (op0, VOIDmode, addr);
10197 temp = force_reg (GET_MODE (op0), op0);
10198 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10199 op1 = force_reg (mode, op1);
ca695ac9 10200
b93a436e
JL
10201 /* The increment queue is LIFO, thus we have to `queue'
10202 the instructions in reverse order. */
10203 enqueue_insn (op0, gen_move_insn (op0, temp));
10204 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10205 return result;
bbf6f052
RK
10206 }
10207 }
ca695ac9 10208
b93a436e
JL
10209 /* Preincrement, or we can't increment with one simple insn. */
10210 if (post)
10211 /* Save a copy of the value before inc or dec, to return it later. */
10212 temp = value = copy_to_reg (op0);
10213 else
10214 /* Arrange to return the incremented value. */
10215 /* Copy the rtx because expand_binop will protect from the queue,
10216 and the results of that would be invalid for us to return
10217 if our caller does emit_queue before using our result. */
10218 temp = copy_rtx (value = op0);
bbf6f052 10219
b93a436e
JL
10220 /* Increment however we can. */
10221 op1 = expand_binop (mode, this_optab, value, op1,
10222 flag_check_memory_usage ? NULL_RTX : op0,
10223 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10224 /* Make sure the value is stored into OP0. */
10225 if (op1 != op0)
10226 emit_move_insn (op0, op1);
5718612f 10227
b93a436e
JL
10228 return temp;
10229}
10230\f
10231/* Expand all function calls contained within EXP, innermost ones first.
10232 But don't look within expressions that have sequence points.
10233 For each CALL_EXPR, record the rtx for its value
10234 in the CALL_EXPR_RTL field. */
5718612f 10235
b93a436e
JL
10236static void
10237preexpand_calls (exp)
10238 tree exp;
10239{
10240 register int nops, i;
10241 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 10242
b93a436e
JL
10243 if (! do_preexpand_calls)
10244 return;
5718612f 10245
b93a436e 10246 /* Only expressions and references can contain calls. */
bbf6f052 10247
b93a436e
JL
10248 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10249 return;
bbf6f052 10250
b93a436e
JL
10251 switch (TREE_CODE (exp))
10252 {
10253 case CALL_EXPR:
10254 /* Do nothing if already expanded. */
10255 if (CALL_EXPR_RTL (exp) != 0
10256 /* Do nothing if the call returns a variable-sized object. */
10257 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10258 /* Do nothing to built-in functions. */
10259 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10260 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10261 == FUNCTION_DECL)
10262 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10263 return;
bbf6f052 10264
b93a436e
JL
10265 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10266 return;
bbf6f052 10267
b93a436e
JL
10268 case COMPOUND_EXPR:
10269 case COND_EXPR:
10270 case TRUTH_ANDIF_EXPR:
10271 case TRUTH_ORIF_EXPR:
10272 /* If we find one of these, then we can be sure
10273 the adjust will be done for it (since it makes jumps).
10274 Do it now, so that if this is inside an argument
10275 of a function, we don't get the stack adjustment
10276 after some other args have already been pushed. */
10277 do_pending_stack_adjust ();
10278 return;
bbf6f052 10279
b93a436e
JL
10280 case BLOCK:
10281 case RTL_EXPR:
10282 case WITH_CLEANUP_EXPR:
10283 case CLEANUP_POINT_EXPR:
10284 case TRY_CATCH_EXPR:
10285 return;
bbf6f052 10286
b93a436e
JL
10287 case SAVE_EXPR:
10288 if (SAVE_EXPR_RTL (exp) != 0)
10289 return;
10290
10291 default:
10292 break;
ca695ac9 10293 }
bbf6f052 10294
b93a436e
JL
10295 nops = tree_code_length[(int) TREE_CODE (exp)];
10296 for (i = 0; i < nops; i++)
10297 if (TREE_OPERAND (exp, i) != 0)
10298 {
10299 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10300 if (type == 'e' || type == '<' || type == '1' || type == '2'
10301 || type == 'r')
10302 preexpand_calls (TREE_OPERAND (exp, i));
10303 }
10304}
10305\f
10306/* At the start of a function, record that we have no previously-pushed
10307 arguments waiting to be popped. */
bbf6f052 10308
b93a436e
JL
10309void
10310init_pending_stack_adjust ()
10311{
10312 pending_stack_adjust = 0;
10313}
bbf6f052 10314
b93a436e 10315/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
10316 so the adjustment won't get done.
10317
10318 Note, if the current function calls alloca, then it must have a
10319 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 10320
b93a436e
JL
10321void
10322clear_pending_stack_adjust ()
10323{
10324#ifdef EXIT_IGNORE_STACK
10325 if (optimize > 0
060fbabf
JL
10326 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10327 && EXIT_IGNORE_STACK
b93a436e
JL
10328 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10329 && ! flag_inline_functions)
10330 pending_stack_adjust = 0;
10331#endif
10332}
bbf6f052 10333
b93a436e
JL
10334/* Pop any previously-pushed arguments that have not been popped yet. */
10335
10336void
10337do_pending_stack_adjust ()
10338{
10339 if (inhibit_defer_pop == 0)
ca695ac9 10340 {
b93a436e
JL
10341 if (pending_stack_adjust != 0)
10342 adjust_stack (GEN_INT (pending_stack_adjust));
10343 pending_stack_adjust = 0;
bbf6f052 10344 }
bbf6f052
RK
10345}
10346\f
b93a436e 10347/* Expand conditional expressions. */
bbf6f052 10348
b93a436e
JL
10349/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10350 LABEL is an rtx of code CODE_LABEL, in this function and all the
10351 functions here. */
bbf6f052 10352
b93a436e
JL
10353void
10354jumpifnot (exp, label)
ca695ac9 10355 tree exp;
b93a436e 10356 rtx label;
bbf6f052 10357{
b93a436e
JL
10358 do_jump (exp, label, NULL_RTX);
10359}
bbf6f052 10360
b93a436e 10361/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 10362
b93a436e
JL
10363void
10364jumpif (exp, label)
10365 tree exp;
10366 rtx label;
10367{
10368 do_jump (exp, NULL_RTX, label);
10369}
ca695ac9 10370
b93a436e
JL
10371/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10372 the result is zero, or IF_TRUE_LABEL if the result is one.
10373 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10374 meaning fall through in that case.
ca695ac9 10375
b93a436e
JL
10376 do_jump always does any pending stack adjust except when it does not
10377 actually perform a jump. An example where there is no jump
10378 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 10379
b93a436e
JL
10380 This function is responsible for optimizing cases such as
10381 &&, || and comparison operators in EXP. */
5718612f 10382
b93a436e
JL
10383void
10384do_jump (exp, if_false_label, if_true_label)
10385 tree exp;
10386 rtx if_false_label, if_true_label;
10387{
10388 register enum tree_code code = TREE_CODE (exp);
10389 /* Some cases need to create a label to jump to
10390 in order to properly fall through.
10391 These cases set DROP_THROUGH_LABEL nonzero. */
10392 rtx drop_through_label = 0;
10393 rtx temp;
10394 rtx comparison = 0;
10395 int i;
10396 tree type;
10397 enum machine_mode mode;
ca695ac9 10398
dbecbbe4
JL
10399#ifdef MAX_INTEGER_COMPUTATION_MODE
10400 check_max_integer_computation_mode (exp);
10401#endif
10402
b93a436e 10403 emit_queue ();
ca695ac9 10404
b93a436e 10405 switch (code)
ca695ac9 10406 {
b93a436e 10407 case ERROR_MARK:
ca695ac9 10408 break;
bbf6f052 10409
b93a436e
JL
10410 case INTEGER_CST:
10411 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10412 if (temp)
10413 emit_jump (temp);
10414 break;
bbf6f052 10415
b93a436e
JL
10416#if 0
10417 /* This is not true with #pragma weak */
10418 case ADDR_EXPR:
10419 /* The address of something can never be zero. */
10420 if (if_true_label)
10421 emit_jump (if_true_label);
10422 break;
10423#endif
bbf6f052 10424
b93a436e
JL
10425 case NOP_EXPR:
10426 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10427 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10428 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10429 goto normal;
10430 case CONVERT_EXPR:
10431 /* If we are narrowing the operand, we have to do the compare in the
10432 narrower mode. */
10433 if ((TYPE_PRECISION (TREE_TYPE (exp))
10434 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10435 goto normal;
10436 case NON_LVALUE_EXPR:
10437 case REFERENCE_EXPR:
10438 case ABS_EXPR:
10439 case NEGATE_EXPR:
10440 case LROTATE_EXPR:
10441 case RROTATE_EXPR:
10442 /* These cannot change zero->non-zero or vice versa. */
10443 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10444 break;
bbf6f052 10445
b93a436e
JL
10446#if 0
10447 /* This is never less insns than evaluating the PLUS_EXPR followed by
10448 a test and can be longer if the test is eliminated. */
10449 case PLUS_EXPR:
10450 /* Reduce to minus. */
10451 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10452 TREE_OPERAND (exp, 0),
10453 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10454 TREE_OPERAND (exp, 1))));
10455 /* Process as MINUS. */
ca695ac9 10456#endif
bbf6f052 10457
b93a436e
JL
10458 case MINUS_EXPR:
10459 /* Non-zero iff operands of minus differ. */
10460 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10461 TREE_OPERAND (exp, 0),
10462 TREE_OPERAND (exp, 1)),
10463 NE, NE);
10464 break;
bbf6f052 10465
b93a436e
JL
10466 case BIT_AND_EXPR:
10467 /* If we are AND'ing with a small constant, do this comparison in the
10468 smallest type that fits. If the machine doesn't have comparisons
10469 that small, it will be converted back to the wider comparison.
10470 This helps if we are testing the sign bit of a narrower object.
10471 combine can't do this for us because it can't know whether a
10472 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 10473
b93a436e
JL
10474 if (! SLOW_BYTE_ACCESS
10475 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10476 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10477 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10478 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10479 && (type = type_for_mode (mode, 1)) != 0
10480 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10481 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10482 != CODE_FOR_nothing))
10483 {
10484 do_jump (convert (type, exp), if_false_label, if_true_label);
10485 break;
10486 }
10487 goto normal;
bbf6f052 10488
b93a436e
JL
10489 case TRUTH_NOT_EXPR:
10490 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10491 break;
bbf6f052 10492
b93a436e
JL
10493 case TRUTH_ANDIF_EXPR:
10494 if (if_false_label == 0)
10495 if_false_label = drop_through_label = gen_label_rtx ();
10496 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10497 start_cleanup_deferral ();
10498 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10499 end_cleanup_deferral ();
10500 break;
bbf6f052 10501
b93a436e
JL
10502 case TRUTH_ORIF_EXPR:
10503 if (if_true_label == 0)
10504 if_true_label = drop_through_label = gen_label_rtx ();
10505 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10506 start_cleanup_deferral ();
10507 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10508 end_cleanup_deferral ();
10509 break;
bbf6f052 10510
b93a436e
JL
10511 case COMPOUND_EXPR:
10512 push_temp_slots ();
10513 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10514 preserve_temp_slots (NULL_RTX);
10515 free_temp_slots ();
10516 pop_temp_slots ();
10517 emit_queue ();
10518 do_pending_stack_adjust ();
10519 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10520 break;
bbf6f052 10521
b93a436e
JL
10522 case COMPONENT_REF:
10523 case BIT_FIELD_REF:
10524 case ARRAY_REF:
10525 {
10526 int bitsize, bitpos, unsignedp;
10527 enum machine_mode mode;
10528 tree type;
10529 tree offset;
10530 int volatilep = 0;
10531 int alignment;
bbf6f052 10532
b93a436e
JL
10533 /* Get description of this reference. We don't actually care
10534 about the underlying object here. */
10535 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10536 &mode, &unsignedp, &volatilep,
10537 &alignment);
bbf6f052 10538
b93a436e
JL
10539 type = type_for_size (bitsize, unsignedp);
10540 if (! SLOW_BYTE_ACCESS
10541 && type != 0 && bitsize >= 0
10542 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10543 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10544 != CODE_FOR_nothing))
10545 {
10546 do_jump (convert (type, exp), if_false_label, if_true_label);
10547 break;
10548 }
10549 goto normal;
10550 }
bbf6f052 10551
b93a436e
JL
10552 case COND_EXPR:
10553 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10554 if (integer_onep (TREE_OPERAND (exp, 1))
10555 && integer_zerop (TREE_OPERAND (exp, 2)))
10556 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 10557
b93a436e
JL
10558 else if (integer_zerop (TREE_OPERAND (exp, 1))
10559 && integer_onep (TREE_OPERAND (exp, 2)))
10560 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 10561
b93a436e
JL
10562 else
10563 {
10564 register rtx label1 = gen_label_rtx ();
10565 drop_through_label = gen_label_rtx ();
bbf6f052 10566
b93a436e 10567 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 10568
b93a436e
JL
10569 start_cleanup_deferral ();
10570 /* Now the THEN-expression. */
10571 do_jump (TREE_OPERAND (exp, 1),
10572 if_false_label ? if_false_label : drop_through_label,
10573 if_true_label ? if_true_label : drop_through_label);
10574 /* In case the do_jump just above never jumps. */
10575 do_pending_stack_adjust ();
10576 emit_label (label1);
bbf6f052 10577
b93a436e
JL
10578 /* Now the ELSE-expression. */
10579 do_jump (TREE_OPERAND (exp, 2),
10580 if_false_label ? if_false_label : drop_through_label,
10581 if_true_label ? if_true_label : drop_through_label);
10582 end_cleanup_deferral ();
10583 }
10584 break;
bbf6f052 10585
b93a436e
JL
10586 case EQ_EXPR:
10587 {
10588 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10589
9ec36da5
JL
10590 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10591 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10592 {
10593 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10594 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10595 do_jump
10596 (fold
10597 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10598 fold (build (EQ_EXPR, TREE_TYPE (exp),
10599 fold (build1 (REALPART_EXPR,
10600 TREE_TYPE (inner_type),
10601 exp0)),
10602 fold (build1 (REALPART_EXPR,
10603 TREE_TYPE (inner_type),
10604 exp1)))),
10605 fold (build (EQ_EXPR, TREE_TYPE (exp),
10606 fold (build1 (IMAGPART_EXPR,
10607 TREE_TYPE (inner_type),
10608 exp0)),
10609 fold (build1 (IMAGPART_EXPR,
10610 TREE_TYPE (inner_type),
10611 exp1)))))),
10612 if_false_label, if_true_label);
10613 }
9ec36da5
JL
10614
10615 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10616 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10617
b93a436e
JL
10618 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10619 && !can_compare_p (TYPE_MODE (inner_type)))
10620 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10621 else
10622 comparison = compare (exp, EQ, EQ);
10623 break;
10624 }
bbf6f052 10625
b93a436e
JL
10626 case NE_EXPR:
10627 {
10628 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10629
9ec36da5
JL
10630 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10631 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10632 {
10633 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10634 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10635 do_jump
10636 (fold
10637 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10638 fold (build (NE_EXPR, TREE_TYPE (exp),
10639 fold (build1 (REALPART_EXPR,
10640 TREE_TYPE (inner_type),
10641 exp0)),
10642 fold (build1 (REALPART_EXPR,
10643 TREE_TYPE (inner_type),
10644 exp1)))),
10645 fold (build (NE_EXPR, TREE_TYPE (exp),
10646 fold (build1 (IMAGPART_EXPR,
10647 TREE_TYPE (inner_type),
10648 exp0)),
10649 fold (build1 (IMAGPART_EXPR,
10650 TREE_TYPE (inner_type),
10651 exp1)))))),
10652 if_false_label, if_true_label);
10653 }
9ec36da5
JL
10654
10655 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10656 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10657
b93a436e
JL
10658 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10659 && !can_compare_p (TYPE_MODE (inner_type)))
10660 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10661 else
10662 comparison = compare (exp, NE, NE);
10663 break;
10664 }
bbf6f052 10665
b93a436e
JL
10666 case LT_EXPR:
10667 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10668 == MODE_INT)
10669 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10670 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10671 else
10672 comparison = compare (exp, LT, LTU);
10673 break;
bbf6f052 10674
b93a436e
JL
10675 case LE_EXPR:
10676 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10677 == MODE_INT)
10678 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10679 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10680 else
10681 comparison = compare (exp, LE, LEU);
10682 break;
bbf6f052 10683
b93a436e
JL
10684 case GT_EXPR:
10685 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10686 == MODE_INT)
10687 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10688 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10689 else
10690 comparison = compare (exp, GT, GTU);
10691 break;
bbf6f052 10692
b93a436e
JL
10693 case GE_EXPR:
10694 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10695 == MODE_INT)
10696 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10697 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10698 else
10699 comparison = compare (exp, GE, GEU);
10700 break;
bbf6f052 10701
b93a436e
JL
10702 default:
10703 normal:
10704 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10705#if 0
10706 /* This is not needed any more and causes poor code since it causes
10707 comparisons and tests from non-SI objects to have different code
10708 sequences. */
10709 /* Copy to register to avoid generating bad insns by cse
10710 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10711 if (!cse_not_expected && GET_CODE (temp) == MEM)
10712 temp = copy_to_reg (temp);
ca695ac9 10713#endif
b93a436e
JL
10714 do_pending_stack_adjust ();
10715 if (GET_CODE (temp) == CONST_INT)
10716 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10717 else if (GET_CODE (temp) == LABEL_REF)
10718 comparison = const_true_rtx;
10719 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10720 && !can_compare_p (GET_MODE (temp)))
10721 /* Note swapping the labels gives us not-equal. */
10722 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10723 else if (GET_MODE (temp) != VOIDmode)
10724 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10725 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10726 GET_MODE (temp), NULL_RTX, 0);
10727 else
10728 abort ();
10729 }
bbf6f052 10730
b93a436e
JL
10731 /* Do any postincrements in the expression that was tested. */
10732 emit_queue ();
bbf6f052 10733
b93a436e
JL
10734 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10735 straight into a conditional jump instruction as the jump condition.
10736 Otherwise, all the work has been done already. */
bbf6f052 10737
b93a436e
JL
10738 if (comparison == const_true_rtx)
10739 {
10740 if (if_true_label)
10741 emit_jump (if_true_label);
10742 }
10743 else if (comparison == const0_rtx)
10744 {
10745 if (if_false_label)
10746 emit_jump (if_false_label);
10747 }
10748 else if (comparison)
10749 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10750
b93a436e
JL
10751 if (drop_through_label)
10752 {
10753 /* If do_jump produces code that might be jumped around,
10754 do any stack adjusts from that code, before the place
10755 where control merges in. */
10756 do_pending_stack_adjust ();
10757 emit_label (drop_through_label);
10758 }
bbf6f052 10759}
b93a436e
JL
10760\f
10761/* Given a comparison expression EXP for values too wide to be compared
10762 with one insn, test the comparison and jump to the appropriate label.
10763 The code of EXP is ignored; we always test GT if SWAP is 0,
10764 and LT if SWAP is 1. */
bbf6f052 10765
b93a436e
JL
10766static void
10767do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10768 tree exp;
10769 int swap;
10770 rtx if_false_label, if_true_label;
10771{
10772 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10773 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10774 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10775 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10776 rtx drop_through_label = 0;
10777 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10778 int i;
bbf6f052 10779
b93a436e
JL
10780 if (! if_true_label || ! if_false_label)
10781 drop_through_label = gen_label_rtx ();
10782 if (! if_true_label)
10783 if_true_label = drop_through_label;
10784 if (! if_false_label)
10785 if_false_label = drop_through_label;
bbf6f052 10786
b93a436e
JL
10787 /* Compare a word at a time, high order first. */
10788 for (i = 0; i < nwords; i++)
f81497d9 10789 {
b93a436e
JL
10790 rtx comp;
10791 rtx op0_word, op1_word;
10792
10793 if (WORDS_BIG_ENDIAN)
10794 {
10795 op0_word = operand_subword_force (op0, i, mode);
10796 op1_word = operand_subword_force (op1, i, mode);
10797 }
f81497d9 10798 else
b93a436e
JL
10799 {
10800 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10801 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10802 }
10803
10804 /* All but high-order word must be compared as unsigned. */
10805 comp = compare_from_rtx (op0_word, op1_word,
10806 (unsignedp || i > 0) ? GTU : GT,
10807 unsignedp, word_mode, NULL_RTX, 0);
10808 if (comp == const_true_rtx)
10809 emit_jump (if_true_label);
10810 else if (comp != const0_rtx)
10811 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10812
10813 /* Consider lower words only if these are equal. */
10814 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10815 NULL_RTX, 0);
10816 if (comp == const_true_rtx)
10817 emit_jump (if_false_label);
10818 else if (comp != const0_rtx)
10819 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10820 }
ca695ac9 10821
b93a436e
JL
10822 if (if_false_label)
10823 emit_jump (if_false_label);
10824 if (drop_through_label)
10825 emit_label (drop_through_label);
f81497d9
RS
10826}
10827
b93a436e
JL
10828/* Compare OP0 with OP1, word at a time, in mode MODE.
10829 UNSIGNEDP says to do unsigned comparison.
10830 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10831
b93a436e
JL
10832void
10833do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10834 enum machine_mode mode;
10835 int unsignedp;
10836 rtx op0, op1;
10837 rtx if_false_label, if_true_label;
f81497d9 10838{
b93a436e
JL
10839 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10840 rtx drop_through_label = 0;
10841 int i;
f81497d9 10842
b93a436e
JL
10843 if (! if_true_label || ! if_false_label)
10844 drop_through_label = gen_label_rtx ();
10845 if (! if_true_label)
10846 if_true_label = drop_through_label;
10847 if (! if_false_label)
10848 if_false_label = drop_through_label;
f81497d9 10849
b93a436e
JL
10850 /* Compare a word at a time, high order first. */
10851 for (i = 0; i < nwords; i++)
10852 {
10853 rtx comp;
10854 rtx op0_word, op1_word;
bbf6f052 10855
b93a436e
JL
10856 if (WORDS_BIG_ENDIAN)
10857 {
10858 op0_word = operand_subword_force (op0, i, mode);
10859 op1_word = operand_subword_force (op1, i, mode);
10860 }
10861 else
10862 {
10863 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10864 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10865 }
bbf6f052 10866
b93a436e
JL
10867 /* All but high-order word must be compared as unsigned. */
10868 comp = compare_from_rtx (op0_word, op1_word,
10869 (unsignedp || i > 0) ? GTU : GT,
10870 unsignedp, word_mode, NULL_RTX, 0);
10871 if (comp == const_true_rtx)
10872 emit_jump (if_true_label);
10873 else if (comp != const0_rtx)
10874 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10875
b93a436e
JL
10876 /* Consider lower words only if these are equal. */
10877 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10878 NULL_RTX, 0);
10879 if (comp == const_true_rtx)
10880 emit_jump (if_false_label);
10881 else if (comp != const0_rtx)
10882 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10883 }
bbf6f052 10884
b93a436e
JL
10885 if (if_false_label)
10886 emit_jump (if_false_label);
10887 if (drop_through_label)
10888 emit_label (drop_through_label);
bbf6f052
RK
10889}
10890
b93a436e
JL
10891/* Given an EQ_EXPR expression EXP for values too wide to be compared
10892 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10893
b93a436e
JL
10894static void
10895do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10896 tree exp;
10897 rtx if_false_label, if_true_label;
bbf6f052 10898{
b93a436e
JL
10899 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10900 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10901 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10902 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10903 int i;
10904 rtx drop_through_label = 0;
bbf6f052 10905
b93a436e
JL
10906 if (! if_false_label)
10907 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10908
b93a436e
JL
10909 for (i = 0; i < nwords; i++)
10910 {
10911 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10912 operand_subword_force (op1, i, mode),
10913 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10914 word_mode, NULL_RTX, 0);
10915 if (comp == const_true_rtx)
10916 emit_jump (if_false_label);
10917 else if (comp != const0_rtx)
10918 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10919 }
bbf6f052 10920
b93a436e
JL
10921 if (if_true_label)
10922 emit_jump (if_true_label);
10923 if (drop_through_label)
10924 emit_label (drop_through_label);
bbf6f052 10925}
b93a436e
JL
10926\f
10927/* Jump according to whether OP0 is 0.
10928 We assume that OP0 has an integer mode that is too wide
10929 for the available compare insns. */
bbf6f052 10930
f5963e61 10931void
b93a436e
JL
10932do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10933 rtx op0;
10934 rtx if_false_label, if_true_label;
ca695ac9 10935{
b93a436e
JL
10936 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10937 rtx part;
10938 int i;
10939 rtx drop_through_label = 0;
bbf6f052 10940
b93a436e
JL
10941 /* The fastest way of doing this comparison on almost any machine is to
10942 "or" all the words and compare the result. If all have to be loaded
10943 from memory and this is a very wide item, it's possible this may
10944 be slower, but that's highly unlikely. */
bbf6f052 10945
b93a436e
JL
10946 part = gen_reg_rtx (word_mode);
10947 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10948 for (i = 1; i < nwords && part != 0; i++)
10949 part = expand_binop (word_mode, ior_optab, part,
10950 operand_subword_force (op0, i, GET_MODE (op0)),
10951 part, 1, OPTAB_WIDEN);
bbf6f052 10952
b93a436e
JL
10953 if (part != 0)
10954 {
10955 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10956 NULL_RTX, 0);
0f41302f 10957
b93a436e
JL
10958 if (comp == const_true_rtx)
10959 emit_jump (if_false_label);
10960 else if (comp == const0_rtx)
10961 emit_jump (if_true_label);
10962 else
10963 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10964
b93a436e
JL
10965 return;
10966 }
bbf6f052 10967
b93a436e
JL
10968 /* If we couldn't do the "or" simply, do this with a series of compares. */
10969 if (! if_false_label)
10970 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10971
b93a436e
JL
10972 for (i = 0; i < nwords; i++)
10973 {
10974 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10975 GET_MODE (op0)),
10976 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10977 if (comp == const_true_rtx)
10978 emit_jump (if_false_label);
10979 else if (comp != const0_rtx)
10980 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10981 }
bbf6f052 10982
b93a436e
JL
10983 if (if_true_label)
10984 emit_jump (if_true_label);
0f41302f 10985
b93a436e
JL
10986 if (drop_through_label)
10987 emit_label (drop_through_label);
bbf6f052 10988}
bbf6f052 10989
b93a436e
JL
10990/* Given a comparison expression in rtl form, output conditional branches to
10991 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10992
b93a436e
JL
10993static void
10994do_jump_for_compare (comparison, if_false_label, if_true_label)
10995 rtx comparison, if_false_label, if_true_label;
bbf6f052 10996{
b93a436e
JL
10997 if (if_true_label)
10998 {
10999 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11000 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
11001 else
11002 abort ();
ca695ac9 11003
b93a436e
JL
11004 if (if_false_label)
11005 emit_jump (if_false_label);
11006 }
11007 else if (if_false_label)
11008 {
11009 rtx insn;
11010 rtx prev = get_last_insn ();
11011 rtx branch = 0;
0f41302f 11012
b93a436e
JL
11013 /* Output the branch with the opposite condition. Then try to invert
11014 what is generated. If more than one insn is a branch, or if the
11015 branch is not the last insn written, abort. If we can't invert
11016 the branch, emit make a true label, redirect this jump to that,
11017 emit a jump to the false label and define the true label. */
bbf6f052 11018
b93a436e
JL
11019 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
11020 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
11021 else
11022 abort ();
bbf6f052 11023
b93a436e
JL
11024 /* Here we get the first insn that was just emitted. It used to be the
11025 case that, on some machines, emitting the branch would discard
11026 the previous compare insn and emit a replacement. This isn't
11027 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 11028
b93a436e
JL
11029 if (prev == 0)
11030 insn = get_insns ();
11031 else if (INSN_DELETED_P (prev))
11032 abort ();
11033 else
11034 insn = NEXT_INSN (prev);
bbf6f052 11035
b93a436e
JL
11036 for (; insn; insn = NEXT_INSN (insn))
11037 if (GET_CODE (insn) == JUMP_INSN)
11038 {
11039 if (branch)
11040 abort ();
11041 branch = insn;
11042 }
a7c5971a 11043
b93a436e
JL
11044 if (branch != get_last_insn ())
11045 abort ();
bbf6f052 11046
b93a436e
JL
11047 JUMP_LABEL (branch) = if_false_label;
11048 if (! invert_jump (branch, if_false_label))
11049 {
11050 if_true_label = gen_label_rtx ();
11051 redirect_jump (branch, if_true_label);
11052 emit_jump (if_false_label);
11053 emit_label (if_true_label);
11054 }
11055 }
11056}
11057\f
11058/* Generate code for a comparison expression EXP
11059 (including code to compute the values to be compared)
11060 and set (CC0) according to the result.
11061 SIGNED_CODE should be the rtx operation for this comparison for
11062 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 11063
b93a436e
JL
11064 We force a stack adjustment unless there are currently
11065 things pushed on the stack that aren't yet used. */
ca695ac9 11066
b93a436e
JL
11067static rtx
11068compare (exp, signed_code, unsigned_code)
11069 register tree exp;
11070 enum rtx_code signed_code, unsigned_code;
11071{
11072 register rtx op0
11073 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11074 register rtx op1
11075 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11076 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
11077 register enum machine_mode mode = TYPE_MODE (type);
11078 int unsignedp = TREE_UNSIGNED (type);
11079 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 11080
b93a436e
JL
11081#ifdef HAVE_canonicalize_funcptr_for_compare
11082 /* If function pointers need to be "canonicalized" before they can
11083 be reliably compared, then canonicalize them. */
11084 if (HAVE_canonicalize_funcptr_for_compare
11085 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11086 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11087 == FUNCTION_TYPE))
bbf6f052 11088 {
b93a436e 11089 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 11090
b93a436e
JL
11091 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11092 op0 = new_op0;
ca695ac9 11093 }
bbf6f052 11094
b93a436e
JL
11095 if (HAVE_canonicalize_funcptr_for_compare
11096 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11097 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11098 == FUNCTION_TYPE))
11099 {
11100 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 11101
b93a436e
JL
11102 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11103 op1 = new_op1;
11104 }
11105#endif
0f41302f 11106
b93a436e
JL
11107 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11108 ((mode == BLKmode)
11109 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11110 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 11111}
bbf6f052 11112
b93a436e
JL
11113/* Like compare but expects the values to compare as two rtx's.
11114 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 11115
b93a436e
JL
11116 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11117 compared.
bbf6f052 11118
b93a436e
JL
11119 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11120 size of MODE should be used. */
ca695ac9 11121
b93a436e
JL
11122rtx
11123compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11124 register rtx op0, op1;
11125 enum rtx_code code;
11126 int unsignedp;
11127 enum machine_mode mode;
11128 rtx size;
11129 int align;
bbf6f052 11130{
b93a436e 11131 rtx tem;
bbf6f052 11132
b93a436e
JL
11133 /* If one operand is constant, make it the second one. Only do this
11134 if the other operand is not constant as well. */
e7c33f54 11135
b93a436e
JL
11136 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11137 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 11138 {
b93a436e
JL
11139 tem = op0;
11140 op0 = op1;
11141 op1 = tem;
11142 code = swap_condition (code);
11143 }
bbf6f052 11144
b93a436e
JL
11145 if (flag_force_mem)
11146 {
11147 op0 = force_not_mem (op0);
11148 op1 = force_not_mem (op1);
11149 }
bbf6f052 11150
b93a436e 11151 do_pending_stack_adjust ();
ca695ac9 11152
b93a436e
JL
11153 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11154 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11155 return tem;
ca695ac9 11156
b93a436e
JL
11157#if 0
11158 /* There's no need to do this now that combine.c can eliminate lots of
11159 sign extensions. This can be less efficient in certain cases on other
11160 machines. */
ca695ac9 11161
b93a436e
JL
11162 /* If this is a signed equality comparison, we can do it as an
11163 unsigned comparison since zero-extension is cheaper than sign
11164 extension and comparisons with zero are done as unsigned. This is
11165 the case even on machines that can do fast sign extension, since
11166 zero-extension is easier to combine with other operations than
11167 sign-extension is. If we are comparing against a constant, we must
11168 convert it to what it would look like unsigned. */
11169 if ((code == EQ || code == NE) && ! unsignedp
11170 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11171 {
11172 if (GET_CODE (op1) == CONST_INT
11173 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11174 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11175 unsignedp = 1;
11176 }
11177#endif
ca695ac9 11178
b93a436e 11179 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 11180
b93a436e
JL
11181 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11182}
11183\f
11184/* Generate code to calculate EXP using a store-flag instruction
11185 and return an rtx for the result. EXP is either a comparison
11186 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 11187
b93a436e 11188 If TARGET is nonzero, store the result there if convenient.
ca695ac9 11189
b93a436e
JL
11190 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11191 cheap.
ca695ac9 11192
b93a436e
JL
11193 Return zero if there is no suitable set-flag instruction
11194 available on this machine.
ca695ac9 11195
b93a436e
JL
11196 Once expand_expr has been called on the arguments of the comparison,
11197 we are committed to doing the store flag, since it is not safe to
11198 re-evaluate the expression. We emit the store-flag insn by calling
11199 emit_store_flag, but only expand the arguments if we have a reason
11200 to believe that emit_store_flag will be successful. If we think that
11201 it will, but it isn't, we have to simulate the store-flag with a
11202 set/jump/set sequence. */
ca695ac9 11203
b93a436e
JL
11204static rtx
11205do_store_flag (exp, target, mode, only_cheap)
11206 tree exp;
11207 rtx target;
11208 enum machine_mode mode;
11209 int only_cheap;
11210{
11211 enum rtx_code code;
11212 tree arg0, arg1, type;
11213 tree tem;
11214 enum machine_mode operand_mode;
11215 int invert = 0;
11216 int unsignedp;
11217 rtx op0, op1;
11218 enum insn_code icode;
11219 rtx subtarget = target;
381127e8 11220 rtx result, label;
ca695ac9 11221
b93a436e
JL
11222 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11223 result at the end. We can't simply invert the test since it would
11224 have already been inverted if it were valid. This case occurs for
11225 some floating-point comparisons. */
ca695ac9 11226
b93a436e
JL
11227 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11228 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 11229
b93a436e
JL
11230 arg0 = TREE_OPERAND (exp, 0);
11231 arg1 = TREE_OPERAND (exp, 1);
11232 type = TREE_TYPE (arg0);
11233 operand_mode = TYPE_MODE (type);
11234 unsignedp = TREE_UNSIGNED (type);
ca695ac9 11235
b93a436e
JL
11236 /* We won't bother with BLKmode store-flag operations because it would mean
11237 passing a lot of information to emit_store_flag. */
11238 if (operand_mode == BLKmode)
11239 return 0;
ca695ac9 11240
b93a436e
JL
11241 /* We won't bother with store-flag operations involving function pointers
11242 when function pointers must be canonicalized before comparisons. */
11243#ifdef HAVE_canonicalize_funcptr_for_compare
11244 if (HAVE_canonicalize_funcptr_for_compare
11245 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11246 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11247 == FUNCTION_TYPE))
11248 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11249 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11250 == FUNCTION_TYPE))))
11251 return 0;
ca695ac9
JB
11252#endif
11253
b93a436e
JL
11254 STRIP_NOPS (arg0);
11255 STRIP_NOPS (arg1);
ca695ac9 11256
b93a436e
JL
11257 /* Get the rtx comparison code to use. We know that EXP is a comparison
11258 operation of some type. Some comparisons against 1 and -1 can be
11259 converted to comparisons with zero. Do so here so that the tests
11260 below will be aware that we have a comparison with zero. These
11261 tests will not catch constants in the first operand, but constants
11262 are rarely passed as the first operand. */
ca695ac9 11263
b93a436e
JL
11264 switch (TREE_CODE (exp))
11265 {
11266 case EQ_EXPR:
11267 code = EQ;
bbf6f052 11268 break;
b93a436e
JL
11269 case NE_EXPR:
11270 code = NE;
bbf6f052 11271 break;
b93a436e
JL
11272 case LT_EXPR:
11273 if (integer_onep (arg1))
11274 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11275 else
11276 code = unsignedp ? LTU : LT;
ca695ac9 11277 break;
b93a436e
JL
11278 case LE_EXPR:
11279 if (! unsignedp && integer_all_onesp (arg1))
11280 arg1 = integer_zero_node, code = LT;
11281 else
11282 code = unsignedp ? LEU : LE;
ca695ac9 11283 break;
b93a436e
JL
11284 case GT_EXPR:
11285 if (! unsignedp && integer_all_onesp (arg1))
11286 arg1 = integer_zero_node, code = GE;
11287 else
11288 code = unsignedp ? GTU : GT;
11289 break;
11290 case GE_EXPR:
11291 if (integer_onep (arg1))
11292 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11293 else
11294 code = unsignedp ? GEU : GE;
ca695ac9 11295 break;
ca695ac9 11296 default:
b93a436e 11297 abort ();
bbf6f052 11298 }
bbf6f052 11299
b93a436e
JL
11300 /* Put a constant second. */
11301 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11302 {
11303 tem = arg0; arg0 = arg1; arg1 = tem;
11304 code = swap_condition (code);
ca695ac9 11305 }
bbf6f052 11306
b93a436e
JL
11307 /* If this is an equality or inequality test of a single bit, we can
11308 do this by shifting the bit being tested to the low-order bit and
11309 masking the result with the constant 1. If the condition was EQ,
11310 we xor it with 1. This does not require an scc insn and is faster
11311 than an scc insn even if we have it. */
d39985fa 11312
b93a436e
JL
11313 if ((code == NE || code == EQ)
11314 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11315 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11316 {
11317 tree inner = TREE_OPERAND (arg0, 0);
11318 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11319 int ops_unsignedp;
bbf6f052 11320
b93a436e
JL
11321 /* If INNER is a right shift of a constant and it plus BITNUM does
11322 not overflow, adjust BITNUM and INNER. */
ca695ac9 11323
b93a436e
JL
11324 if (TREE_CODE (inner) == RSHIFT_EXPR
11325 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11326 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11327 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11328 < TYPE_PRECISION (type)))
ca695ac9 11329 {
b93a436e
JL
11330 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11331 inner = TREE_OPERAND (inner, 0);
ca695ac9 11332 }
ca695ac9 11333
b93a436e
JL
11334 /* If we are going to be able to omit the AND below, we must do our
11335 operations as unsigned. If we must use the AND, we have a choice.
11336 Normally unsigned is faster, but for some machines signed is. */
11337 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11338#ifdef LOAD_EXTEND_OP
11339 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11340#else
11341 : 1
11342#endif
11343 );
bbf6f052 11344
b93a436e
JL
11345 if (subtarget == 0 || GET_CODE (subtarget) != REG
11346 || GET_MODE (subtarget) != operand_mode
e5e809f4 11347 || ! safe_from_p (subtarget, inner, 1))
b93a436e 11348 subtarget = 0;
bbf6f052 11349
b93a436e 11350 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 11351
b93a436e
JL
11352 if (bitnum != 0)
11353 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11354 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 11355
b93a436e
JL
11356 if (GET_MODE (op0) != mode)
11357 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 11358
b93a436e
JL
11359 if ((code == EQ && ! invert) || (code == NE && invert))
11360 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11361 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 11362
b93a436e
JL
11363 /* Put the AND last so it can combine with more things. */
11364 if (bitnum != TYPE_PRECISION (type) - 1)
11365 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 11366
b93a436e
JL
11367 return op0;
11368 }
bbf6f052 11369
b93a436e
JL
11370 /* Now see if we are likely to be able to do this. Return if not. */
11371 if (! can_compare_p (operand_mode))
11372 return 0;
11373 icode = setcc_gen_code[(int) code];
11374 if (icode == CODE_FOR_nothing
11375 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 11376 {
b93a436e
JL
11377 /* We can only do this if it is one of the special cases that
11378 can be handled without an scc insn. */
11379 if ((code == LT && integer_zerop (arg1))
11380 || (! only_cheap && code == GE && integer_zerop (arg1)))
11381 ;
11382 else if (BRANCH_COST >= 0
11383 && ! only_cheap && (code == NE || code == EQ)
11384 && TREE_CODE (type) != REAL_TYPE
11385 && ((abs_optab->handlers[(int) operand_mode].insn_code
11386 != CODE_FOR_nothing)
11387 || (ffs_optab->handlers[(int) operand_mode].insn_code
11388 != CODE_FOR_nothing)))
11389 ;
11390 else
11391 return 0;
ca695ac9 11392 }
b93a436e
JL
11393
11394 preexpand_calls (exp);
11395 if (subtarget == 0 || GET_CODE (subtarget) != REG
11396 || GET_MODE (subtarget) != operand_mode
e5e809f4 11397 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
11398 subtarget = 0;
11399
11400 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11401 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11402
11403 if (target == 0)
11404 target = gen_reg_rtx (mode);
11405
11406 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11407 because, if the emit_store_flag does anything it will succeed and
11408 OP0 and OP1 will not be used subsequently. */
ca695ac9 11409
b93a436e
JL
11410 result = emit_store_flag (target, code,
11411 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11412 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11413 operand_mode, unsignedp, 1);
ca695ac9 11414
b93a436e
JL
11415 if (result)
11416 {
11417 if (invert)
11418 result = expand_binop (mode, xor_optab, result, const1_rtx,
11419 result, 0, OPTAB_LIB_WIDEN);
11420 return result;
ca695ac9 11421 }
bbf6f052 11422
b93a436e
JL
11423 /* If this failed, we have to do this with set/compare/jump/set code. */
11424 if (GET_CODE (target) != REG
11425 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11426 target = gen_reg_rtx (GET_MODE (target));
11427
11428 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11429 result = compare_from_rtx (op0, op1, code, unsignedp,
11430 operand_mode, NULL_RTX, 0);
11431 if (GET_CODE (result) == CONST_INT)
11432 return (((result == const0_rtx && ! invert)
11433 || (result != const0_rtx && invert))
11434 ? const0_rtx : const1_rtx);
ca695ac9 11435
b93a436e
JL
11436 label = gen_label_rtx ();
11437 if (bcc_gen_fctn[(int) code] == 0)
11438 abort ();
0f41302f 11439
b93a436e
JL
11440 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11441 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11442 emit_label (label);
bbf6f052 11443
b93a436e 11444 return target;
ca695ac9 11445}
b93a436e
JL
11446\f
11447/* Generate a tablejump instruction (used for switch statements). */
11448
11449#ifdef HAVE_tablejump
e87b4f3f 11450
b93a436e
JL
11451/* INDEX is the value being switched on, with the lowest value
11452 in the table already subtracted.
11453 MODE is its expected mode (needed if INDEX is constant).
11454 RANGE is the length of the jump table.
11455 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 11456
b93a436e
JL
11457 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11458 index value is out of range. */
0f41302f 11459
ca695ac9 11460void
b93a436e
JL
11461do_tablejump (index, mode, range, table_label, default_label)
11462 rtx index, range, table_label, default_label;
11463 enum machine_mode mode;
ca695ac9 11464{
b93a436e 11465 register rtx temp, vector;
88d3b7f0 11466
b93a436e
JL
11467 /* Do an unsigned comparison (in the proper mode) between the index
11468 expression and the value which represents the length of the range.
11469 Since we just finished subtracting the lower bound of the range
11470 from the index expression, this comparison allows us to simultaneously
11471 check that the original index expression value is both greater than
11472 or equal to the minimum value of the range and less than or equal to
11473 the maximum value of the range. */
709f5be1 11474
b93a436e
JL
11475 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11476 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 11477
b93a436e
JL
11478 /* If index is in range, it must fit in Pmode.
11479 Convert to Pmode so we can index with it. */
11480 if (mode != Pmode)
11481 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11482
b93a436e
JL
11483 /* Don't let a MEM slip thru, because then INDEX that comes
11484 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11485 and break_out_memory_refs will go to work on it and mess it up. */
11486#ifdef PIC_CASE_VECTOR_ADDRESS
11487 if (flag_pic && GET_CODE (index) != REG)
11488 index = copy_to_mode_reg (Pmode, index);
11489#endif
ca695ac9 11490
b93a436e
JL
11491 /* If flag_force_addr were to affect this address
11492 it could interfere with the tricky assumptions made
11493 about addresses that contain label-refs,
11494 which may be valid only very near the tablejump itself. */
11495 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11496 GET_MODE_SIZE, because this indicates how large insns are. The other
11497 uses should all be Pmode, because they are addresses. This code
11498 could fail if addresses and insns are not the same size. */
11499 index = gen_rtx_PLUS (Pmode,
11500 gen_rtx_MULT (Pmode, index,
11501 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11502 gen_rtx_LABEL_REF (Pmode, table_label));
11503#ifdef PIC_CASE_VECTOR_ADDRESS
11504 if (flag_pic)
11505 index = PIC_CASE_VECTOR_ADDRESS (index);
11506 else
bbf6f052 11507#endif
b93a436e
JL
11508 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11509 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11510 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11511 RTX_UNCHANGING_P (vector) = 1;
11512 convert_move (temp, vector, 0);
11513
11514 emit_jump_insn (gen_tablejump (temp, table_label));
11515
11516 /* If we are generating PIC code or if the table is PC-relative, the
11517 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11518 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11519 emit_barrier ();
bbf6f052 11520}
b93a436e
JL
11521
11522#endif /* HAVE_tablejump */
This page took 2.386489 seconds and 5 git commands to generate.