]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
mprec.h: Protect definition of uint32_t with #ifndef _UINT32_T.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
fdf004cf 2 Copyright (C) 1988, 92-98, 1999 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
18543a22
ILT
68/* Assume that case vectors are not pc-relative. */
69#ifndef CASE_VECTOR_PC_RELATIVE
70#define CASE_VECTOR_PC_RELATIVE 0
71#endif
72
bbf6f052
RK
73/* If this is nonzero, we do not bother generating VOLATILE
74 around volatile memory references, and we are willing to
75 output indirect addresses. If cse is to follow, we reject
76 indirect addresses so a useful potential cse is generated;
77 if it is used only once, instruction combination will produce
78 the same indirect address eventually. */
79int cse_not_expected;
80
81/* Nonzero to generate code for all the subroutines within an
82 expression before generating the upper levels of the expression.
83 Nowadays this is never zero. */
84int do_preexpand_calls = 1;
85
86/* Number of units that we should eventually pop off the stack.
87 These are the arguments to function calls that have already returned. */
88int pending_stack_adjust;
89
5c7a310f
MM
90/* Under some ABIs, it is the caller's responsibility to pop arguments
91 pushed for function calls. A naive implementation would simply pop
92 the arguments immediately after each call. However, if several
93 function calls are made in a row, it is typically cheaper to pop
94 all the arguments after all of the calls are complete since a
95 single pop instruction can be used. Therefore, GCC attempts to
96 defer popping the arguments until absolutely necessary. (For
97 example, at the end of a conditional, the arguments must be popped,
98 since code outside the conditional won't know whether or not the
99 arguments need to be popped.)
100
101 When INHIBIT_DEFER_POP is non-zero, however, the compiler does not
102 attempt to defer pops. Instead, the stack is popped immediately
103 after each call. Rather then setting this variable directly, use
104 NO_DEFER_POP and OK_DEFER_POP. */
bbf6f052
RK
105int inhibit_defer_pop;
106
bbf6f052
RK
107/* Nonzero means __builtin_saveregs has already been done in this function.
108 The value is the pseudoreg containing the value __builtin_saveregs
109 returned. */
110static rtx saveregs_value;
111
dcf76fff
TW
112/* Similarly for __builtin_apply_args. */
113static rtx apply_args_value;
114
956d6950 115/* Don't check memory usage, since code is being emitted to check a memory
7d384cc0
KR
116 usage. Used when current_function_check_memory_usage is true, to avoid
117 infinite recursion. */
956d6950
JL
118static int in_check_memory_usage;
119
fdc46fbe
JM
120/* Postincrements that still need to be expanded. */
121static rtx pending_chain;
122
4969d05d
RK
123/* This structure is used by move_by_pieces to describe the move to
124 be performed. */
4969d05d
RK
125struct move_by_pieces
126{
127 rtx to;
128 rtx to_addr;
129 int autinc_to;
130 int explicit_inc_to;
e9cf6a97 131 int to_struct;
4969d05d
RK
132 rtx from;
133 rtx from_addr;
134 int autinc_from;
135 int explicit_inc_from;
e9cf6a97 136 int from_struct;
4969d05d
RK
137 int len;
138 int offset;
139 int reverse;
140};
141
9de08200
RK
142/* This structure is used by clear_by_pieces to describe the clear to
143 be performed. */
144
145struct clear_by_pieces
146{
147 rtx to;
148 rtx to_addr;
149 int autinc_to;
150 int explicit_inc_to;
151 int to_struct;
152 int len;
153 int offset;
154 int reverse;
155};
156
292b1216 157extern struct obstack permanent_obstack;
4ed67205 158extern rtx arg_pointer_save_area;
c02bd5d9 159
03566575
JW
160static rtx get_push_address PROTO ((int));
161
4969d05d 162static rtx enqueue_insn PROTO((rtx, rtx));
4969d05d 163static void init_queue PROTO((void));
4969d05d 164static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 165static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 166 struct move_by_pieces *));
9de08200 167static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 168static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
169 struct clear_by_pieces *));
170static int is_zeros_p PROTO((tree));
171static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
172static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
173 tree, tree, int));
e1a43f73 174static void store_constructor PROTO((tree, rtx, int));
4969d05d 175static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
ece32014
MM
176 enum machine_mode, int, int,
177 int, int));
e009aaf3
JL
178static enum memory_use_mode
179 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
180static tree save_noncopied_parts PROTO((tree, tree));
181static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 182static int safe_from_p PROTO((rtx, tree, int));
4969d05d 183static int fixed_type_p PROTO((tree));
01c8a7c8 184static rtx var_rtx PROTO((tree));
4969d05d
RK
185static int get_pointer_alignment PROTO((tree, unsigned));
186static tree string_constant PROTO((tree, tree *));
187static tree c_strlen PROTO((tree));
55a6ba9f 188static rtx get_memory_rtx PROTO((tree));
307b821c
RK
189static rtx expand_builtin PROTO((tree, rtx, rtx,
190 enum machine_mode, int));
0006469d
TW
191static int apply_args_size PROTO((void));
192static int apply_result_size PROTO((void));
193static rtx result_vector PROTO((int, rtx));
194static rtx expand_builtin_apply_args PROTO((void));
195static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
196static void expand_builtin_return PROTO((rtx));
7b8b9722 197static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
198static void preexpand_calls PROTO((tree));
199static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
200static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
4969d05d
RK
201static void do_jump_for_compare PROTO((rtx, rtx, rtx));
202static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
203static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 204
4fa52007
RK
205/* Record for each mode whether we can move a register directly to or
206 from an object of that mode in memory. If we can't, we won't try
207 to use that mode directly when accessing a field of that mode. */
208
209static char direct_load[NUM_MACHINE_MODES];
210static char direct_store[NUM_MACHINE_MODES];
211
7e24ffc9
HPN
212/* If a memory-to-memory move would take MOVE_RATIO or more simple
213 move-instruction sequences, we will do a movstr or libcall instead. */
bbf6f052
RK
214
215#ifndef MOVE_RATIO
266007a7 216#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
217#define MOVE_RATIO 2
218#else
996d9dac
MM
219/* If we are optimizing for space (-Os), cut down the default move ratio */
220#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
221#endif
222#endif
e87b4f3f 223
fbe1758d
AM
224/* This macro is used to determine whether move_by_pieces should be called
225 to perform a structure copy. */
226#ifndef MOVE_BY_PIECES_P
227#define MOVE_BY_PIECES_P(SIZE, ALIGN) (move_by_pieces_ninsns \
228 (SIZE, ALIGN) < MOVE_RATIO)
229#endif
230
266007a7 231/* This array records the insn_code of insns to perform block moves. */
e6677db3 232enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 233
9de08200
RK
234/* This array records the insn_code of insns to perform block clears. */
235enum insn_code clrstr_optab[NUM_MACHINE_MODES];
236
0f41302f 237/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
238
239#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 240#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 241#endif
0006469d
TW
242
243/* Register mappings for target machines without register windows. */
244#ifndef INCOMING_REGNO
245#define INCOMING_REGNO(OUT) (OUT)
246#endif
247#ifndef OUTGOING_REGNO
248#define OUTGOING_REGNO(IN) (IN)
249#endif
bbf6f052 250\f
4fa52007 251/* This is run once per compilation to set up which modes can be used
266007a7 252 directly in memory and to initialize the block move optab. */
4fa52007
RK
253
254void
255init_expr_once ()
256{
257 rtx insn, pat;
258 enum machine_mode mode;
cff48d8f 259 int num_clobbers;
9ec36da5
JL
260 rtx mem, mem1;
261 char *free_point;
262
263 start_sequence ();
264
265 /* Since we are on the permanent obstack, we must be sure we save this
266 spot AFTER we call start_sequence, since it will reuse the rtl it
267 makes. */
268 free_point = (char *) oballoc (0);
269
e2549997
RS
270 /* Try indexing by frame ptr and try by stack ptr.
271 It is known that on the Convex the stack ptr isn't a valid index.
272 With luck, one or the other is valid on any machine. */
9ec36da5
JL
273 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
274 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 275
38a448ca 276 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
277 pat = PATTERN (insn);
278
279 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
280 mode = (enum machine_mode) ((int) mode + 1))
281 {
282 int regno;
283 rtx reg;
4fa52007
RK
284
285 direct_load[(int) mode] = direct_store[(int) mode] = 0;
286 PUT_MODE (mem, mode);
e2549997 287 PUT_MODE (mem1, mode);
4fa52007 288
e6fe56a4
RK
289 /* See if there is some register that can be used in this mode and
290 directly loaded or stored from memory. */
291
7308a047
RS
292 if (mode != VOIDmode && mode != BLKmode)
293 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
294 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
295 regno++)
296 {
297 if (! HARD_REGNO_MODE_OK (regno, mode))
298 continue;
e6fe56a4 299
38a448ca 300 reg = gen_rtx_REG (mode, regno);
e6fe56a4 301
7308a047
RS
302 SET_SRC (pat) = mem;
303 SET_DEST (pat) = reg;
304 if (recog (pat, insn, &num_clobbers) >= 0)
305 direct_load[(int) mode] = 1;
e6fe56a4 306
e2549997
RS
307 SET_SRC (pat) = mem1;
308 SET_DEST (pat) = reg;
309 if (recog (pat, insn, &num_clobbers) >= 0)
310 direct_load[(int) mode] = 1;
311
7308a047
RS
312 SET_SRC (pat) = reg;
313 SET_DEST (pat) = mem;
314 if (recog (pat, insn, &num_clobbers) >= 0)
315 direct_store[(int) mode] = 1;
e2549997
RS
316
317 SET_SRC (pat) = reg;
318 SET_DEST (pat) = mem1;
319 if (recog (pat, insn, &num_clobbers) >= 0)
320 direct_store[(int) mode] = 1;
7308a047 321 }
4fa52007
RK
322 }
323
324 end_sequence ();
9ec36da5 325 obfree (free_point);
4fa52007 326}
cff48d8f 327
bbf6f052
RK
328/* This is run at the start of compiling a function. */
329
330void
331init_expr ()
332{
333 init_queue ();
334
335 pending_stack_adjust = 0;
336 inhibit_defer_pop = 0;
bbf6f052 337 saveregs_value = 0;
0006469d 338 apply_args_value = 0;
e87b4f3f 339 forced_labels = 0;
bbf6f052
RK
340}
341
342/* Save all variables describing the current status into the structure *P.
343 This is used before starting a nested function. */
344
345void
346save_expr_status (p)
347 struct function *p;
348{
fdc46fbe 349 p->pending_chain = pending_chain;
bbf6f052
RK
350 p->pending_stack_adjust = pending_stack_adjust;
351 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 352 p->saveregs_value = saveregs_value;
0006469d 353 p->apply_args_value = apply_args_value;
e87b4f3f 354 p->forced_labels = forced_labels;
bbf6f052 355
fdc46fbe 356 pending_chain = NULL_RTX;
bbf6f052
RK
357 pending_stack_adjust = 0;
358 inhibit_defer_pop = 0;
bbf6f052 359 saveregs_value = 0;
0006469d 360 apply_args_value = 0;
e87b4f3f 361 forced_labels = 0;
bbf6f052
RK
362}
363
364/* Restore all variables describing the current status from the structure *P.
365 This is used after a nested function. */
366
367void
368restore_expr_status (p)
369 struct function *p;
370{
fdc46fbe 371 pending_chain = p->pending_chain;
bbf6f052
RK
372 pending_stack_adjust = p->pending_stack_adjust;
373 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 374 saveregs_value = p->saveregs_value;
0006469d 375 apply_args_value = p->apply_args_value;
e87b4f3f 376 forced_labels = p->forced_labels;
bbf6f052
RK
377}
378\f
379/* Manage the queue of increment instructions to be output
380 for POSTINCREMENT_EXPR expressions, etc. */
381
bbf6f052
RK
382/* Queue up to increment (or change) VAR later. BODY says how:
383 BODY should be the same thing you would pass to emit_insn
384 to increment right away. It will go to emit_insn later on.
385
386 The value is a QUEUED expression to be used in place of VAR
387 where you want to guarantee the pre-incrementation value of VAR. */
388
389static rtx
390enqueue_insn (var, body)
391 rtx var, body;
392{
38a448ca
RH
393 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
394 var, NULL_RTX, NULL_RTX, body,
395 pending_chain);
bbf6f052
RK
396 return pending_chain;
397}
398
399/* Use protect_from_queue to convert a QUEUED expression
400 into something that you can put immediately into an instruction.
401 If the queued incrementation has not happened yet,
402 protect_from_queue returns the variable itself.
403 If the incrementation has happened, protect_from_queue returns a temp
404 that contains a copy of the old value of the variable.
405
406 Any time an rtx which might possibly be a QUEUED is to be put
407 into an instruction, it must be passed through protect_from_queue first.
408 QUEUED expressions are not meaningful in instructions.
409
410 Do not pass a value through protect_from_queue and then hold
411 on to it for a while before putting it in an instruction!
412 If the queue is flushed in between, incorrect code will result. */
413
414rtx
415protect_from_queue (x, modify)
416 register rtx x;
417 int modify;
418{
419 register RTX_CODE code = GET_CODE (x);
420
421#if 0 /* A QUEUED can hang around after the queue is forced out. */
422 /* Shortcut for most common case. */
423 if (pending_chain == 0)
424 return x;
425#endif
426
427 if (code != QUEUED)
428 {
e9baa644
RK
429 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
430 use of autoincrement. Make a copy of the contents of the memory
431 location rather than a copy of the address, but not if the value is
432 of mode BLKmode. Don't modify X in place since it might be
433 shared. */
bbf6f052
RK
434 if (code == MEM && GET_MODE (x) != BLKmode
435 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
436 {
437 register rtx y = XEXP (x, 0);
38a448ca 438 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644 439
e9baa644 440 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
c6df88cb 441 MEM_COPY_ATTRIBUTES (new, x);
41472af8 442 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 443
bbf6f052
RK
444 if (QUEUED_INSN (y))
445 {
e9baa644
RK
446 register rtx temp = gen_reg_rtx (GET_MODE (new));
447 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
448 QUEUED_INSN (y));
449 return temp;
450 }
e9baa644 451 return new;
bbf6f052
RK
452 }
453 /* Otherwise, recursively protect the subexpressions of all
454 the kinds of rtx's that can contain a QUEUED. */
455 if (code == MEM)
3f15938e
RS
456 {
457 rtx tem = protect_from_queue (XEXP (x, 0), 0);
458 if (tem != XEXP (x, 0))
459 {
460 x = copy_rtx (x);
461 XEXP (x, 0) = tem;
462 }
463 }
bbf6f052
RK
464 else if (code == PLUS || code == MULT)
465 {
3f15938e
RS
466 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
467 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
468 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
469 {
470 x = copy_rtx (x);
471 XEXP (x, 0) = new0;
472 XEXP (x, 1) = new1;
473 }
bbf6f052
RK
474 }
475 return x;
476 }
477 /* If the increment has not happened, use the variable itself. */
478 if (QUEUED_INSN (x) == 0)
479 return QUEUED_VAR (x);
480 /* If the increment has happened and a pre-increment copy exists,
481 use that copy. */
482 if (QUEUED_COPY (x) != 0)
483 return QUEUED_COPY (x);
484 /* The increment has happened but we haven't set up a pre-increment copy.
485 Set one up now, and use it. */
486 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
487 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
488 QUEUED_INSN (x));
489 return QUEUED_COPY (x);
490}
491
492/* Return nonzero if X contains a QUEUED expression:
493 if it contains anything that will be altered by a queued increment.
494 We handle only combinations of MEM, PLUS, MINUS and MULT operators
495 since memory addresses generally contain only those. */
496
1f06ee8d 497int
bbf6f052
RK
498queued_subexp_p (x)
499 rtx x;
500{
501 register enum rtx_code code = GET_CODE (x);
502 switch (code)
503 {
504 case QUEUED:
505 return 1;
506 case MEM:
507 return queued_subexp_p (XEXP (x, 0));
508 case MULT:
509 case PLUS:
510 case MINUS:
e9a25f70
JL
511 return (queued_subexp_p (XEXP (x, 0))
512 || queued_subexp_p (XEXP (x, 1)));
513 default:
514 return 0;
bbf6f052 515 }
bbf6f052
RK
516}
517
518/* Perform all the pending incrementations. */
519
520void
521emit_queue ()
522{
523 register rtx p;
381127e8 524 while ((p = pending_chain))
bbf6f052 525 {
41b083c4
R
526 rtx body = QUEUED_BODY (p);
527
528 if (GET_CODE (body) == SEQUENCE)
529 {
530 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
531 emit_insn (QUEUED_BODY (p));
532 }
533 else
534 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
535 pending_chain = QUEUED_NEXT (p);
536 }
537}
538
539static void
540init_queue ()
541{
542 if (pending_chain)
543 abort ();
544}
545\f
546/* Copy data from FROM to TO, where the machine modes are not the same.
547 Both modes may be integer, or both may be floating.
548 UNSIGNEDP should be nonzero if FROM is an unsigned type.
549 This causes zero-extension instead of sign-extension. */
550
551void
552convert_move (to, from, unsignedp)
553 register rtx to, from;
554 int unsignedp;
555{
556 enum machine_mode to_mode = GET_MODE (to);
557 enum machine_mode from_mode = GET_MODE (from);
558 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
559 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
560 enum insn_code code;
561 rtx libcall;
562
563 /* rtx code for making an equivalent value. */
564 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
565
566 to = protect_from_queue (to, 1);
567 from = protect_from_queue (from, 0);
568
569 if (to_real != from_real)
570 abort ();
571
1499e0a8
RK
572 /* If FROM is a SUBREG that indicates that we have already done at least
573 the required extension, strip it. We don't handle such SUBREGs as
574 TO here. */
575
576 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
577 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
578 >= GET_MODE_SIZE (to_mode))
579 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
580 from = gen_lowpart (to_mode, from), from_mode = to_mode;
581
582 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
583 abort ();
584
bbf6f052
RK
585 if (to_mode == from_mode
586 || (from_mode == VOIDmode && CONSTANT_P (from)))
587 {
588 emit_move_insn (to, from);
589 return;
590 }
591
592 if (to_real)
593 {
81d79e2c
RS
594 rtx value;
595
2b01c326 596 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 597 {
2b01c326
RK
598 /* Try converting directly if the insn is supported. */
599 if ((code = can_extend_p (to_mode, from_mode, 0))
600 != CODE_FOR_nothing)
601 {
602 emit_unop_insn (code, to, from, UNKNOWN);
603 return;
604 }
bbf6f052 605 }
2b01c326 606
b424402e
RS
607#ifdef HAVE_trunchfqf2
608 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
609 {
610 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
611 return;
612 }
613#endif
704af6a1
JL
614#ifdef HAVE_trunctqfqf2
615 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
616 {
617 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
618 return;
619 }
620#endif
b424402e
RS
621#ifdef HAVE_truncsfqf2
622 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
623 {
624 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
625 return;
626 }
627#endif
628#ifdef HAVE_truncdfqf2
629 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
630 {
631 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
632 return;
633 }
634#endif
635#ifdef HAVE_truncxfqf2
636 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
637 {
638 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
639 return;
640 }
641#endif
642#ifdef HAVE_trunctfqf2
643 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
644 {
645 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
646 return;
647 }
648#endif
03747aa3
RK
649
650#ifdef HAVE_trunctqfhf2
651 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
652 {
653 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
654 return;
655 }
656#endif
b424402e
RS
657#ifdef HAVE_truncsfhf2
658 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
659 {
660 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
661 return;
662 }
663#endif
664#ifdef HAVE_truncdfhf2
665 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
666 {
667 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
668 return;
669 }
670#endif
671#ifdef HAVE_truncxfhf2
672 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
673 {
674 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
675 return;
676 }
677#endif
678#ifdef HAVE_trunctfhf2
679 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
680 {
681 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
682 return;
683 }
684#endif
2b01c326
RK
685
686#ifdef HAVE_truncsftqf2
687 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
688 {
689 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
690 return;
691 }
692#endif
693#ifdef HAVE_truncdftqf2
694 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
695 {
696 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
697 return;
698 }
699#endif
700#ifdef HAVE_truncxftqf2
701 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
702 {
703 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
704 return;
705 }
706#endif
707#ifdef HAVE_trunctftqf2
708 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
709 {
710 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
711 return;
712 }
713#endif
714
bbf6f052
RK
715#ifdef HAVE_truncdfsf2
716 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
717 {
718 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
719 return;
720 }
721#endif
b092b471
JW
722#ifdef HAVE_truncxfsf2
723 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
724 {
725 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
726 return;
727 }
728#endif
bbf6f052
RK
729#ifdef HAVE_trunctfsf2
730 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
731 {
732 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
733 return;
734 }
735#endif
b092b471
JW
736#ifdef HAVE_truncxfdf2
737 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
738 {
739 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
740 return;
741 }
742#endif
bbf6f052
RK
743#ifdef HAVE_trunctfdf2
744 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
745 {
746 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
747 return;
748 }
749#endif
750
b092b471
JW
751 libcall = (rtx) 0;
752 switch (from_mode)
753 {
754 case SFmode:
755 switch (to_mode)
756 {
757 case DFmode:
758 libcall = extendsfdf2_libfunc;
759 break;
760
761 case XFmode:
762 libcall = extendsfxf2_libfunc;
763 break;
764
765 case TFmode:
766 libcall = extendsftf2_libfunc;
767 break;
e9a25f70
JL
768
769 default:
770 break;
b092b471
JW
771 }
772 break;
773
774 case DFmode:
775 switch (to_mode)
776 {
777 case SFmode:
778 libcall = truncdfsf2_libfunc;
779 break;
780
781 case XFmode:
782 libcall = extenddfxf2_libfunc;
783 break;
784
785 case TFmode:
786 libcall = extenddftf2_libfunc;
787 break;
e9a25f70
JL
788
789 default:
790 break;
b092b471
JW
791 }
792 break;
793
794 case XFmode:
795 switch (to_mode)
796 {
797 case SFmode:
798 libcall = truncxfsf2_libfunc;
799 break;
800
801 case DFmode:
802 libcall = truncxfdf2_libfunc;
803 break;
e9a25f70
JL
804
805 default:
806 break;
b092b471
JW
807 }
808 break;
809
810 case TFmode:
811 switch (to_mode)
812 {
813 case SFmode:
814 libcall = trunctfsf2_libfunc;
815 break;
816
817 case DFmode:
818 libcall = trunctfdf2_libfunc;
819 break;
e9a25f70
JL
820
821 default:
822 break;
b092b471
JW
823 }
824 break;
e9a25f70
JL
825
826 default:
827 break;
b092b471
JW
828 }
829
830 if (libcall == (rtx) 0)
831 /* This conversion is not implemented yet. */
bbf6f052
RK
832 abort ();
833
81d79e2c
RS
834 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
835 1, from, from_mode);
836 emit_move_insn (to, value);
bbf6f052
RK
837 return;
838 }
839
840 /* Now both modes are integers. */
841
842 /* Handle expanding beyond a word. */
843 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
844 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
845 {
846 rtx insns;
847 rtx lowpart;
848 rtx fill_value;
849 rtx lowfrom;
850 int i;
851 enum machine_mode lowpart_mode;
852 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
853
854 /* Try converting directly if the insn is supported. */
855 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
856 != CODE_FOR_nothing)
857 {
cd1b4b44
RK
858 /* If FROM is a SUBREG, put it into a register. Do this
859 so that we always generate the same set of insns for
860 better cse'ing; if an intermediate assignment occurred,
861 we won't be doing the operation directly on the SUBREG. */
862 if (optimize > 0 && GET_CODE (from) == SUBREG)
863 from = force_reg (from_mode, from);
bbf6f052
RK
864 emit_unop_insn (code, to, from, equiv_code);
865 return;
866 }
867 /* Next, try converting via full word. */
868 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
869 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
870 != CODE_FOR_nothing))
871 {
a81fee56 872 if (GET_CODE (to) == REG)
38a448ca 873 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
874 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
875 emit_unop_insn (code, to,
876 gen_lowpart (word_mode, to), equiv_code);
877 return;
878 }
879
880 /* No special multiword conversion insn; do it by hand. */
881 start_sequence ();
882
5c5033c3
RK
883 /* Since we will turn this into a no conflict block, we must ensure
884 that the source does not overlap the target. */
885
886 if (reg_overlap_mentioned_p (to, from))
887 from = force_reg (from_mode, from);
888
bbf6f052
RK
889 /* Get a copy of FROM widened to a word, if necessary. */
890 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
891 lowpart_mode = word_mode;
892 else
893 lowpart_mode = from_mode;
894
895 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
896
897 lowpart = gen_lowpart (lowpart_mode, to);
898 emit_move_insn (lowpart, lowfrom);
899
900 /* Compute the value to put in each remaining word. */
901 if (unsignedp)
902 fill_value = const0_rtx;
903 else
904 {
905#ifdef HAVE_slt
906 if (HAVE_slt
907 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
908 && STORE_FLAG_VALUE == -1)
909 {
906c4e36
RK
910 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
911 lowpart_mode, 0, 0);
bbf6f052
RK
912 fill_value = gen_reg_rtx (word_mode);
913 emit_insn (gen_slt (fill_value));
914 }
915 else
916#endif
917 {
918 fill_value
919 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
920 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 921 NULL_RTX, 0);
bbf6f052
RK
922 fill_value = convert_to_mode (word_mode, fill_value, 1);
923 }
924 }
925
926 /* Fill the remaining words. */
927 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
928 {
929 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
930 rtx subword = operand_subword (to, index, 1, to_mode);
931
932 if (subword == 0)
933 abort ();
934
935 if (fill_value != subword)
936 emit_move_insn (subword, fill_value);
937 }
938
939 insns = get_insns ();
940 end_sequence ();
941
906c4e36 942 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 943 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
944 return;
945 }
946
d3c64ee3
RS
947 /* Truncating multi-word to a word or less. */
948 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
949 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 950 {
431a6eca
JW
951 if (!((GET_CODE (from) == MEM
952 && ! MEM_VOLATILE_P (from)
953 && direct_load[(int) to_mode]
954 && ! mode_dependent_address_p (XEXP (from, 0)))
955 || GET_CODE (from) == REG
956 || GET_CODE (from) == SUBREG))
957 from = force_reg (from_mode, from);
bbf6f052
RK
958 convert_move (to, gen_lowpart (word_mode, from), 0);
959 return;
960 }
961
962 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
963 if (to_mode == PQImode)
964 {
965 if (from_mode != QImode)
966 from = convert_to_mode (QImode, from, unsignedp);
967
968#ifdef HAVE_truncqipqi2
969 if (HAVE_truncqipqi2)
970 {
971 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
972 return;
973 }
974#endif /* HAVE_truncqipqi2 */
975 abort ();
976 }
977
978 if (from_mode == PQImode)
979 {
980 if (to_mode != QImode)
981 {
982 from = convert_to_mode (QImode, from, unsignedp);
983 from_mode = QImode;
984 }
985 else
986 {
987#ifdef HAVE_extendpqiqi2
988 if (HAVE_extendpqiqi2)
989 {
990 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
991 return;
992 }
993#endif /* HAVE_extendpqiqi2 */
994 abort ();
995 }
996 }
997
bbf6f052
RK
998 if (to_mode == PSImode)
999 {
1000 if (from_mode != SImode)
1001 from = convert_to_mode (SImode, from, unsignedp);
1002
1f584163
DE
1003#ifdef HAVE_truncsipsi2
1004 if (HAVE_truncsipsi2)
bbf6f052 1005 {
1f584163 1006 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1007 return;
1008 }
1f584163 1009#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1010 abort ();
1011 }
1012
1013 if (from_mode == PSImode)
1014 {
1015 if (to_mode != SImode)
1016 {
1017 from = convert_to_mode (SImode, from, unsignedp);
1018 from_mode = SImode;
1019 }
1020 else
1021 {
1f584163
DE
1022#ifdef HAVE_extendpsisi2
1023 if (HAVE_extendpsisi2)
bbf6f052 1024 {
1f584163 1025 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1026 return;
1027 }
1f584163 1028#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1029 abort ();
1030 }
1031 }
1032
0407367d
RK
1033 if (to_mode == PDImode)
1034 {
1035 if (from_mode != DImode)
1036 from = convert_to_mode (DImode, from, unsignedp);
1037
1038#ifdef HAVE_truncdipdi2
1039 if (HAVE_truncdipdi2)
1040 {
1041 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1042 return;
1043 }
1044#endif /* HAVE_truncdipdi2 */
1045 abort ();
1046 }
1047
1048 if (from_mode == PDImode)
1049 {
1050 if (to_mode != DImode)
1051 {
1052 from = convert_to_mode (DImode, from, unsignedp);
1053 from_mode = DImode;
1054 }
1055 else
1056 {
1057#ifdef HAVE_extendpdidi2
1058 if (HAVE_extendpdidi2)
1059 {
1060 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1061 return;
1062 }
1063#endif /* HAVE_extendpdidi2 */
1064 abort ();
1065 }
1066 }
1067
bbf6f052
RK
1068 /* Now follow all the conversions between integers
1069 no more than a word long. */
1070
1071 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1072 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1073 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1074 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1075 {
d3c64ee3
RS
1076 if (!((GET_CODE (from) == MEM
1077 && ! MEM_VOLATILE_P (from)
1078 && direct_load[(int) to_mode]
1079 && ! mode_dependent_address_p (XEXP (from, 0)))
1080 || GET_CODE (from) == REG
1081 || GET_CODE (from) == SUBREG))
1082 from = force_reg (from_mode, from);
34aa3599
RK
1083 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1084 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1085 from = copy_to_reg (from);
bbf6f052
RK
1086 emit_move_insn (to, gen_lowpart (to_mode, from));
1087 return;
1088 }
1089
d3c64ee3 1090 /* Handle extension. */
bbf6f052
RK
1091 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1092 {
1093 /* Convert directly if that works. */
1094 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1095 != CODE_FOR_nothing)
1096 {
1097 emit_unop_insn (code, to, from, equiv_code);
1098 return;
1099 }
1100 else
1101 {
1102 enum machine_mode intermediate;
2b28d92e
NC
1103 rtx tmp;
1104 tree shift_amount;
bbf6f052
RK
1105
1106 /* Search for a mode to convert via. */
1107 for (intermediate = from_mode; intermediate != VOIDmode;
1108 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1109 if (((can_extend_p (to_mode, intermediate, unsignedp)
1110 != CODE_FOR_nothing)
1111 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1112 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1113 && (can_extend_p (intermediate, from_mode, unsignedp)
1114 != CODE_FOR_nothing))
1115 {
1116 convert_move (to, convert_to_mode (intermediate, from,
1117 unsignedp), unsignedp);
1118 return;
1119 }
1120
2b28d92e
NC
1121 /* No suitable intermediate mode.
1122 Generate what we need with shifts. */
1123 shift_amount = build_int_2 (GET_MODE_BITSIZE (to_mode)
1124 - GET_MODE_BITSIZE (from_mode), 0);
1125 from = gen_lowpart (to_mode, force_reg (from_mode, from));
1126 tmp = expand_shift (LSHIFT_EXPR, to_mode, from, shift_amount,
1127 to, unsignedp);
1128 tmp = expand_shift (RSHIFT_EXPR, to_mode, tmp, shift_amount,
1129 to, unsignedp);
1130 if (tmp != to)
1131 emit_move_insn (to, tmp);
1132 return;
bbf6f052
RK
1133 }
1134 }
1135
1136 /* Support special truncate insns for certain modes. */
1137
1138 if (from_mode == DImode && to_mode == SImode)
1139 {
1140#ifdef HAVE_truncdisi2
1141 if (HAVE_truncdisi2)
1142 {
1143 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1144 return;
1145 }
1146#endif
1147 convert_move (to, force_reg (from_mode, from), unsignedp);
1148 return;
1149 }
1150
1151 if (from_mode == DImode && to_mode == HImode)
1152 {
1153#ifdef HAVE_truncdihi2
1154 if (HAVE_truncdihi2)
1155 {
1156 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1157 return;
1158 }
1159#endif
1160 convert_move (to, force_reg (from_mode, from), unsignedp);
1161 return;
1162 }
1163
1164 if (from_mode == DImode && to_mode == QImode)
1165 {
1166#ifdef HAVE_truncdiqi2
1167 if (HAVE_truncdiqi2)
1168 {
1169 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1170 return;
1171 }
1172#endif
1173 convert_move (to, force_reg (from_mode, from), unsignedp);
1174 return;
1175 }
1176
1177 if (from_mode == SImode && to_mode == HImode)
1178 {
1179#ifdef HAVE_truncsihi2
1180 if (HAVE_truncsihi2)
1181 {
1182 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1183 return;
1184 }
1185#endif
1186 convert_move (to, force_reg (from_mode, from), unsignedp);
1187 return;
1188 }
1189
1190 if (from_mode == SImode && to_mode == QImode)
1191 {
1192#ifdef HAVE_truncsiqi2
1193 if (HAVE_truncsiqi2)
1194 {
1195 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1196 return;
1197 }
1198#endif
1199 convert_move (to, force_reg (from_mode, from), unsignedp);
1200 return;
1201 }
1202
1203 if (from_mode == HImode && to_mode == QImode)
1204 {
1205#ifdef HAVE_trunchiqi2
1206 if (HAVE_trunchiqi2)
1207 {
1208 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1209 return;
1210 }
1211#endif
1212 convert_move (to, force_reg (from_mode, from), unsignedp);
1213 return;
1214 }
1215
b9bcad65
RK
1216 if (from_mode == TImode && to_mode == DImode)
1217 {
1218#ifdef HAVE_trunctidi2
1219 if (HAVE_trunctidi2)
1220 {
1221 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1222 return;
1223 }
1224#endif
1225 convert_move (to, force_reg (from_mode, from), unsignedp);
1226 return;
1227 }
1228
1229 if (from_mode == TImode && to_mode == SImode)
1230 {
1231#ifdef HAVE_trunctisi2
1232 if (HAVE_trunctisi2)
1233 {
1234 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1235 return;
1236 }
1237#endif
1238 convert_move (to, force_reg (from_mode, from), unsignedp);
1239 return;
1240 }
1241
1242 if (from_mode == TImode && to_mode == HImode)
1243 {
1244#ifdef HAVE_trunctihi2
1245 if (HAVE_trunctihi2)
1246 {
1247 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1248 return;
1249 }
1250#endif
1251 convert_move (to, force_reg (from_mode, from), unsignedp);
1252 return;
1253 }
1254
1255 if (from_mode == TImode && to_mode == QImode)
1256 {
1257#ifdef HAVE_trunctiqi2
1258 if (HAVE_trunctiqi2)
1259 {
1260 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1261 return;
1262 }
1263#endif
1264 convert_move (to, force_reg (from_mode, from), unsignedp);
1265 return;
1266 }
1267
bbf6f052
RK
1268 /* Handle truncation of volatile memrefs, and so on;
1269 the things that couldn't be truncated directly,
1270 and for which there was no special instruction. */
1271 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1272 {
1273 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1274 emit_move_insn (to, temp);
1275 return;
1276 }
1277
1278 /* Mode combination is not recognized. */
1279 abort ();
1280}
1281
1282/* Return an rtx for a value that would result
1283 from converting X to mode MODE.
1284 Both X and MODE may be floating, or both integer.
1285 UNSIGNEDP is nonzero if X is an unsigned value.
1286 This can be done by referring to a part of X in place
5d901c31
RS
1287 or by copying to a new temporary with conversion.
1288
1289 This function *must not* call protect_from_queue
1290 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1291
1292rtx
1293convert_to_mode (mode, x, unsignedp)
1294 enum machine_mode mode;
1295 rtx x;
1296 int unsignedp;
5ffe63ed
RS
1297{
1298 return convert_modes (mode, VOIDmode, x, unsignedp);
1299}
1300
1301/* Return an rtx for a value that would result
1302 from converting X from mode OLDMODE to mode MODE.
1303 Both modes may be floating, or both integer.
1304 UNSIGNEDP is nonzero if X is an unsigned value.
1305
1306 This can be done by referring to a part of X in place
1307 or by copying to a new temporary with conversion.
1308
1309 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1310
1311 This function *must not* call protect_from_queue
1312 except when putting X into an insn (in which case convert_move does it). */
1313
1314rtx
1315convert_modes (mode, oldmode, x, unsignedp)
1316 enum machine_mode mode, oldmode;
1317 rtx x;
1318 int unsignedp;
bbf6f052
RK
1319{
1320 register rtx temp;
5ffe63ed 1321
1499e0a8
RK
1322 /* If FROM is a SUBREG that indicates that we have already done at least
1323 the required extension, strip it. */
1324
1325 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1326 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1327 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1328 x = gen_lowpart (mode, x);
bbf6f052 1329
64791b18
RK
1330 if (GET_MODE (x) != VOIDmode)
1331 oldmode = GET_MODE (x);
1332
5ffe63ed 1333 if (mode == oldmode)
bbf6f052
RK
1334 return x;
1335
1336 /* There is one case that we must handle specially: If we are converting
906c4e36 1337 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1338 we are to interpret the constant as unsigned, gen_lowpart will do
1339 the wrong if the constant appears negative. What we want to do is
1340 make the high-order word of the constant zero, not all ones. */
1341
1342 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1343 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1344 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1345 {
1346 HOST_WIDE_INT val = INTVAL (x);
1347
1348 if (oldmode != VOIDmode
1349 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1350 {
1351 int width = GET_MODE_BITSIZE (oldmode);
1352
1353 /* We need to zero extend VAL. */
1354 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1355 }
1356
1357 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1358 }
bbf6f052
RK
1359
1360 /* We can do this with a gen_lowpart if both desired and current modes
1361 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1362 non-volatile MEM. Except for the constant case where MODE is no
1363 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1364
ba2e110c
RK
1365 if ((GET_CODE (x) == CONST_INT
1366 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1367 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1368 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1369 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1370 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1371 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1372 && direct_load[(int) mode])
2bf29316
JW
1373 || (GET_CODE (x) == REG
1374 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1375 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1376 {
1377 /* ?? If we don't know OLDMODE, we have to assume here that
1378 X does not need sign- or zero-extension. This may not be
1379 the case, but it's the best we can do. */
1380 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1381 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1382 {
1383 HOST_WIDE_INT val = INTVAL (x);
1384 int width = GET_MODE_BITSIZE (oldmode);
1385
1386 /* We must sign or zero-extend in this case. Start by
1387 zero-extending, then sign extend if we need to. */
1388 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1389 if (! unsignedp
1390 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1391 val |= (HOST_WIDE_INT) (-1) << width;
1392
1393 return GEN_INT (val);
1394 }
1395
1396 return gen_lowpart (mode, x);
1397 }
bbf6f052
RK
1398
1399 temp = gen_reg_rtx (mode);
1400 convert_move (temp, x, unsignedp);
1401 return temp;
1402}
1403\f
fbe1758d
AM
1404
1405/* This macro is used to determine what the largest unit size that
1406 move_by_pieces can use is. */
1407
1408/* MOVE_MAX_PIECES is the number of bytes at a time which we can
1409 move efficiently, as opposed to MOVE_MAX which is the maximum
1410 number of bhytes we can move with a single instruction. */
1411
1412#ifndef MOVE_MAX_PIECES
1413#define MOVE_MAX_PIECES MOVE_MAX
1414#endif
1415
bbf6f052
RK
1416/* Generate several move instructions to copy LEN bytes
1417 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1418 The caller must pass FROM and TO
1419 through protect_from_queue before calling.
1420 ALIGN (in bytes) is maximum alignment we can assume. */
1421
2e245dac 1422void
bbf6f052
RK
1423move_by_pieces (to, from, len, align)
1424 rtx to, from;
1425 int len, align;
1426{
1427 struct move_by_pieces data;
1428 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
fbe1758d
AM
1429 int max_size = MOVE_MAX_PIECES + 1;
1430 enum machine_mode mode = VOIDmode, tmode;
1431 enum insn_code icode;
bbf6f052
RK
1432
1433 data.offset = 0;
1434 data.to_addr = to_addr;
1435 data.from_addr = from_addr;
1436 data.to = to;
1437 data.from = from;
1438 data.autinc_to
1439 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1440 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1441 data.autinc_from
1442 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1443 || GET_CODE (from_addr) == POST_INC
1444 || GET_CODE (from_addr) == POST_DEC);
1445
1446 data.explicit_inc_from = 0;
1447 data.explicit_inc_to = 0;
1448 data.reverse
1449 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1450 if (data.reverse) data.offset = len;
1451 data.len = len;
1452
e9cf6a97
JW
1453 data.to_struct = MEM_IN_STRUCT_P (to);
1454 data.from_struct = MEM_IN_STRUCT_P (from);
1455
bbf6f052
RK
1456 /* If copying requires more than two move insns,
1457 copy addresses to registers (to make displacements shorter)
1458 and use post-increment if available. */
1459 if (!(data.autinc_from && data.autinc_to)
1460 && move_by_pieces_ninsns (len, align) > 2)
1461 {
fbe1758d
AM
1462 /* Find the mode of the largest move... */
1463 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1464 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1465 if (GET_MODE_SIZE (tmode) < max_size)
1466 mode = tmode;
1467
1468 if (USE_LOAD_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_from)
bbf6f052
RK
1469 {
1470 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1471 data.autinc_from = 1;
1472 data.explicit_inc_from = -1;
1473 }
fbe1758d 1474 if (USE_LOAD_POST_INCREMENT (mode) && ! data.autinc_from)
bbf6f052
RK
1475 {
1476 data.from_addr = copy_addr_to_reg (from_addr);
1477 data.autinc_from = 1;
1478 data.explicit_inc_from = 1;
1479 }
bbf6f052
RK
1480 if (!data.autinc_from && CONSTANT_P (from_addr))
1481 data.from_addr = copy_addr_to_reg (from_addr);
fbe1758d 1482 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
bbf6f052
RK
1483 {
1484 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1485 data.autinc_to = 1;
1486 data.explicit_inc_to = -1;
1487 }
fbe1758d 1488 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
bbf6f052
RK
1489 {
1490 data.to_addr = copy_addr_to_reg (to_addr);
1491 data.autinc_to = 1;
1492 data.explicit_inc_to = 1;
1493 }
bbf6f052
RK
1494 if (!data.autinc_to && CONSTANT_P (to_addr))
1495 data.to_addr = copy_addr_to_reg (to_addr);
1496 }
1497
c7a7ac46 1498 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1499 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1500 align = MOVE_MAX;
bbf6f052
RK
1501
1502 /* First move what we can in the largest integer mode, then go to
1503 successively smaller modes. */
1504
1505 while (max_size > 1)
1506 {
e7c33f54
RK
1507 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1508 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1509 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1510 mode = tmode;
1511
1512 if (mode == VOIDmode)
1513 break;
1514
1515 icode = mov_optab->handlers[(int) mode].insn_code;
1516 if (icode != CODE_FOR_nothing
1517 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1518 GET_MODE_SIZE (mode)))
1519 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1520
1521 max_size = GET_MODE_SIZE (mode);
1522 }
1523
1524 /* The code above should have handled everything. */
2a8e278c 1525 if (data.len > 0)
bbf6f052
RK
1526 abort ();
1527}
1528
1529/* Return number of insns required to move L bytes by pieces.
1530 ALIGN (in bytes) is maximum alignment we can assume. */
1531
1532static int
1533move_by_pieces_ninsns (l, align)
1534 unsigned int l;
1535 int align;
1536{
1537 register int n_insns = 0;
e87b4f3f 1538 int max_size = MOVE_MAX + 1;
bbf6f052 1539
c7a7ac46 1540 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1541 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1542 align = MOVE_MAX;
bbf6f052
RK
1543
1544 while (max_size > 1)
1545 {
1546 enum machine_mode mode = VOIDmode, tmode;
1547 enum insn_code icode;
1548
e7c33f54
RK
1549 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1550 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1551 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1552 mode = tmode;
1553
1554 if (mode == VOIDmode)
1555 break;
1556
1557 icode = mov_optab->handlers[(int) mode].insn_code;
1558 if (icode != CODE_FOR_nothing
1559 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1560 GET_MODE_SIZE (mode)))
1561 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1562
1563 max_size = GET_MODE_SIZE (mode);
1564 }
1565
1566 return n_insns;
1567}
1568
1569/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1570 with move instructions for mode MODE. GENFUN is the gen_... function
1571 to make a move insn for that mode. DATA has all the other info. */
1572
1573static void
1574move_by_pieces_1 (genfun, mode, data)
eae4b970 1575 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1576 enum machine_mode mode;
1577 struct move_by_pieces *data;
1578{
1579 register int size = GET_MODE_SIZE (mode);
1580 register rtx to1, from1;
1581
1582 while (data->len >= size)
1583 {
1584 if (data->reverse) data->offset -= size;
1585
1586 to1 = (data->autinc_to
38a448ca 1587 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1588 : copy_rtx (change_address (data->to, mode,
1589 plus_constant (data->to_addr,
1590 data->offset))));
e9cf6a97 1591 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1592
db3cf6fb
MS
1593 from1
1594 = (data->autinc_from
38a448ca 1595 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1596 : copy_rtx (change_address (data->from, mode,
1597 plus_constant (data->from_addr,
1598 data->offset))));
e9cf6a97 1599 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052 1600
940da324 1601 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
906c4e36 1602 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
940da324 1603 if (HAVE_PRE_DECREMENT && data->explicit_inc_from < 0)
906c4e36 1604 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1605
1606 emit_insn ((*genfun) (to1, from1));
940da324 1607 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
906c4e36 1608 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
940da324 1609 if (HAVE_POST_INCREMENT && data->explicit_inc_from > 0)
906c4e36 1610 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1611
1612 if (! data->reverse) data->offset += size;
1613
1614 data->len -= size;
1615 }
1616}
1617\f
1618/* Emit code to move a block Y to a block X.
1619 This may be done with string-move instructions,
1620 with multiple scalar move instructions, or with a library call.
1621
1622 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1623 with mode BLKmode.
1624 SIZE is an rtx that says how long they are.
1625 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1626 measured in bytes.
bbf6f052 1627
e9a25f70
JL
1628 Return the address of the new block, if memcpy is called and returns it,
1629 0 otherwise. */
1630
1631rtx
bbf6f052
RK
1632emit_block_move (x, y, size, align)
1633 rtx x, y;
1634 rtx size;
1635 int align;
1636{
e9a25f70 1637 rtx retval = 0;
52cf7115
JL
1638#ifdef TARGET_MEM_FUNCTIONS
1639 static tree fn;
1640 tree call_expr, arg_list;
1641#endif
e9a25f70 1642
bbf6f052
RK
1643 if (GET_MODE (x) != BLKmode)
1644 abort ();
1645
1646 if (GET_MODE (y) != BLKmode)
1647 abort ();
1648
1649 x = protect_from_queue (x, 1);
1650 y = protect_from_queue (y, 0);
5d901c31 1651 size = protect_from_queue (size, 0);
bbf6f052
RK
1652
1653 if (GET_CODE (x) != MEM)
1654 abort ();
1655 if (GET_CODE (y) != MEM)
1656 abort ();
1657 if (size == 0)
1658 abort ();
1659
fbe1758d 1660 if (GET_CODE (size) == CONST_INT && MOVE_BY_PIECES_P (INTVAL (size), align))
bbf6f052
RK
1661 move_by_pieces (x, y, INTVAL (size), align);
1662 else
1663 {
1664 /* Try the most limited insn first, because there's no point
1665 including more than one in the machine description unless
1666 the more limited one has some advantage. */
266007a7 1667
0bba3f6f 1668 rtx opalign = GEN_INT (align);
266007a7
RK
1669 enum machine_mode mode;
1670
1671 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1672 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1673 {
266007a7 1674 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1675
1676 if (code != CODE_FOR_nothing
803090c4
RK
1677 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1678 here because if SIZE is less than the mode mask, as it is
8008b228 1679 returned by the macro, it will definitely be less than the
803090c4 1680 actual mode mask. */
8ca00751
RK
1681 && ((GET_CODE (size) == CONST_INT
1682 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1683 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1684 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1685 && (insn_operand_predicate[(int) code][0] == 0
1686 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1687 && (insn_operand_predicate[(int) code][1] == 0
1688 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1689 && (insn_operand_predicate[(int) code][3] == 0
1690 || (*insn_operand_predicate[(int) code][3]) (opalign,
1691 VOIDmode)))
bbf6f052 1692 {
1ba1e2a8 1693 rtx op2;
266007a7
RK
1694 rtx last = get_last_insn ();
1695 rtx pat;
1696
1ba1e2a8 1697 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1698 if (insn_operand_predicate[(int) code][2] != 0
1699 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1700 op2 = copy_to_mode_reg (mode, op2);
1701
1702 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1703 if (pat)
1704 {
1705 emit_insn (pat);
e9a25f70 1706 return 0;
266007a7
RK
1707 }
1708 else
1709 delete_insns_since (last);
bbf6f052
RK
1710 }
1711 }
bbf6f052 1712
4bc973ae
JL
1713 /* X, Y, or SIZE may have been passed through protect_from_queue.
1714
1715 It is unsafe to save the value generated by protect_from_queue
1716 and reuse it later. Consider what happens if emit_queue is
1717 called before the return value from protect_from_queue is used.
1718
1719 Expansion of the CALL_EXPR below will call emit_queue before
1720 we are finished emitting RTL for argument setup. So if we are
1721 not careful we could get the wrong value for an argument.
1722
1723 To avoid this problem we go ahead and emit code to copy X, Y &
1724 SIZE into new pseudos. We can then place those new pseudos
1725 into an RTL_EXPR and use them later, even after a call to
1726 emit_queue.
1727
1728 Note this is not strictly needed for library calls since they
1729 do not call emit_queue before loading their arguments. However,
1730 we may need to have library calls call emit_queue in the future
1731 since failing to do so could cause problems for targets which
1732 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
1733 x = copy_to_mode_reg (Pmode, XEXP (x, 0));
1734 y = copy_to_mode_reg (Pmode, XEXP (y, 0));
1735
1736#ifdef TARGET_MEM_FUNCTIONS
1737 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
1738#else
1739 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
1740 TREE_UNSIGNED (integer_type_node));
f3dc586a 1741 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae
JL
1742#endif
1743
bbf6f052 1744#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1745 /* It is incorrect to use the libcall calling conventions to call
1746 memcpy in this context.
1747
1748 This could be a user call to memcpy and the user may wish to
1749 examine the return value from memcpy.
1750
1751 For targets where libcalls and normal calls have different conventions
1752 for returning pointers, we could end up generating incorrect code.
1753
1754 So instead of using a libcall sequence we build up a suitable
1755 CALL_EXPR and expand the call in the normal fashion. */
1756 if (fn == NULL_TREE)
1757 {
1758 tree fntype;
1759
1760 /* This was copied from except.c, I don't know if all this is
1761 necessary in this context or not. */
1762 fn = get_identifier ("memcpy");
1763 push_obstacks_nochange ();
1764 end_temporary_allocation ();
1765 fntype = build_pointer_type (void_type_node);
1766 fntype = build_function_type (fntype, NULL_TREE);
1767 fn = build_decl (FUNCTION_DECL, fn, fntype);
1768 DECL_EXTERNAL (fn) = 1;
1769 TREE_PUBLIC (fn) = 1;
1770 DECL_ARTIFICIAL (fn) = 1;
1771 make_decl_rtl (fn, NULL_PTR, 1);
1772 assemble_external (fn);
1773 pop_obstacks ();
1774 }
1775
1776 /* We need to make an argument list for the function call.
1777
1778 memcpy has three arguments, the first two are void * addresses and
1779 the last is a size_t byte count for the copy. */
1780 arg_list
1781 = build_tree_list (NULL_TREE,
4bc973ae 1782 make_tree (build_pointer_type (void_type_node), x));
52cf7115
JL
1783 TREE_CHAIN (arg_list)
1784 = build_tree_list (NULL_TREE,
4bc973ae 1785 make_tree (build_pointer_type (void_type_node), y));
52cf7115
JL
1786 TREE_CHAIN (TREE_CHAIN (arg_list))
1787 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1788 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1789
1790 /* Now we have to build up the CALL_EXPR itself. */
1791 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1792 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1793 call_expr, arg_list, NULL_TREE);
1794 TREE_SIDE_EFFECTS (call_expr) = 1;
1795
1796 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1797#else
d562e42e 1798 emit_library_call (bcopy_libfunc, 0,
fe7bbd2a 1799 VOIDmode, 3, y, Pmode, x, Pmode,
3b6f75e2
JW
1800 convert_to_mode (TYPE_MODE (integer_type_node), size,
1801 TREE_UNSIGNED (integer_type_node)),
1802 TYPE_MODE (integer_type_node));
bbf6f052
RK
1803#endif
1804 }
e9a25f70
JL
1805
1806 return retval;
bbf6f052
RK
1807}
1808\f
1809/* Copy all or part of a value X into registers starting at REGNO.
1810 The number of registers to be filled is NREGS. */
1811
1812void
1813move_block_to_reg (regno, x, nregs, mode)
1814 int regno;
1815 rtx x;
1816 int nregs;
1817 enum machine_mode mode;
1818{
1819 int i;
381127e8
RL
1820#ifdef HAVE_load_multiple
1821 rtx pat;
1822 rtx last;
1823#endif
bbf6f052 1824
72bb9717
RK
1825 if (nregs == 0)
1826 return;
1827
bbf6f052
RK
1828 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1829 x = validize_mem (force_const_mem (mode, x));
1830
1831 /* See if the machine can do this with a load multiple insn. */
1832#ifdef HAVE_load_multiple
c3a02afe 1833 if (HAVE_load_multiple)
bbf6f052 1834 {
c3a02afe 1835 last = get_last_insn ();
38a448ca 1836 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1837 GEN_INT (nregs));
1838 if (pat)
1839 {
1840 emit_insn (pat);
1841 return;
1842 }
1843 else
1844 delete_insns_since (last);
bbf6f052 1845 }
bbf6f052
RK
1846#endif
1847
1848 for (i = 0; i < nregs; i++)
38a448ca 1849 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1850 operand_subword_force (x, i, mode));
1851}
1852
1853/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1854 The number of registers to be filled is NREGS. SIZE indicates the number
1855 of bytes in the object X. */
1856
bbf6f052
RK
1857
1858void
0040593d 1859move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1860 int regno;
1861 rtx x;
1862 int nregs;
0040593d 1863 int size;
bbf6f052
RK
1864{
1865 int i;
381127e8
RL
1866#ifdef HAVE_store_multiple
1867 rtx pat;
1868 rtx last;
1869#endif
58a32c5c 1870 enum machine_mode mode;
bbf6f052 1871
58a32c5c
DE
1872 /* If SIZE is that of a mode no bigger than a word, just use that
1873 mode's store operation. */
1874 if (size <= UNITS_PER_WORD
1875 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1876 {
1877 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1878 gen_rtx_REG (mode, regno));
58a32c5c
DE
1879 return;
1880 }
1881
0040593d 1882 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1883 to the left before storing to memory. Note that the previous test
1884 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1885 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1886 {
1887 rtx tem = operand_subword (x, 0, 1, BLKmode);
1888 rtx shift;
1889
1890 if (tem == 0)
1891 abort ();
1892
1893 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1894 gen_rtx_REG (word_mode, regno),
0040593d
JW
1895 build_int_2 ((UNITS_PER_WORD - size)
1896 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1897 emit_move_insn (tem, shift);
1898 return;
1899 }
1900
bbf6f052
RK
1901 /* See if the machine can do this with a store multiple insn. */
1902#ifdef HAVE_store_multiple
c3a02afe 1903 if (HAVE_store_multiple)
bbf6f052 1904 {
c3a02afe 1905 last = get_last_insn ();
38a448ca 1906 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1907 GEN_INT (nregs));
1908 if (pat)
1909 {
1910 emit_insn (pat);
1911 return;
1912 }
1913 else
1914 delete_insns_since (last);
bbf6f052 1915 }
bbf6f052
RK
1916#endif
1917
1918 for (i = 0; i < nregs; i++)
1919 {
1920 rtx tem = operand_subword (x, i, 1, BLKmode);
1921
1922 if (tem == 0)
1923 abort ();
1924
38a448ca 1925 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1926 }
1927}
1928
aac5cc16
RH
1929/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1930 registers represented by a PARALLEL. SSIZE represents the total size of
1931 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1932 SRC in bits. */
1933/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1934 the balance will be in what would be the low-order memory addresses, i.e.
1935 left justified for big endian, right justified for little endian. This
1936 happens to be true for the targets currently using this support. If this
1937 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1938 would be needed. */
fffa9c1d
JW
1939
1940void
aac5cc16
RH
1941emit_group_load (dst, orig_src, ssize, align)
1942 rtx dst, orig_src;
1943 int align, ssize;
fffa9c1d 1944{
aac5cc16
RH
1945 rtx *tmps, src;
1946 int start, i;
fffa9c1d 1947
aac5cc16 1948 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1949 abort ();
1950
1951 /* Check for a NULL entry, used to indicate that the parameter goes
1952 both on the stack and in registers. */
aac5cc16
RH
1953 if (XEXP (XVECEXP (dst, 0, 0), 0))
1954 start = 0;
fffa9c1d 1955 else
aac5cc16
RH
1956 start = 1;
1957
1958 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1959
1960 /* If we won't be loading directly from memory, protect the real source
1961 from strange tricks we might play. */
1962 src = orig_src;
1963 if (GET_CODE (src) != MEM)
1964 {
1965 src = gen_reg_rtx (GET_MODE (orig_src));
1966 emit_move_insn (src, orig_src);
1967 }
1968
1969 /* Process the pieces. */
1970 for (i = start; i < XVECLEN (dst, 0); i++)
1971 {
1972 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1973 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1974 int bytelen = GET_MODE_SIZE (mode);
1975 int shift = 0;
1976
1977 /* Handle trailing fragments that run over the size of the struct. */
1978 if (ssize >= 0 && bytepos + bytelen > ssize)
1979 {
1980 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1981 bytelen = ssize - bytepos;
1982 if (bytelen <= 0)
1983 abort();
1984 }
1985
1986 /* Optimize the access just a bit. */
1987 if (GET_CODE (src) == MEM
1988 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1989 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1990 && bytelen == GET_MODE_SIZE (mode))
1991 {
1992 tmps[i] = gen_reg_rtx (mode);
1993 emit_move_insn (tmps[i],
1994 change_address (src, mode,
1995 plus_constant (XEXP (src, 0),
1996 bytepos)));
fffa9c1d
JW
1997 }
1998 else
aac5cc16
RH
1999 {
2000 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
2001 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
2002 mode, mode, align, ssize);
2003 }
fffa9c1d 2004
aac5cc16
RH
2005 if (BYTES_BIG_ENDIAN && shift)
2006 {
2007 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
2008 tmps[i], 0, OPTAB_WIDEN);
2009 }
fffa9c1d 2010 }
aac5cc16
RH
2011 emit_queue();
2012
2013 /* Copy the extracted pieces into the proper (probable) hard regs. */
2014 for (i = start; i < XVECLEN (dst, 0); i++)
2015 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
2016}
2017
aac5cc16
RH
2018/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
2019 registers represented by a PARALLEL. SSIZE represents the total size of
2020 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
2021
2022void
aac5cc16
RH
2023emit_group_store (orig_dst, src, ssize, align)
2024 rtx orig_dst, src;
2025 int ssize, align;
fffa9c1d 2026{
aac5cc16
RH
2027 rtx *tmps, dst;
2028 int start, i;
fffa9c1d 2029
aac5cc16 2030 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
2031 abort ();
2032
2033 /* Check for a NULL entry, used to indicate that the parameter goes
2034 both on the stack and in registers. */
aac5cc16
RH
2035 if (XEXP (XVECEXP (src, 0, 0), 0))
2036 start = 0;
fffa9c1d 2037 else
aac5cc16
RH
2038 start = 1;
2039
2040 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 2041
aac5cc16
RH
2042 /* Copy the (probable) hard regs into pseudos. */
2043 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 2044 {
aac5cc16
RH
2045 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2046 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2047 emit_move_insn (tmps[i], reg);
2048 }
2049 emit_queue();
fffa9c1d 2050
aac5cc16
RH
2051 /* If we won't be storing directly into memory, protect the real destination
2052 from strange tricks we might play. */
2053 dst = orig_dst;
10a9f2be
JW
2054 if (GET_CODE (dst) == PARALLEL)
2055 {
2056 rtx temp;
2057
2058 /* We can get a PARALLEL dst if there is a conditional expression in
2059 a return statement. In that case, the dst and src are the same,
2060 so no action is necessary. */
2061 if (rtx_equal_p (dst, src))
2062 return;
2063
2064 /* It is unclear if we can ever reach here, but we may as well handle
2065 it. Allocate a temporary, and split this into a store/load to/from
2066 the temporary. */
2067
2068 temp = assign_stack_temp (GET_MODE (dst), ssize, 0);
2069 emit_group_store (temp, src, ssize, align);
2070 emit_group_load (dst, temp, ssize, align);
2071 return;
2072 }
2073 else if (GET_CODE (dst) != MEM)
aac5cc16
RH
2074 {
2075 dst = gen_reg_rtx (GET_MODE (orig_dst));
2076 /* Make life a bit easier for combine. */
2077 emit_move_insn (dst, const0_rtx);
2078 }
2079 else if (! MEM_IN_STRUCT_P (dst))
2080 {
2081 /* store_bit_field requires that memory operations have
2082 mem_in_struct_p set; we might not. */
fffa9c1d 2083
aac5cc16 2084 dst = copy_rtx (orig_dst);
c6df88cb 2085 MEM_SET_IN_STRUCT_P (dst, 1);
aac5cc16
RH
2086 }
2087
2088 /* Process the pieces. */
2089 for (i = start; i < XVECLEN (src, 0); i++)
2090 {
2091 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2092 enum machine_mode mode = GET_MODE (tmps[i]);
2093 int bytelen = GET_MODE_SIZE (mode);
2094
2095 /* Handle trailing fragments that run over the size of the struct. */
2096 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2097 {
aac5cc16
RH
2098 if (BYTES_BIG_ENDIAN)
2099 {
2100 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2101 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2102 tmps[i], 0, OPTAB_WIDEN);
2103 }
2104 bytelen = ssize - bytepos;
71bc0330 2105 }
fffa9c1d 2106
aac5cc16
RH
2107 /* Optimize the access just a bit. */
2108 if (GET_CODE (dst) == MEM
2109 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2110 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2111 && bytelen == GET_MODE_SIZE (mode))
2112 {
2113 emit_move_insn (change_address (dst, mode,
2114 plus_constant (XEXP (dst, 0),
2115 bytepos)),
2116 tmps[i]);
2117 }
2118 else
2119 {
2120 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2121 mode, tmps[i], align, ssize);
2122 }
fffa9c1d 2123 }
aac5cc16
RH
2124 emit_queue();
2125
2126 /* Copy from the pseudo into the (probable) hard reg. */
2127 if (GET_CODE (dst) == REG)
2128 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2129}
2130
c36fce9a
GRK
2131/* Generate code to copy a BLKmode object of TYPE out of a
2132 set of registers starting with SRCREG into TGTBLK. If TGTBLK
2133 is null, a stack temporary is created. TGTBLK is returned.
2134
2135 The primary purpose of this routine is to handle functions
2136 that return BLKmode structures in registers. Some machines
2137 (the PA for example) want to return all small structures
2138 in registers regardless of the structure's alignment.
2139 */
2140
2141rtx
2142copy_blkmode_from_reg(tgtblk,srcreg,type)
2143 rtx tgtblk;
2144 rtx srcreg;
2145 tree type;
2146{
2147 int bytes = int_size_in_bytes (type);
2148 rtx src = NULL, dst = NULL;
c84e2712 2149 int bitsize = MIN (TYPE_ALIGN (type), (unsigned int) BITS_PER_WORD);
c36fce9a
GRK
2150 int bitpos, xbitpos, big_endian_correction = 0;
2151
2152 if (tgtblk == 0)
2153 {
2154 tgtblk = assign_stack_temp (BLKmode, bytes, 0);
c6df88cb 2155 MEM_SET_IN_STRUCT_P (tgtblk, AGGREGATE_TYPE_P (type));
c36fce9a
GRK
2156 preserve_temp_slots (tgtblk);
2157 }
2158
2159 /* This code assumes srcreg is at least a full word. If it isn't,
2160 copy it into a new pseudo which is a full word. */
2161 if (GET_MODE (srcreg) != BLKmode
2162 && GET_MODE_SIZE (GET_MODE (srcreg)) < UNITS_PER_WORD)
2163 srcreg = convert_to_mode (word_mode, srcreg,
2164 TREE_UNSIGNED (type));
2165
2166 /* Structures whose size is not a multiple of a word are aligned
2167 to the least significant byte (to the right). On a BYTES_BIG_ENDIAN
2168 machine, this means we must skip the empty high order bytes when
2169 calculating the bit offset. */
2170 if (BYTES_BIG_ENDIAN && bytes % UNITS_PER_WORD)
2171 big_endian_correction = (BITS_PER_WORD - ((bytes % UNITS_PER_WORD)
2172 * BITS_PER_UNIT));
2173
2174 /* Copy the structure BITSIZE bites at a time.
2175
2176 We could probably emit more efficient code for machines
2177 which do not use strict alignment, but it doesn't seem
2178 worth the effort at the current time. */
2179 for (bitpos = 0, xbitpos = big_endian_correction;
2180 bitpos < bytes * BITS_PER_UNIT;
2181 bitpos += bitsize, xbitpos += bitsize)
2182 {
2183
2184 /* We need a new source operand each time xbitpos is on a
2185 word boundary and when xbitpos == big_endian_correction
2186 (the first time through). */
2187 if (xbitpos % BITS_PER_WORD == 0
2188 || xbitpos == big_endian_correction)
2189 src = operand_subword_force (srcreg,
2190 xbitpos / BITS_PER_WORD,
2191 BLKmode);
2192
2193 /* We need a new destination operand each time bitpos is on
2194 a word boundary. */
2195 if (bitpos % BITS_PER_WORD == 0)
2196 dst = operand_subword (tgtblk, bitpos / BITS_PER_WORD, 1, BLKmode);
2197
2198 /* Use xbitpos for the source extraction (right justified) and
2199 xbitpos for the destination store (left justified). */
2200 store_bit_field (dst, bitsize, bitpos % BITS_PER_WORD, word_mode,
2201 extract_bit_field (src, bitsize,
2202 xbitpos % BITS_PER_WORD, 1,
2203 NULL_RTX, word_mode,
2204 word_mode,
2205 bitsize / BITS_PER_UNIT,
2206 BITS_PER_WORD),
2207 bitsize / BITS_PER_UNIT, BITS_PER_WORD);
2208 }
2209 return tgtblk;
2210}
2211
2212
94b25f81
RK
2213/* Add a USE expression for REG to the (possibly empty) list pointed
2214 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2215
2216void
b3f8cf4a
RK
2217use_reg (call_fusage, reg)
2218 rtx *call_fusage, reg;
2219{
0304dfbb
DE
2220 if (GET_CODE (reg) != REG
2221 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2222 abort();
2223
2224 *call_fusage
38a448ca
RH
2225 = gen_rtx_EXPR_LIST (VOIDmode,
2226 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2227}
2228
94b25f81
RK
2229/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2230 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2231
2232void
0304dfbb
DE
2233use_regs (call_fusage, regno, nregs)
2234 rtx *call_fusage;
bbf6f052
RK
2235 int regno;
2236 int nregs;
2237{
0304dfbb 2238 int i;
bbf6f052 2239
0304dfbb
DE
2240 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2241 abort ();
2242
2243 for (i = 0; i < nregs; i++)
38a448ca 2244 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2245}
fffa9c1d
JW
2246
2247/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2248 PARALLEL REGS. This is for calls that pass values in multiple
2249 non-contiguous locations. The Irix 6 ABI has examples of this. */
2250
2251void
2252use_group_regs (call_fusage, regs)
2253 rtx *call_fusage;
2254 rtx regs;
2255{
2256 int i;
2257
6bd35f86
DE
2258 for (i = 0; i < XVECLEN (regs, 0); i++)
2259 {
2260 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2261
6bd35f86
DE
2262 /* A NULL entry means the parameter goes both on the stack and in
2263 registers. This can also be a MEM for targets that pass values
2264 partially on the stack and partially in registers. */
e9a25f70 2265 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2266 use_reg (call_fusage, reg);
2267 }
fffa9c1d 2268}
bbf6f052 2269\f
9de08200
RK
2270/* Generate several move instructions to clear LEN bytes of block TO.
2271 (A MEM rtx with BLKmode). The caller must pass TO through
2272 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2273 we can assume. */
2274
2275static void
2276clear_by_pieces (to, len, align)
2277 rtx to;
2278 int len, align;
2279{
2280 struct clear_by_pieces data;
2281 rtx to_addr = XEXP (to, 0);
fbe1758d
AM
2282 int max_size = MOVE_MAX_PIECES + 1;
2283 enum machine_mode mode = VOIDmode, tmode;
2284 enum insn_code icode;
9de08200
RK
2285
2286 data.offset = 0;
2287 data.to_addr = to_addr;
2288 data.to = to;
2289 data.autinc_to
2290 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2291 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2292
2293 data.explicit_inc_to = 0;
2294 data.reverse
2295 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2296 if (data.reverse) data.offset = len;
2297 data.len = len;
2298
2299 data.to_struct = MEM_IN_STRUCT_P (to);
2300
2301 /* If copying requires more than two move insns,
2302 copy addresses to registers (to make displacements shorter)
2303 and use post-increment if available. */
2304 if (!data.autinc_to
2305 && move_by_pieces_ninsns (len, align) > 2)
2306 {
fbe1758d
AM
2307 /* Determine the main mode we'll be using */
2308 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2309 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2310 if (GET_MODE_SIZE (tmode) < max_size)
2311 mode = tmode;
2312
2313 if (USE_STORE_PRE_DECREMENT (mode) && data.reverse && ! data.autinc_to)
9de08200
RK
2314 {
2315 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2316 data.autinc_to = 1;
2317 data.explicit_inc_to = -1;
2318 }
fbe1758d 2319 if (USE_STORE_POST_INCREMENT (mode) && ! data.reverse && ! data.autinc_to)
9de08200
RK
2320 {
2321 data.to_addr = copy_addr_to_reg (to_addr);
2322 data.autinc_to = 1;
2323 data.explicit_inc_to = 1;
2324 }
9de08200
RK
2325 if (!data.autinc_to && CONSTANT_P (to_addr))
2326 data.to_addr = copy_addr_to_reg (to_addr);
2327 }
2328
2329 if (! SLOW_UNALIGNED_ACCESS
2330 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2331 align = MOVE_MAX;
2332
2333 /* First move what we can in the largest integer mode, then go to
2334 successively smaller modes. */
2335
2336 while (max_size > 1)
2337 {
9de08200
RK
2338 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2339 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2340 if (GET_MODE_SIZE (tmode) < max_size)
2341 mode = tmode;
2342
2343 if (mode == VOIDmode)
2344 break;
2345
2346 icode = mov_optab->handlers[(int) mode].insn_code;
2347 if (icode != CODE_FOR_nothing
2348 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2349 GET_MODE_SIZE (mode)))
2350 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2351
2352 max_size = GET_MODE_SIZE (mode);
2353 }
2354
2355 /* The code above should have handled everything. */
2356 if (data.len != 0)
2357 abort ();
2358}
2359
2360/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2361 with move instructions for mode MODE. GENFUN is the gen_... function
2362 to make a move insn for that mode. DATA has all the other info. */
2363
2364static void
2365clear_by_pieces_1 (genfun, mode, data)
eae4b970 2366 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2367 enum machine_mode mode;
2368 struct clear_by_pieces *data;
2369{
2370 register int size = GET_MODE_SIZE (mode);
2371 register rtx to1;
2372
2373 while (data->len >= size)
2374 {
2375 if (data->reverse) data->offset -= size;
2376
2377 to1 = (data->autinc_to
38a448ca 2378 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2379 : copy_rtx (change_address (data->to, mode,
2380 plus_constant (data->to_addr,
2381 data->offset))));
9de08200
RK
2382 MEM_IN_STRUCT_P (to1) = data->to_struct;
2383
940da324 2384 if (HAVE_PRE_DECREMENT && data->explicit_inc_to < 0)
9de08200 2385 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
9de08200
RK
2386
2387 emit_insn ((*genfun) (to1, const0_rtx));
940da324 2388 if (HAVE_POST_INCREMENT && data->explicit_inc_to > 0)
9de08200 2389 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
9de08200
RK
2390
2391 if (! data->reverse) data->offset += size;
2392
2393 data->len -= size;
2394 }
2395}
2396\f
bbf6f052 2397/* Write zeros through the storage of OBJECT.
9de08200 2398 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2399 the maximum alignment we can is has, measured in bytes.
bbf6f052 2400
e9a25f70
JL
2401 If we call a function that returns the length of the block, return it. */
2402
2403rtx
9de08200 2404clear_storage (object, size, align)
bbf6f052 2405 rtx object;
4c08eef0 2406 rtx size;
9de08200 2407 int align;
bbf6f052 2408{
52cf7115
JL
2409#ifdef TARGET_MEM_FUNCTIONS
2410 static tree fn;
2411 tree call_expr, arg_list;
2412#endif
e9a25f70
JL
2413 rtx retval = 0;
2414
bbf6f052
RK
2415 if (GET_MODE (object) == BLKmode)
2416 {
9de08200
RK
2417 object = protect_from_queue (object, 1);
2418 size = protect_from_queue (size, 0);
2419
2420 if (GET_CODE (size) == CONST_INT
fbe1758d 2421 && MOVE_BY_PIECES_P (INTVAL (size), align))
9de08200
RK
2422 clear_by_pieces (object, INTVAL (size), align);
2423
2424 else
2425 {
2426 /* Try the most limited insn first, because there's no point
2427 including more than one in the machine description unless
2428 the more limited one has some advantage. */
2429
2430 rtx opalign = GEN_INT (align);
2431 enum machine_mode mode;
2432
2433 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2434 mode = GET_MODE_WIDER_MODE (mode))
2435 {
2436 enum insn_code code = clrstr_optab[(int) mode];
2437
2438 if (code != CODE_FOR_nothing
2439 /* We don't need MODE to be narrower than
2440 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2441 the mode mask, as it is returned by the macro, it will
2442 definitely be less than the actual mode mask. */
2443 && ((GET_CODE (size) == CONST_INT
2444 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2445 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2446 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2447 && (insn_operand_predicate[(int) code][0] == 0
2448 || (*insn_operand_predicate[(int) code][0]) (object,
2449 BLKmode))
2450 && (insn_operand_predicate[(int) code][2] == 0
2451 || (*insn_operand_predicate[(int) code][2]) (opalign,
2452 VOIDmode)))
2453 {
2454 rtx op1;
2455 rtx last = get_last_insn ();
2456 rtx pat;
2457
2458 op1 = convert_to_mode (mode, size, 1);
2459 if (insn_operand_predicate[(int) code][1] != 0
2460 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2461 mode))
2462 op1 = copy_to_mode_reg (mode, op1);
2463
2464 pat = GEN_FCN ((int) code) (object, op1, opalign);
2465 if (pat)
2466 {
2467 emit_insn (pat);
e9a25f70 2468 return 0;
9de08200
RK
2469 }
2470 else
2471 delete_insns_since (last);
2472 }
2473 }
2474
4bc973ae 2475 /* OBJECT or SIZE may have been passed through protect_from_queue.
9de08200 2476
4bc973ae
JL
2477 It is unsafe to save the value generated by protect_from_queue
2478 and reuse it later. Consider what happens if emit_queue is
2479 called before the return value from protect_from_queue is used.
52cf7115 2480
4bc973ae
JL
2481 Expansion of the CALL_EXPR below will call emit_queue before
2482 we are finished emitting RTL for argument setup. So if we are
2483 not careful we could get the wrong value for an argument.
52cf7115 2484
4bc973ae
JL
2485 To avoid this problem we go ahead and emit code to copy OBJECT
2486 and SIZE into new pseudos. We can then place those new pseudos
2487 into an RTL_EXPR and use them later, even after a call to
2488 emit_queue.
52cf7115 2489
4bc973ae
JL
2490 Note this is not strictly needed for library calls since they
2491 do not call emit_queue before loading their arguments. However,
2492 we may need to have library calls call emit_queue in the future
2493 since failing to do so could cause problems for targets which
2494 define SMALL_REGISTER_CLASSES and pass arguments in registers. */
2495 object = copy_to_mode_reg (Pmode, XEXP (object, 0));
52cf7115 2496
4bc973ae
JL
2497#ifdef TARGET_MEM_FUNCTIONS
2498 size = copy_to_mode_reg (TYPE_MODE (sizetype), size);
2499#else
2500 size = convert_to_mode (TYPE_MODE (integer_type_node), size,
2501 TREE_UNSIGNED (integer_type_node));
f3dc586a 2502 size = copy_to_mode_reg (TYPE_MODE (integer_type_node), size);
4bc973ae 2503#endif
52cf7115 2504
52cf7115 2505
4bc973ae
JL
2506#ifdef TARGET_MEM_FUNCTIONS
2507 /* It is incorrect to use the libcall calling conventions to call
2508 memset in this context.
52cf7115 2509
4bc973ae
JL
2510 This could be a user call to memset and the user may wish to
2511 examine the return value from memset.
52cf7115 2512
4bc973ae
JL
2513 For targets where libcalls and normal calls have different
2514 conventions for returning pointers, we could end up generating
2515 incorrect code.
2516
2517 So instead of using a libcall sequence we build up a suitable
2518 CALL_EXPR and expand the call in the normal fashion. */
2519 if (fn == NULL_TREE)
2520 {
2521 tree fntype;
2522
2523 /* This was copied from except.c, I don't know if all this is
2524 necessary in this context or not. */
2525 fn = get_identifier ("memset");
2526 push_obstacks_nochange ();
2527 end_temporary_allocation ();
2528 fntype = build_pointer_type (void_type_node);
2529 fntype = build_function_type (fntype, NULL_TREE);
2530 fn = build_decl (FUNCTION_DECL, fn, fntype);
2531 DECL_EXTERNAL (fn) = 1;
2532 TREE_PUBLIC (fn) = 1;
2533 DECL_ARTIFICIAL (fn) = 1;
2534 make_decl_rtl (fn, NULL_PTR, 1);
2535 assemble_external (fn);
2536 pop_obstacks ();
2537 }
2538
2539 /* We need to make an argument list for the function call.
2540
2541 memset has three arguments, the first is a void * addresses, the
2542 second a integer with the initialization value, the last is a
2543 size_t byte count for the copy. */
2544 arg_list
2545 = build_tree_list (NULL_TREE,
2546 make_tree (build_pointer_type (void_type_node),
2547 object));
2548 TREE_CHAIN (arg_list)
2549 = build_tree_list (NULL_TREE,
2550 make_tree (integer_type_node, const0_rtx));
2551 TREE_CHAIN (TREE_CHAIN (arg_list))
2552 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2553 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2554
2555 /* Now we have to build up the CALL_EXPR itself. */
2556 call_expr = build1 (ADDR_EXPR,
2557 build_pointer_type (TREE_TYPE (fn)), fn);
2558 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2559 call_expr, arg_list, NULL_TREE);
2560 TREE_SIDE_EFFECTS (call_expr) = 1;
2561
2562 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2563#else
9de08200 2564 emit_library_call (bzero_libfunc, 0,
fe7bbd2a 2565 VOIDmode, 2, object, Pmode, size,
9de08200 2566 TYPE_MODE (integer_type_node));
bbf6f052 2567#endif
9de08200 2568 }
bbf6f052
RK
2569 }
2570 else
66ed0683 2571 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2572
2573 return retval;
bbf6f052
RK
2574}
2575
2576/* Generate code to copy Y into X.
2577 Both Y and X must have the same mode, except that
2578 Y can be a constant with VOIDmode.
2579 This mode cannot be BLKmode; use emit_block_move for that.
2580
2581 Return the last instruction emitted. */
2582
2583rtx
2584emit_move_insn (x, y)
2585 rtx x, y;
2586{
2587 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2588
2589 x = protect_from_queue (x, 1);
2590 y = protect_from_queue (y, 0);
2591
2592 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2593 abort ();
2594
ee5332b8
RH
2595 /* Never force constant_p_rtx to memory. */
2596 if (GET_CODE (y) == CONSTANT_P_RTX)
2597 ;
2598 else if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
bbf6f052
RK
2599 y = force_const_mem (mode, y);
2600
2601 /* If X or Y are memory references, verify that their addresses are valid
2602 for the machine. */
2603 if (GET_CODE (x) == MEM
2604 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2605 && ! push_operand (x, GET_MODE (x)))
2606 || (flag_force_addr
2607 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2608 x = change_address (x, VOIDmode, XEXP (x, 0));
2609
2610 if (GET_CODE (y) == MEM
2611 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2612 || (flag_force_addr
2613 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2614 y = change_address (y, VOIDmode, XEXP (y, 0));
2615
2616 if (mode == BLKmode)
2617 abort ();
2618
261c4230
RS
2619 return emit_move_insn_1 (x, y);
2620}
2621
2622/* Low level part of emit_move_insn.
2623 Called just like emit_move_insn, but assumes X and Y
2624 are basically valid. */
2625
2626rtx
2627emit_move_insn_1 (x, y)
2628 rtx x, y;
2629{
2630 enum machine_mode mode = GET_MODE (x);
2631 enum machine_mode submode;
2632 enum mode_class class = GET_MODE_CLASS (mode);
2633 int i;
2634
76bbe028
ZW
2635 if (mode >= MAX_MACHINE_MODE)
2636 abort ();
2637
bbf6f052
RK
2638 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2639 return
2640 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2641
89742723 2642 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2643 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2644 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2645 * BITS_PER_UNIT),
2646 (class == MODE_COMPLEX_INT
2647 ? MODE_INT : MODE_FLOAT),
2648 0))
7308a047
RS
2649 && (mov_optab->handlers[(int) submode].insn_code
2650 != CODE_FOR_nothing))
2651 {
2652 /* Don't split destination if it is a stack push. */
2653 int stack = push_operand (x, GET_MODE (x));
7308a047 2654
7308a047
RS
2655 /* If this is a stack, push the highpart first, so it
2656 will be in the argument order.
2657
2658 In that case, change_address is used only to convert
2659 the mode, not to change the address. */
c937357e
RS
2660 if (stack)
2661 {
e33c0d66
RS
2662 /* Note that the real part always precedes the imag part in memory
2663 regardless of machine's endianness. */
c937357e
RS
2664#ifdef STACK_GROWS_DOWNWARD
2665 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2666 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2667 gen_imagpart (submode, y)));
c937357e 2668 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2669 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2670 gen_realpart (submode, y)));
c937357e
RS
2671#else
2672 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2673 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2674 gen_realpart (submode, y)));
c937357e 2675 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2676 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2677 gen_imagpart (submode, y)));
c937357e
RS
2678#endif
2679 }
2680 else
2681 {
c14c6529
RH
2682 /* Show the output dies here. This is necessary for pseudos;
2683 hard regs shouldn't appear here except as return values.
2684 We never want to emit such a clobber after reload. */
2685 if (x != y
2686 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2687 {
c14c6529 2688 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2689 }
2638126a 2690
c937357e 2691 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2692 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2693 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2694 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2695 }
7308a047 2696
7a1ab50a 2697 return get_last_insn ();
7308a047
RS
2698 }
2699
bbf6f052
RK
2700 /* This will handle any multi-word mode that lacks a move_insn pattern.
2701 However, you will get better code if you define such patterns,
2702 even if they must turn into multiple assembler instructions. */
a4320483 2703 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2704 {
2705 rtx last_insn = 0;
6551fa4d 2706
a98c9f1a
RK
2707#ifdef PUSH_ROUNDING
2708
2709 /* If X is a push on the stack, do the push now and replace
2710 X with a reference to the stack pointer. */
2711 if (push_operand (x, GET_MODE (x)))
2712 {
2713 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2714 x = change_address (x, VOIDmode, stack_pointer_rtx);
2715 }
2716#endif
2717
c14c6529
RH
2718 /* Show the output dies here. This is necessary for pseudos;
2719 hard regs shouldn't appear here except as return values.
2720 We never want to emit such a clobber after reload. */
2721 if (x != y
2722 && ! (reload_in_progress || reload_completed))
b2e7e6fb 2723 {
c14c6529 2724 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
b2e7e6fb 2725 }
15a7a8ec 2726
bbf6f052
RK
2727 for (i = 0;
2728 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2729 i++)
2730 {
2731 rtx xpart = operand_subword (x, i, 1, mode);
2732 rtx ypart = operand_subword (y, i, 1, mode);
2733
2734 /* If we can't get a part of Y, put Y into memory if it is a
2735 constant. Otherwise, force it into a register. If we still
2736 can't get a part of Y, abort. */
2737 if (ypart == 0 && CONSTANT_P (y))
2738 {
2739 y = force_const_mem (mode, y);
2740 ypart = operand_subword (y, i, 1, mode);
2741 }
2742 else if (ypart == 0)
2743 ypart = operand_subword_force (y, i, mode);
2744
2745 if (xpart == 0 || ypart == 0)
2746 abort ();
2747
2748 last_insn = emit_move_insn (xpart, ypart);
2749 }
6551fa4d 2750
bbf6f052
RK
2751 return last_insn;
2752 }
2753 else
2754 abort ();
2755}
2756\f
2757/* Pushing data onto the stack. */
2758
2759/* Push a block of length SIZE (perhaps variable)
2760 and return an rtx to address the beginning of the block.
2761 Note that it is not possible for the value returned to be a QUEUED.
2762 The value may be virtual_outgoing_args_rtx.
2763
2764 EXTRA is the number of bytes of padding to push in addition to SIZE.
2765 BELOW nonzero means this padding comes at low addresses;
2766 otherwise, the padding comes at high addresses. */
2767
2768rtx
2769push_block (size, extra, below)
2770 rtx size;
2771 int extra, below;
2772{
2773 register rtx temp;
88f63c77
RK
2774
2775 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2776 if (CONSTANT_P (size))
2777 anti_adjust_stack (plus_constant (size, extra));
2778 else if (GET_CODE (size) == REG && extra == 0)
2779 anti_adjust_stack (size);
2780 else
2781 {
2782 rtx temp = copy_to_mode_reg (Pmode, size);
2783 if (extra != 0)
906c4e36 2784 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2785 temp, 0, OPTAB_LIB_WIDEN);
2786 anti_adjust_stack (temp);
2787 }
2788
e1a9b2ab
HB
2789#if defined (STACK_GROWS_DOWNWARD) \
2790 || (defined (ARGS_GROW_DOWNWARD) \
2791 && !defined (ACCUMULATE_OUTGOING_ARGS))
2792
2793 /* Return the lowest stack address when STACK or ARGS grow downward and
2794 we are not aaccumulating outgoing arguments (the c4x port uses such
2795 conventions). */
bbf6f052
RK
2796 temp = virtual_outgoing_args_rtx;
2797 if (extra != 0 && below)
2798 temp = plus_constant (temp, extra);
2799#else
2800 if (GET_CODE (size) == CONST_INT)
2801 temp = plus_constant (virtual_outgoing_args_rtx,
2802 - INTVAL (size) - (below ? 0 : extra));
2803 else if (extra != 0 && !below)
38a448ca 2804 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2805 negate_rtx (Pmode, plus_constant (size, extra)));
2806 else
38a448ca 2807 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2808 negate_rtx (Pmode, size));
2809#endif
2810
2811 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2812}
2813
87e38d84 2814rtx
bbf6f052
RK
2815gen_push_operand ()
2816{
38a448ca 2817 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2818}
2819
921b3427
RK
2820/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2821 block of SIZE bytes. */
2822
2823static rtx
2824get_push_address (size)
2825 int size;
2826{
2827 register rtx temp;
2828
2829 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2830 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2831 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2832 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2833 else
2834 temp = stack_pointer_rtx;
2835
c85f7c16 2836 return copy_to_reg (temp);
921b3427
RK
2837}
2838
bbf6f052
RK
2839/* Generate code to push X onto the stack, assuming it has mode MODE and
2840 type TYPE.
2841 MODE is redundant except when X is a CONST_INT (since they don't
2842 carry mode info).
2843 SIZE is an rtx for the size of data to be copied (in bytes),
2844 needed only if X is BLKmode.
2845
2846 ALIGN (in bytes) is maximum alignment we can assume.
2847
cd048831
RK
2848 If PARTIAL and REG are both nonzero, then copy that many of the first
2849 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2850 The amount of space pushed is decreased by PARTIAL words,
2851 rounded *down* to a multiple of PARM_BOUNDARY.
2852 REG must be a hard register in this case.
cd048831
RK
2853 If REG is zero but PARTIAL is not, take any all others actions for an
2854 argument partially in registers, but do not actually load any
2855 registers.
bbf6f052
RK
2856
2857 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2858 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2859
2860 On a machine that lacks real push insns, ARGS_ADDR is the address of
2861 the bottom of the argument block for this call. We use indexing off there
2862 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2863 argument block has not been preallocated.
2864
e5e809f4
JL
2865 ARGS_SO_FAR is the size of args previously pushed for this call.
2866
2867 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2868 for arguments passed in registers. If nonzero, it will be the number
2869 of bytes required. */
bbf6f052
RK
2870
2871void
2872emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2873 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2874 register rtx x;
2875 enum machine_mode mode;
2876 tree type;
2877 rtx size;
2878 int align;
2879 int partial;
2880 rtx reg;
2881 int extra;
2882 rtx args_addr;
2883 rtx args_so_far;
e5e809f4 2884 int reg_parm_stack_space;
bbf6f052
RK
2885{
2886 rtx xinner;
2887 enum direction stack_direction
2888#ifdef STACK_GROWS_DOWNWARD
2889 = downward;
2890#else
2891 = upward;
2892#endif
2893
2894 /* Decide where to pad the argument: `downward' for below,
2895 `upward' for above, or `none' for don't pad it.
2896 Default is below for small data on big-endian machines; else above. */
2897 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2898
2899 /* Invert direction if stack is post-update. */
2900 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2901 if (where_pad != none)
2902 where_pad = (where_pad == downward ? upward : downward);
2903
2904 xinner = x = protect_from_queue (x, 0);
2905
2906 if (mode == BLKmode)
2907 {
2908 /* Copy a block into the stack, entirely or partially. */
2909
2910 register rtx temp;
2911 int used = partial * UNITS_PER_WORD;
2912 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2913 int skip;
2914
2915 if (size == 0)
2916 abort ();
2917
2918 used -= offset;
2919
2920 /* USED is now the # of bytes we need not copy to the stack
2921 because registers will take care of them. */
2922
2923 if (partial != 0)
2924 xinner = change_address (xinner, BLKmode,
2925 plus_constant (XEXP (xinner, 0), used));
2926
2927 /* If the partial register-part of the arg counts in its stack size,
2928 skip the part of stack space corresponding to the registers.
2929 Otherwise, start copying to the beginning of the stack space,
2930 by setting SKIP to 0. */
e5e809f4 2931 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2932
2933#ifdef PUSH_ROUNDING
2934 /* Do it with several push insns if that doesn't take lots of insns
2935 and if there is no difficulty with push insns that skip bytes
2936 on the stack for alignment purposes. */
2937 if (args_addr == 0
2938 && GET_CODE (size) == CONST_INT
2939 && skip == 0
15914757 2940 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size) - used, align))
bbf6f052
RK
2941 /* Here we avoid the case of a structure whose weak alignment
2942 forces many pushes of a small amount of data,
2943 and such small pushes do rounding that causes trouble. */
c7a7ac46 2944 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2945 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2946 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2947 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2948 {
2949 /* Push padding now if padding above and stack grows down,
2950 or if padding below and stack grows up.
2951 But if space already allocated, this has already been done. */
2952 if (extra && args_addr == 0
2953 && where_pad != none && where_pad != stack_direction)
906c4e36 2954 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2955
38a448ca 2956 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2957 INTVAL (size) - used, align);
921b3427 2958
7d384cc0 2959 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2960 {
2961 rtx temp;
2962
956d6950 2963 in_check_memory_usage = 1;
921b3427 2964 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2965 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 2966 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
2967 temp, Pmode,
2968 XEXP (xinner, 0), Pmode,
921b3427
RK
2969 GEN_INT (INTVAL(size) - used),
2970 TYPE_MODE (sizetype));
2971 else
2972 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 2973 temp, Pmode,
921b3427
RK
2974 GEN_INT (INTVAL(size) - used),
2975 TYPE_MODE (sizetype),
956d6950
JL
2976 GEN_INT (MEMORY_USE_RW),
2977 TYPE_MODE (integer_type_node));
2978 in_check_memory_usage = 0;
921b3427 2979 }
bbf6f052
RK
2980 }
2981 else
2982#endif /* PUSH_ROUNDING */
2983 {
2984 /* Otherwise make space on the stack and copy the data
2985 to the address of that space. */
2986
2987 /* Deduct words put into registers from the size we must copy. */
2988 if (partial != 0)
2989 {
2990 if (GET_CODE (size) == CONST_INT)
906c4e36 2991 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2992 else
2993 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2994 GEN_INT (used), NULL_RTX, 0,
2995 OPTAB_LIB_WIDEN);
bbf6f052
RK
2996 }
2997
2998 /* Get the address of the stack space.
2999 In this case, we do not deal with EXTRA separately.
3000 A single stack adjust will do. */
3001 if (! args_addr)
3002 {
3003 temp = push_block (size, extra, where_pad == downward);
3004 extra = 0;
3005 }
3006 else if (GET_CODE (args_so_far) == CONST_INT)
3007 temp = memory_address (BLKmode,
3008 plus_constant (args_addr,
3009 skip + INTVAL (args_so_far)));
3010 else
3011 temp = memory_address (BLKmode,
38a448ca
RH
3012 plus_constant (gen_rtx_PLUS (Pmode,
3013 args_addr,
3014 args_so_far),
bbf6f052 3015 skip));
7d384cc0 3016 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
3017 {
3018 rtx target;
3019
956d6950 3020 in_check_memory_usage = 1;
921b3427 3021 target = copy_to_reg (temp);
c85f7c16 3022 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3023 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3024 target, Pmode,
3025 XEXP (xinner, 0), Pmode,
921b3427
RK
3026 size, TYPE_MODE (sizetype));
3027 else
3028 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3029 target, Pmode,
921b3427 3030 size, TYPE_MODE (sizetype),
956d6950
JL
3031 GEN_INT (MEMORY_USE_RW),
3032 TYPE_MODE (integer_type_node));
3033 in_check_memory_usage = 0;
921b3427 3034 }
bbf6f052
RK
3035
3036 /* TEMP is the address of the block. Copy the data there. */
3037 if (GET_CODE (size) == CONST_INT
fbe1758d 3038 && (MOVE_BY_PIECES_P ((unsigned) INTVAL (size), align)))
bbf6f052 3039 {
38a448ca 3040 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
3041 INTVAL (size), align);
3042 goto ret;
3043 }
e5e809f4 3044 else
bbf6f052 3045 {
e5e809f4
JL
3046 rtx opalign = GEN_INT (align);
3047 enum machine_mode mode;
9e6a5703 3048 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
3049
3050 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
3051 mode != VOIDmode;
3052 mode = GET_MODE_WIDER_MODE (mode))
c841050e 3053 {
e5e809f4
JL
3054 enum insn_code code = movstr_optab[(int) mode];
3055
3056 if (code != CODE_FOR_nothing
3057 && ((GET_CODE (size) == CONST_INT
3058 && ((unsigned HOST_WIDE_INT) INTVAL (size)
3059 <= (GET_MODE_MASK (mode) >> 1)))
3060 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
3061 && (insn_operand_predicate[(int) code][0] == 0
3062 || ((*insn_operand_predicate[(int) code][0])
3063 (target, BLKmode)))
3064 && (insn_operand_predicate[(int) code][1] == 0
3065 || ((*insn_operand_predicate[(int) code][1])
3066 (xinner, BLKmode)))
3067 && (insn_operand_predicate[(int) code][3] == 0
3068 || ((*insn_operand_predicate[(int) code][3])
3069 (opalign, VOIDmode))))
3070 {
3071 rtx op2 = convert_to_mode (mode, size, 1);
3072 rtx last = get_last_insn ();
3073 rtx pat;
3074
3075 if (insn_operand_predicate[(int) code][2] != 0
3076 && ! ((*insn_operand_predicate[(int) code][2])
3077 (op2, mode)))
3078 op2 = copy_to_mode_reg (mode, op2);
3079
3080 pat = GEN_FCN ((int) code) (target, xinner,
3081 op2, opalign);
3082 if (pat)
3083 {
3084 emit_insn (pat);
3085 goto ret;
3086 }
3087 else
3088 delete_insns_since (last);
3089 }
c841050e 3090 }
bbf6f052 3091 }
bbf6f052
RK
3092
3093#ifndef ACCUMULATE_OUTGOING_ARGS
3094 /* If the source is referenced relative to the stack pointer,
3095 copy it to another register to stabilize it. We do not need
3096 to do this if we know that we won't be changing sp. */
3097
3098 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
3099 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
3100 temp = copy_to_reg (temp);
3101#endif
3102
3103 /* Make inhibit_defer_pop nonzero around the library call
3104 to force it to pop the bcopy-arguments right away. */
3105 NO_DEFER_POP;
3106#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3107 emit_library_call (memcpy_libfunc, 0,
bbf6f052 3108 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
3109 convert_to_mode (TYPE_MODE (sizetype),
3110 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3111 TYPE_MODE (sizetype));
bbf6f052 3112#else
d562e42e 3113 emit_library_call (bcopy_libfunc, 0,
bbf6f052 3114 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
3115 convert_to_mode (TYPE_MODE (integer_type_node),
3116 size,
3117 TREE_UNSIGNED (integer_type_node)),
3118 TYPE_MODE (integer_type_node));
bbf6f052
RK
3119#endif
3120 OK_DEFER_POP;
3121 }
3122 }
3123 else if (partial > 0)
3124 {
3125 /* Scalar partly in registers. */
3126
3127 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
3128 int i;
3129 int not_stack;
3130 /* # words of start of argument
3131 that we must make space for but need not store. */
3132 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
3133 int args_offset = INTVAL (args_so_far);
3134 int skip;
3135
3136 /* Push padding now if padding above and stack grows down,
3137 or if padding below and stack grows up.
3138 But if space already allocated, this has already been done. */
3139 if (extra && args_addr == 0
3140 && where_pad != none && where_pad != stack_direction)
906c4e36 3141 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3142
3143 /* If we make space by pushing it, we might as well push
3144 the real data. Otherwise, we can leave OFFSET nonzero
3145 and leave the space uninitialized. */
3146 if (args_addr == 0)
3147 offset = 0;
3148
3149 /* Now NOT_STACK gets the number of words that we don't need to
3150 allocate on the stack. */
3151 not_stack = partial - offset;
3152
3153 /* If the partial register-part of the arg counts in its stack size,
3154 skip the part of stack space corresponding to the registers.
3155 Otherwise, start copying to the beginning of the stack space,
3156 by setting SKIP to 0. */
e5e809f4 3157 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
3158
3159 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
3160 x = validize_mem (force_const_mem (mode, x));
3161
3162 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
3163 SUBREGs of such registers are not allowed. */
3164 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
3165 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
3166 x = copy_to_reg (x);
3167
3168 /* Loop over all the words allocated on the stack for this arg. */
3169 /* We can do it by words, because any scalar bigger than a word
3170 has a size a multiple of a word. */
3171#ifndef PUSH_ARGS_REVERSED
3172 for (i = not_stack; i < size; i++)
3173#else
3174 for (i = size - 1; i >= not_stack; i--)
3175#endif
3176 if (i >= not_stack + offset)
3177 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
3178 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
3179 0, args_addr,
3180 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
3181 * UNITS_PER_WORD)),
3182 reg_parm_stack_space);
bbf6f052
RK
3183 }
3184 else
3185 {
3186 rtx addr;
921b3427 3187 rtx target = NULL_RTX;
bbf6f052
RK
3188
3189 /* Push padding now if padding above and stack grows down,
3190 or if padding below and stack grows up.
3191 But if space already allocated, this has already been done. */
3192 if (extra && args_addr == 0
3193 && where_pad != none && where_pad != stack_direction)
906c4e36 3194 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3195
3196#ifdef PUSH_ROUNDING
3197 if (args_addr == 0)
3198 addr = gen_push_operand ();
3199 else
3200#endif
921b3427
RK
3201 {
3202 if (GET_CODE (args_so_far) == CONST_INT)
3203 addr
3204 = memory_address (mode,
3205 plus_constant (args_addr,
3206 INTVAL (args_so_far)));
3207 else
38a448ca
RH
3208 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3209 args_so_far));
921b3427
RK
3210 target = addr;
3211 }
bbf6f052 3212
38a448ca 3213 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3214
7d384cc0 3215 if (current_function_check_memory_usage && ! in_check_memory_usage)
921b3427 3216 {
956d6950 3217 in_check_memory_usage = 1;
921b3427
RK
3218 if (target == 0)
3219 target = get_push_address (GET_MODE_SIZE (mode));
3220
c85f7c16 3221 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427 3222 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3223 target, Pmode,
3224 XEXP (x, 0), Pmode,
921b3427
RK
3225 GEN_INT (GET_MODE_SIZE (mode)),
3226 TYPE_MODE (sizetype));
3227 else
3228 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
6a9c4aed 3229 target, Pmode,
921b3427
RK
3230 GEN_INT (GET_MODE_SIZE (mode)),
3231 TYPE_MODE (sizetype),
956d6950
JL
3232 GEN_INT (MEMORY_USE_RW),
3233 TYPE_MODE (integer_type_node));
3234 in_check_memory_usage = 0;
921b3427 3235 }
bbf6f052
RK
3236 }
3237
3238 ret:
3239 /* If part should go in registers, copy that part
3240 into the appropriate registers. Do this now, at the end,
3241 since mem-to-mem copies above may do function calls. */
cd048831 3242 if (partial > 0 && reg != 0)
fffa9c1d
JW
3243 {
3244 /* Handle calls that pass values in multiple non-contiguous locations.
3245 The Irix 6 ABI has examples of this. */
3246 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3247 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3248 else
3249 move_block_to_reg (REGNO (reg), x, partial, mode);
3250 }
bbf6f052
RK
3251
3252 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3253 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3254}
3255\f
bbf6f052
RK
3256/* Expand an assignment that stores the value of FROM into TO.
3257 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3258 (This may contain a QUEUED rtx;
3259 if the value is constant, this rtx is a constant.)
3260 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3261
3262 SUGGEST_REG is no longer actually used.
3263 It used to mean, copy the value through a register
3264 and return that register, if that is possible.
709f5be1 3265 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3266
3267rtx
3268expand_assignment (to, from, want_value, suggest_reg)
3269 tree to, from;
3270 int want_value;
3271 int suggest_reg;
3272{
3273 register rtx to_rtx = 0;
3274 rtx result;
3275
3276 /* Don't crash if the lhs of the assignment was erroneous. */
3277
3278 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3279 {
3280 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3281 return want_value ? result : NULL_RTX;
3282 }
bbf6f052
RK
3283
3284 /* Assignment of a structure component needs special treatment
3285 if the structure component's rtx is not simply a MEM.
6be58303
JW
3286 Assignment of an array element at a constant index, and assignment of
3287 an array element in an unaligned packed structure field, has the same
3288 problem. */
bbf6f052 3289
08293add
RK
3290 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3291 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3292 {
3293 enum machine_mode mode1;
3294 int bitsize;
3295 int bitpos;
7bb0943f 3296 tree offset;
bbf6f052
RK
3297 int unsignedp;
3298 int volatilep = 0;
0088fcb1 3299 tree tem;
d78d243c 3300 int alignment;
0088fcb1
RK
3301
3302 push_temp_slots ();
839c4796
RK
3303 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3304 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3305
3306 /* If we are going to use store_bit_field and extract_bit_field,
3307 make sure to_rtx will be safe for multiple use. */
3308
3309 if (mode1 == VOIDmode && want_value)
3310 tem = stabilize_reference (tem);
3311
921b3427 3312 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3313 if (offset != 0)
3314 {
906c4e36 3315 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3316
3317 if (GET_CODE (to_rtx) != MEM)
3318 abort ();
bd070e1a
RH
3319
3320 if (GET_MODE (offset_rtx) != ptr_mode)
3321 {
3322#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3323 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3324#else
3325 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3326#endif
3327 }
3328
9a7b9f4f
JL
3329 /* A constant address in TO_RTX can have VOIDmode, we must not try
3330 to call force_reg for that case. Avoid that case. */
89752202
HB
3331 if (GET_CODE (to_rtx) == MEM
3332 && GET_MODE (to_rtx) == BLKmode
9a7b9f4f 3333 && GET_MODE (XEXP (to_rtx, 0)) != VOIDmode
89752202
HB
3334 && bitsize
3335 && (bitpos % bitsize) == 0
3336 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3337 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3338 {
3339 rtx temp = change_address (to_rtx, mode1,
3340 plus_constant (XEXP (to_rtx, 0),
3341 (bitpos /
3342 BITS_PER_UNIT)));
3343 if (GET_CODE (XEXP (temp, 0)) == REG)
3344 to_rtx = temp;
3345 else
3346 to_rtx = change_address (to_rtx, mode1,
3347 force_reg (GET_MODE (XEXP (temp, 0)),
3348 XEXP (temp, 0)));
3349 bitpos = 0;
3350 }
3351
7bb0943f 3352 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
3353 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3354 force_reg (ptr_mode, offset_rtx)));
7bb0943f 3355 }
bbf6f052
RK
3356 if (volatilep)
3357 {
3358 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3359 {
3360 /* When the offset is zero, to_rtx is the address of the
3361 structure we are storing into, and hence may be shared.
3362 We must make a new MEM before setting the volatile bit. */
3363 if (offset == 0)
effbcc6a
RK
3364 to_rtx = copy_rtx (to_rtx);
3365
01188446
JW
3366 MEM_VOLATILE_P (to_rtx) = 1;
3367 }
bbf6f052
RK
3368#if 0 /* This was turned off because, when a field is volatile
3369 in an object which is not volatile, the object may be in a register,
3370 and then we would abort over here. */
3371 else
3372 abort ();
3373#endif
3374 }
3375
956d6950
JL
3376 if (TREE_CODE (to) == COMPONENT_REF
3377 && TREE_READONLY (TREE_OPERAND (to, 1)))
3378 {
8bd6ecc2 3379 if (offset == 0)
956d6950
JL
3380 to_rtx = copy_rtx (to_rtx);
3381
3382 RTX_UNCHANGING_P (to_rtx) = 1;
3383 }
3384
921b3427 3385 /* Check the access. */
7d384cc0 3386 if (current_function_check_memory_usage && GET_CODE (to_rtx) == MEM)
921b3427
RK
3387 {
3388 rtx to_addr;
3389 int size;
3390 int best_mode_size;
3391 enum machine_mode best_mode;
3392
3393 best_mode = get_best_mode (bitsize, bitpos,
3394 TYPE_ALIGN (TREE_TYPE (tem)),
3395 mode1, volatilep);
3396 if (best_mode == VOIDmode)
3397 best_mode = QImode;
3398
3399 best_mode_size = GET_MODE_BITSIZE (best_mode);
3400 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3401 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3402 size *= GET_MODE_SIZE (best_mode);
3403
3404 /* Check the access right of the pointer. */
e9a25f70
JL
3405 if (size)
3406 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3407 to_addr, Pmode,
e9a25f70 3408 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3409 GEN_INT (MEMORY_USE_WO),
3410 TYPE_MODE (integer_type_node));
921b3427
RK
3411 }
3412
bbf6f052
RK
3413 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3414 (want_value
3415 /* Spurious cast makes HPUX compiler happy. */
3416 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3417 : VOIDmode),
3418 unsignedp,
3419 /* Required alignment of containing datum. */
d78d243c 3420 alignment,
ece32014
MM
3421 int_size_in_bytes (TREE_TYPE (tem)),
3422 get_alias_set (to));
bbf6f052
RK
3423 preserve_temp_slots (result);
3424 free_temp_slots ();
0088fcb1 3425 pop_temp_slots ();
bbf6f052 3426
709f5be1
RS
3427 /* If the value is meaningful, convert RESULT to the proper mode.
3428 Otherwise, return nothing. */
5ffe63ed
RS
3429 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3430 TYPE_MODE (TREE_TYPE (from)),
3431 result,
3432 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3433 : NULL_RTX);
bbf6f052
RK
3434 }
3435
cd1db108
RS
3436 /* If the rhs is a function call and its value is not an aggregate,
3437 call the function before we start to compute the lhs.
3438 This is needed for correct code for cases such as
3439 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3440 requires loading up part of an address in a separate insn.
3441
3442 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3443 a promoted variable where the zero- or sign- extension needs to be done.
3444 Handling this in the normal way is safe because no computation is done
3445 before the call. */
3446 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3447 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3448 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3449 {
0088fcb1
RK
3450 rtx value;
3451
3452 push_temp_slots ();
3453 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3454 if (to_rtx == 0)
921b3427 3455 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3456
fffa9c1d
JW
3457 /* Handle calls that return values in multiple non-contiguous locations.
3458 The Irix 6 ABI has examples of this. */
3459 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3460 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3461 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3462 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3463 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3464 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45 3465 else
6419e5b0
DT
3466 {
3467#ifdef POINTERS_EXTEND_UNSIGNED
ab40f612
DT
3468 if (TREE_CODE (TREE_TYPE (to)) == REFERENCE_TYPE
3469 || TREE_CODE (TREE_TYPE (to)) == POINTER_TYPE)
6419e5b0
DT
3470 value = convert_memory_address (GET_MODE (to_rtx), value);
3471#endif
3472 emit_move_insn (to_rtx, value);
3473 }
cd1db108
RS
3474 preserve_temp_slots (to_rtx);
3475 free_temp_slots ();
0088fcb1 3476 pop_temp_slots ();
709f5be1 3477 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3478 }
3479
bbf6f052
RK
3480 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3481 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3482
3483 if (to_rtx == 0)
41472af8
MM
3484 {
3485 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3486 if (GET_CODE (to_rtx) == MEM)
3487 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3488 }
bbf6f052 3489
86d38d25
RS
3490 /* Don't move directly into a return register. */
3491 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3492 {
0088fcb1
RK
3493 rtx temp;
3494
3495 push_temp_slots ();
3496 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3497 emit_move_insn (to_rtx, temp);
3498 preserve_temp_slots (to_rtx);
3499 free_temp_slots ();
0088fcb1 3500 pop_temp_slots ();
709f5be1 3501 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3502 }
3503
bbf6f052
RK
3504 /* In case we are returning the contents of an object which overlaps
3505 the place the value is being stored, use a safe function when copying
3506 a value through a pointer into a structure value return block. */
3507 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3508 && current_function_returns_struct
3509 && !current_function_returns_pcc_struct)
3510 {
0088fcb1
RK
3511 rtx from_rtx, size;
3512
3513 push_temp_slots ();
33a20d10 3514 size = expr_size (from);
921b3427
RK
3515 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3516 EXPAND_MEMORY_USE_DONT);
3517
3518 /* Copy the rights of the bitmap. */
7d384cc0 3519 if (current_function_check_memory_usage)
921b3427 3520 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3521 XEXP (to_rtx, 0), Pmode,
3522 XEXP (from_rtx, 0), Pmode,
921b3427
RK
3523 convert_to_mode (TYPE_MODE (sizetype),
3524 size, TREE_UNSIGNED (sizetype)),
3525 TYPE_MODE (sizetype));
bbf6f052
RK
3526
3527#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3528 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3529 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3530 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3531 convert_to_mode (TYPE_MODE (sizetype),
3532 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3533 TYPE_MODE (sizetype));
bbf6f052 3534#else
d562e42e 3535 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3536 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3537 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3538 convert_to_mode (TYPE_MODE (integer_type_node),
3539 size, TREE_UNSIGNED (integer_type_node)),
3540 TYPE_MODE (integer_type_node));
bbf6f052
RK
3541#endif
3542
3543 preserve_temp_slots (to_rtx);
3544 free_temp_slots ();
0088fcb1 3545 pop_temp_slots ();
709f5be1 3546 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3547 }
3548
3549 /* Compute FROM and store the value in the rtx we got. */
3550
0088fcb1 3551 push_temp_slots ();
bbf6f052
RK
3552 result = store_expr (from, to_rtx, want_value);
3553 preserve_temp_slots (result);
3554 free_temp_slots ();
0088fcb1 3555 pop_temp_slots ();
709f5be1 3556 return want_value ? result : NULL_RTX;
bbf6f052
RK
3557}
3558
3559/* Generate code for computing expression EXP,
3560 and storing the value into TARGET.
bbf6f052
RK
3561 TARGET may contain a QUEUED rtx.
3562
709f5be1
RS
3563 If WANT_VALUE is nonzero, return a copy of the value
3564 not in TARGET, so that we can be sure to use the proper
3565 value in a containing expression even if TARGET has something
3566 else stored in it. If possible, we copy the value through a pseudo
3567 and return that pseudo. Or, if the value is constant, we try to
3568 return the constant. In some cases, we return a pseudo
3569 copied *from* TARGET.
3570
3571 If the mode is BLKmode then we may return TARGET itself.
3572 It turns out that in BLKmode it doesn't cause a problem.
3573 because C has no operators that could combine two different
3574 assignments into the same BLKmode object with different values
3575 with no sequence point. Will other languages need this to
3576 be more thorough?
3577
3578 If WANT_VALUE is 0, we return NULL, to make sure
3579 to catch quickly any cases where the caller uses the value
3580 and fails to set WANT_VALUE. */
bbf6f052
RK
3581
3582rtx
709f5be1 3583store_expr (exp, target, want_value)
bbf6f052
RK
3584 register tree exp;
3585 register rtx target;
709f5be1 3586 int want_value;
bbf6f052
RK
3587{
3588 register rtx temp;
3589 int dont_return_target = 0;
3590
3591 if (TREE_CODE (exp) == COMPOUND_EXPR)
3592 {
3593 /* Perform first part of compound expression, then assign from second
3594 part. */
3595 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3596 emit_queue ();
709f5be1 3597 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3598 }
3599 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3600 {
3601 /* For conditional expression, get safe form of the target. Then
3602 test the condition, doing the appropriate assignment on either
3603 side. This avoids the creation of unnecessary temporaries.
3604 For non-BLKmode, it is more efficient not to do this. */
3605
3606 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3607
3608 emit_queue ();
3609 target = protect_from_queue (target, 1);
3610
dabf8373 3611 do_pending_stack_adjust ();
bbf6f052
RK
3612 NO_DEFER_POP;
3613 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3614 start_cleanup_deferral ();
709f5be1 3615 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3616 end_cleanup_deferral ();
bbf6f052
RK
3617 emit_queue ();
3618 emit_jump_insn (gen_jump (lab2));
3619 emit_barrier ();
3620 emit_label (lab1);
956d6950 3621 start_cleanup_deferral ();
709f5be1 3622 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3623 end_cleanup_deferral ();
bbf6f052
RK
3624 emit_queue ();
3625 emit_label (lab2);
3626 OK_DEFER_POP;
a3a58acc 3627
709f5be1 3628 return want_value ? target : NULL_RTX;
bbf6f052 3629 }
bbf6f052 3630 else if (queued_subexp_p (target))
709f5be1
RS
3631 /* If target contains a postincrement, let's not risk
3632 using it as the place to generate the rhs. */
bbf6f052
RK
3633 {
3634 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3635 {
3636 /* Expand EXP into a new pseudo. */
3637 temp = gen_reg_rtx (GET_MODE (target));
3638 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3639 }
3640 else
906c4e36 3641 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3642
3643 /* If target is volatile, ANSI requires accessing the value
3644 *from* the target, if it is accessed. So make that happen.
3645 In no case return the target itself. */
3646 if (! MEM_VOLATILE_P (target) && want_value)
3647 dont_return_target = 1;
bbf6f052 3648 }
12f06d17
CH
3649 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
3650 && GET_MODE (target) != BLKmode)
3651 /* If target is in memory and caller wants value in a register instead,
3652 arrange that. Pass TARGET as target for expand_expr so that,
3653 if EXP is another assignment, WANT_VALUE will be nonzero for it.
3654 We know expand_expr will not use the target in that case.
3655 Don't do this if TARGET is volatile because we are supposed
3656 to write it and then read it. */
3657 {
3658 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
3659 GET_MODE (target), 0);
3660 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3661 temp = copy_to_reg (temp);
3662 dont_return_target = 1;
3663 }
1499e0a8
RK
3664 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3665 /* If this is an scalar in a register that is stored in a wider mode
3666 than the declared mode, compute the result into its declared mode
3667 and then convert to the wider mode. Our value is the computed
3668 expression. */
3669 {
5a32d038 3670 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3671 which will often result in some optimizations. Do the conversion
3672 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3673 the extend. But don't do this if the type of EXP is a subtype
3674 of something else since then the conversion might involve
3675 more than just converting modes. */
3676 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3677 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3678 {
3679 if (TREE_UNSIGNED (TREE_TYPE (exp))
3680 != SUBREG_PROMOTED_UNSIGNED_P (target))
3681 exp
3682 = convert
3683 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3684 TREE_TYPE (exp)),
3685 exp);
3686
3687 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3688 SUBREG_PROMOTED_UNSIGNED_P (target)),
3689 exp);
3690 }
5a32d038 3691
1499e0a8 3692 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3693
766f36c7 3694 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3695 the access now so it gets done only once. Likewise if
3696 it contains TARGET. */
3697 if (GET_CODE (temp) == MEM && want_value
3698 && (MEM_VOLATILE_P (temp)
3699 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3700 temp = copy_to_reg (temp);
3701
b258707c
RS
3702 /* If TEMP is a VOIDmode constant, use convert_modes to make
3703 sure that we properly convert it. */
3704 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3705 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3706 TYPE_MODE (TREE_TYPE (exp)), temp,
3707 SUBREG_PROMOTED_UNSIGNED_P (target));
3708
1499e0a8
RK
3709 convert_move (SUBREG_REG (target), temp,
3710 SUBREG_PROMOTED_UNSIGNED_P (target));
3dbecef9
JW
3711
3712 /* If we promoted a constant, change the mode back down to match
3713 target. Otherwise, the caller might get confused by a result whose
3714 mode is larger than expected. */
3715
3716 if (want_value && GET_MODE (temp) != GET_MODE (target)
3717 && GET_MODE (temp) != VOIDmode)
3718 {
3719 temp = gen_rtx_SUBREG (GET_MODE (target), temp, 0);
3720 SUBREG_PROMOTED_VAR_P (temp) = 1;
3721 SUBREG_PROMOTED_UNSIGNED_P (temp)
3722 = SUBREG_PROMOTED_UNSIGNED_P (target);
3723 }
3724
709f5be1 3725 return want_value ? temp : NULL_RTX;
1499e0a8 3726 }
bbf6f052
RK
3727 else
3728 {
3729 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3730 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3731 If TARGET is a volatile mem ref, either return TARGET
3732 or return a reg copied *from* TARGET; ANSI requires this.
3733
3734 Otherwise, if TEMP is not TARGET, return TEMP
3735 if it is constant (for efficiency),
3736 or if we really want the correct value. */
bbf6f052
RK
3737 if (!(target && GET_CODE (target) == REG
3738 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3739 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3740 && ! rtx_equal_p (temp, target)
709f5be1 3741 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3742 dont_return_target = 1;
3743 }
3744
b258707c
RS
3745 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3746 the same as that of TARGET, adjust the constant. This is needed, for
3747 example, in case it is a CONST_DOUBLE and we want only a word-sized
3748 value. */
3749 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3750 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3751 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3752 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3753 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3754
7d384cc0 3755 if (current_function_check_memory_usage
921b3427
RK
3756 && GET_CODE (target) == MEM
3757 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3758 {
3759 if (GET_CODE (temp) == MEM)
3760 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
3761 XEXP (target, 0), Pmode,
3762 XEXP (temp, 0), Pmode,
921b3427
RK
3763 expr_size (exp), TYPE_MODE (sizetype));
3764 else
3765 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3766 XEXP (target, 0), Pmode,
921b3427 3767 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3768 GEN_INT (MEMORY_USE_WO),
3769 TYPE_MODE (integer_type_node));
921b3427
RK
3770 }
3771
bbf6f052
RK
3772 /* If value was not generated in the target, store it there.
3773 Convert the value to TARGET's type first if nec. */
f3f2255a
R
3774 /* If TEMP and TARGET compare equal according to rtx_equal_p, but
3775 one or both of them are volatile memory refs, we have to distinguish
3776 two cases:
3777 - expand_expr has used TARGET. In this case, we must not generate
3778 another copy. This can be detected by TARGET being equal according
3779 to == .
3780 - expand_expr has not used TARGET - that means that the source just
3781 happens to have the same RTX form. Since temp will have been created
3782 by expand_expr, it will compare unequal according to == .
3783 We must generate a copy in this case, to reach the correct number
3784 of volatile memory references. */
bbf6f052 3785
6036acbb 3786 if ((! rtx_equal_p (temp, target)
f3f2255a
R
3787 || (temp != target && (side_effects_p (temp)
3788 || side_effects_p (target))))
6036acbb 3789 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3790 {
3791 target = protect_from_queue (target, 1);
3792 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3793 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3794 {
3795 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3796 if (dont_return_target)
3797 {
3798 /* In this case, we will return TEMP,
3799 so make sure it has the proper mode.
3800 But don't forget to store the value into TARGET. */
3801 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3802 emit_move_insn (target, temp);
3803 }
3804 else
3805 convert_move (target, temp, unsignedp);
3806 }
3807
3808 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3809 {
3810 /* Handle copying a string constant into an array.
3811 The string constant may be shorter than the array.
3812 So copy just the string's actual length, and clear the rest. */
3813 rtx size;
22619c3f 3814 rtx addr;
bbf6f052 3815
e87b4f3f
RS
3816 /* Get the size of the data type of the string,
3817 which is actually the size of the target. */
3818 size = expr_size (exp);
3819 if (GET_CODE (size) == CONST_INT
3820 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3821 emit_block_move (target, temp, size,
3822 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3823 else
bbf6f052 3824 {
e87b4f3f
RS
3825 /* Compute the size of the data to copy from the string. */
3826 tree copy_size
c03b7665 3827 = size_binop (MIN_EXPR,
b50d17a1 3828 make_tree (sizetype, size),
c03b7665
RK
3829 convert (sizetype,
3830 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3831 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3832 VOIDmode, 0);
e87b4f3f
RS
3833 rtx label = 0;
3834
3835 /* Copy that much. */
3836 emit_block_move (target, temp, copy_size_rtx,
3837 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3838
88f63c77
RK
3839 /* Figure out how much is left in TARGET that we have to clear.
3840 Do all calculations in ptr_mode. */
3841
3842 addr = XEXP (target, 0);
3843 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3844
e87b4f3f
RS
3845 if (GET_CODE (copy_size_rtx) == CONST_INT)
3846 {
88f63c77 3847 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3848 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3849 }
3850 else
3851 {
88f63c77
RK
3852 addr = force_reg (ptr_mode, addr);
3853 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3854 copy_size_rtx, NULL_RTX, 0,
3855 OPTAB_LIB_WIDEN);
e87b4f3f 3856
88f63c77 3857 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3858 copy_size_rtx, NULL_RTX, 0,
3859 OPTAB_LIB_WIDEN);
e87b4f3f 3860
e87b4f3f 3861 label = gen_label_rtx ();
c5d5d461
JL
3862 emit_cmp_and_jump_insns (size, const0_rtx, LT, NULL_RTX,
3863 GET_MODE (size), 0, 0, label);
e87b4f3f
RS
3864 }
3865
3866 if (size != const0_rtx)
3867 {
921b3427 3868 /* Be sure we can write on ADDR. */
7d384cc0 3869 if (current_function_check_memory_usage)
921b3427 3870 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 3871 addr, Pmode,
921b3427 3872 size, TYPE_MODE (sizetype),
956d6950
JL
3873 GEN_INT (MEMORY_USE_WO),
3874 TYPE_MODE (integer_type_node));
bbf6f052 3875#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3876 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3877 addr, ptr_mode,
3b6f75e2
JW
3878 const0_rtx, TYPE_MODE (integer_type_node),
3879 convert_to_mode (TYPE_MODE (sizetype),
3880 size,
3881 TREE_UNSIGNED (sizetype)),
3882 TYPE_MODE (sizetype));
bbf6f052 3883#else
d562e42e 3884 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3885 addr, ptr_mode,
3b6f75e2
JW
3886 convert_to_mode (TYPE_MODE (integer_type_node),
3887 size,
3888 TREE_UNSIGNED (integer_type_node)),
3889 TYPE_MODE (integer_type_node));
bbf6f052 3890#endif
e87b4f3f 3891 }
22619c3f 3892
e87b4f3f
RS
3893 if (label)
3894 emit_label (label);
bbf6f052
RK
3895 }
3896 }
fffa9c1d
JW
3897 /* Handle calls that return values in multiple non-contiguous locations.
3898 The Irix 6 ABI has examples of this. */
3899 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3900 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3901 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3902 else if (GET_MODE (temp) == BLKmode)
3903 emit_block_move (target, temp, expr_size (exp),
3904 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3905 else
3906 emit_move_insn (target, temp);
3907 }
709f5be1 3908
766f36c7
RK
3909 /* If we don't want a value, return NULL_RTX. */
3910 if (! want_value)
3911 return NULL_RTX;
3912
3913 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3914 ??? The latter test doesn't seem to make sense. */
3915 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3916 return temp;
766f36c7
RK
3917
3918 /* Return TARGET itself if it is a hard register. */
3919 else if (want_value && GET_MODE (target) != BLKmode
3920 && ! (GET_CODE (target) == REG
3921 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3922 return copy_to_reg (target);
766f36c7
RK
3923
3924 else
709f5be1 3925 return target;
bbf6f052
RK
3926}
3927\f
9de08200
RK
3928/* Return 1 if EXP just contains zeros. */
3929
3930static int
3931is_zeros_p (exp)
3932 tree exp;
3933{
3934 tree elt;
3935
3936 switch (TREE_CODE (exp))
3937 {
3938 case CONVERT_EXPR:
3939 case NOP_EXPR:
3940 case NON_LVALUE_EXPR:
3941 return is_zeros_p (TREE_OPERAND (exp, 0));
3942
3943 case INTEGER_CST:
3944 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3945
3946 case COMPLEX_CST:
3947 return
3948 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3949
3950 case REAL_CST:
41c9120b 3951 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3952
3953 case CONSTRUCTOR:
e1a43f73
PB
3954 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3955 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3956 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3957 if (! is_zeros_p (TREE_VALUE (elt)))
3958 return 0;
3959
3960 return 1;
e9a25f70
JL
3961
3962 default:
3963 return 0;
9de08200 3964 }
9de08200
RK
3965}
3966
3967/* Return 1 if EXP contains mostly (3/4) zeros. */
3968
3969static int
3970mostly_zeros_p (exp)
3971 tree exp;
3972{
9de08200
RK
3973 if (TREE_CODE (exp) == CONSTRUCTOR)
3974 {
e1a43f73
PB
3975 int elts = 0, zeros = 0;
3976 tree elt = CONSTRUCTOR_ELTS (exp);
3977 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3978 {
3979 /* If there are no ranges of true bits, it is all zero. */
3980 return elt == NULL_TREE;
3981 }
3982 for (; elt; elt = TREE_CHAIN (elt))
3983 {
3984 /* We do not handle the case where the index is a RANGE_EXPR,
3985 so the statistic will be somewhat inaccurate.
3986 We do make a more accurate count in store_constructor itself,
3987 so since this function is only used for nested array elements,
0f41302f 3988 this should be close enough. */
e1a43f73
PB
3989 if (mostly_zeros_p (TREE_VALUE (elt)))
3990 zeros++;
3991 elts++;
3992 }
9de08200
RK
3993
3994 return 4 * zeros >= 3 * elts;
3995 }
3996
3997 return is_zeros_p (exp);
3998}
3999\f
e1a43f73
PB
4000/* Helper function for store_constructor.
4001 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
4002 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
4003 CLEARED is as for store_constructor.
4004
4005 This provides a recursive shortcut back to store_constructor when it isn't
4006 necessary to go through store_field. This is so that we can pass through
4007 the cleared field to let store_constructor know that we may not have to
4008 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
4009
4010static void
4011store_constructor_field (target, bitsize, bitpos,
4012 mode, exp, type, cleared)
4013 rtx target;
4014 int bitsize, bitpos;
4015 enum machine_mode mode;
4016 tree exp, type;
4017 int cleared;
4018{
4019 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
4020 && bitpos % BITS_PER_UNIT == 0
4021 /* If we have a non-zero bitpos for a register target, then we just
4022 let store_field do the bitfield handling. This is unlikely to
4023 generate unnecessary clear instructions anyways. */
4024 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 4025 {
126e5b0d
JW
4026 if (bitpos != 0)
4027 target = change_address (target, VOIDmode,
4028 plus_constant (XEXP (target, 0),
4029 bitpos / BITS_PER_UNIT));
4030 store_constructor (exp, target, cleared);
e1a43f73
PB
4031 }
4032 else
4033 store_field (target, bitsize, bitpos, mode, exp,
4034 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
ece32014 4035 int_size_in_bytes (type), 0);
e1a43f73
PB
4036}
4037
bbf6f052 4038/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 4039 TARGET is either a REG or a MEM.
0f41302f 4040 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
4041
4042static void
e1a43f73 4043store_constructor (exp, target, cleared)
bbf6f052
RK
4044 tree exp;
4045 rtx target;
e1a43f73 4046 int cleared;
bbf6f052 4047{
4af3895e 4048 tree type = TREE_TYPE (exp);
34c73909 4049 rtx exp_size = expr_size (exp);
4af3895e 4050
bbf6f052
RK
4051 /* We know our target cannot conflict, since safe_from_p has been called. */
4052#if 0
4053 /* Don't try copying piece by piece into a hard register
4054 since that is vulnerable to being clobbered by EXP.
4055 Instead, construct in a pseudo register and then copy it all. */
4056 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
4057 {
4058 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 4059 store_constructor (exp, temp, 0);
bbf6f052
RK
4060 emit_move_insn (target, temp);
4061 return;
4062 }
4063#endif
4064
e44842fe
RK
4065 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
4066 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
4067 {
4068 register tree elt;
4069
4af3895e 4070 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
4071 if (TREE_CODE (type) == UNION_TYPE
4072 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 4073 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
4074
4075 /* If we are building a static constructor into a register,
4076 set the initial value as zero so we can fold the value into
67225c15
RK
4077 a constant. But if more than one register is involved,
4078 this probably loses. */
4079 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
4080 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
4081 {
4082 if (! cleared)
e9a25f70 4083 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 4084
9de08200
RK
4085 cleared = 1;
4086 }
4087
4088 /* If the constructor has fewer fields than the structure
4089 or if we are initializing the structure to mostly zeros,
bbf6f052 4090 clear the whole structure first. */
9de08200
RK
4091 else if ((list_length (CONSTRUCTOR_ELTS (exp))
4092 != list_length (TYPE_FIELDS (type)))
4093 || mostly_zeros_p (exp))
4094 {
4095 if (! cleared)
4096 clear_storage (target, expr_size (exp),
4097 TYPE_ALIGN (type) / BITS_PER_UNIT);
4098
4099 cleared = 1;
4100 }
bbf6f052
RK
4101 else
4102 /* Inform later passes that the old value is dead. */
38a448ca 4103 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4104
4105 /* Store each element of the constructor into
4106 the corresponding field of TARGET. */
4107
4108 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
4109 {
4110 register tree field = TREE_PURPOSE (elt);
34c73909 4111 tree value = TREE_VALUE (elt);
bbf6f052
RK
4112 register enum machine_mode mode;
4113 int bitsize;
b50d17a1 4114 int bitpos = 0;
bbf6f052 4115 int unsignedp;
b50d17a1
RK
4116 tree pos, constant = 0, offset = 0;
4117 rtx to_rtx = target;
bbf6f052 4118
f32fd778
RS
4119 /* Just ignore missing fields.
4120 We cleared the whole structure, above,
4121 if any fields are missing. */
4122 if (field == 0)
4123 continue;
4124
e1a43f73
PB
4125 if (cleared && is_zeros_p (TREE_VALUE (elt)))
4126 continue;
9de08200 4127
bbf6f052
RK
4128 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
4129 unsignedp = TREE_UNSIGNED (field);
4130 mode = DECL_MODE (field);
4131 if (DECL_BIT_FIELD (field))
4132 mode = VOIDmode;
4133
b50d17a1
RK
4134 pos = DECL_FIELD_BITPOS (field);
4135 if (TREE_CODE (pos) == INTEGER_CST)
4136 constant = pos;
4137 else if (TREE_CODE (pos) == PLUS_EXPR
4138 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4139 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
4140 else
4141 offset = pos;
4142
4143 if (constant)
cd11b87e 4144 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
4145
4146 if (offset)
4147 {
4148 rtx offset_rtx;
4149
4150 if (contains_placeholder_p (offset))
4151 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 4152 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 4153
b50d17a1
RK
4154 offset = size_binop (FLOOR_DIV_EXPR, offset,
4155 size_int (BITS_PER_UNIT));
bbf6f052 4156
b50d17a1
RK
4157 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
4158 if (GET_CODE (to_rtx) != MEM)
4159 abort ();
4160
bd070e1a
RH
4161 if (GET_MODE (offset_rtx) != ptr_mode)
4162 {
4163#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 4164 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
4165#else
4166 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
4167#endif
4168 }
4169
b50d17a1
RK
4170 to_rtx
4171 = change_address (to_rtx, VOIDmode,
38a448ca 4172 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 4173 force_reg (ptr_mode, offset_rtx)));
b50d17a1 4174 }
cf04eb80
RK
4175 if (TREE_READONLY (field))
4176 {
9151b3bf 4177 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
4178 to_rtx = copy_rtx (to_rtx);
4179
cf04eb80
RK
4180 RTX_UNCHANGING_P (to_rtx) = 1;
4181 }
4182
34c73909
R
4183#ifdef WORD_REGISTER_OPERATIONS
4184 /* If this initializes a field that is smaller than a word, at the
4185 start of a word, try to widen it to a full word.
4186 This special case allows us to output C++ member function
4187 initializations in a form that the optimizers can understand. */
4188 if (constant
4189 && GET_CODE (target) == REG
4190 && bitsize < BITS_PER_WORD
4191 && bitpos % BITS_PER_WORD == 0
4192 && GET_MODE_CLASS (mode) == MODE_INT
4193 && TREE_CODE (value) == INTEGER_CST
4194 && GET_CODE (exp_size) == CONST_INT
4195 && bitpos + BITS_PER_WORD <= INTVAL (exp_size) * BITS_PER_UNIT)
4196 {
4197 tree type = TREE_TYPE (value);
4198 if (TYPE_PRECISION (type) < BITS_PER_WORD)
4199 {
4200 type = type_for_size (BITS_PER_WORD, TREE_UNSIGNED (type));
4201 value = convert (type, value);
4202 }
4203 if (BYTES_BIG_ENDIAN)
4204 value
4205 = fold (build (LSHIFT_EXPR, type, value,
4206 build_int_2 (BITS_PER_WORD - bitsize, 0)));
4207 bitsize = BITS_PER_WORD;
4208 mode = word_mode;
4209 }
4210#endif
e1a43f73 4211 store_constructor_field (to_rtx, bitsize, bitpos,
34c73909 4212 mode, value, type, cleared);
bbf6f052
RK
4213 }
4214 }
4af3895e 4215 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
4216 {
4217 register tree elt;
4218 register int i;
e1a43f73 4219 int need_to_clear;
4af3895e 4220 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
4221 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
4222 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 4223 tree elttype = TREE_TYPE (type);
bbf6f052 4224
e1a43f73 4225 /* If the constructor has fewer elements than the array,
38e01259 4226 clear the whole array first. Similarly if this is
e1a43f73
PB
4227 static constructor of a non-BLKmode object. */
4228 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
4229 need_to_clear = 1;
4230 else
4231 {
4232 HOST_WIDE_INT count = 0, zero_count = 0;
4233 need_to_clear = 0;
4234 /* This loop is a more accurate version of the loop in
4235 mostly_zeros_p (it handles RANGE_EXPR in an index).
4236 It is also needed to check for missing elements. */
4237 for (elt = CONSTRUCTOR_ELTS (exp);
4238 elt != NULL_TREE;
df0faff1 4239 elt = TREE_CHAIN (elt))
e1a43f73
PB
4240 {
4241 tree index = TREE_PURPOSE (elt);
4242 HOST_WIDE_INT this_node_count;
4243 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4244 {
4245 tree lo_index = TREE_OPERAND (index, 0);
4246 tree hi_index = TREE_OPERAND (index, 1);
4247 if (TREE_CODE (lo_index) != INTEGER_CST
4248 || TREE_CODE (hi_index) != INTEGER_CST)
4249 {
4250 need_to_clear = 1;
4251 break;
4252 }
4253 this_node_count = TREE_INT_CST_LOW (hi_index)
4254 - TREE_INT_CST_LOW (lo_index) + 1;
4255 }
4256 else
4257 this_node_count = 1;
4258 count += this_node_count;
4259 if (mostly_zeros_p (TREE_VALUE (elt)))
4260 zero_count += this_node_count;
4261 }
8e958f70 4262 /* Clear the entire array first if there are any missing elements,
0f41302f 4263 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4264 if (count < maxelt - minelt + 1
4265 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4266 need_to_clear = 1;
4267 }
4268 if (need_to_clear)
9de08200
RK
4269 {
4270 if (! cleared)
4271 clear_storage (target, expr_size (exp),
4272 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
4273 cleared = 1;
4274 }
bbf6f052
RK
4275 else
4276 /* Inform later passes that the old value is dead. */
38a448ca 4277 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4278
4279 /* Store each element of the constructor into
4280 the corresponding element of TARGET, determined
4281 by counting the elements. */
4282 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4283 elt;
4284 elt = TREE_CHAIN (elt), i++)
4285 {
4286 register enum machine_mode mode;
4287 int bitsize;
4288 int bitpos;
4289 int unsignedp;
e1a43f73 4290 tree value = TREE_VALUE (elt);
03dc44a6
RS
4291 tree index = TREE_PURPOSE (elt);
4292 rtx xtarget = target;
bbf6f052 4293
e1a43f73
PB
4294 if (cleared && is_zeros_p (value))
4295 continue;
9de08200 4296
bbf6f052
RK
4297 mode = TYPE_MODE (elttype);
4298 bitsize = GET_MODE_BITSIZE (mode);
4299 unsignedp = TREE_UNSIGNED (elttype);
4300
e1a43f73
PB
4301 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4302 {
4303 tree lo_index = TREE_OPERAND (index, 0);
4304 tree hi_index = TREE_OPERAND (index, 1);
4305 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4306 struct nesting *loop;
05c0b405
PB
4307 HOST_WIDE_INT lo, hi, count;
4308 tree position;
e1a43f73 4309
0f41302f 4310 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4311 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4312 && TREE_CODE (hi_index) == INTEGER_CST
4313 && (lo = TREE_INT_CST_LOW (lo_index),
4314 hi = TREE_INT_CST_LOW (hi_index),
4315 count = hi - lo + 1,
4316 (GET_CODE (target) != MEM
4317 || count <= 2
4318 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4319 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4320 <= 40 * 8))))
e1a43f73 4321 {
05c0b405
PB
4322 lo -= minelt; hi -= minelt;
4323 for (; lo <= hi; lo++)
e1a43f73 4324 {
05c0b405
PB
4325 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4326 store_constructor_field (target, bitsize, bitpos,
4327 mode, value, type, cleared);
e1a43f73
PB
4328 }
4329 }
4330 else
4331 {
4332 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4333 loop_top = gen_label_rtx ();
4334 loop_end = gen_label_rtx ();
4335
4336 unsignedp = TREE_UNSIGNED (domain);
4337
4338 index = build_decl (VAR_DECL, NULL_TREE, domain);
4339
4340 DECL_RTL (index) = index_r
4341 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4342 &unsignedp, 0));
4343
4344 if (TREE_CODE (value) == SAVE_EXPR
4345 && SAVE_EXPR_RTL (value) == 0)
4346 {
0f41302f
MS
4347 /* Make sure value gets expanded once before the
4348 loop. */
e1a43f73
PB
4349 expand_expr (value, const0_rtx, VOIDmode, 0);
4350 emit_queue ();
4351 }
4352 store_expr (lo_index, index_r, 0);
4353 loop = expand_start_loop (0);
4354
0f41302f 4355 /* Assign value to element index. */
e1a43f73
PB
4356 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4357 size_int (BITS_PER_UNIT));
4358 position = size_binop (MULT_EXPR,
4359 size_binop (MINUS_EXPR, index,
4360 TYPE_MIN_VALUE (domain)),
4361 position);
4362 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4363 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4364 xtarget = change_address (target, mode, addr);
4365 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 4366 store_constructor (value, xtarget, cleared);
e1a43f73
PB
4367 else
4368 store_expr (value, xtarget, 0);
4369
4370 expand_exit_loop_if_false (loop,
4371 build (LT_EXPR, integer_type_node,
4372 index, hi_index));
4373
4374 expand_increment (build (PREINCREMENT_EXPR,
4375 TREE_TYPE (index),
7b8b9722 4376 index, integer_one_node), 0, 0);
e1a43f73
PB
4377 expand_end_loop ();
4378 emit_label (loop_end);
4379
4380 /* Needed by stupid register allocation. to extend the
4381 lifetime of pseudo-regs used by target past the end
4382 of the loop. */
38a448ca 4383 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4384 }
4385 }
4386 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4387 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4388 {
e1a43f73 4389 rtx pos_rtx, addr;
03dc44a6
RS
4390 tree position;
4391
5b6c44ff
RK
4392 if (index == 0)
4393 index = size_int (i);
4394
e1a43f73
PB
4395 if (minelt)
4396 index = size_binop (MINUS_EXPR, index,
4397 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4398 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4399 size_int (BITS_PER_UNIT));
4400 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4401 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4402 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4403 xtarget = change_address (target, mode, addr);
e1a43f73 4404 store_expr (value, xtarget, 0);
03dc44a6
RS
4405 }
4406 else
4407 {
4408 if (index != 0)
7c314719 4409 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4410 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4411 else
4412 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
4413 store_constructor_field (target, bitsize, bitpos,
4414 mode, value, type, cleared);
03dc44a6 4415 }
bbf6f052
RK
4416 }
4417 }
071a6595
PB
4418 /* set constructor assignments */
4419 else if (TREE_CODE (type) == SET_TYPE)
4420 {
e1a43f73 4421 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4422 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4423 tree domain = TYPE_DOMAIN (type);
4424 tree domain_min, domain_max, bitlength;
4425
9faa82d8 4426 /* The default implementation strategy is to extract the constant
071a6595
PB
4427 parts of the constructor, use that to initialize the target,
4428 and then "or" in whatever non-constant ranges we need in addition.
4429
4430 If a large set is all zero or all ones, it is
4431 probably better to set it using memset (if available) or bzero.
4432 Also, if a large set has just a single range, it may also be
4433 better to first clear all the first clear the set (using
0f41302f 4434 bzero/memset), and set the bits we want. */
071a6595 4435
0f41302f 4436 /* Check for all zeros. */
e1a43f73 4437 if (elt == NULL_TREE)
071a6595 4438 {
e1a43f73
PB
4439 if (!cleared)
4440 clear_storage (target, expr_size (exp),
4441 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4442 return;
4443 }
4444
071a6595
PB
4445 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4446 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4447 bitlength = size_binop (PLUS_EXPR,
4448 size_binop (MINUS_EXPR, domain_max, domain_min),
4449 size_one_node);
4450
e1a43f73
PB
4451 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4452 abort ();
4453 nbits = TREE_INT_CST_LOW (bitlength);
4454
4455 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4456 are "complicated" (more than one range), initialize (the
4457 constant parts) by copying from a constant. */
4458 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4459 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4460 {
b4ee5a72
PB
4461 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4462 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4463 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4464 HOST_WIDE_INT word = 0;
4465 int bit_pos = 0;
4466 int ibit = 0;
0f41302f 4467 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4468 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4469 for (;;)
071a6595 4470 {
b4ee5a72
PB
4471 if (bit_buffer[ibit])
4472 {
b09f3348 4473 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4474 word |= (1 << (set_word_size - 1 - bit_pos));
4475 else
4476 word |= 1 << bit_pos;
4477 }
4478 bit_pos++; ibit++;
4479 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4480 {
e1a43f73
PB
4481 if (word != 0 || ! cleared)
4482 {
4483 rtx datum = GEN_INT (word);
4484 rtx to_rtx;
0f41302f
MS
4485 /* The assumption here is that it is safe to use
4486 XEXP if the set is multi-word, but not if
4487 it's single-word. */
e1a43f73
PB
4488 if (GET_CODE (target) == MEM)
4489 {
4490 to_rtx = plus_constant (XEXP (target, 0), offset);
4491 to_rtx = change_address (target, mode, to_rtx);
4492 }
4493 else if (offset == 0)
4494 to_rtx = target;
4495 else
4496 abort ();
4497 emit_move_insn (to_rtx, datum);
4498 }
b4ee5a72
PB
4499 if (ibit == nbits)
4500 break;
4501 word = 0;
4502 bit_pos = 0;
4503 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4504 }
4505 }
071a6595 4506 }
e1a43f73
PB
4507 else if (!cleared)
4508 {
0f41302f 4509 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4510 if (TREE_CHAIN (elt) != NULL_TREE
4511 || (TREE_PURPOSE (elt) == NULL_TREE
4512 ? nbits != 1
4513 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4514 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4515 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4516 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4517 != nbits))))
4518 clear_storage (target, expr_size (exp),
4519 TYPE_ALIGN (type) / BITS_PER_UNIT);
4520 }
4521
4522 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4523 {
4524 /* start of range of element or NULL */
4525 tree startbit = TREE_PURPOSE (elt);
4526 /* end of range of element, or element value */
4527 tree endbit = TREE_VALUE (elt);
381127e8 4528#ifdef TARGET_MEM_FUNCTIONS
071a6595 4529 HOST_WIDE_INT startb, endb;
381127e8 4530#endif
071a6595
PB
4531 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4532
4533 bitlength_rtx = expand_expr (bitlength,
4534 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4535
4536 /* handle non-range tuple element like [ expr ] */
4537 if (startbit == NULL_TREE)
4538 {
4539 startbit = save_expr (endbit);
4540 endbit = startbit;
4541 }
4542 startbit = convert (sizetype, startbit);
4543 endbit = convert (sizetype, endbit);
4544 if (! integer_zerop (domain_min))
4545 {
4546 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4547 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4548 }
4549 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4550 EXPAND_CONST_ADDRESS);
4551 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4552 EXPAND_CONST_ADDRESS);
4553
4554 if (REG_P (target))
4555 {
4556 targetx = assign_stack_temp (GET_MODE (target),
4557 GET_MODE_SIZE (GET_MODE (target)),
4558 0);
4559 emit_move_insn (targetx, target);
4560 }
4561 else if (GET_CODE (target) == MEM)
4562 targetx = target;
4563 else
4564 abort ();
4565
4566#ifdef TARGET_MEM_FUNCTIONS
4567 /* Optimization: If startbit and endbit are
9faa82d8 4568 constants divisible by BITS_PER_UNIT,
0f41302f 4569 call memset instead. */
071a6595
PB
4570 if (TREE_CODE (startbit) == INTEGER_CST
4571 && TREE_CODE (endbit) == INTEGER_CST
4572 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4573 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4574 {
071a6595
PB
4575 emit_library_call (memset_libfunc, 0,
4576 VOIDmode, 3,
e1a43f73
PB
4577 plus_constant (XEXP (targetx, 0),
4578 startb / BITS_PER_UNIT),
071a6595 4579 Pmode,
3b6f75e2 4580 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4581 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4582 TYPE_MODE (sizetype));
071a6595
PB
4583 }
4584 else
4585#endif
4586 {
38a448ca 4587 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4588 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4589 bitlength_rtx, TYPE_MODE (sizetype),
4590 startbit_rtx, TYPE_MODE (sizetype),
4591 endbit_rtx, TYPE_MODE (sizetype));
4592 }
4593 if (REG_P (target))
4594 emit_move_insn (target, targetx);
4595 }
4596 }
bbf6f052
RK
4597
4598 else
4599 abort ();
4600}
4601
4602/* Store the value of EXP (an expression tree)
4603 into a subfield of TARGET which has mode MODE and occupies
4604 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4605 If MODE is VOIDmode, it means that we are storing into a bit-field.
4606
4607 If VALUE_MODE is VOIDmode, return nothing in particular.
4608 UNSIGNEDP is not used in this case.
4609
4610 Otherwise, return an rtx for the value stored. This rtx
4611 has mode VALUE_MODE if that is convenient to do.
4612 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4613
4614 ALIGN is the alignment that TARGET is known to have, measured in bytes.
ece32014
MM
4615 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying.
4616
4617 ALIAS_SET is the alias set for the destination. This value will
4618 (in general) be different from that for TARGET, since TARGET is a
4619 reference to the containing structure. */
bbf6f052
RK
4620
4621static rtx
4622store_field (target, bitsize, bitpos, mode, exp, value_mode,
ece32014 4623 unsignedp, align, total_size, alias_set)
bbf6f052
RK
4624 rtx target;
4625 int bitsize, bitpos;
4626 enum machine_mode mode;
4627 tree exp;
4628 enum machine_mode value_mode;
4629 int unsignedp;
4630 int align;
4631 int total_size;
ece32014 4632 int alias_set;
bbf6f052 4633{
906c4e36 4634 HOST_WIDE_INT width_mask = 0;
bbf6f052 4635
e9a25f70
JL
4636 if (TREE_CODE (exp) == ERROR_MARK)
4637 return const0_rtx;
4638
906c4e36
RK
4639 if (bitsize < HOST_BITS_PER_WIDE_INT)
4640 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4641
4642 /* If we are storing into an unaligned field of an aligned union that is
4643 in a register, we may have the mode of TARGET being an integer mode but
4644 MODE == BLKmode. In that case, get an aligned object whose size and
4645 alignment are the same as TARGET and store TARGET into it (we can avoid
4646 the store if the field being stored is the entire width of TARGET). Then
4647 call ourselves recursively to store the field into a BLKmode version of
4648 that object. Finally, load from the object into TARGET. This is not
4649 very efficient in general, but should only be slightly more expensive
4650 than the otherwise-required unaligned accesses. Perhaps this can be
4651 cleaned up later. */
4652
4653 if (mode == BLKmode
4654 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4655 {
4656 rtx object = assign_stack_temp (GET_MODE (target),
4657 GET_MODE_SIZE (GET_MODE (target)), 0);
4658 rtx blk_object = copy_rtx (object);
4659
c6df88cb
MM
4660 MEM_SET_IN_STRUCT_P (object, 1);
4661 MEM_SET_IN_STRUCT_P (blk_object, 1);
bbf6f052
RK
4662 PUT_MODE (blk_object, BLKmode);
4663
4664 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4665 emit_move_insn (object, target);
4666
4667 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
ece32014 4668 align, total_size, alias_set);
bbf6f052 4669
46093b97
RS
4670 /* Even though we aren't returning target, we need to
4671 give it the updated value. */
bbf6f052
RK
4672 emit_move_insn (target, object);
4673
46093b97 4674 return blk_object;
bbf6f052
RK
4675 }
4676
4677 /* If the structure is in a register or if the component
4678 is a bit field, we cannot use addressing to access it.
4679 Use bit-field techniques or SUBREG to store in it. */
4680
4fa52007 4681 if (mode == VOIDmode
6ab06cbb
JW
4682 || (mode != BLKmode && ! direct_store[(int) mode]
4683 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
4684 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
4fa52007 4685 || GET_CODE (target) == REG
c980ac49 4686 || GET_CODE (target) == SUBREG
ccc98036
RS
4687 /* If the field isn't aligned enough to store as an ordinary memref,
4688 store it as a bit field. */
c7a7ac46 4689 || (SLOW_UNALIGNED_ACCESS
ccc98036 4690 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4691 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4692 {
906c4e36 4693 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4694
ef19912d
RK
4695 /* If BITSIZE is narrower than the size of the type of EXP
4696 we will be narrowing TEMP. Normally, what's wanted are the
4697 low-order bits. However, if EXP's type is a record and this is
4698 big-endian machine, we want the upper BITSIZE bits. */
4699 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4700 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4701 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4702 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4703 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4704 - bitsize),
4705 temp, 1);
4706
bbd6cf73
RK
4707 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4708 MODE. */
4709 if (mode != VOIDmode && mode != BLKmode
4710 && mode != TYPE_MODE (TREE_TYPE (exp)))
4711 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4712
a281e72d
RK
4713 /* If the modes of TARGET and TEMP are both BLKmode, both
4714 must be in memory and BITPOS must be aligned on a byte
4715 boundary. If so, we simply do a block copy. */
4716 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4717 {
4718 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4719 || bitpos % BITS_PER_UNIT != 0)
4720 abort ();
4721
0086427c
RK
4722 target = change_address (target, VOIDmode,
4723 plus_constant (XEXP (target, 0),
a281e72d
RK
4724 bitpos / BITS_PER_UNIT));
4725
4726 emit_block_move (target, temp,
4727 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4728 / BITS_PER_UNIT),
4729 1);
4730
4731 return value_mode == VOIDmode ? const0_rtx : target;
4732 }
4733
bbf6f052
RK
4734 /* Store the value in the bitfield. */
4735 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4736 if (value_mode != VOIDmode)
4737 {
4738 /* The caller wants an rtx for the value. */
4739 /* If possible, avoid refetching from the bitfield itself. */
4740 if (width_mask != 0
4741 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4742 {
9074de27 4743 tree count;
5c4d7cfb 4744 enum machine_mode tmode;
86a2c12a 4745
5c4d7cfb
RS
4746 if (unsignedp)
4747 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4748 tmode = GET_MODE (temp);
86a2c12a
RS
4749 if (tmode == VOIDmode)
4750 tmode = value_mode;
5c4d7cfb
RS
4751 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4752 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4753 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4754 }
bbf6f052 4755 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4756 NULL_RTX, value_mode, 0, align,
4757 total_size);
bbf6f052
RK
4758 }
4759 return const0_rtx;
4760 }
4761 else
4762 {
4763 rtx addr = XEXP (target, 0);
4764 rtx to_rtx;
4765
4766 /* If a value is wanted, it must be the lhs;
4767 so make the address stable for multiple use. */
4768
4769 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4770 && ! CONSTANT_ADDRESS_P (addr)
4771 /* A frame-pointer reference is already stable. */
4772 && ! (GET_CODE (addr) == PLUS
4773 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4774 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4775 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4776 addr = copy_to_reg (addr);
4777
4778 /* Now build a reference to just the desired component. */
4779
effbcc6a
RK
4780 to_rtx = copy_rtx (change_address (target, mode,
4781 plus_constant (addr,
4782 (bitpos
4783 / BITS_PER_UNIT))));
c6df88cb 4784 MEM_SET_IN_STRUCT_P (to_rtx, 1);
ece32014 4785 MEM_ALIAS_SET (to_rtx) = alias_set;
bbf6f052
RK
4786
4787 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4788 }
4789}
4790\f
4791/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4792 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4793 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4794
4795 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4796 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4797 If the position of the field is variable, we store a tree
4798 giving the variable offset (in units) in *POFFSET.
4799 This offset is in addition to the bit position.
4800 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4801 We set *PALIGNMENT to the alignment in bytes of the address that will be
4802 computed. This is the alignment of the thing we return if *POFFSET
4803 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4804
4805 If any of the extraction expressions is volatile,
4806 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4807
4808 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4809 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4810 is redundant.
4811
4812 If the field describes a variable-sized object, *PMODE is set to
4813 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4814 this case, but the address of the object can be found. */
bbf6f052
RK
4815
4816tree
4969d05d 4817get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4818 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4819 tree exp;
4820 int *pbitsize;
4821 int *pbitpos;
7bb0943f 4822 tree *poffset;
bbf6f052
RK
4823 enum machine_mode *pmode;
4824 int *punsignedp;
4825 int *pvolatilep;
839c4796 4826 int *palignment;
bbf6f052 4827{
b50d17a1 4828 tree orig_exp = exp;
bbf6f052
RK
4829 tree size_tree = 0;
4830 enum machine_mode mode = VOIDmode;
742920c7 4831 tree offset = integer_zero_node;
c84e2712 4832 unsigned int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4833
4834 if (TREE_CODE (exp) == COMPONENT_REF)
4835 {
4836 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4837 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4838 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4839 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4840 }
4841 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4842 {
4843 size_tree = TREE_OPERAND (exp, 1);
4844 *punsignedp = TREE_UNSIGNED (exp);
4845 }
4846 else
4847 {
4848 mode = TYPE_MODE (TREE_TYPE (exp));
ab87f8c8
JL
4849 if (mode == BLKmode)
4850 size_tree = TYPE_SIZE (TREE_TYPE (exp));
4851
bbf6f052
RK
4852 *pbitsize = GET_MODE_BITSIZE (mode);
4853 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4854 }
4855
4856 if (size_tree)
4857 {
4858 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4859 mode = BLKmode, *pbitsize = -1;
4860 else
4861 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4862 }
4863
4864 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4865 and find the ultimate containing object. */
4866
4867 *pbitpos = 0;
4868
4869 while (1)
4870 {
7bb0943f 4871 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4872 {
7bb0943f
RS
4873 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4874 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4875 : TREE_OPERAND (exp, 2));
e6d8c385 4876 tree constant = integer_zero_node, var = pos;
bbf6f052 4877
e7f3c83f
RK
4878 /* If this field hasn't been filled in yet, don't go
4879 past it. This should only happen when folding expressions
4880 made during type construction. */
4881 if (pos == 0)
4882 break;
4883
e6d8c385
RK
4884 /* Assume here that the offset is a multiple of a unit.
4885 If not, there should be an explicitly added constant. */
4886 if (TREE_CODE (pos) == PLUS_EXPR
4887 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4888 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4889 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4890 constant = pos, var = integer_zero_node;
4891
4892 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4893 offset = size_binop (PLUS_EXPR, offset,
4894 size_binop (EXACT_DIV_EXPR, var,
4895 size_int (BITS_PER_UNIT)));
bbf6f052 4896 }
bbf6f052 4897
742920c7 4898 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4899 {
742920c7
RK
4900 /* This code is based on the code in case ARRAY_REF in expand_expr
4901 below. We assume here that the size of an array element is
4902 always an integral multiple of BITS_PER_UNIT. */
4903
4904 tree index = TREE_OPERAND (exp, 1);
4905 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4906 tree low_bound
4907 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4908 tree index_type = TREE_TYPE (index);
ead17059 4909 tree xindex;
742920c7 4910
4c08eef0 4911 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4912 {
4c08eef0
RK
4913 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4914 index);
742920c7
RK
4915 index_type = TREE_TYPE (index);
4916 }
4917
74a4fbfc
DB
4918 /* Optimize the special-case of a zero lower bound.
4919
4920 We convert the low_bound to sizetype to avoid some problems
4921 with constant folding. (E.g. suppose the lower bound is 1,
4922 and its mode is QI. Without the conversion, (ARRAY
4923 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4924 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4925
4926 But sizetype isn't quite right either (especially if
4927 the lowbound is negative). FIXME */
4928
ca0f2220 4929 if (! integer_zerop (low_bound))
74a4fbfc
DB
4930 index = fold (build (MINUS_EXPR, index_type, index,
4931 convert (sizetype, low_bound)));
ca0f2220 4932
f8dac6eb
R
4933 if (TREE_CODE (index) == INTEGER_CST)
4934 {
4935 index = convert (sbitsizetype, index);
4936 index_type = TREE_TYPE (index);
4937 }
4938
ead17059
RH
4939 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4940 convert (sbitsizetype,
4941 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4942
ead17059
RH
4943 if (TREE_CODE (xindex) == INTEGER_CST
4944 && TREE_INT_CST_HIGH (xindex) == 0)
4945 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4946 else
956d6950 4947 {
ead17059
RH
4948 /* Either the bit offset calculated above is not constant, or
4949 it overflowed. In either case, redo the multiplication
4950 against the size in units. This is especially important
4951 in the non-constant case to avoid a division at runtime. */
4952 xindex = fold (build (MULT_EXPR, ssizetype, index,
4953 convert (ssizetype,
4954 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4955
4956 if (contains_placeholder_p (xindex))
4957 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4958
4959 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4960 }
bbf6f052
RK
4961 }
4962 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4963 && ! ((TREE_CODE (exp) == NOP_EXPR
4964 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4965 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4966 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4967 != UNION_TYPE))
bbf6f052
RK
4968 && (TYPE_MODE (TREE_TYPE (exp))
4969 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4970 break;
7bb0943f
RS
4971
4972 /* If any reference in the chain is volatile, the effect is volatile. */
4973 if (TREE_THIS_VOLATILE (exp))
4974 *pvolatilep = 1;
839c4796
RK
4975
4976 /* If the offset is non-constant already, then we can't assume any
4977 alignment more than the alignment here. */
4978 if (! integer_zerop (offset))
4979 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4980
bbf6f052
RK
4981 exp = TREE_OPERAND (exp, 0);
4982 }
4983
839c4796
RK
4984 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4985 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4986 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4987 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4988
742920c7
RK
4989 if (integer_zerop (offset))
4990 offset = 0;
4991
b50d17a1
RK
4992 if (offset != 0 && contains_placeholder_p (offset))
4993 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4994
bbf6f052 4995 *pmode = mode;
7bb0943f 4996 *poffset = offset;
839c4796 4997 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4998 return exp;
4999}
921b3427
RK
5000
5001/* Subroutine of expand_exp: compute memory_usage from modifier. */
5002static enum memory_use_mode
5003get_memory_usage_from_modifier (modifier)
5004 enum expand_modifier modifier;
5005{
5006 switch (modifier)
5007 {
5008 case EXPAND_NORMAL:
e5e809f4 5009 case EXPAND_SUM:
921b3427
RK
5010 return MEMORY_USE_RO;
5011 break;
5012 case EXPAND_MEMORY_USE_WO:
5013 return MEMORY_USE_WO;
5014 break;
5015 case EXPAND_MEMORY_USE_RW:
5016 return MEMORY_USE_RW;
5017 break;
921b3427 5018 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
5019 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
5020 MEMORY_USE_DONT, because they are modifiers to a call of
5021 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 5022 case EXPAND_CONST_ADDRESS:
e5e809f4 5023 case EXPAND_INITIALIZER:
921b3427
RK
5024 return MEMORY_USE_DONT;
5025 case EXPAND_MEMORY_USE_BAD:
5026 default:
5027 abort ();
5028 }
5029}
bbf6f052
RK
5030\f
5031/* Given an rtx VALUE that may contain additions and multiplications,
5032 return an equivalent value that just refers to a register or memory.
5033 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
5034 and returning a pseudo-register containing the value.
5035
5036 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
5037
5038rtx
5039force_operand (value, target)
5040 rtx value, target;
5041{
5042 register optab binoptab = 0;
5043 /* Use a temporary to force order of execution of calls to
5044 `force_operand'. */
5045 rtx tmp;
5046 register rtx op2;
5047 /* Use subtarget as the target for operand 0 of a binary operation. */
5048 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5049
8b015896
RH
5050 /* Check for a PIC address load. */
5051 if (flag_pic
5052 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
5053 && XEXP (value, 0) == pic_offset_table_rtx
5054 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
5055 || GET_CODE (XEXP (value, 1)) == LABEL_REF
5056 || GET_CODE (XEXP (value, 1)) == CONST))
5057 {
5058 if (!subtarget)
5059 subtarget = gen_reg_rtx (GET_MODE (value));
5060 emit_move_insn (subtarget, value);
5061 return subtarget;
5062 }
5063
bbf6f052
RK
5064 if (GET_CODE (value) == PLUS)
5065 binoptab = add_optab;
5066 else if (GET_CODE (value) == MINUS)
5067 binoptab = sub_optab;
5068 else if (GET_CODE (value) == MULT)
5069 {
5070 op2 = XEXP (value, 1);
5071 if (!CONSTANT_P (op2)
5072 && !(GET_CODE (op2) == REG && op2 != subtarget))
5073 subtarget = 0;
5074 tmp = force_operand (XEXP (value, 0), subtarget);
5075 return expand_mult (GET_MODE (value), tmp,
906c4e36 5076 force_operand (op2, NULL_RTX),
bbf6f052
RK
5077 target, 0);
5078 }
5079
5080 if (binoptab)
5081 {
5082 op2 = XEXP (value, 1);
5083 if (!CONSTANT_P (op2)
5084 && !(GET_CODE (op2) == REG && op2 != subtarget))
5085 subtarget = 0;
5086 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
5087 {
5088 binoptab = add_optab;
5089 op2 = negate_rtx (GET_MODE (value), op2);
5090 }
5091
5092 /* Check for an addition with OP2 a constant integer and our first
5093 operand a PLUS of a virtual register and something else. In that
5094 case, we want to emit the sum of the virtual register and the
5095 constant first and then add the other value. This allows virtual
5096 register instantiation to simply modify the constant rather than
5097 creating another one around this addition. */
5098 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
5099 && GET_CODE (XEXP (value, 0)) == PLUS
5100 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
5101 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
5102 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
5103 {
5104 rtx temp = expand_binop (GET_MODE (value), binoptab,
5105 XEXP (XEXP (value, 0), 0), op2,
5106 subtarget, 0, OPTAB_LIB_WIDEN);
5107 return expand_binop (GET_MODE (value), binoptab, temp,
5108 force_operand (XEXP (XEXP (value, 0), 1), 0),
5109 target, 0, OPTAB_LIB_WIDEN);
5110 }
5111
5112 tmp = force_operand (XEXP (value, 0), subtarget);
5113 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 5114 force_operand (op2, NULL_RTX),
bbf6f052 5115 target, 0, OPTAB_LIB_WIDEN);
8008b228 5116 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
5117 because the only operations we are expanding here are signed ones. */
5118 }
5119 return value;
5120}
5121\f
5122/* Subroutine of expand_expr:
5123 save the non-copied parts (LIST) of an expr (LHS), and return a list
5124 which can restore these values to their previous values,
5125 should something modify their storage. */
5126
5127static tree
5128save_noncopied_parts (lhs, list)
5129 tree lhs;
5130 tree list;
5131{
5132 tree tail;
5133 tree parts = 0;
5134
5135 for (tail = list; tail; tail = TREE_CHAIN (tail))
5136 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5137 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
5138 else
5139 {
5140 tree part = TREE_VALUE (tail);
5141 tree part_type = TREE_TYPE (part);
906c4e36 5142 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 5143 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 5144 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 5145 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 5146 parts = tree_cons (to_be_saved,
906c4e36
RK
5147 build (RTL_EXPR, part_type, NULL_TREE,
5148 (tree) target),
bbf6f052
RK
5149 parts);
5150 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
5151 }
5152 return parts;
5153}
5154
5155/* Subroutine of expand_expr:
5156 record the non-copied parts (LIST) of an expr (LHS), and return a list
5157 which specifies the initial values of these parts. */
5158
5159static tree
5160init_noncopied_parts (lhs, list)
5161 tree lhs;
5162 tree list;
5163{
5164 tree tail;
5165 tree parts = 0;
5166
5167 for (tail = list; tail; tail = TREE_CHAIN (tail))
5168 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
5169 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
c15398de 5170 else if (TREE_PURPOSE (tail))
bbf6f052
RK
5171 {
5172 tree part = TREE_VALUE (tail);
5173 tree part_type = TREE_TYPE (part);
906c4e36 5174 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
5175 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
5176 }
5177 return parts;
5178}
5179
5180/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
5181 EXP can reference X, which is being modified. TOP_P is nonzero if this
5182 call is going to be used to determine whether we need a temporary
ff439b5f
CB
5183 for EXP, as opposed to a recursive call to this function.
5184
5185 It is always safe for this routine to return zero since it merely
5186 searches for optimization opportunities. */
bbf6f052
RK
5187
5188static int
e5e809f4 5189safe_from_p (x, exp, top_p)
bbf6f052
RK
5190 rtx x;
5191 tree exp;
e5e809f4 5192 int top_p;
bbf6f052
RK
5193{
5194 rtx exp_rtl = 0;
5195 int i, nops;
ff439b5f
CB
5196 static int save_expr_count;
5197 static int save_expr_size = 0;
5198 static tree *save_expr_rewritten;
5199 static tree save_expr_trees[256];
bbf6f052 5200
6676e72f
RK
5201 if (x == 0
5202 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
5203 have no way of allocating temporaries of variable size
5204 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
5205 So we assume here that something at a higher level has prevented a
f4510f37 5206 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
5207 do this when X is BLKmode and when we are at the top level. */
5208 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 5209 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
5210 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
5211 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
5212 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
5213 != INTEGER_CST)
f4510f37 5214 && GET_MODE (x) == BLKmode))
bbf6f052
RK
5215 return 1;
5216
ff439b5f
CB
5217 if (top_p && save_expr_size == 0)
5218 {
5219 int rtn;
5220
5221 save_expr_count = 0;
5222 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
5223 save_expr_rewritten = &save_expr_trees[0];
5224
5225 rtn = safe_from_p (x, exp, 1);
5226
5227 for (i = 0; i < save_expr_count; ++i)
5228 {
5229 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
5230 abort ();
5231 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
5232 }
5233
5234 save_expr_size = 0;
5235
5236 return rtn;
5237 }
5238
bbf6f052
RK
5239 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
5240 find the underlying pseudo. */
5241 if (GET_CODE (x) == SUBREG)
5242 {
5243 x = SUBREG_REG (x);
5244 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5245 return 0;
5246 }
5247
5248 /* If X is a location in the outgoing argument area, it is always safe. */
5249 if (GET_CODE (x) == MEM
5250 && (XEXP (x, 0) == virtual_outgoing_args_rtx
5251 || (GET_CODE (XEXP (x, 0)) == PLUS
5252 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
5253 return 1;
5254
5255 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
5256 {
5257 case 'd':
5258 exp_rtl = DECL_RTL (exp);
5259 break;
5260
5261 case 'c':
5262 return 1;
5263
5264 case 'x':
5265 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 5266 return ((TREE_VALUE (exp) == 0
e5e809f4 5267 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5268 && (TREE_CHAIN (exp) == 0
e5e809f4 5269 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5270 else if (TREE_CODE (exp) == ERROR_MARK)
5271 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5272 else
5273 return 0;
5274
5275 case '1':
e5e809f4 5276 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5277
5278 case '2':
5279 case '<':
e5e809f4
JL
5280 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5281 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5282
5283 case 'e':
5284 case 'r':
5285 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5286 the expression. If it is set, we conflict iff we are that rtx or
5287 both are in memory. Otherwise, we check all operands of the
5288 expression recursively. */
5289
5290 switch (TREE_CODE (exp))
5291 {
5292 case ADDR_EXPR:
e44842fe 5293 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5294 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5295 || TREE_STATIC (exp));
bbf6f052
RK
5296
5297 case INDIRECT_REF:
5298 if (GET_CODE (x) == MEM)
5299 return 0;
5300 break;
5301
5302 case CALL_EXPR:
5303 exp_rtl = CALL_EXPR_RTL (exp);
5304 if (exp_rtl == 0)
5305 {
5306 /* Assume that the call will clobber all hard registers and
5307 all of memory. */
5308 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5309 || GET_CODE (x) == MEM)
5310 return 0;
5311 }
5312
5313 break;
5314
5315 case RTL_EXPR:
3bb5826a
RK
5316 /* If a sequence exists, we would have to scan every instruction
5317 in the sequence to see if it was safe. This is probably not
5318 worthwhile. */
5319 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5320 return 0;
5321
3bb5826a 5322 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5323 break;
5324
5325 case WITH_CLEANUP_EXPR:
5326 exp_rtl = RTL_EXPR_RTL (exp);
5327 break;
5328
5dab5552 5329 case CLEANUP_POINT_EXPR:
e5e809f4 5330 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5331
bbf6f052
RK
5332 case SAVE_EXPR:
5333 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5334 if (exp_rtl)
5335 break;
5336
5337 /* This SAVE_EXPR might appear many times in the top-level
5338 safe_from_p() expression, and if it has a complex
5339 subexpression, examining it multiple times could result
5340 in a combinatorial explosion. E.g. on an Alpha
5341 running at least 200MHz, a Fortran test case compiled with
5342 optimization took about 28 minutes to compile -- even though
5343 it was only a few lines long, and the complicated line causing
5344 so much time to be spent in the earlier version of safe_from_p()
5345 had only 293 or so unique nodes.
5346
5347 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5348 where it is so we can turn it back in the top-level safe_from_p()
5349 when we're done. */
5350
5351 /* For now, don't bother re-sizing the array. */
5352 if (save_expr_count >= save_expr_size)
5353 return 0;
5354 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5355
5356 nops = tree_code_length[(int) SAVE_EXPR];
5357 for (i = 0; i < nops; i++)
ff59bfe6
JM
5358 {
5359 tree operand = TREE_OPERAND (exp, i);
5360 if (operand == NULL_TREE)
5361 continue;
5362 TREE_SET_CODE (exp, ERROR_MARK);
5363 if (!safe_from_p (x, operand, 0))
5364 return 0;
5365 TREE_SET_CODE (exp, SAVE_EXPR);
5366 }
5367 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5368 return 1;
bbf6f052 5369
8129842c
RS
5370 case BIND_EXPR:
5371 /* The only operand we look at is operand 1. The rest aren't
5372 part of the expression. */
e5e809f4 5373 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5374
bbf6f052 5375 case METHOD_CALL_EXPR:
0f41302f 5376 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5377 abort ();
e9a25f70
JL
5378
5379 default:
5380 break;
bbf6f052
RK
5381 }
5382
5383 /* If we have an rtx, we do not need to scan our operands. */
5384 if (exp_rtl)
5385 break;
5386
5387 nops = tree_code_length[(int) TREE_CODE (exp)];
5388 for (i = 0; i < nops; i++)
5389 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5390 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5391 return 0;
5392 }
5393
5394 /* If we have an rtl, find any enclosed object. Then see if we conflict
5395 with it. */
5396 if (exp_rtl)
5397 {
5398 if (GET_CODE (exp_rtl) == SUBREG)
5399 {
5400 exp_rtl = SUBREG_REG (exp_rtl);
5401 if (GET_CODE (exp_rtl) == REG
5402 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5403 return 0;
5404 }
5405
5406 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5407 are memory and EXP is not readonly. */
5408 return ! (rtx_equal_p (x, exp_rtl)
5409 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5410 && ! TREE_READONLY (exp)));
5411 }
5412
5413 /* If we reach here, it is safe. */
5414 return 1;
5415}
5416
5417/* Subroutine of expand_expr: return nonzero iff EXP is an
5418 expression whose type is statically determinable. */
5419
5420static int
5421fixed_type_p (exp)
5422 tree exp;
5423{
5424 if (TREE_CODE (exp) == PARM_DECL
5425 || TREE_CODE (exp) == VAR_DECL
5426 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5427 || TREE_CODE (exp) == COMPONENT_REF
5428 || TREE_CODE (exp) == ARRAY_REF)
5429 return 1;
5430 return 0;
5431}
01c8a7c8
RK
5432
5433/* Subroutine of expand_expr: return rtx if EXP is a
5434 variable or parameter; else return 0. */
5435
5436static rtx
5437var_rtx (exp)
5438 tree exp;
5439{
5440 STRIP_NOPS (exp);
5441 switch (TREE_CODE (exp))
5442 {
5443 case PARM_DECL:
5444 case VAR_DECL:
5445 return DECL_RTL (exp);
5446 default:
5447 return 0;
5448 }
5449}
dbecbbe4
JL
5450
5451#ifdef MAX_INTEGER_COMPUTATION_MODE
5452void
5453check_max_integer_computation_mode (exp)
5454 tree exp;
5455{
5f652c07 5456 enum tree_code code;
dbecbbe4
JL
5457 enum machine_mode mode;
5458
5f652c07
JM
5459 /* Strip any NOPs that don't change the mode. */
5460 STRIP_NOPS (exp);
5461 code = TREE_CODE (exp);
5462
71bca506
JL
5463 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5464 if (code == NOP_EXPR
5465 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5466 return;
5467
dbecbbe4
JL
5468 /* First check the type of the overall operation. We need only look at
5469 unary, binary and relational operations. */
5470 if (TREE_CODE_CLASS (code) == '1'
5471 || TREE_CODE_CLASS (code) == '2'
5472 || TREE_CODE_CLASS (code) == '<')
5473 {
5474 mode = TYPE_MODE (TREE_TYPE (exp));
5475 if (GET_MODE_CLASS (mode) == MODE_INT
5476 && mode > MAX_INTEGER_COMPUTATION_MODE)
5477 fatal ("unsupported wide integer operation");
5478 }
5479
5480 /* Check operand of a unary op. */
5481 if (TREE_CODE_CLASS (code) == '1')
5482 {
5483 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5484 if (GET_MODE_CLASS (mode) == MODE_INT
5485 && mode > MAX_INTEGER_COMPUTATION_MODE)
5486 fatal ("unsupported wide integer operation");
5487 }
5488
5489 /* Check operands of a binary/comparison op. */
5490 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5491 {
5492 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5493 if (GET_MODE_CLASS (mode) == MODE_INT
5494 && mode > MAX_INTEGER_COMPUTATION_MODE)
5495 fatal ("unsupported wide integer operation");
5496
5497 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5498 if (GET_MODE_CLASS (mode) == MODE_INT
5499 && mode > MAX_INTEGER_COMPUTATION_MODE)
5500 fatal ("unsupported wide integer operation");
5501 }
5502}
5503#endif
5504
bbf6f052
RK
5505\f
5506/* expand_expr: generate code for computing expression EXP.
5507 An rtx for the computed value is returned. The value is never null.
5508 In the case of a void EXP, const0_rtx is returned.
5509
5510 The value may be stored in TARGET if TARGET is nonzero.
5511 TARGET is just a suggestion; callers must assume that
5512 the rtx returned may not be the same as TARGET.
5513
5514 If TARGET is CONST0_RTX, it means that the value will be ignored.
5515
5516 If TMODE is not VOIDmode, it suggests generating the
5517 result in mode TMODE. But this is done only when convenient.
5518 Otherwise, TMODE is ignored and the value generated in its natural mode.
5519 TMODE is just a suggestion; callers must assume that
5520 the rtx returned may not have mode TMODE.
5521
d6a5ac33
RK
5522 Note that TARGET may have neither TMODE nor MODE. In that case, it
5523 probably will not be used.
bbf6f052
RK
5524
5525 If MODIFIER is EXPAND_SUM then when EXP is an addition
5526 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5527 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5528 products as above, or REG or MEM, or constant.
5529 Ordinarily in such cases we would output mul or add instructions
5530 and then return a pseudo reg containing the sum.
5531
5532 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5533 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5534 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5535 This is used for outputting expressions used in initializers.
5536
5537 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5538 with a constant address even if that address is not normally legitimate.
5539 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5540
5541rtx
5542expand_expr (exp, target, tmode, modifier)
5543 register tree exp;
5544 rtx target;
5545 enum machine_mode tmode;
5546 enum expand_modifier modifier;
5547{
b50d17a1
RK
5548 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5549 This is static so it will be accessible to our recursive callees. */
5550 static tree placeholder_list = 0;
bbf6f052
RK
5551 register rtx op0, op1, temp;
5552 tree type = TREE_TYPE (exp);
5553 int unsignedp = TREE_UNSIGNED (type);
68557e14 5554 register enum machine_mode mode;
bbf6f052
RK
5555 register enum tree_code code = TREE_CODE (exp);
5556 optab this_optab;
68557e14
ML
5557 rtx subtarget, original_target;
5558 int ignore;
bbf6f052 5559 tree context;
921b3427
RK
5560 /* Used by check-memory-usage to make modifier read only. */
5561 enum expand_modifier ro_modifier;
bbf6f052 5562
68557e14
ML
5563 /* Handle ERROR_MARK before anybody tries to access its type. */
5564 if (TREE_CODE (exp) == ERROR_MARK)
5565 {
5566 op0 = CONST0_RTX (tmode);
5567 if (op0 != 0)
5568 return op0;
5569 return const0_rtx;
5570 }
5571
5572 mode = TYPE_MODE (type);
5573 /* Use subtarget as the target for operand 0 of a binary operation. */
5574 subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5575 original_target = target;
5576 ignore = (target == const0_rtx
5577 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
5578 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5579 || code == COND_EXPR)
5580 && TREE_CODE (type) == VOID_TYPE));
5581
921b3427
RK
5582 /* Make a read-only version of the modifier. */
5583 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5584 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5585 ro_modifier = modifier;
5586 else
5587 ro_modifier = EXPAND_NORMAL;
ca695ac9 5588
bbf6f052
RK
5589 /* Don't use hard regs as subtargets, because the combiner
5590 can only handle pseudo regs. */
5591 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5592 subtarget = 0;
5593 /* Avoid subtargets inside loops,
5594 since they hide some invariant expressions. */
5595 if (preserve_subexpressions_p ())
5596 subtarget = 0;
5597
dd27116b
RK
5598 /* If we are going to ignore this result, we need only do something
5599 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5600 is, short-circuit the most common cases here. Note that we must
5601 not call expand_expr with anything but const0_rtx in case this
5602 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5603
dd27116b
RK
5604 if (ignore)
5605 {
5606 if (! TREE_SIDE_EFFECTS (exp))
5607 return const0_rtx;
5608
5609 /* Ensure we reference a volatile object even if value is ignored. */
5610 if (TREE_THIS_VOLATILE (exp)
5611 && TREE_CODE (exp) != FUNCTION_DECL
5612 && mode != VOIDmode && mode != BLKmode)
5613 {
921b3427 5614 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5615 if (GET_CODE (temp) == MEM)
5616 temp = copy_to_reg (temp);
5617 return const0_rtx;
5618 }
5619
5620 if (TREE_CODE_CLASS (code) == '1')
5621 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5622 VOIDmode, ro_modifier);
dd27116b
RK
5623 else if (TREE_CODE_CLASS (code) == '2'
5624 || TREE_CODE_CLASS (code) == '<')
5625 {
921b3427
RK
5626 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5627 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5628 return const0_rtx;
5629 }
5630 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5631 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5632 /* If the second operand has no side effects, just evaluate
0f41302f 5633 the first. */
dd27116b 5634 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5635 VOIDmode, ro_modifier);
dd27116b 5636
90764a87 5637 target = 0;
dd27116b 5638 }
bbf6f052 5639
dbecbbe4 5640#ifdef MAX_INTEGER_COMPUTATION_MODE
5f652c07
JM
5641 /* Only check stuff here if the mode we want is different from the mode
5642 of the expression; if it's the same, check_max_integer_computiation_mode
5643 will handle it. Do we really need to check this stuff at all? */
5644
ce3c0b53 5645 if (target
5f652c07 5646 && GET_MODE (target) != mode
ce3c0b53
JL
5647 && TREE_CODE (exp) != INTEGER_CST
5648 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5649 && TREE_CODE (exp) != ARRAY_REF
5650 && TREE_CODE (exp) != COMPONENT_REF
5651 && TREE_CODE (exp) != BIT_FIELD_REF
5652 && TREE_CODE (exp) != INDIRECT_REF
6bcd94ae 5653 && TREE_CODE (exp) != CALL_EXPR
6ab46dff
GRK
5654 && TREE_CODE (exp) != VAR_DECL
5655 && TREE_CODE (exp) != RTL_EXPR)
dbecbbe4
JL
5656 {
5657 enum machine_mode mode = GET_MODE (target);
5658
5659 if (GET_MODE_CLASS (mode) == MODE_INT
5660 && mode > MAX_INTEGER_COMPUTATION_MODE)
5661 fatal ("unsupported wide integer operation");
5662 }
5663
5f652c07
JM
5664 if (tmode != mode
5665 && TREE_CODE (exp) != INTEGER_CST
ce3c0b53 5666 && TREE_CODE (exp) != PARM_DECL
ee06cc21
JL
5667 && TREE_CODE (exp) != ARRAY_REF
5668 && TREE_CODE (exp) != COMPONENT_REF
5669 && TREE_CODE (exp) != BIT_FIELD_REF
5670 && TREE_CODE (exp) != INDIRECT_REF
ce3c0b53 5671 && TREE_CODE (exp) != VAR_DECL
6bcd94ae 5672 && TREE_CODE (exp) != CALL_EXPR
6ab46dff 5673 && TREE_CODE (exp) != RTL_EXPR
71bca506 5674 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5675 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5676 fatal ("unsupported wide integer operation");
5677
5678 check_max_integer_computation_mode (exp);
5679#endif
5680
e44842fe
RK
5681 /* If will do cse, generate all results into pseudo registers
5682 since 1) that allows cse to find more things
5683 and 2) otherwise cse could produce an insn the machine
5684 cannot support. */
5685
bbf6f052
RK
5686 if (! cse_not_expected && mode != BLKmode && target
5687 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5688 target = subtarget;
5689
bbf6f052
RK
5690 switch (code)
5691 {
5692 case LABEL_DECL:
b552441b
RS
5693 {
5694 tree function = decl_function_context (exp);
5695 /* Handle using a label in a containing function. */
d0977240
RK
5696 if (function != current_function_decl
5697 && function != inline_function_decl && function != 0)
b552441b
RS
5698 {
5699 struct function *p = find_function_data (function);
5700 /* Allocate in the memory associated with the function
5701 that the label is in. */
5702 push_obstacks (p->function_obstack,
5703 p->function_maybepermanent_obstack);
5704
38a448ca
RH
5705 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5706 label_rtx (exp),
5707 p->forced_labels);
b552441b
RS
5708 pop_obstacks ();
5709 }
ab87f8c8
JL
5710 else
5711 {
ab87f8c8
JL
5712 if (modifier == EXPAND_INITIALIZER)
5713 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5714 label_rtx (exp),
5715 forced_labels);
5716 }
38a448ca
RH
5717 temp = gen_rtx_MEM (FUNCTION_MODE,
5718 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5719 if (function != current_function_decl
5720 && function != inline_function_decl && function != 0)
26fcb35a
RS
5721 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5722 return temp;
b552441b 5723 }
bbf6f052
RK
5724
5725 case PARM_DECL:
5726 if (DECL_RTL (exp) == 0)
5727 {
5728 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5729 return CONST0_RTX (mode);
bbf6f052
RK
5730 }
5731
0f41302f 5732 /* ... fall through ... */
d6a5ac33 5733
bbf6f052 5734 case VAR_DECL:
2dca20cd
RS
5735 /* If a static var's type was incomplete when the decl was written,
5736 but the type is complete now, lay out the decl now. */
5737 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5738 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5739 {
5740 push_obstacks_nochange ();
5741 end_temporary_allocation ();
5742 layout_decl (exp, 0);
5743 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5744 pop_obstacks ();
5745 }
d6a5ac33 5746
7d384cc0
KR
5747 /* Although static-storage variables start off initialized, according to
5748 ANSI C, a memcpy could overwrite them with uninitialized values. So
5749 we check them too. This also lets us check for read-only variables
5750 accessed via a non-const declaration, in case it won't be detected
5751 any other way (e.g., in an embedded system or OS kernel without
5752 memory protection).
5753
5754 Aggregates are not checked here; they're handled elsewhere. */
5755 if (current_function_check_memory_usage && code == VAR_DECL
921b3427 5756 && GET_CODE (DECL_RTL (exp)) == MEM
921b3427
RK
5757 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5758 {
5759 enum memory_use_mode memory_usage;
5760 memory_usage = get_memory_usage_from_modifier (modifier);
5761
5762 if (memory_usage != MEMORY_USE_DONT)
5763 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 5764 XEXP (DECL_RTL (exp), 0), Pmode,
921b3427
RK
5765 GEN_INT (int_size_in_bytes (type)),
5766 TYPE_MODE (sizetype),
956d6950
JL
5767 GEN_INT (memory_usage),
5768 TYPE_MODE (integer_type_node));
921b3427
RK
5769 }
5770
0f41302f 5771 /* ... fall through ... */
d6a5ac33 5772
2dca20cd 5773 case FUNCTION_DECL:
bbf6f052
RK
5774 case RESULT_DECL:
5775 if (DECL_RTL (exp) == 0)
5776 abort ();
d6a5ac33 5777
e44842fe
RK
5778 /* Ensure variable marked as used even if it doesn't go through
5779 a parser. If it hasn't be used yet, write out an external
5780 definition. */
5781 if (! TREE_USED (exp))
5782 {
5783 assemble_external (exp);
5784 TREE_USED (exp) = 1;
5785 }
5786
dc6d66b3
RK
5787 /* Show we haven't gotten RTL for this yet. */
5788 temp = 0;
5789
bbf6f052
RK
5790 /* Handle variables inherited from containing functions. */
5791 context = decl_function_context (exp);
5792
5793 /* We treat inline_function_decl as an alias for the current function
5794 because that is the inline function whose vars, types, etc.
5795 are being merged into the current function.
5796 See expand_inline_function. */
d6a5ac33 5797
bbf6f052
RK
5798 if (context != 0 && context != current_function_decl
5799 && context != inline_function_decl
5800 /* If var is static, we don't need a static chain to access it. */
5801 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5802 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5803 {
5804 rtx addr;
5805
5806 /* Mark as non-local and addressable. */
81feeecb 5807 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5808 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5809 abort ();
bbf6f052
RK
5810 mark_addressable (exp);
5811 if (GET_CODE (DECL_RTL (exp)) != MEM)
5812 abort ();
5813 addr = XEXP (DECL_RTL (exp), 0);
5814 if (GET_CODE (addr) == MEM)
38a448ca
RH
5815 addr = gen_rtx_MEM (Pmode,
5816 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5817 else
5818 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5819 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5820 }
4af3895e 5821
bbf6f052
RK
5822 /* This is the case of an array whose size is to be determined
5823 from its initializer, while the initializer is still being parsed.
5824 See expand_decl. */
d6a5ac33 5825
dc6d66b3
RK
5826 else if (GET_CODE (DECL_RTL (exp)) == MEM
5827 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5828 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5829 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5830
5831 /* If DECL_RTL is memory, we are in the normal case and either
5832 the address is not valid or it is not a register and -fforce-addr
5833 is specified, get the address into a register. */
5834
dc6d66b3
RK
5835 else if (GET_CODE (DECL_RTL (exp)) == MEM
5836 && modifier != EXPAND_CONST_ADDRESS
5837 && modifier != EXPAND_SUM
5838 && modifier != EXPAND_INITIALIZER
5839 && (! memory_address_p (DECL_MODE (exp),
5840 XEXP (DECL_RTL (exp), 0))
5841 || (flag_force_addr
5842 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5843 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5844 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5845
dc6d66b3
RK
5846 /* If we got something, return it. But first, set the alignment
5847 the address is a register. */
5848 if (temp != 0)
5849 {
5850 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5851 mark_reg_pointer (XEXP (temp, 0),
5852 DECL_ALIGN (exp) / BITS_PER_UNIT);
5853
5854 return temp;
5855 }
5856
1499e0a8
RK
5857 /* If the mode of DECL_RTL does not match that of the decl, it
5858 must be a promoted value. We return a SUBREG of the wanted mode,
5859 but mark it so that we know that it was already extended. */
5860
5861 if (GET_CODE (DECL_RTL (exp)) == REG
5862 && GET_MODE (DECL_RTL (exp)) != mode)
5863 {
1499e0a8
RK
5864 /* Get the signedness used for this variable. Ensure we get the
5865 same mode we got when the variable was declared. */
78911e8b
RK
5866 if (GET_MODE (DECL_RTL (exp))
5867 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5868 abort ();
5869
38a448ca 5870 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5871 SUBREG_PROMOTED_VAR_P (temp) = 1;
5872 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5873 return temp;
5874 }
5875
bbf6f052
RK
5876 return DECL_RTL (exp);
5877
5878 case INTEGER_CST:
5879 return immed_double_const (TREE_INT_CST_LOW (exp),
5880 TREE_INT_CST_HIGH (exp),
5881 mode);
5882
5883 case CONST_DECL:
921b3427
RK
5884 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5885 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5886
5887 case REAL_CST:
5888 /* If optimized, generate immediate CONST_DOUBLE
5889 which will be turned into memory by reload if necessary.
5890
5891 We used to force a register so that loop.c could see it. But
5892 this does not allow gen_* patterns to perform optimizations with
5893 the constants. It also produces two insns in cases like "x = 1.0;".
5894 On most machines, floating-point constants are not permitted in
5895 many insns, so we'd end up copying it to a register in any case.
5896
5897 Now, we do the copying in expand_binop, if appropriate. */
5898 return immed_real_const (exp);
5899
5900 case COMPLEX_CST:
5901 case STRING_CST:
5902 if (! TREE_CST_RTL (exp))
5903 output_constant_def (exp);
5904
5905 /* TREE_CST_RTL probably contains a constant address.
5906 On RISC machines where a constant address isn't valid,
5907 make some insns to get that address into a register. */
5908 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5909 && modifier != EXPAND_CONST_ADDRESS
5910 && modifier != EXPAND_INITIALIZER
5911 && modifier != EXPAND_SUM
d6a5ac33
RK
5912 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5913 || (flag_force_addr
5914 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5915 return change_address (TREE_CST_RTL (exp), VOIDmode,
5916 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5917 return TREE_CST_RTL (exp);
5918
bf1e5319 5919 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5920 {
5921 rtx to_return;
5922 char *saved_input_filename = input_filename;
5923 int saved_lineno = lineno;
5924 input_filename = EXPR_WFL_FILENAME (exp);
5925 lineno = EXPR_WFL_LINENO (exp);
5926 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5927 emit_line_note (input_filename, lineno);
5928 /* Possibly avoid switching back and force here */
5929 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5930 input_filename = saved_input_filename;
5931 lineno = saved_lineno;
5932 return to_return;
5933 }
bf1e5319 5934
bbf6f052
RK
5935 case SAVE_EXPR:
5936 context = decl_function_context (exp);
d6a5ac33 5937
d0977240
RK
5938 /* If this SAVE_EXPR was at global context, assume we are an
5939 initialization function and move it into our context. */
5940 if (context == 0)
5941 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5942
bbf6f052
RK
5943 /* We treat inline_function_decl as an alias for the current function
5944 because that is the inline function whose vars, types, etc.
5945 are being merged into the current function.
5946 See expand_inline_function. */
5947 if (context == current_function_decl || context == inline_function_decl)
5948 context = 0;
5949
5950 /* If this is non-local, handle it. */
5951 if (context)
5952 {
d0977240
RK
5953 /* The following call just exists to abort if the context is
5954 not of a containing function. */
5955 find_function_data (context);
5956
bbf6f052
RK
5957 temp = SAVE_EXPR_RTL (exp);
5958 if (temp && GET_CODE (temp) == REG)
5959 {
5960 put_var_into_stack (exp);
5961 temp = SAVE_EXPR_RTL (exp);
5962 }
5963 if (temp == 0 || GET_CODE (temp) != MEM)
5964 abort ();
5965 return change_address (temp, mode,
5966 fix_lexical_addr (XEXP (temp, 0), exp));
5967 }
5968 if (SAVE_EXPR_RTL (exp) == 0)
5969 {
06089a8b
RK
5970 if (mode == VOIDmode)
5971 temp = const0_rtx;
5972 else
e5e809f4 5973 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5974
bbf6f052 5975 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5976 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5977 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5978 save_expr_regs);
ff78f773
RK
5979
5980 /* If the mode of TEMP does not match that of the expression, it
5981 must be a promoted value. We pass store_expr a SUBREG of the
5982 wanted mode but mark it so that we know that it was already
5983 extended. Note that `unsignedp' was modified above in
5984 this case. */
5985
5986 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5987 {
38a448ca 5988 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5989 SUBREG_PROMOTED_VAR_P (temp) = 1;
5990 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5991 }
5992
4c7a0be9 5993 if (temp == const0_rtx)
921b3427
RK
5994 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5995 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5996 else
5997 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5998
5999 TREE_USED (exp) = 1;
bbf6f052 6000 }
1499e0a8
RK
6001
6002 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
6003 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 6004 but mark it so that we know that it was already extended. */
1499e0a8
RK
6005
6006 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
6007 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
6008 {
e70d22c8
RK
6009 /* Compute the signedness and make the proper SUBREG. */
6010 promote_mode (type, mode, &unsignedp, 0);
38a448ca 6011 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
6012 SUBREG_PROMOTED_VAR_P (temp) = 1;
6013 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
6014 return temp;
6015 }
6016
bbf6f052
RK
6017 return SAVE_EXPR_RTL (exp);
6018
679163cf
MS
6019 case UNSAVE_EXPR:
6020 {
6021 rtx temp;
6022 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
6023 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
6024 return temp;
6025 }
6026
b50d17a1 6027 case PLACEHOLDER_EXPR:
e9a25f70
JL
6028 {
6029 tree placeholder_expr;
6030
6031 /* If there is an object on the head of the placeholder list,
e5e809f4 6032 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
6033 further information, see tree.def. */
6034 for (placeholder_expr = placeholder_list;
6035 placeholder_expr != 0;
6036 placeholder_expr = TREE_CHAIN (placeholder_expr))
6037 {
6038 tree need_type = TYPE_MAIN_VARIANT (type);
6039 tree object = 0;
6040 tree old_list = placeholder_list;
6041 tree elt;
6042
e5e809f4
JL
6043 /* Find the outermost reference that is of the type we want.
6044 If none, see if any object has a type that is a pointer to
6045 the type we want. */
6046 for (elt = TREE_PURPOSE (placeholder_expr);
6047 elt != 0 && object == 0;
6048 elt
6049 = ((TREE_CODE (elt) == COMPOUND_EXPR
6050 || TREE_CODE (elt) == COND_EXPR)
6051 ? TREE_OPERAND (elt, 1)
6052 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6053 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6054 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6055 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6056 ? TREE_OPERAND (elt, 0) : 0))
6057 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
6058 object = elt;
e9a25f70 6059
e9a25f70 6060 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
6061 elt != 0 && object == 0;
6062 elt
6063 = ((TREE_CODE (elt) == COMPOUND_EXPR
6064 || TREE_CODE (elt) == COND_EXPR)
6065 ? TREE_OPERAND (elt, 1)
6066 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
6067 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
6068 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
6069 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
6070 ? TREE_OPERAND (elt, 0) : 0))
6071 if (POINTER_TYPE_P (TREE_TYPE (elt))
6072 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 6073 == need_type))
e5e809f4 6074 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 6075
e9a25f70 6076 if (object != 0)
2cde2255 6077 {
e9a25f70
JL
6078 /* Expand this object skipping the list entries before
6079 it was found in case it is also a PLACEHOLDER_EXPR.
6080 In that case, we want to translate it using subsequent
6081 entries. */
6082 placeholder_list = TREE_CHAIN (placeholder_expr);
6083 temp = expand_expr (object, original_target, tmode,
6084 ro_modifier);
6085 placeholder_list = old_list;
6086 return temp;
2cde2255 6087 }
e9a25f70
JL
6088 }
6089 }
b50d17a1
RK
6090
6091 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
6092 abort ();
6093
6094 case WITH_RECORD_EXPR:
6095 /* Put the object on the placeholder list, expand our first operand,
6096 and pop the list. */
6097 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
6098 placeholder_list);
6099 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 6100 tmode, ro_modifier);
b50d17a1
RK
6101 placeholder_list = TREE_CHAIN (placeholder_list);
6102 return target;
6103
70e6ca43
APB
6104 case GOTO_EXPR:
6105 if (TREE_CODE (TREE_OPERAND (exp, 0)) == LABEL_DECL)
6106 expand_goto (TREE_OPERAND (exp, 0));
6107 else
6108 expand_computed_goto (TREE_OPERAND (exp, 0));
6109 return const0_rtx;
6110
bbf6f052 6111 case EXIT_EXPR:
e44842fe
RK
6112 expand_exit_loop_if_false (NULL_PTR,
6113 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
6114 return const0_rtx;
6115
f42e28dd
APB
6116 case LABELED_BLOCK_EXPR:
6117 if (LABELED_BLOCK_BODY (exp))
6118 expand_expr_stmt (LABELED_BLOCK_BODY (exp));
6119 emit_label (label_rtx (LABELED_BLOCK_LABEL (exp)));
6120 return const0_rtx;
6121
6122 case EXIT_BLOCK_EXPR:
6123 if (EXIT_BLOCK_RETURN (exp))
ab87f8c8 6124 sorry ("returned value in block_exit_expr");
f42e28dd
APB
6125 expand_goto (LABELED_BLOCK_LABEL (EXIT_BLOCK_LABELED_BLOCK (exp)));
6126 return const0_rtx;
6127
bbf6f052 6128 case LOOP_EXPR:
0088fcb1 6129 push_temp_slots ();
bbf6f052
RK
6130 expand_start_loop (1);
6131 expand_expr_stmt (TREE_OPERAND (exp, 0));
6132 expand_end_loop ();
0088fcb1 6133 pop_temp_slots ();
bbf6f052
RK
6134
6135 return const0_rtx;
6136
6137 case BIND_EXPR:
6138 {
6139 tree vars = TREE_OPERAND (exp, 0);
6140 int vars_need_expansion = 0;
6141
6142 /* Need to open a binding contour here because
e976b8b2 6143 if there are any cleanups they must be contained here. */
bbf6f052
RK
6144 expand_start_bindings (0);
6145
2df53c0b
RS
6146 /* Mark the corresponding BLOCK for output in its proper place. */
6147 if (TREE_OPERAND (exp, 2) != 0
6148 && ! TREE_USED (TREE_OPERAND (exp, 2)))
6149 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
6150
6151 /* If VARS have not yet been expanded, expand them now. */
6152 while (vars)
6153 {
6154 if (DECL_RTL (vars) == 0)
6155 {
6156 vars_need_expansion = 1;
6157 expand_decl (vars);
6158 }
6159 expand_decl_init (vars);
6160 vars = TREE_CHAIN (vars);
6161 }
6162
921b3427 6163 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
6164
6165 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
6166
6167 return temp;
6168 }
6169
6170 case RTL_EXPR:
83b853c9
JM
6171 if (RTL_EXPR_SEQUENCE (exp))
6172 {
6173 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
6174 abort ();
6175 emit_insns (RTL_EXPR_SEQUENCE (exp));
6176 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
6177 }
99310285 6178 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 6179 free_temps_for_rtl_expr (exp);
bbf6f052
RK
6180 return RTL_EXPR_RTL (exp);
6181
6182 case CONSTRUCTOR:
dd27116b
RK
6183 /* If we don't need the result, just ensure we evaluate any
6184 subexpressions. */
6185 if (ignore)
6186 {
6187 tree elt;
6188 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
6189 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
6190 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
6191 return const0_rtx;
6192 }
3207b172 6193
4af3895e
JVA
6194 /* All elts simple constants => refer to a constant in memory. But
6195 if this is a non-BLKmode mode, let it store a field at a time
6196 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 6197 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
6198 store directly into the target unless the type is large enough
6199 that memcpy will be used. If we are making an initializer and
3207b172 6200 all operands are constant, put it in memory as well. */
dd27116b 6201 else if ((TREE_STATIC (exp)
3207b172 6202 && ((mode == BLKmode
e5e809f4 6203 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
6204 || TREE_ADDRESSABLE (exp)
6205 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
fbe1758d
AM
6206 && (!MOVE_BY_PIECES_P
6207 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
6208 TYPE_ALIGN (type) / BITS_PER_UNIT))
9de08200 6209 && ! mostly_zeros_p (exp))))
dd27116b 6210 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
6211 {
6212 rtx constructor = output_constant_def (exp);
b552441b
RS
6213 if (modifier != EXPAND_CONST_ADDRESS
6214 && modifier != EXPAND_INITIALIZER
6215 && modifier != EXPAND_SUM
d6a5ac33
RK
6216 && (! memory_address_p (GET_MODE (constructor),
6217 XEXP (constructor, 0))
6218 || (flag_force_addr
6219 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
6220 constructor = change_address (constructor, VOIDmode,
6221 XEXP (constructor, 0));
6222 return constructor;
6223 }
6224
bbf6f052
RK
6225 else
6226 {
e9ac02a6
JW
6227 /* Handle calls that pass values in multiple non-contiguous
6228 locations. The Irix 6 ABI has examples of this. */
e5e809f4 6229 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 6230 || GET_CODE (target) == PARALLEL)
06089a8b
RK
6231 {
6232 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
6233 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6234 else
6235 target = assign_temp (type, 0, 1, 1);
6236 }
07604beb
RK
6237
6238 if (TREE_READONLY (exp))
6239 {
9151b3bf 6240 if (GET_CODE (target) == MEM)
effbcc6a
RK
6241 target = copy_rtx (target);
6242
07604beb
RK
6243 RTX_UNCHANGING_P (target) = 1;
6244 }
6245
e1a43f73 6246 store_constructor (exp, target, 0);
bbf6f052
RK
6247 return target;
6248 }
6249
6250 case INDIRECT_REF:
6251 {
6252 tree exp1 = TREE_OPERAND (exp, 0);
6253 tree exp2;
7581a30f
JW
6254 tree index;
6255 tree string = string_constant (exp1, &index);
6256 int i;
6257
06eaa86f 6258 /* Try to optimize reads from const strings. */
7581a30f
JW
6259 if (string
6260 && TREE_CODE (string) == STRING_CST
6261 && TREE_CODE (index) == INTEGER_CST
6262 && !TREE_INT_CST_HIGH (index)
6263 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
6264 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
6265 && GET_MODE_SIZE (mode) == 1
6266 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 6267 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 6268
405f0da6
JW
6269 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
6270 op0 = memory_address (mode, op0);
8c8a8e34 6271
7d384cc0 6272 if (current_function_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
921b3427
RK
6273 {
6274 enum memory_use_mode memory_usage;
6275 memory_usage = get_memory_usage_from_modifier (modifier);
6276
6277 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
6278 {
6279 in_check_memory_usage = 1;
6280 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6281 op0, Pmode,
c85f7c16
JL
6282 GEN_INT (int_size_in_bytes (type)),
6283 TYPE_MODE (sizetype),
6284 GEN_INT (memory_usage),
6285 TYPE_MODE (integer_type_node));
6286 in_check_memory_usage = 0;
6287 }
921b3427
RK
6288 }
6289
38a448ca 6290 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
6291 /* If address was computed by addition,
6292 mark this as an element of an aggregate. */
9ec36da5
JL
6293 if (TREE_CODE (exp1) == PLUS_EXPR
6294 || (TREE_CODE (exp1) == SAVE_EXPR
6295 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 6296 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
6297 || (TREE_CODE (exp1) == ADDR_EXPR
6298 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 6299 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
c6df88cb 6300 MEM_SET_IN_STRUCT_P (temp, 1);
b5f88157 6301
2c4c436a 6302 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 6303 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
6304
6305 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
6306 here, because, in C and C++, the fact that a location is accessed
6307 through a pointer to const does not mean that the value there can
6308 never change. Languages where it can never change should
6309 also set TREE_STATIC. */
5cb7a25a 6310 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6311 return temp;
6312 }
bbf6f052
RK
6313
6314 case ARRAY_REF:
742920c7
RK
6315 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6316 abort ();
bbf6f052 6317
bbf6f052 6318 {
742920c7
RK
6319 tree array = TREE_OPERAND (exp, 0);
6320 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6321 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6322 tree index = TREE_OPERAND (exp, 1);
6323 tree index_type = TREE_TYPE (index);
08293add 6324 HOST_WIDE_INT i;
b50d17a1 6325
d4c89139
PB
6326 /* Optimize the special-case of a zero lower bound.
6327
6328 We convert the low_bound to sizetype to avoid some problems
6329 with constant folding. (E.g. suppose the lower bound is 1,
6330 and its mode is QI. Without the conversion, (ARRAY
6331 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6332 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6333
6334 But sizetype isn't quite right either (especially if
6335 the lowbound is negative). FIXME */
6336
742920c7 6337 if (! integer_zerop (low_bound))
d4c89139
PB
6338 index = fold (build (MINUS_EXPR, index_type, index,
6339 convert (sizetype, low_bound)));
742920c7 6340
742920c7 6341 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6342 This is not done in fold so it won't happen inside &.
6343 Don't fold if this is for wide characters since it's too
6344 difficult to do correctly and this is a very rare case. */
742920c7
RK
6345
6346 if (TREE_CODE (array) == STRING_CST
6347 && TREE_CODE (index) == INTEGER_CST
6348 && !TREE_INT_CST_HIGH (index)
307b821c 6349 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6350 && GET_MODE_CLASS (mode) == MODE_INT
6351 && GET_MODE_SIZE (mode) == 1)
307b821c 6352 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6353
742920c7
RK
6354 /* If this is a constant index into a constant array,
6355 just get the value from the array. Handle both the cases when
6356 we have an explicit constructor and when our operand is a variable
6357 that was declared const. */
4af3895e 6358
742920c7
RK
6359 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6360 {
6361 if (TREE_CODE (index) == INTEGER_CST
6362 && TREE_INT_CST_HIGH (index) == 0)
6363 {
6364 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6365
6366 i = TREE_INT_CST_LOW (index);
6367 while (elem && i--)
6368 elem = TREE_CHAIN (elem);
6369 if (elem)
6370 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6371 tmode, ro_modifier);
742920c7
RK
6372 }
6373 }
4af3895e 6374
742920c7
RK
6375 else if (optimize >= 1
6376 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6377 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6378 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6379 {
08293add 6380 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6381 {
6382 tree init = DECL_INITIAL (array);
6383
6384 i = TREE_INT_CST_LOW (index);
6385 if (TREE_CODE (init) == CONSTRUCTOR)
6386 {
6387 tree elem = CONSTRUCTOR_ELTS (init);
6388
03dc44a6
RS
6389 while (elem
6390 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6391 elem = TREE_CHAIN (elem);
6392 if (elem)
6393 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6394 tmode, ro_modifier);
742920c7
RK
6395 }
6396 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6397 && TREE_INT_CST_HIGH (index) == 0
6398 && (TREE_INT_CST_LOW (index)
6399 < TREE_STRING_LENGTH (init)))
6400 return (GEN_INT
6401 (TREE_STRING_POINTER
6402 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6403 }
6404 }
6405 }
8c8a8e34 6406
08293add 6407 /* ... fall through ... */
bbf6f052
RK
6408
6409 case COMPONENT_REF:
6410 case BIT_FIELD_REF:
4af3895e 6411 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6412 appropriate field if it is present. Don't do this if we have
6413 already written the data since we want to refer to that copy
6414 and varasm.c assumes that's what we'll do. */
4af3895e 6415 if (code != ARRAY_REF
7a0b7b9a
RK
6416 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6417 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6418 {
6419 tree elt;
6420
6421 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6422 elt = TREE_CHAIN (elt))
86b5812c
RK
6423 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6424 /* We can normally use the value of the field in the
6425 CONSTRUCTOR. However, if this is a bitfield in
6426 an integral mode that we can fit in a HOST_WIDE_INT,
6427 we must mask only the number of bits in the bitfield,
6428 since this is done implicitly by the constructor. If
6429 the bitfield does not meet either of those conditions,
6430 we can't do this optimization. */
6431 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6432 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6433 == MODE_INT)
6434 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6435 <= HOST_BITS_PER_WIDE_INT))))
6436 {
6437 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6438 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6439 {
6440 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6441
6442 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6443 {
6444 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6445 op0 = expand_and (op0, op1, target);
6446 }
6447 else
6448 {
e5e809f4
JL
6449 enum machine_mode imode
6450 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6451 tree count
e5e809f4
JL
6452 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6453 0);
86b5812c
RK
6454
6455 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6456 target, 0);
6457 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6458 target, 0);
6459 }
6460 }
6461
6462 return op0;
6463 }
4af3895e
JVA
6464 }
6465
bbf6f052
RK
6466 {
6467 enum machine_mode mode1;
6468 int bitsize;
6469 int bitpos;
7bb0943f 6470 tree offset;
bbf6f052 6471 int volatilep = 0;
034f9101 6472 int alignment;
839c4796
RK
6473 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6474 &mode1, &unsignedp, &volatilep,
6475 &alignment);
bbf6f052 6476
e7f3c83f
RK
6477 /* If we got back the original object, something is wrong. Perhaps
6478 we are evaluating an expression too early. In any event, don't
6479 infinitely recurse. */
6480 if (tem == exp)
6481 abort ();
6482
3d27140a 6483 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6484 computation, since it will need a temporary and TARGET is known
6485 to have to do. This occurs in unchecked conversion in Ada. */
6486
6487 op0 = expand_expr (tem,
6488 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6489 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6490 != INTEGER_CST)
6491 ? target : NULL_RTX),
4ed67205 6492 VOIDmode,
e5e809f4
JL
6493 modifier == EXPAND_INITIALIZER
6494 ? modifier : EXPAND_NORMAL);
bbf6f052 6495
8c8a8e34 6496 /* If this is a constant, put it into a register if it is a
8008b228 6497 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6498 if (CONSTANT_P (op0))
6499 {
6500 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6501 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6502 op0 = force_reg (mode, op0);
6503 else
6504 op0 = validize_mem (force_const_mem (mode, op0));
6505 }
6506
7bb0943f
RS
6507 if (offset != 0)
6508 {
906c4e36 6509 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6510
6511 if (GET_CODE (op0) != MEM)
6512 abort ();
2d48c13d
JL
6513
6514 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6515 {
2d48c13d 6516#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6517 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6518#else
bd070e1a 6519 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6520#endif
bd070e1a 6521 }
2d48c13d 6522
efd07ca7
JL
6523 /* A constant address in TO_RTX can have VOIDmode, we must not try
6524 to call force_reg for that case. Avoid that case. */
89752202
HB
6525 if (GET_CODE (op0) == MEM
6526 && GET_MODE (op0) == BLKmode
efd07ca7 6527 && GET_MODE (XEXP (op0, 0)) != VOIDmode
89752202
HB
6528 && bitsize
6529 && (bitpos % bitsize) == 0
6530 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6531 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6532 {
6533 rtx temp = change_address (op0, mode1,
6534 plus_constant (XEXP (op0, 0),
6535 (bitpos /
6536 BITS_PER_UNIT)));
6537 if (GET_CODE (XEXP (temp, 0)) == REG)
6538 op0 = temp;
6539 else
6540 op0 = change_address (op0, mode1,
6541 force_reg (GET_MODE (XEXP (temp, 0)),
6542 XEXP (temp, 0)));
6543 bitpos = 0;
6544 }
6545
6546
7bb0943f 6547 op0 = change_address (op0, VOIDmode,
38a448ca
RH
6548 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6549 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
6550 }
6551
bbf6f052
RK
6552 /* Don't forget about volatility even if this is a bitfield. */
6553 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6554 {
6555 op0 = copy_rtx (op0);
6556 MEM_VOLATILE_P (op0) = 1;
6557 }
6558
921b3427 6559 /* Check the access. */
7d384cc0 6560 if (current_function_check_memory_usage && GET_CODE (op0) == MEM)
921b3427
RK
6561 {
6562 enum memory_use_mode memory_usage;
6563 memory_usage = get_memory_usage_from_modifier (modifier);
6564
6565 if (memory_usage != MEMORY_USE_DONT)
6566 {
6567 rtx to;
6568 int size;
6569
6570 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6571 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6572
6573 /* Check the access right of the pointer. */
e9a25f70
JL
6574 if (size > BITS_PER_UNIT)
6575 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 6576 to, Pmode,
e9a25f70
JL
6577 GEN_INT (size / BITS_PER_UNIT),
6578 TYPE_MODE (sizetype),
956d6950
JL
6579 GEN_INT (memory_usage),
6580 TYPE_MODE (integer_type_node));
921b3427
RK
6581 }
6582 }
6583
ccc98036
RS
6584 /* In cases where an aligned union has an unaligned object
6585 as a field, we might be extracting a BLKmode value from
6586 an integer-mode (e.g., SImode) object. Handle this case
6587 by doing the extract into an object as wide as the field
6588 (which we know to be the width of a basic mode), then
f2420d0b
JW
6589 storing into memory, and changing the mode to BLKmode.
6590 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6591 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6592 if (mode1 == VOIDmode
ccc98036 6593 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6594 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6595 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6596 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6597 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6598 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6599 /* If the field isn't aligned enough to fetch as a memref,
6600 fetch it as a bit field. */
6601 || (SLOW_UNALIGNED_ACCESS
c84e2712 6602 && ((TYPE_ALIGN (TREE_TYPE (tem)) < (unsigned int) GET_MODE_ALIGNMENT (mode))
f9409c3a 6603 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6604 {
bbf6f052
RK
6605 enum machine_mode ext_mode = mode;
6606
6607 if (ext_mode == BLKmode)
6608 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6609
6610 if (ext_mode == BLKmode)
a281e72d
RK
6611 {
6612 /* In this case, BITPOS must start at a byte boundary and
6613 TARGET, if specified, must be a MEM. */
6614 if (GET_CODE (op0) != MEM
6615 || (target != 0 && GET_CODE (target) != MEM)
6616 || bitpos % BITS_PER_UNIT != 0)
6617 abort ();
6618
6619 op0 = change_address (op0, VOIDmode,
6620 plus_constant (XEXP (op0, 0),
6621 bitpos / BITS_PER_UNIT));
6622 if (target == 0)
6623 target = assign_temp (type, 0, 1, 1);
6624
6625 emit_block_move (target, op0,
6626 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6627 / BITS_PER_UNIT),
6628 1);
6629
6630 return target;
6631 }
bbf6f052 6632
dc6d66b3
RK
6633 op0 = validize_mem (op0);
6634
6635 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6636 mark_reg_pointer (XEXP (op0, 0), alignment);
6637
6638 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6639 unsignedp, target, ext_mode, ext_mode,
034f9101 6640 alignment,
bbf6f052 6641 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6642
6643 /* If the result is a record type and BITSIZE is narrower than
6644 the mode of OP0, an integral mode, and this is a big endian
6645 machine, we must put the field into the high-order bits. */
6646 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6647 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6648 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6649 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6650 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6651 - bitsize),
6652 op0, 1);
6653
bbf6f052
RK
6654 if (mode == BLKmode)
6655 {
6656 rtx new = assign_stack_temp (ext_mode,
6657 bitsize / BITS_PER_UNIT, 0);
6658
6659 emit_move_insn (new, op0);
6660 op0 = copy_rtx (new);
6661 PUT_MODE (op0, BLKmode);
c6df88cb 6662 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052
RK
6663 }
6664
6665 return op0;
6666 }
6667
05019f83
RK
6668 /* If the result is BLKmode, use that to access the object
6669 now as well. */
6670 if (mode == BLKmode)
6671 mode1 = BLKmode;
6672
bbf6f052
RK
6673 /* Get a reference to just this component. */
6674 if (modifier == EXPAND_CONST_ADDRESS
6675 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6676 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6677 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6678 else
6679 op0 = change_address (op0, mode1,
6680 plus_constant (XEXP (op0, 0),
6681 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6682
6683 if (GET_CODE (op0) == MEM)
6684 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6685
dc6d66b3
RK
6686 if (GET_CODE (XEXP (op0, 0)) == REG)
6687 mark_reg_pointer (XEXP (op0, 0), alignment);
6688
c6df88cb 6689 MEM_SET_IN_STRUCT_P (op0, 1);
bbf6f052 6690 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6691 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6692 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6693 || modifier == EXPAND_INITIALIZER)
bbf6f052 6694 return op0;
0d15e60c 6695 else if (target == 0)
bbf6f052 6696 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6697
bbf6f052
RK
6698 convert_move (target, op0, unsignedp);
6699 return target;
6700 }
6701
bbf6f052
RK
6702 /* Intended for a reference to a buffer of a file-object in Pascal.
6703 But it's not certain that a special tree code will really be
6704 necessary for these. INDIRECT_REF might work for them. */
6705 case BUFFER_REF:
6706 abort ();
6707
7308a047 6708 case IN_EXPR:
7308a047 6709 {
d6a5ac33
RK
6710 /* Pascal set IN expression.
6711
6712 Algorithm:
6713 rlo = set_low - (set_low%bits_per_word);
6714 the_word = set [ (index - rlo)/bits_per_word ];
6715 bit_index = index % bits_per_word;
6716 bitmask = 1 << bit_index;
6717 return !!(the_word & bitmask); */
6718
7308a047
RS
6719 tree set = TREE_OPERAND (exp, 0);
6720 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6721 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6722 tree set_type = TREE_TYPE (set);
7308a047
RS
6723 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6724 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6725 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6726 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6727 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6728 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6729 rtx setaddr = XEXP (setval, 0);
6730 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6731 rtx rlow;
6732 rtx diff, quo, rem, addr, bit, result;
7308a047 6733
d6a5ac33
RK
6734 preexpand_calls (exp);
6735
6736 /* If domain is empty, answer is no. Likewise if index is constant
6737 and out of bounds. */
51723711 6738 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6739 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6740 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6741 || (TREE_CODE (index) == INTEGER_CST
6742 && TREE_CODE (set_low_bound) == INTEGER_CST
6743 && tree_int_cst_lt (index, set_low_bound))
6744 || (TREE_CODE (set_high_bound) == INTEGER_CST
6745 && TREE_CODE (index) == INTEGER_CST
6746 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6747 return const0_rtx;
6748
d6a5ac33
RK
6749 if (target == 0)
6750 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6751
6752 /* If we get here, we have to generate the code for both cases
6753 (in range and out of range). */
6754
6755 op0 = gen_label_rtx ();
6756 op1 = gen_label_rtx ();
6757
6758 if (! (GET_CODE (index_val) == CONST_INT
6759 && GET_CODE (lo_r) == CONST_INT))
6760 {
c5d5d461
JL
6761 emit_cmp_and_jump_insns (index_val, lo_r, LT, NULL_RTX,
6762 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6763 }
6764
6765 if (! (GET_CODE (index_val) == CONST_INT
6766 && GET_CODE (hi_r) == CONST_INT))
6767 {
c5d5d461
JL
6768 emit_cmp_and_jump_insns (index_val, hi_r, GT, NULL_RTX,
6769 GET_MODE (index_val), iunsignedp, 0, op1);
7308a047
RS
6770 }
6771
6772 /* Calculate the element number of bit zero in the first word
6773 of the set. */
6774 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6775 rlow = GEN_INT (INTVAL (lo_r)
6776 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6777 else
17938e57
RK
6778 rlow = expand_binop (index_mode, and_optab, lo_r,
6779 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6780 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6781
d6a5ac33
RK
6782 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6783 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6784
6785 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6786 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6787 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6788 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6789
7308a047 6790 addr = memory_address (byte_mode,
d6a5ac33
RK
6791 expand_binop (index_mode, add_optab, diff,
6792 setaddr, NULL_RTX, iunsignedp,
17938e57 6793 OPTAB_LIB_WIDEN));
d6a5ac33 6794
7308a047
RS
6795 /* Extract the bit we want to examine */
6796 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6797 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6798 make_tree (TREE_TYPE (index), rem),
6799 NULL_RTX, 1);
6800 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6801 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6802 1, OPTAB_LIB_WIDEN);
17938e57
RK
6803
6804 if (result != target)
6805 convert_move (target, result, 1);
7308a047
RS
6806
6807 /* Output the code to handle the out-of-range case. */
6808 emit_jump (op0);
6809 emit_label (op1);
6810 emit_move_insn (target, const0_rtx);
6811 emit_label (op0);
6812 return target;
6813 }
6814
bbf6f052
RK
6815 case WITH_CLEANUP_EXPR:
6816 if (RTL_EXPR_RTL (exp) == 0)
6817 {
6818 RTL_EXPR_RTL (exp)
921b3427 6819 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6820 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6821
bbf6f052
RK
6822 /* That's it for this cleanup. */
6823 TREE_OPERAND (exp, 2) = 0;
6824 }
6825 return RTL_EXPR_RTL (exp);
6826
5dab5552
MS
6827 case CLEANUP_POINT_EXPR:
6828 {
e976b8b2
MS
6829 /* Start a new binding layer that will keep track of all cleanup
6830 actions to be performed. */
6831 expand_start_bindings (0);
6832
d93d4205 6833 target_temp_slot_level = temp_slot_level;
e976b8b2 6834
921b3427 6835 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6836 /* If we're going to use this value, load it up now. */
6837 if (! ignore)
6838 op0 = force_not_mem (op0);
d93d4205 6839 preserve_temp_slots (op0);
e976b8b2 6840 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6841 }
6842 return op0;
6843
bbf6f052
RK
6844 case CALL_EXPR:
6845 /* Check for a built-in function. */
6846 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6847 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6848 == FUNCTION_DECL)
bbf6f052
RK
6849 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6850 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6851
bbf6f052
RK
6852 /* If this call was expanded already by preexpand_calls,
6853 just return the result we got. */
6854 if (CALL_EXPR_RTL (exp) != 0)
6855 return CALL_EXPR_RTL (exp);
d6a5ac33 6856
8129842c 6857 return expand_call (exp, target, ignore);
bbf6f052
RK
6858
6859 case NON_LVALUE_EXPR:
6860 case NOP_EXPR:
6861 case CONVERT_EXPR:
6862 case REFERENCE_EXPR:
bbf6f052
RK
6863 if (TREE_CODE (type) == UNION_TYPE)
6864 {
6865 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6866 if (target == 0)
06089a8b
RK
6867 {
6868 if (mode != BLKmode)
6869 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6870 else
6871 target = assign_temp (type, 0, 1, 1);
6872 }
d6a5ac33 6873
bbf6f052
RK
6874 if (GET_CODE (target) == MEM)
6875 /* Store data into beginning of memory target. */
6876 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6877 change_address (target, TYPE_MODE (valtype), 0), 0);
6878
bbf6f052
RK
6879 else if (GET_CODE (target) == REG)
6880 /* Store this field into a union of the proper type. */
6881 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6882 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6883 VOIDmode, 0, 1,
ece32014
MM
6884 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))),
6885 0);
bbf6f052
RK
6886 else
6887 abort ();
6888
6889 /* Return the entire union. */
6890 return target;
6891 }
d6a5ac33 6892
7f62854a
RK
6893 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6894 {
6895 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6896 ro_modifier);
7f62854a
RK
6897
6898 /* If the signedness of the conversion differs and OP0 is
6899 a promoted SUBREG, clear that indication since we now
6900 have to do the proper extension. */
6901 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6902 && GET_CODE (op0) == SUBREG)
6903 SUBREG_PROMOTED_VAR_P (op0) = 0;
6904
6905 return op0;
6906 }
6907
1499e0a8 6908 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6909 if (GET_MODE (op0) == mode)
6910 return op0;
12342f90 6911
d6a5ac33
RK
6912 /* If OP0 is a constant, just convert it into the proper mode. */
6913 if (CONSTANT_P (op0))
6914 return
6915 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6916 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6917
26fcb35a 6918 if (modifier == EXPAND_INITIALIZER)
38a448ca 6919 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6920
bbf6f052 6921 if (target == 0)
d6a5ac33
RK
6922 return
6923 convert_to_mode (mode, op0,
6924 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6925 else
d6a5ac33
RK
6926 convert_move (target, op0,
6927 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6928 return target;
6929
6930 case PLUS_EXPR:
0f41302f
MS
6931 /* We come here from MINUS_EXPR when the second operand is a
6932 constant. */
bbf6f052
RK
6933 plus_expr:
6934 this_optab = add_optab;
6935
6936 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6937 something else, make sure we add the register to the constant and
6938 then to the other thing. This case can occur during strength
6939 reduction and doing it this way will produce better code if the
6940 frame pointer or argument pointer is eliminated.
6941
6942 fold-const.c will ensure that the constant is always in the inner
6943 PLUS_EXPR, so the only case we need to do anything about is if
6944 sp, ap, or fp is our second argument, in which case we must swap
6945 the innermost first argument and our second argument. */
6946
6947 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6948 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6949 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6950 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6951 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6952 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6953 {
6954 tree t = TREE_OPERAND (exp, 1);
6955
6956 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6957 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6958 }
6959
88f63c77 6960 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6961 something, we might be forming a constant. So try to use
6962 plus_constant. If it produces a sum and we can't accept it,
6963 use force_operand. This allows P = &ARR[const] to generate
6964 efficient code on machines where a SYMBOL_REF is not a valid
6965 address.
6966
6967 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6968 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6969 || mode == ptr_mode)
bbf6f052 6970 {
c980ac49
RS
6971 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6972 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6973 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6974 {
6975 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6976 EXPAND_SUM);
6977 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6978 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6979 op1 = force_operand (op1, target);
6980 return op1;
6981 }
bbf6f052 6982
c980ac49
RS
6983 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6984 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6985 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6986 {
6987 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6988 EXPAND_SUM);
6989 if (! CONSTANT_P (op0))
6990 {
6991 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6992 VOIDmode, modifier);
709f5be1
RS
6993 /* Don't go to both_summands if modifier
6994 says it's not right to return a PLUS. */
6995 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6996 goto binop2;
c980ac49
RS
6997 goto both_summands;
6998 }
6999 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
7000 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
7001 op0 = force_operand (op0, target);
7002 return op0;
7003 }
bbf6f052
RK
7004 }
7005
7006 /* No sense saving up arithmetic to be done
7007 if it's all in the wrong mode to form part of an address.
7008 And force_operand won't know whether to sign-extend or
7009 zero-extend. */
7010 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 7011 || mode != ptr_mode)
c980ac49 7012 goto binop;
bbf6f052
RK
7013
7014 preexpand_calls (exp);
e5e809f4 7015 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7016 subtarget = 0;
7017
921b3427
RK
7018 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
7019 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 7020
c980ac49 7021 both_summands:
bbf6f052
RK
7022 /* Make sure any term that's a sum with a constant comes last. */
7023 if (GET_CODE (op0) == PLUS
7024 && CONSTANT_P (XEXP (op0, 1)))
7025 {
7026 temp = op0;
7027 op0 = op1;
7028 op1 = temp;
7029 }
7030 /* If adding to a sum including a constant,
7031 associate it to put the constant outside. */
7032 if (GET_CODE (op1) == PLUS
7033 && CONSTANT_P (XEXP (op1, 1)))
7034 {
7035 rtx constant_term = const0_rtx;
7036
7037 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
7038 if (temp != 0)
7039 op0 = temp;
6f90e075
JW
7040 /* Ensure that MULT comes first if there is one. */
7041 else if (GET_CODE (op0) == MULT)
38a448ca 7042 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 7043 else
38a448ca 7044 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
7045
7046 /* Let's also eliminate constants from op0 if possible. */
7047 op0 = eliminate_constant_term (op0, &constant_term);
7048
7049 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
7050 their sum should be a constant. Form it into OP1, since the
7051 result we want will then be OP0 + OP1. */
7052
7053 temp = simplify_binary_operation (PLUS, mode, constant_term,
7054 XEXP (op1, 1));
7055 if (temp != 0)
7056 op1 = temp;
7057 else
38a448ca 7058 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
7059 }
7060
7061 /* Put a constant term last and put a multiplication first. */
7062 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
7063 temp = op1, op1 = op0, op0 = temp;
7064
7065 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 7066 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
7067
7068 case MINUS_EXPR:
ea87523e
RK
7069 /* For initializers, we are allowed to return a MINUS of two
7070 symbolic constants. Here we handle all cases when both operands
7071 are constant. */
bbf6f052
RK
7072 /* Handle difference of two symbolic constants,
7073 for the sake of an initializer. */
7074 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
7075 && really_constant_p (TREE_OPERAND (exp, 0))
7076 && really_constant_p (TREE_OPERAND (exp, 1)))
7077 {
906c4e36 7078 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 7079 VOIDmode, ro_modifier);
906c4e36 7080 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 7081 VOIDmode, ro_modifier);
ea87523e 7082
ea87523e
RK
7083 /* If the last operand is a CONST_INT, use plus_constant of
7084 the negated constant. Else make the MINUS. */
7085 if (GET_CODE (op1) == CONST_INT)
7086 return plus_constant (op0, - INTVAL (op1));
7087 else
38a448ca 7088 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
7089 }
7090 /* Convert A - const to A + (-const). */
7091 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7092 {
ae431183
RK
7093 tree negated = fold (build1 (NEGATE_EXPR, type,
7094 TREE_OPERAND (exp, 1)));
7095
7096 /* Deal with the case where we can't negate the constant
7097 in TYPE. */
7098 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
7099 {
7100 tree newtype = signed_type (type);
7101 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
7102 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
7103 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
7104
7105 if (! TREE_OVERFLOW (newneg))
7106 return expand_expr (convert (type,
7107 build (PLUS_EXPR, newtype,
7108 newop0, newneg)),
921b3427 7109 target, tmode, ro_modifier);
ae431183
RK
7110 }
7111 else
7112 {
7113 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
7114 goto plus_expr;
7115 }
bbf6f052
RK
7116 }
7117 this_optab = sub_optab;
7118 goto binop;
7119
7120 case MULT_EXPR:
7121 preexpand_calls (exp);
7122 /* If first operand is constant, swap them.
7123 Thus the following special case checks need only
7124 check the second operand. */
7125 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
7126 {
7127 register tree t1 = TREE_OPERAND (exp, 0);
7128 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
7129 TREE_OPERAND (exp, 1) = t1;
7130 }
7131
7132 /* Attempt to return something suitable for generating an
7133 indexed address, for machines that support that. */
7134
88f63c77 7135 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 7136 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 7137 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 7138 {
921b3427
RK
7139 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
7140 EXPAND_SUM);
bbf6f052
RK
7141
7142 /* Apply distributive law if OP0 is x+c. */
7143 if (GET_CODE (op0) == PLUS
7144 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
7145 return gen_rtx_PLUS (mode,
7146 gen_rtx_MULT (mode, XEXP (op0, 0),
7147 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
7148 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
7149 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
7150
7151 if (GET_CODE (op0) != REG)
906c4e36 7152 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
7153 if (GET_CODE (op0) != REG)
7154 op0 = copy_to_mode_reg (mode, op0);
7155
38a448ca
RH
7156 return gen_rtx_MULT (mode, op0,
7157 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
7158 }
7159
e5e809f4 7160 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7161 subtarget = 0;
7162
7163 /* Check for multiplying things that have been extended
7164 from a narrower type. If this machine supports multiplying
7165 in that narrower type with a result in the desired type,
7166 do it that way, and avoid the explicit type-conversion. */
7167 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
7168 && TREE_CODE (type) == INTEGER_TYPE
7169 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7170 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
7171 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
7172 && int_fits_type_p (TREE_OPERAND (exp, 1),
7173 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7174 /* Don't use a widening multiply if a shift will do. */
7175 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 7176 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
7177 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
7178 ||
7179 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7180 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7181 ==
7182 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
7183 /* If both operands are extended, they must either both
7184 be zero-extended or both be sign-extended. */
7185 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
7186 ==
7187 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
7188 {
7189 enum machine_mode innermode
7190 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
7191 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7192 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
7193 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
7194 ? umul_widen_optab : smul_widen_optab);
b10af0c8 7195 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 7196 {
b10af0c8
TG
7197 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
7198 {
7199 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7200 NULL_RTX, VOIDmode, 0);
7201 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7202 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7203 VOIDmode, 0);
7204 else
7205 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7206 NULL_RTX, VOIDmode, 0);
7207 goto binop2;
7208 }
7209 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
7210 && innermode == word_mode)
7211 {
7212 rtx htem;
7213 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7214 NULL_RTX, VOIDmode, 0);
7215 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
7216 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
7217 VOIDmode, 0);
7218 else
7219 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
7220 NULL_RTX, VOIDmode, 0);
7221 temp = expand_binop (mode, other_optab, op0, op1, target,
7222 unsignedp, OPTAB_LIB_WIDEN);
7223 htem = expand_mult_highpart_adjust (innermode,
7224 gen_highpart (innermode, temp),
7225 op0, op1,
7226 gen_highpart (innermode, temp),
7227 unsignedp);
7228 emit_move_insn (gen_highpart (innermode, temp), htem);
7229 return temp;
7230 }
bbf6f052
RK
7231 }
7232 }
7233 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7234 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7235 return expand_mult (mode, op0, op1, target, unsignedp);
7236
7237 case TRUNC_DIV_EXPR:
7238 case FLOOR_DIV_EXPR:
7239 case CEIL_DIV_EXPR:
7240 case ROUND_DIV_EXPR:
7241 case EXACT_DIV_EXPR:
7242 preexpand_calls (exp);
e5e809f4 7243 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7244 subtarget = 0;
7245 /* Possible optimization: compute the dividend with EXPAND_SUM
7246 then if the divisor is constant can optimize the case
7247 where some terms of the dividend have coeffs divisible by it. */
7248 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7249 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7250 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
7251
7252 case RDIV_EXPR:
7253 this_optab = flodiv_optab;
7254 goto binop;
7255
7256 case TRUNC_MOD_EXPR:
7257 case FLOOR_MOD_EXPR:
7258 case CEIL_MOD_EXPR:
7259 case ROUND_MOD_EXPR:
7260 preexpand_calls (exp);
e5e809f4 7261 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7262 subtarget = 0;
7263 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7264 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7265 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
7266
7267 case FIX_ROUND_EXPR:
7268 case FIX_FLOOR_EXPR:
7269 case FIX_CEIL_EXPR:
7270 abort (); /* Not used for C. */
7271
7272 case FIX_TRUNC_EXPR:
906c4e36 7273 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7274 if (target == 0)
7275 target = gen_reg_rtx (mode);
7276 expand_fix (target, op0, unsignedp);
7277 return target;
7278
7279 case FLOAT_EXPR:
906c4e36 7280 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7281 if (target == 0)
7282 target = gen_reg_rtx (mode);
7283 /* expand_float can't figure out what to do if FROM has VOIDmode.
7284 So give it the correct mode. With -O, cse will optimize this. */
7285 if (GET_MODE (op0) == VOIDmode)
7286 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7287 op0);
7288 expand_float (target, op0,
7289 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
7290 return target;
7291
7292 case NEGATE_EXPR:
5b22bee8 7293 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
7294 temp = expand_unop (mode, neg_optab, op0, target, 0);
7295 if (temp == 0)
7296 abort ();
7297 return temp;
7298
7299 case ABS_EXPR:
7300 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7301
2d7050fd 7302 /* Handle complex values specially. */
d6a5ac33
RK
7303 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
7304 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
7305 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 7306
bbf6f052
RK
7307 /* Unsigned abs is simply the operand. Testing here means we don't
7308 risk generating incorrect code below. */
7309 if (TREE_UNSIGNED (type))
7310 return op0;
7311
91813b28 7312 return expand_abs (mode, op0, target,
e5e809f4 7313 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7314
7315 case MAX_EXPR:
7316 case MIN_EXPR:
7317 target = original_target;
e5e809f4 7318 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7319 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7320 || GET_MODE (target) != mode
bbf6f052
RK
7321 || (GET_CODE (target) == REG
7322 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7323 target = gen_reg_rtx (mode);
906c4e36 7324 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7325 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7326
7327 /* First try to do it with a special MIN or MAX instruction.
7328 If that does not win, use a conditional jump to select the proper
7329 value. */
7330 this_optab = (TREE_UNSIGNED (type)
7331 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7332 : (code == MIN_EXPR ? smin_optab : smax_optab));
7333
7334 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7335 OPTAB_WIDEN);
7336 if (temp != 0)
7337 return temp;
7338
fa2981d8
JW
7339 /* At this point, a MEM target is no longer useful; we will get better
7340 code without it. */
7341
7342 if (GET_CODE (target) == MEM)
7343 target = gen_reg_rtx (mode);
7344
ee456b1c
RK
7345 if (target != op0)
7346 emit_move_insn (target, op0);
d6a5ac33 7347
bbf6f052 7348 op0 = gen_label_rtx ();
d6a5ac33 7349
f81497d9
RS
7350 /* If this mode is an integer too wide to compare properly,
7351 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 7352 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 7353 {
f81497d9 7354 if (code == MAX_EXPR)
d6a5ac33
RK
7355 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7356 target, op1, NULL_RTX, op0);
bbf6f052 7357 else
d6a5ac33
RK
7358 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7359 op1, target, NULL_RTX, op0);
ee456b1c 7360 emit_move_insn (target, op1);
bbf6f052 7361 }
f81497d9
RS
7362 else
7363 {
7364 if (code == MAX_EXPR)
7365 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7366 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7367 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
7368 else
7369 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7370 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7371 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 7372 if (temp == const0_rtx)
ee456b1c 7373 emit_move_insn (target, op1);
f81497d9
RS
7374 else if (temp != const_true_rtx)
7375 {
7376 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7377 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7378 else
7379 abort ();
ee456b1c 7380 emit_move_insn (target, op1);
f81497d9
RS
7381 }
7382 }
bbf6f052
RK
7383 emit_label (op0);
7384 return target;
7385
bbf6f052
RK
7386 case BIT_NOT_EXPR:
7387 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7388 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7389 if (temp == 0)
7390 abort ();
7391 return temp;
7392
7393 case FFS_EXPR:
7394 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7395 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7396 if (temp == 0)
7397 abort ();
7398 return temp;
7399
d6a5ac33
RK
7400 /* ??? Can optimize bitwise operations with one arg constant.
7401 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7402 and (a bitwise1 b) bitwise2 b (etc)
7403 but that is probably not worth while. */
7404
7405 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7406 boolean values when we want in all cases to compute both of them. In
7407 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7408 as actual zero-or-1 values and then bitwise anding. In cases where
7409 there cannot be any side effects, better code would be made by
7410 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7411 how to recognize those cases. */
7412
bbf6f052
RK
7413 case TRUTH_AND_EXPR:
7414 case BIT_AND_EXPR:
7415 this_optab = and_optab;
7416 goto binop;
7417
bbf6f052
RK
7418 case TRUTH_OR_EXPR:
7419 case BIT_IOR_EXPR:
7420 this_optab = ior_optab;
7421 goto binop;
7422
874726a8 7423 case TRUTH_XOR_EXPR:
bbf6f052
RK
7424 case BIT_XOR_EXPR:
7425 this_optab = xor_optab;
7426 goto binop;
7427
7428 case LSHIFT_EXPR:
7429 case RSHIFT_EXPR:
7430 case LROTATE_EXPR:
7431 case RROTATE_EXPR:
7432 preexpand_calls (exp);
e5e809f4 7433 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7434 subtarget = 0;
7435 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7436 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7437 unsignedp);
7438
d6a5ac33
RK
7439 /* Could determine the answer when only additive constants differ. Also,
7440 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7441 case LT_EXPR:
7442 case LE_EXPR:
7443 case GT_EXPR:
7444 case GE_EXPR:
7445 case EQ_EXPR:
7446 case NE_EXPR:
7447 preexpand_calls (exp);
7448 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7449 if (temp != 0)
7450 return temp;
d6a5ac33 7451
0f41302f 7452 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7453 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7454 && original_target
7455 && GET_CODE (original_target) == REG
7456 && (GET_MODE (original_target)
7457 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7458 {
d6a5ac33
RK
7459 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7460 VOIDmode, 0);
7461
bbf6f052
RK
7462 if (temp != original_target)
7463 temp = copy_to_reg (temp);
d6a5ac33 7464
bbf6f052 7465 op1 = gen_label_rtx ();
c5d5d461
JL
7466 emit_cmp_and_jump_insns (temp, const0_rtx, EQ, NULL_RTX,
7467 GET_MODE (temp), unsignedp, 0, op1);
bbf6f052
RK
7468 emit_move_insn (temp, const1_rtx);
7469 emit_label (op1);
7470 return temp;
7471 }
d6a5ac33 7472
bbf6f052
RK
7473 /* If no set-flag instruction, must generate a conditional
7474 store into a temporary variable. Drop through
7475 and handle this like && and ||. */
7476
7477 case TRUTH_ANDIF_EXPR:
7478 case TRUTH_ORIF_EXPR:
e44842fe 7479 if (! ignore
e5e809f4 7480 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7481 /* Make sure we don't have a hard reg (such as function's return
7482 value) live across basic blocks, if not optimizing. */
7483 || (!optimize && GET_CODE (target) == REG
7484 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7485 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7486
7487 if (target)
7488 emit_clr_insn (target);
7489
bbf6f052
RK
7490 op1 = gen_label_rtx ();
7491 jumpifnot (exp, op1);
e44842fe
RK
7492
7493 if (target)
7494 emit_0_to_1_insn (target);
7495
bbf6f052 7496 emit_label (op1);
e44842fe 7497 return ignore ? const0_rtx : target;
bbf6f052
RK
7498
7499 case TRUTH_NOT_EXPR:
7500 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7501 /* The parser is careful to generate TRUTH_NOT_EXPR
7502 only with operands that are always zero or one. */
906c4e36 7503 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7504 target, 1, OPTAB_LIB_WIDEN);
7505 if (temp == 0)
7506 abort ();
7507 return temp;
7508
7509 case COMPOUND_EXPR:
7510 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7511 emit_queue ();
7512 return expand_expr (TREE_OPERAND (exp, 1),
7513 (ignore ? const0_rtx : target),
7514 VOIDmode, 0);
7515
7516 case COND_EXPR:
ac01eace
RK
7517 /* If we would have a "singleton" (see below) were it not for a
7518 conversion in each arm, bring that conversion back out. */
7519 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7520 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7521 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7522 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7523 {
7524 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7525 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7526
7527 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7528 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7529 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7530 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7531 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7532 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7533 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7534 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7535 return expand_expr (build1 (NOP_EXPR, type,
7536 build (COND_EXPR, TREE_TYPE (true),
7537 TREE_OPERAND (exp, 0),
7538 true, false)),
7539 target, tmode, modifier);
7540 }
7541
bbf6f052
RK
7542 {
7543 /* Note that COND_EXPRs whose type is a structure or union
7544 are required to be constructed to contain assignments of
7545 a temporary variable, so that we can evaluate them here
7546 for side effect only. If type is void, we must do likewise. */
7547
7548 /* If an arm of the branch requires a cleanup,
7549 only that cleanup is performed. */
7550
7551 tree singleton = 0;
7552 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7553
7554 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7555 convert it to our mode, if necessary. */
7556 if (integer_onep (TREE_OPERAND (exp, 1))
7557 && integer_zerop (TREE_OPERAND (exp, 2))
7558 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7559 {
dd27116b
RK
7560 if (ignore)
7561 {
7562 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7563 ro_modifier);
dd27116b
RK
7564 return const0_rtx;
7565 }
7566
921b3427 7567 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7568 if (GET_MODE (op0) == mode)
7569 return op0;
d6a5ac33 7570
bbf6f052
RK
7571 if (target == 0)
7572 target = gen_reg_rtx (mode);
7573 convert_move (target, op0, unsignedp);
7574 return target;
7575 }
7576
ac01eace
RK
7577 /* Check for X ? A + B : A. If we have this, we can copy A to the
7578 output and conditionally add B. Similarly for unary operations.
7579 Don't do this if X has side-effects because those side effects
7580 might affect A or B and the "?" operation is a sequence point in
7581 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7582
7583 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7584 && operand_equal_p (TREE_OPERAND (exp, 2),
7585 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7586 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7587 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7588 && operand_equal_p (TREE_OPERAND (exp, 1),
7589 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7590 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7591 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7592 && operand_equal_p (TREE_OPERAND (exp, 2),
7593 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7594 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7595 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7596 && operand_equal_p (TREE_OPERAND (exp, 1),
7597 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7598 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7599
01c8a7c8
RK
7600 /* If we are not to produce a result, we have no target. Otherwise,
7601 if a target was specified use it; it will not be used as an
7602 intermediate target unless it is safe. If no target, use a
7603 temporary. */
7604
7605 if (ignore)
7606 temp = 0;
7607 else if (original_target
e5e809f4 7608 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7609 || (singleton && GET_CODE (original_target) == REG
7610 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7611 && original_target == var_rtx (singleton)))
7612 && GET_MODE (original_target) == mode
7c00d1fe
RK
7613#ifdef HAVE_conditional_move
7614 && (! can_conditionally_move_p (mode)
7615 || GET_CODE (original_target) == REG
7616 || TREE_ADDRESSABLE (type))
7617#endif
01c8a7c8
RK
7618 && ! (GET_CODE (original_target) == MEM
7619 && MEM_VOLATILE_P (original_target)))
7620 temp = original_target;
7621 else if (TREE_ADDRESSABLE (type))
7622 abort ();
7623 else
7624 temp = assign_temp (type, 0, 0, 1);
7625
ac01eace
RK
7626 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7627 do the test of X as a store-flag operation, do this as
7628 A + ((X != 0) << log C). Similarly for other simple binary
7629 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7630 if (temp && singleton && binary_op
bbf6f052
RK
7631 && (TREE_CODE (binary_op) == PLUS_EXPR
7632 || TREE_CODE (binary_op) == MINUS_EXPR
7633 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7634 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7635 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7636 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7637 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7638 {
7639 rtx result;
7640 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7641 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7642 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7643 : xor_optab);
bbf6f052
RK
7644
7645 /* If we had X ? A : A + 1, do this as A + (X == 0).
7646
7647 We have to invert the truth value here and then put it
7648 back later if do_store_flag fails. We cannot simply copy
7649 TREE_OPERAND (exp, 0) to another variable and modify that
7650 because invert_truthvalue can modify the tree pointed to
7651 by its argument. */
7652 if (singleton == TREE_OPERAND (exp, 1))
7653 TREE_OPERAND (exp, 0)
7654 = invert_truthvalue (TREE_OPERAND (exp, 0));
7655
7656 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7657 (safe_from_p (temp, singleton, 1)
906c4e36 7658 ? temp : NULL_RTX),
bbf6f052
RK
7659 mode, BRANCH_COST <= 1);
7660
ac01eace
RK
7661 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7662 result = expand_shift (LSHIFT_EXPR, mode, result,
7663 build_int_2 (tree_log2
7664 (TREE_OPERAND
7665 (binary_op, 1)),
7666 0),
e5e809f4 7667 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7668 ? temp : NULL_RTX), 0);
7669
bbf6f052
RK
7670 if (result)
7671 {
906c4e36 7672 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7673 return expand_binop (mode, boptab, op1, result, temp,
7674 unsignedp, OPTAB_LIB_WIDEN);
7675 }
7676 else if (singleton == TREE_OPERAND (exp, 1))
7677 TREE_OPERAND (exp, 0)
7678 = invert_truthvalue (TREE_OPERAND (exp, 0));
7679 }
7680
dabf8373 7681 do_pending_stack_adjust ();
bbf6f052
RK
7682 NO_DEFER_POP;
7683 op0 = gen_label_rtx ();
7684
7685 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7686 {
7687 if (temp != 0)
7688 {
7689 /* If the target conflicts with the other operand of the
7690 binary op, we can't use it. Also, we can't use the target
7691 if it is a hard register, because evaluating the condition
7692 might clobber it. */
7693 if ((binary_op
e5e809f4 7694 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7695 || (GET_CODE (temp) == REG
7696 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7697 temp = gen_reg_rtx (mode);
7698 store_expr (singleton, temp, 0);
7699 }
7700 else
906c4e36 7701 expand_expr (singleton,
2937cf87 7702 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7703 if (singleton == TREE_OPERAND (exp, 1))
7704 jumpif (TREE_OPERAND (exp, 0), op0);
7705 else
7706 jumpifnot (TREE_OPERAND (exp, 0), op0);
7707
956d6950 7708 start_cleanup_deferral ();
bbf6f052
RK
7709 if (binary_op && temp == 0)
7710 /* Just touch the other operand. */
7711 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7712 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7713 else if (binary_op)
7714 store_expr (build (TREE_CODE (binary_op), type,
7715 make_tree (type, temp),
7716 TREE_OPERAND (binary_op, 1)),
7717 temp, 0);
7718 else
7719 store_expr (build1 (TREE_CODE (unary_op), type,
7720 make_tree (type, temp)),
7721 temp, 0);
7722 op1 = op0;
bbf6f052 7723 }
bbf6f052
RK
7724 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7725 comparison operator. If we have one of these cases, set the
7726 output to A, branch on A (cse will merge these two references),
7727 then set the output to FOO. */
7728 else if (temp
7729 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7730 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7731 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7732 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7733 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7734 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7735 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7736 {
7737 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7738 temp = gen_reg_rtx (mode);
7739 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7740 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7741
956d6950 7742 start_cleanup_deferral ();
bbf6f052
RK
7743 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7744 op1 = op0;
7745 }
7746 else if (temp
7747 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7748 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7749 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7750 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7751 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7752 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7753 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7754 {
7755 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7756 temp = gen_reg_rtx (mode);
7757 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7758 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7759
956d6950 7760 start_cleanup_deferral ();
bbf6f052
RK
7761 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7762 op1 = op0;
7763 }
7764 else
7765 {
7766 op1 = gen_label_rtx ();
7767 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7768
956d6950 7769 start_cleanup_deferral ();
bbf6f052
RK
7770 if (temp != 0)
7771 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7772 else
906c4e36
RK
7773 expand_expr (TREE_OPERAND (exp, 1),
7774 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7775 end_cleanup_deferral ();
bbf6f052
RK
7776 emit_queue ();
7777 emit_jump_insn (gen_jump (op1));
7778 emit_barrier ();
7779 emit_label (op0);
956d6950 7780 start_cleanup_deferral ();
bbf6f052
RK
7781 if (temp != 0)
7782 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7783 else
906c4e36
RK
7784 expand_expr (TREE_OPERAND (exp, 2),
7785 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7786 }
7787
956d6950 7788 end_cleanup_deferral ();
bbf6f052
RK
7789
7790 emit_queue ();
7791 emit_label (op1);
7792 OK_DEFER_POP;
5dab5552 7793
bbf6f052
RK
7794 return temp;
7795 }
7796
7797 case TARGET_EXPR:
7798 {
7799 /* Something needs to be initialized, but we didn't know
7800 where that thing was when building the tree. For example,
7801 it could be the return value of a function, or a parameter
7802 to a function which lays down in the stack, or a temporary
7803 variable which must be passed by reference.
7804
7805 We guarantee that the expression will either be constructed
7806 or copied into our original target. */
7807
7808 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7809 tree cleanups = NULL_TREE;
5c062816 7810 tree exp1;
bbf6f052
RK
7811
7812 if (TREE_CODE (slot) != VAR_DECL)
7813 abort ();
7814
9c51f375
RK
7815 if (! ignore)
7816 target = original_target;
7817
bbf6f052
RK
7818 if (target == 0)
7819 {
7820 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7821 {
7822 target = DECL_RTL (slot);
5c062816 7823 /* If we have already expanded the slot, so don't do
ac993f4f 7824 it again. (mrs) */
5c062816
MS
7825 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7826 return target;
ac993f4f 7827 }
bbf6f052
RK
7828 else
7829 {
e9a25f70 7830 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7831 /* All temp slots at this level must not conflict. */
7832 preserve_temp_slots (target);
7833 DECL_RTL (slot) = target;
e9a25f70
JL
7834 if (TREE_ADDRESSABLE (slot))
7835 {
7836 TREE_ADDRESSABLE (slot) = 0;
7837 mark_addressable (slot);
7838 }
bbf6f052 7839
e287fd6e
RK
7840 /* Since SLOT is not known to the called function
7841 to belong to its stack frame, we must build an explicit
7842 cleanup. This case occurs when we must build up a reference
7843 to pass the reference as an argument. In this case,
7844 it is very likely that such a reference need not be
7845 built here. */
7846
7847 if (TREE_OPERAND (exp, 2) == 0)
7848 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7849 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7850 }
bbf6f052
RK
7851 }
7852 else
7853 {
7854 /* This case does occur, when expanding a parameter which
7855 needs to be constructed on the stack. The target
7856 is the actual stack address that we want to initialize.
7857 The function we call will perform the cleanup in this case. */
7858
8c042b47
RS
7859 /* If we have already assigned it space, use that space,
7860 not target that we were passed in, as our target
7861 parameter is only a hint. */
7862 if (DECL_RTL (slot) != 0)
7863 {
7864 target = DECL_RTL (slot);
7865 /* If we have already expanded the slot, so don't do
7866 it again. (mrs) */
7867 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7868 return target;
7869 }
21002281
JW
7870 else
7871 {
7872 DECL_RTL (slot) = target;
7873 /* If we must have an addressable slot, then make sure that
7874 the RTL that we just stored in slot is OK. */
7875 if (TREE_ADDRESSABLE (slot))
7876 {
7877 TREE_ADDRESSABLE (slot) = 0;
7878 mark_addressable (slot);
7879 }
7880 }
bbf6f052
RK
7881 }
7882
4847c938 7883 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7884 /* Mark it as expanded. */
7885 TREE_OPERAND (exp, 1) = NULL_TREE;
7886
e5e809f4 7887 TREE_USED (slot) = 1;
41531e5b 7888 store_expr (exp1, target, 0);
61d6b1cc 7889
e976b8b2 7890 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7891
41531e5b 7892 return target;
bbf6f052
RK
7893 }
7894
7895 case INIT_EXPR:
7896 {
7897 tree lhs = TREE_OPERAND (exp, 0);
7898 tree rhs = TREE_OPERAND (exp, 1);
7899 tree noncopied_parts = 0;
7900 tree lhs_type = TREE_TYPE (lhs);
7901
7902 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7903 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7904 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7905 TYPE_NONCOPIED_PARTS (lhs_type));
7906 while (noncopied_parts != 0)
7907 {
7908 expand_assignment (TREE_VALUE (noncopied_parts),
7909 TREE_PURPOSE (noncopied_parts), 0, 0);
7910 noncopied_parts = TREE_CHAIN (noncopied_parts);
7911 }
7912 return temp;
7913 }
7914
7915 case MODIFY_EXPR:
7916 {
7917 /* If lhs is complex, expand calls in rhs before computing it.
7918 That's so we don't compute a pointer and save it over a call.
7919 If lhs is simple, compute it first so we can give it as a
7920 target if the rhs is just a call. This avoids an extra temp and copy
7921 and that prevents a partial-subsumption which makes bad code.
7922 Actually we could treat component_ref's of vars like vars. */
7923
7924 tree lhs = TREE_OPERAND (exp, 0);
7925 tree rhs = TREE_OPERAND (exp, 1);
7926 tree noncopied_parts = 0;
7927 tree lhs_type = TREE_TYPE (lhs);
7928
7929 temp = 0;
7930
7931 if (TREE_CODE (lhs) != VAR_DECL
7932 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7933 && TREE_CODE (lhs) != PARM_DECL
7934 && ! (TREE_CODE (lhs) == INDIRECT_REF
7935 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7936 preexpand_calls (exp);
7937
7938 /* Check for |= or &= of a bitfield of size one into another bitfield
7939 of size 1. In this case, (unless we need the result of the
7940 assignment) we can do this more efficiently with a
7941 test followed by an assignment, if necessary.
7942
7943 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7944 things change so we do, this code should be enhanced to
7945 support it. */
7946 if (ignore
7947 && TREE_CODE (lhs) == COMPONENT_REF
7948 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7949 || TREE_CODE (rhs) == BIT_AND_EXPR)
7950 && TREE_OPERAND (rhs, 0) == lhs
7951 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7952 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7953 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7954 {
7955 rtx label = gen_label_rtx ();
7956
7957 do_jump (TREE_OPERAND (rhs, 1),
7958 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7959 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7960 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7961 (TREE_CODE (rhs) == BIT_IOR_EXPR
7962 ? integer_one_node
7963 : integer_zero_node)),
7964 0, 0);
e7c33f54 7965 do_pending_stack_adjust ();
bbf6f052
RK
7966 emit_label (label);
7967 return const0_rtx;
7968 }
7969
7970 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7971 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7972 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7973 TYPE_NONCOPIED_PARTS (lhs_type));
7974
7975 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7976 while (noncopied_parts != 0)
7977 {
7978 expand_assignment (TREE_PURPOSE (noncopied_parts),
7979 TREE_VALUE (noncopied_parts), 0, 0);
7980 noncopied_parts = TREE_CHAIN (noncopied_parts);
7981 }
7982 return temp;
7983 }
7984
6e7f84a7
APB
7985 case RETURN_EXPR:
7986 if (!TREE_OPERAND (exp, 0))
7987 expand_null_return ();
7988 else
7989 expand_return (TREE_OPERAND (exp, 0));
7990 return const0_rtx;
7991
bbf6f052
RK
7992 case PREINCREMENT_EXPR:
7993 case PREDECREMENT_EXPR:
7b8b9722 7994 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7995
7996 case POSTINCREMENT_EXPR:
7997 case POSTDECREMENT_EXPR:
7998 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7999 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
8000
8001 case ADDR_EXPR:
987c71d9 8002 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 8003 be a MEM corresponding to a stack slot. */
987c71d9
RK
8004 temp = 0;
8005
bbf6f052
RK
8006 /* Are we taking the address of a nested function? */
8007 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 8008 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
8009 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
8010 && ! TREE_STATIC (exp))
bbf6f052
RK
8011 {
8012 op0 = trampoline_address (TREE_OPERAND (exp, 0));
8013 op0 = force_operand (op0, target);
8014 }
682ba3a6
RK
8015 /* If we are taking the address of something erroneous, just
8016 return a zero. */
8017 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
8018 return const0_rtx;
bbf6f052
RK
8019 else
8020 {
e287fd6e
RK
8021 /* We make sure to pass const0_rtx down if we came in with
8022 ignore set, to avoid doing the cleanups twice for something. */
8023 op0 = expand_expr (TREE_OPERAND (exp, 0),
8024 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
8025 (modifier == EXPAND_INITIALIZER
8026 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 8027
119af78a
RK
8028 /* If we are going to ignore the result, OP0 will have been set
8029 to const0_rtx, so just return it. Don't get confused and
8030 think we are taking the address of the constant. */
8031 if (ignore)
8032 return op0;
8033
3539e816
MS
8034 op0 = protect_from_queue (op0, 0);
8035
896102d0
RK
8036 /* We would like the object in memory. If it is a constant,
8037 we can have it be statically allocated into memory. For
682ba3a6 8038 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
8039 memory and store the value into it. */
8040
8041 if (CONSTANT_P (op0))
8042 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
8043 op0);
987c71d9 8044 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
8045 {
8046 mark_temp_addr_taken (op0);
8047 temp = XEXP (op0, 0);
8048 }
896102d0 8049
682ba3a6 8050 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 8051 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
8052 {
8053 /* If this object is in a register, it must be not
0f41302f 8054 be BLKmode. */
896102d0 8055 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 8056 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 8057
7a0b7b9a 8058 mark_temp_addr_taken (memloc);
896102d0
RK
8059 emit_move_insn (memloc, op0);
8060 op0 = memloc;
8061 }
8062
bbf6f052
RK
8063 if (GET_CODE (op0) != MEM)
8064 abort ();
8065
8066 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
8067 {
8068 temp = XEXP (op0, 0);
8069#ifdef POINTERS_EXTEND_UNSIGNED
8070 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
8071 && mode == ptr_mode)
9fcfcce7 8072 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
8073#endif
8074 return temp;
8075 }
987c71d9 8076
bbf6f052
RK
8077 op0 = force_operand (XEXP (op0, 0), target);
8078 }
987c71d9 8079
bbf6f052 8080 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
8081 op0 = force_reg (Pmode, op0);
8082
dc6d66b3
RK
8083 if (GET_CODE (op0) == REG
8084 && ! REG_USERVAR_P (op0))
8085 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
8086
8087 /* If we might have had a temp slot, add an equivalent address
8088 for it. */
8089 if (temp != 0)
8090 update_temp_slot_address (temp, op0);
8091
88f63c77
RK
8092#ifdef POINTERS_EXTEND_UNSIGNED
8093 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
8094 && mode == ptr_mode)
9fcfcce7 8095 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
8096#endif
8097
bbf6f052
RK
8098 return op0;
8099
8100 case ENTRY_VALUE_EXPR:
8101 abort ();
8102
7308a047
RS
8103 /* COMPLEX type for Extended Pascal & Fortran */
8104 case COMPLEX_EXPR:
8105 {
8106 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 8107 rtx insns;
7308a047
RS
8108
8109 /* Get the rtx code of the operands. */
8110 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8111 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
8112
8113 if (! target)
8114 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8115
6551fa4d 8116 start_sequence ();
7308a047
RS
8117
8118 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
8119 emit_move_insn (gen_realpart (mode, target), op0);
8120 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 8121
6551fa4d
JW
8122 insns = get_insns ();
8123 end_sequence ();
8124
7308a047 8125 /* Complex construction should appear as a single unit. */
6551fa4d
JW
8126 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
8127 each with a separate pseudo as destination.
8128 It's not correct for flow to treat them as a unit. */
6d6e61ce 8129 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8130 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
8131 else
8132 emit_insns (insns);
7308a047
RS
8133
8134 return target;
8135 }
8136
8137 case REALPART_EXPR:
2d7050fd
RS
8138 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8139 return gen_realpart (mode, op0);
7308a047
RS
8140
8141 case IMAGPART_EXPR:
2d7050fd
RS
8142 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8143 return gen_imagpart (mode, op0);
7308a047
RS
8144
8145 case CONJ_EXPR:
8146 {
62acb978 8147 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 8148 rtx imag_t;
6551fa4d 8149 rtx insns;
7308a047
RS
8150
8151 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8152
8153 if (! target)
d6a5ac33 8154 target = gen_reg_rtx (mode);
7308a047 8155
6551fa4d 8156 start_sequence ();
7308a047
RS
8157
8158 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
8159 emit_move_insn (gen_realpart (partmode, target),
8160 gen_realpart (partmode, op0));
7308a047 8161
62acb978
RK
8162 imag_t = gen_imagpart (partmode, target);
8163 temp = expand_unop (partmode, neg_optab,
8164 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
8165 if (temp != imag_t)
8166 emit_move_insn (imag_t, temp);
8167
6551fa4d
JW
8168 insns = get_insns ();
8169 end_sequence ();
8170
d6a5ac33
RK
8171 /* Conjugate should appear as a single unit
8172 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
8173 each with a separate pseudo as destination.
8174 It's not correct for flow to treat them as a unit. */
6d6e61ce 8175 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
8176 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
8177 else
8178 emit_insns (insns);
7308a047
RS
8179
8180 return target;
8181 }
8182
e976b8b2
MS
8183 case TRY_CATCH_EXPR:
8184 {
8185 tree handler = TREE_OPERAND (exp, 1);
8186
8187 expand_eh_region_start ();
8188
8189 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
8190
8191 expand_eh_region_end (handler);
8192
8193 return op0;
8194 }
8195
b335b813
PB
8196 case TRY_FINALLY_EXPR:
8197 {
8198 tree try_block = TREE_OPERAND (exp, 0);
8199 tree finally_block = TREE_OPERAND (exp, 1);
8200 rtx finally_label = gen_label_rtx ();
8201 rtx done_label = gen_label_rtx ();
8202 rtx return_link = gen_reg_rtx (Pmode);
8203 tree cleanup = build (GOTO_SUBROUTINE_EXPR, void_type_node,
8204 (tree) finally_label, (tree) return_link);
8205 TREE_SIDE_EFFECTS (cleanup) = 1;
8206
8207 /* Start a new binding layer that will keep track of all cleanup
8208 actions to be performed. */
8209 expand_start_bindings (0);
8210
8211 target_temp_slot_level = temp_slot_level;
8212
8213 expand_decl_cleanup (NULL_TREE, cleanup);
8214 op0 = expand_expr (try_block, target, tmode, modifier);
8215
8216 preserve_temp_slots (op0);
8217 expand_end_bindings (NULL_TREE, 0, 0);
8218 emit_jump (done_label);
8219 emit_label (finally_label);
8220 expand_expr (finally_block, const0_rtx, VOIDmode, 0);
8221 emit_indirect_jump (return_link);
8222 emit_label (done_label);
8223 return op0;
8224 }
8225
8226 case GOTO_SUBROUTINE_EXPR:
8227 {
8228 rtx subr = (rtx) TREE_OPERAND (exp, 0);
8229 rtx return_link = *(rtx *) &TREE_OPERAND (exp, 1);
8230 rtx return_address = gen_label_rtx ();
8231 emit_move_insn (return_link, gen_rtx_LABEL_REF (Pmode, return_address));
8232 emit_jump (subr);
8233 emit_label (return_address);
8234 return const0_rtx;
8235 }
8236
e976b8b2
MS
8237 case POPDCC_EXPR:
8238 {
8239 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 8240 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
8241 return const0_rtx;
8242 }
8243
8244 case POPDHC_EXPR:
8245 {
8246 rtx dhc = get_dynamic_handler_chain ();
38a448ca 8247 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
8248 return const0_rtx;
8249 }
8250
bbf6f052 8251 default:
90764a87 8252 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
8253 }
8254
8255 /* Here to do an ordinary binary operator, generating an instruction
8256 from the optab already placed in `this_optab'. */
8257 binop:
8258 preexpand_calls (exp);
e5e809f4 8259 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
8260 subtarget = 0;
8261 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 8262 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
8263 binop2:
8264 temp = expand_binop (mode, this_optab, op0, op1, target,
8265 unsignedp, OPTAB_LIB_WIDEN);
8266 if (temp == 0)
8267 abort ();
8268 return temp;
8269}
bbf6f052 8270
bbf6f052 8271
b93a436e
JL
8272\f
8273/* Return the alignment in bits of EXP, a pointer valued expression.
8274 But don't return more than MAX_ALIGN no matter what.
8275 The alignment returned is, by default, the alignment of the thing that
8276 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
8277
8278 Otherwise, look at the expression to see if we can do better, i.e., if the
8279 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 8280
b93a436e
JL
8281static int
8282get_pointer_alignment (exp, max_align)
8283 tree exp;
8284 unsigned max_align;
bbf6f052 8285{
b93a436e
JL
8286 unsigned align, inner;
8287
8288 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8289 return 0;
8290
8291 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8292 align = MIN (align, max_align);
8293
8294 while (1)
bbf6f052 8295 {
b93a436e 8296 switch (TREE_CODE (exp))
bbf6f052 8297 {
b93a436e
JL
8298 case NOP_EXPR:
8299 case CONVERT_EXPR:
8300 case NON_LVALUE_EXPR:
8301 exp = TREE_OPERAND (exp, 0);
8302 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
8303 return align;
8304 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
8305 align = MIN (inner, max_align);
8306 break;
8307
8308 case PLUS_EXPR:
8309 /* If sum of pointer + int, restrict our maximum alignment to that
8310 imposed by the integer. If not, we can't do any better than
8311 ALIGN. */
8312 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
8313 return align;
8314
8315 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
8316 & (max_align - 1))
8317 != 0)
8318 max_align >>= 1;
8319
8320 exp = TREE_OPERAND (exp, 0);
8321 break;
8322
8323 case ADDR_EXPR:
8324 /* See what we are pointing at and look at its alignment. */
8325 exp = TREE_OPERAND (exp, 0);
8326 if (TREE_CODE (exp) == FUNCTION_DECL)
8327 align = FUNCTION_BOUNDARY;
8328 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
8329 align = DECL_ALIGN (exp);
8330#ifdef CONSTANT_ALIGNMENT
8331 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
8332 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 8333#endif
b93a436e 8334 return MIN (align, max_align);
c02bd5d9 8335
b93a436e
JL
8336 default:
8337 return align;
8338 }
8339 }
8340}
8341\f
8342/* Return the tree node and offset if a given argument corresponds to
8343 a string constant. */
8344
8345static tree
8346string_constant (arg, ptr_offset)
8347 tree arg;
8348 tree *ptr_offset;
8349{
8350 STRIP_NOPS (arg);
8351
8352 if (TREE_CODE (arg) == ADDR_EXPR
8353 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8354 {
8355 *ptr_offset = integer_zero_node;
8356 return TREE_OPERAND (arg, 0);
8357 }
8358 else if (TREE_CODE (arg) == PLUS_EXPR)
8359 {
8360 tree arg0 = TREE_OPERAND (arg, 0);
8361 tree arg1 = TREE_OPERAND (arg, 1);
8362
8363 STRIP_NOPS (arg0);
8364 STRIP_NOPS (arg1);
8365
8366 if (TREE_CODE (arg0) == ADDR_EXPR
8367 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8368 {
b93a436e
JL
8369 *ptr_offset = arg1;
8370 return TREE_OPERAND (arg0, 0);
bbf6f052 8371 }
b93a436e
JL
8372 else if (TREE_CODE (arg1) == ADDR_EXPR
8373 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8374 {
b93a436e
JL
8375 *ptr_offset = arg0;
8376 return TREE_OPERAND (arg1, 0);
bbf6f052 8377 }
b93a436e 8378 }
ca695ac9 8379
b93a436e
JL
8380 return 0;
8381}
ca695ac9 8382
b93a436e
JL
8383/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8384 way, because it could contain a zero byte in the middle.
8385 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 8386
b93a436e
JL
8387 Unfortunately, string_constant can't access the values of const char
8388 arrays with initializers, so neither can we do so here. */
e87b4f3f 8389
b93a436e
JL
8390static tree
8391c_strlen (src)
8392 tree src;
8393{
8394 tree offset_node;
8395 int offset, max;
8396 char *ptr;
e7c33f54 8397
b93a436e
JL
8398 src = string_constant (src, &offset_node);
8399 if (src == 0)
8400 return 0;
8401 max = TREE_STRING_LENGTH (src);
8402 ptr = TREE_STRING_POINTER (src);
8403 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8404 {
8405 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8406 compute the offset to the following null if we don't know where to
8407 start searching for it. */
8408 int i;
8409 for (i = 0; i < max; i++)
8410 if (ptr[i] == 0)
8411 return 0;
8412 /* We don't know the starting offset, but we do know that the string
8413 has no internal zero bytes. We can assume that the offset falls
8414 within the bounds of the string; otherwise, the programmer deserves
8415 what he gets. Subtract the offset from the length of the string,
8416 and return that. */
8417 /* This would perhaps not be valid if we were dealing with named
8418 arrays in addition to literal string constants. */
8419 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8420 }
e7c33f54 8421
b93a436e
JL
8422 /* We have a known offset into the string. Start searching there for
8423 a null character. */
8424 if (offset_node == 0)
8425 offset = 0;
8426 else
8427 {
8428 /* Did we get a long long offset? If so, punt. */
8429 if (TREE_INT_CST_HIGH (offset_node) != 0)
8430 return 0;
8431 offset = TREE_INT_CST_LOW (offset_node);
8432 }
8433 /* If the offset is known to be out of bounds, warn, and call strlen at
8434 runtime. */
8435 if (offset < 0 || offset > max)
8436 {
8437 warning ("offset outside bounds of constant string");
8438 return 0;
8439 }
8440 /* Use strlen to search for the first zero byte. Since any strings
8441 constructed with build_string will have nulls appended, we win even
8442 if we get handed something like (char[4])"abcd".
e7c33f54 8443
b93a436e
JL
8444 Since OFFSET is our starting index into the string, no further
8445 calculation is needed. */
8446 return size_int (strlen (ptr + offset));
8447}
1bbddf11 8448
b93a436e
JL
8449rtx
8450expand_builtin_return_addr (fndecl_code, count, tem)
8451 enum built_in_function fndecl_code;
8452 int count;
8453 rtx tem;
8454{
8455 int i;
e7c33f54 8456
b93a436e
JL
8457 /* Some machines need special handling before we can access
8458 arbitrary frames. For example, on the sparc, we must first flush
8459 all register windows to the stack. */
8460#ifdef SETUP_FRAME_ADDRESSES
8461 if (count > 0)
8462 SETUP_FRAME_ADDRESSES ();
8463#endif
e87b4f3f 8464
b93a436e
JL
8465 /* On the sparc, the return address is not in the frame, it is in a
8466 register. There is no way to access it off of the current frame
8467 pointer, but it can be accessed off the previous frame pointer by
8468 reading the value from the register window save area. */
8469#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8470 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8471 count--;
8472#endif
60bac6ea 8473
b93a436e
JL
8474 /* Scan back COUNT frames to the specified frame. */
8475 for (i = 0; i < count; i++)
8476 {
8477 /* Assume the dynamic chain pointer is in the word that the
8478 frame address points to, unless otherwise specified. */
8479#ifdef DYNAMIC_CHAIN_ADDRESS
8480 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8481#endif
8482 tem = memory_address (Pmode, tem);
8483 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8484 }
ca695ac9 8485
b93a436e
JL
8486 /* For __builtin_frame_address, return what we've got. */
8487 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8488 return tem;
e9a25f70 8489
b93a436e
JL
8490 /* For __builtin_return_address, Get the return address from that
8491 frame. */
8492#ifdef RETURN_ADDR_RTX
8493 tem = RETURN_ADDR_RTX (count, tem);
8494#else
8495 tem = memory_address (Pmode,
8496 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8497 tem = gen_rtx_MEM (Pmode, tem);
8498#endif
8499 return tem;
8500}
e9a25f70 8501
b93a436e
JL
8502/* __builtin_setjmp is passed a pointer to an array of five words (not
8503 all will be used on all machines). It operates similarly to the C
8504 library function of the same name, but is more efficient. Much of
8505 the code below (and for longjmp) is copied from the handling of
8506 non-local gotos.
ca695ac9 8507
b93a436e
JL
8508 NOTE: This is intended for use by GNAT and the exception handling
8509 scheme in the compiler and will only work in the method used by
8510 them. */
e9a25f70 8511
b93a436e 8512rtx
6fd1c67b 8513expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
8514 rtx buf_addr;
8515 rtx target;
6fd1c67b 8516 rtx first_label, next_label;
b93a436e 8517{
6fd1c67b 8518 rtx lab1 = gen_label_rtx ();
a260abc9
DE
8519 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8520 enum machine_mode value_mode;
b93a436e 8521 rtx stack_save;
e9a25f70 8522
b93a436e 8523 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 8524
b93a436e
JL
8525#ifdef POINTERS_EXTEND_UNSIGNED
8526 buf_addr = convert_memory_address (Pmode, buf_addr);
8527#endif
d7f21d63 8528
b93a436e 8529 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 8530
b93a436e
JL
8531 if (target == 0 || GET_CODE (target) != REG
8532 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8533 target = gen_reg_rtx (value_mode);
d7f21d63 8534
b93a436e 8535 emit_queue ();
d7f21d63 8536
9ec36da5
JL
8537 /* We store the frame pointer and the address of lab1 in the buffer
8538 and use the rest of it for the stack save area, which is
8539 machine-dependent. */
8540
0bc02db4
MS
8541#ifndef BUILTIN_SETJMP_FRAME_VALUE
8542#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8543#endif
8544
b93a436e 8545 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
0bc02db4 8546 BUILTIN_SETJMP_FRAME_VALUE);
6fd1c67b
RH
8547 emit_move_insn (validize_mem
8548 (gen_rtx_MEM (Pmode,
b93a436e
JL
8549 plus_constant (buf_addr,
8550 GET_MODE_SIZE (Pmode)))),
89c84672 8551 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, lab1)));
d7f21d63 8552
b93a436e
JL
8553 stack_save = gen_rtx_MEM (sa_mode,
8554 plus_constant (buf_addr,
8555 2 * GET_MODE_SIZE (Pmode)));
8556 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 8557
6fd1c67b
RH
8558 /* If there is further processing to do, do it. */
8559#ifdef HAVE_builtin_setjmp_setup
8560 if (HAVE_builtin_setjmp_setup)
8561 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 8562#endif
d7f21d63 8563
6fd1c67b 8564 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 8565 emit_move_insn (target, const0_rtx);
6fd1c67b 8566 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
8567 emit_barrier ();
8568 emit_label (lab1);
d7f21d63 8569
89c84672
RH
8570 /* Tell flow about the strange goings on. Putting `lab1' on
8571 `nonlocal_goto_handler_labels' to indicates that function
8572 calls may traverse the arc back to this label. */
8573
6fd1c67b 8574 current_function_has_nonlocal_label = 1;
89c84672
RH
8575 nonlocal_goto_handler_labels =
8576 gen_rtx_EXPR_LIST (VOIDmode, lab1, nonlocal_goto_handler_labels);
6fd1c67b
RH
8577
8578 /* Clobber the FP when we get here, so we have to make sure it's
8579 marked as used by this function. */
b93a436e 8580 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 8581
b93a436e
JL
8582 /* Mark the static chain as clobbered here so life information
8583 doesn't get messed up for it. */
8584 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 8585
b93a436e
JL
8586 /* Now put in the code to restore the frame pointer, and argument
8587 pointer, if needed. The code below is from expand_end_bindings
8588 in stmt.c; see detailed documentation there. */
8589#ifdef HAVE_nonlocal_goto
8590 if (! HAVE_nonlocal_goto)
8591#endif
8592 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 8593
b93a436e
JL
8594#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8595 if (fixed_regs[ARG_POINTER_REGNUM])
8596 {
8597#ifdef ELIMINABLE_REGS
c84e2712 8598 size_t i;
b93a436e 8599 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 8600
b93a436e
JL
8601 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8602 if (elim_regs[i].from == ARG_POINTER_REGNUM
8603 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8604 break;
ca695ac9 8605
b93a436e
JL
8606 if (i == sizeof elim_regs / sizeof elim_regs [0])
8607#endif
8608 {
8609 /* Now restore our arg pointer from the address at which it
8610 was saved in our stack frame.
8611 If there hasn't be space allocated for it yet, make
8612 some now. */
8613 if (arg_pointer_save_area == 0)
8614 arg_pointer_save_area
8615 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8616 emit_move_insn (virtual_incoming_args_rtx,
8617 copy_to_reg (arg_pointer_save_area));
8618 }
8619 }
8620#endif
ca695ac9 8621
6fd1c67b
RH
8622#ifdef HAVE_builtin_setjmp_receiver
8623 if (HAVE_builtin_setjmp_receiver)
8624 emit_insn (gen_builtin_setjmp_receiver (lab1));
8625 else
8626#endif
b93a436e 8627#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
8628 if (HAVE_nonlocal_goto_receiver)
8629 emit_insn (gen_nonlocal_goto_receiver ());
8630 else
b93a436e 8631#endif
081f5e7e
KG
8632 {
8633 ; /* Nothing */
8634 }
6fd1c67b
RH
8635
8636 /* Set TARGET, and branch to the next-time-through label. */
3e2b9a3d 8637 emit_move_insn (target, const1_rtx);
6fd1c67b
RH
8638 emit_jump_insn (gen_jump (next_label));
8639 emit_barrier ();
ca695ac9 8640
6fd1c67b
RH
8641 return target;
8642}
ca695ac9 8643
6fd1c67b
RH
8644void
8645expand_builtin_longjmp (buf_addr, value)
8646 rtx buf_addr, value;
8647{
8648 rtx fp, lab, stack;
a260abc9 8649 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
ca695ac9 8650
6fd1c67b
RH
8651#ifdef POINTERS_EXTEND_UNSIGNED
8652 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 8653#endif
6fd1c67b
RH
8654 buf_addr = force_reg (Pmode, buf_addr);
8655
3e2b9a3d
JW
8656 /* We used to store value in static_chain_rtx, but that fails if pointers
8657 are smaller than integers. We instead require that the user must pass
8658 a second argument of 1, because that is what builtin_setjmp will
8659 return. This also makes EH slightly more efficient, since we are no
8660 longer copying around a value that we don't care about. */
8661 if (value != const1_rtx)
8662 abort ();
6fd1c67b
RH
8663
8664#ifdef HAVE_builtin_longjmp
8665 if (HAVE_builtin_longjmp)
3e2b9a3d 8666 emit_insn (gen_builtin_longjmp (buf_addr));
6fd1c67b 8667 else
b93a436e 8668#endif
6fd1c67b
RH
8669 {
8670 fp = gen_rtx_MEM (Pmode, buf_addr);
8671 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8672 GET_MODE_SIZE (Pmode)));
e9a25f70 8673
6fd1c67b
RH
8674 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8675 2 * GET_MODE_SIZE (Pmode)));
8676
8677 /* Pick up FP, label, and SP from the block and jump. This code is
8678 from expand_goto in stmt.c; see there for detailed comments. */
8679#if HAVE_nonlocal_goto
8680 if (HAVE_nonlocal_goto)
3e2b9a3d
JW
8681 /* We have to pass a value to the nonlocal_goto pattern that will
8682 get copied into the static_chain pointer, but it does not matter
8683 what that value is, because builtin_setjmp does not use it. */
6fd1c67b
RH
8684 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8685 else
b93a436e 8686#endif
6fd1c67b
RH
8687 {
8688 lab = copy_to_reg (lab);
60bac6ea 8689
6fd1c67b
RH
8690 emit_move_insn (hard_frame_pointer_rtx, fp);
8691 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8692
8693 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8694 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
6fd1c67b
RH
8695 emit_indirect_jump (lab);
8696 }
8697 }
b93a436e 8698}
60bac6ea 8699
55a6ba9f
JC
8700static rtx
8701get_memory_rtx (exp)
8702 tree exp;
8703{
8704 rtx mem;
8705 int is_aggregate;
8706
8707 mem = gen_rtx_MEM (BLKmode,
8708 memory_address (BLKmode,
8709 expand_expr (exp, NULL_RTX,
8710 ptr_mode, EXPAND_SUM)));
8711
8712 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8713
8714 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8715 if the value is the address of a structure or if the expression is
8716 cast to a pointer to structure type. */
8717 is_aggregate = 0;
8718
8719 while (TREE_CODE (exp) == NOP_EXPR)
8720 {
8721 tree cast_type = TREE_TYPE (exp);
8722 if (TREE_CODE (cast_type) == POINTER_TYPE
8723 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8724 {
8725 is_aggregate = 1;
8726 break;
8727 }
8728 exp = TREE_OPERAND (exp, 0);
8729 }
8730
8731 if (is_aggregate == 0)
8732 {
8733 tree type;
8734
8735 if (TREE_CODE (exp) == ADDR_EXPR)
8736 /* If this is the address of an object, check whether the
8737 object is an array. */
8738 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8739 else
8740 type = TREE_TYPE (TREE_TYPE (exp));
8741 is_aggregate = AGGREGATE_TYPE_P (type);
8742 }
8743
c6df88cb 8744 MEM_SET_IN_STRUCT_P (mem, is_aggregate);
55a6ba9f
JC
8745 return mem;
8746}
8747
b93a436e
JL
8748\f
8749/* Expand an expression EXP that calls a built-in function,
8750 with result going to TARGET if that's convenient
8751 (and in mode MODE if that's convenient).
8752 SUBTARGET may be used as the target for computing one of EXP's operands.
8753 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 8754
b93a436e
JL
8755#define CALLED_AS_BUILT_IN(NODE) \
8756 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 8757
b93a436e
JL
8758static rtx
8759expand_builtin (exp, target, subtarget, mode, ignore)
8760 tree exp;
8761 rtx target;
8762 rtx subtarget;
8763 enum machine_mode mode;
8764 int ignore;
8765{
8766 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8767 tree arglist = TREE_OPERAND (exp, 1);
8768 rtx op0;
8769 rtx lab1, insns;
8770 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8771 optab builtin_optab;
60bac6ea 8772
b93a436e
JL
8773 switch (DECL_FUNCTION_CODE (fndecl))
8774 {
8775 case BUILT_IN_ABS:
8776 case BUILT_IN_LABS:
8777 case BUILT_IN_FABS:
8778 /* build_function_call changes these into ABS_EXPR. */
8779 abort ();
4ed67205 8780
b93a436e
JL
8781 case BUILT_IN_SIN:
8782 case BUILT_IN_COS:
8783 /* Treat these like sqrt, but only if the user asks for them. */
8784 if (! flag_fast_math)
8785 break;
8786 case BUILT_IN_FSQRT:
8787 /* If not optimizing, call the library function. */
8788 if (! optimize)
8789 break;
4ed67205 8790
b93a436e
JL
8791 if (arglist == 0
8792 /* Arg could be wrong type if user redeclared this fcn wrong. */
8793 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
8794 break;
8795
b93a436e
JL
8796 /* Stabilize and compute the argument. */
8797 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8798 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8799 {
8800 exp = copy_node (exp);
8801 arglist = copy_node (arglist);
8802 TREE_OPERAND (exp, 1) = arglist;
8803 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8804 }
8805 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 8806
b93a436e
JL
8807 /* Make a suitable register to place result in. */
8808 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 8809
b93a436e
JL
8810 emit_queue ();
8811 start_sequence ();
7565a035 8812
b93a436e
JL
8813 switch (DECL_FUNCTION_CODE (fndecl))
8814 {
8815 case BUILT_IN_SIN:
8816 builtin_optab = sin_optab; break;
8817 case BUILT_IN_COS:
8818 builtin_optab = cos_optab; break;
8819 case BUILT_IN_FSQRT:
8820 builtin_optab = sqrt_optab; break;
8821 default:
8822 abort ();
8823 }
4ed67205 8824
b93a436e
JL
8825 /* Compute into TARGET.
8826 Set TARGET to wherever the result comes back. */
8827 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8828 builtin_optab, op0, target, 0);
8829
8830 /* If we were unable to expand via the builtin, stop the
8831 sequence (without outputting the insns) and break, causing
38e01259 8832 a call to the library function. */
b93a436e 8833 if (target == 0)
4ed67205 8834 {
b93a436e
JL
8835 end_sequence ();
8836 break;
8837 }
4ed67205 8838
b93a436e
JL
8839 /* Check the results by default. But if flag_fast_math is turned on,
8840 then assume sqrt will always be called with valid arguments. */
4ed67205 8841
41af162c 8842 if (flag_errno_math && ! flag_fast_math)
b93a436e
JL
8843 {
8844 /* Don't define the builtin FP instructions
8845 if your machine is not IEEE. */
8846 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8847 abort ();
4ed67205 8848
b93a436e 8849 lab1 = gen_label_rtx ();
ca55abae 8850
b93a436e
JL
8851 /* Test the result; if it is NaN, set errno=EDOM because
8852 the argument was not in the domain. */
c5d5d461
JL
8853 emit_cmp_and_jump_insns (target, target, EQ, 0, GET_MODE (target),
8854 0, 0, lab1);
b93a436e
JL
8855
8856#ifdef TARGET_EDOM
8857 {
8858#ifdef GEN_ERRNO_RTX
8859 rtx errno_rtx = GEN_ERRNO_RTX;
8860#else
8861 rtx errno_rtx
8862 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8863#endif
e87b4f3f 8864
b93a436e
JL
8865 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8866 }
8867#else
8868 /* We can't set errno=EDOM directly; let the library call do it.
8869 Pop the arguments right away in case the call gets deleted. */
8870 NO_DEFER_POP;
8871 expand_call (exp, target, 0);
8872 OK_DEFER_POP;
8873#endif
e7c33f54 8874
b93a436e
JL
8875 emit_label (lab1);
8876 }
0006469d 8877
b93a436e
JL
8878 /* Output the entire sequence. */
8879 insns = get_insns ();
8880 end_sequence ();
8881 emit_insns (insns);
8882
8883 return target;
0006469d 8884
b93a436e
JL
8885 case BUILT_IN_FMOD:
8886 break;
0006469d 8887
b93a436e
JL
8888 /* __builtin_apply_args returns block of memory allocated on
8889 the stack into which is stored the arg pointer, structure
8890 value address, static chain, and all the registers that might
8891 possibly be used in performing a function call. The code is
8892 moved to the start of the function so the incoming values are
8893 saved. */
8894 case BUILT_IN_APPLY_ARGS:
8895 /* Don't do __builtin_apply_args more than once in a function.
8896 Save the result of the first call and reuse it. */
8897 if (apply_args_value != 0)
8898 return apply_args_value;
8899 {
8900 /* When this function is called, it means that registers must be
8901 saved on entry to this function. So we migrate the
8902 call to the first insn of this function. */
8903 rtx temp;
8904 rtx seq;
0006469d 8905
b93a436e
JL
8906 start_sequence ();
8907 temp = expand_builtin_apply_args ();
8908 seq = get_insns ();
8909 end_sequence ();
0006469d 8910
b93a436e 8911 apply_args_value = temp;
0006469d 8912
b93a436e
JL
8913 /* Put the sequence after the NOTE that starts the function.
8914 If this is inside a SEQUENCE, make the outer-level insn
8915 chain current, so the code is placed at the start of the
8916 function. */
8917 push_topmost_sequence ();
8918 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8919 pop_topmost_sequence ();
8920 return temp;
8921 }
0006469d 8922
b93a436e
JL
8923 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8924 FUNCTION with a copy of the parameters described by
8925 ARGUMENTS, and ARGSIZE. It returns a block of memory
8926 allocated on the stack into which is stored all the registers
8927 that might possibly be used for returning the result of a
8928 function. ARGUMENTS is the value returned by
8929 __builtin_apply_args. ARGSIZE is the number of bytes of
8930 arguments that must be copied. ??? How should this value be
8931 computed? We'll also need a safe worst case value for varargs
8932 functions. */
8933 case BUILT_IN_APPLY:
8934 if (arglist == 0
8935 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8936 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8937 || TREE_CHAIN (arglist) == 0
8938 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8939 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8940 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8941 return const0_rtx;
8942 else
8943 {
8944 int i;
8945 tree t;
8946 rtx ops[3];
0006469d 8947
b93a436e
JL
8948 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8949 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8950
b93a436e
JL
8951 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8952 }
bbf6f052 8953
b93a436e
JL
8954 /* __builtin_return (RESULT) causes the function to return the
8955 value described by RESULT. RESULT is address of the block of
8956 memory returned by __builtin_apply. */
8957 case BUILT_IN_RETURN:
8958 if (arglist
8959 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8960 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8961 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8962 NULL_RTX, VOIDmode, 0));
8963 return const0_rtx;
ca695ac9 8964
b93a436e
JL
8965 case BUILT_IN_SAVEREGS:
8966 /* Don't do __builtin_saveregs more than once in a function.
8967 Save the result of the first call and reuse it. */
8968 if (saveregs_value != 0)
8969 return saveregs_value;
8970 {
8971 /* When this function is called, it means that registers must be
8972 saved on entry to this function. So we migrate the
8973 call to the first insn of this function. */
8974 rtx temp;
8975 rtx seq;
ca695ac9 8976
b93a436e
JL
8977 /* Now really call the function. `expand_call' does not call
8978 expand_builtin, so there is no danger of infinite recursion here. */
8979 start_sequence ();
ca695ac9 8980
b93a436e
JL
8981#ifdef EXPAND_BUILTIN_SAVEREGS
8982 /* Do whatever the machine needs done in this case. */
8983 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8984#else
8985 /* The register where the function returns its value
8986 is likely to have something else in it, such as an argument.
8987 So preserve that register around the call. */
ca695ac9 8988
b93a436e
JL
8989 if (value_mode != VOIDmode)
8990 {
8991 rtx valreg = hard_libcall_value (value_mode);
8992 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8993
b93a436e
JL
8994 emit_move_insn (saved_valreg, valreg);
8995 temp = expand_call (exp, target, ignore);
8996 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8997 }
8998 else
b93a436e
JL
8999 /* Generate the call, putting the value in a pseudo. */
9000 temp = expand_call (exp, target, ignore);
9001#endif
bbf6f052 9002
b93a436e
JL
9003 seq = get_insns ();
9004 end_sequence ();
bbf6f052 9005
b93a436e 9006 saveregs_value = temp;
bbf6f052 9007
b93a436e
JL
9008 /* Put the sequence after the NOTE that starts the function.
9009 If this is inside a SEQUENCE, make the outer-level insn
9010 chain current, so the code is placed at the start of the
9011 function. */
9012 push_topmost_sequence ();
9013 emit_insns_before (seq, NEXT_INSN (get_insns ()));
9014 pop_topmost_sequence ();
9015 return temp;
9016 }
bbf6f052 9017
b93a436e
JL
9018 /* __builtin_args_info (N) returns word N of the arg space info
9019 for the current function. The number and meanings of words
9020 is controlled by the definition of CUMULATIVE_ARGS. */
9021 case BUILT_IN_ARGS_INFO:
9022 {
9023 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 9024 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
9025#if 0
9026 /* These are used by the code below that is if 0'ed away */
9027 int i;
b93a436e 9028 tree type, elts, result;
381127e8 9029#endif
bbf6f052 9030
b93a436e
JL
9031 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
9032 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
9033 __FILE__, __LINE__);
bbf6f052 9034
b93a436e
JL
9035 if (arglist != 0)
9036 {
9037 tree arg = TREE_VALUE (arglist);
9038 if (TREE_CODE (arg) != INTEGER_CST)
9039 error ("argument of `__builtin_args_info' must be constant");
9040 else
9041 {
9042 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 9043
b93a436e
JL
9044 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
9045 error ("argument of `__builtin_args_info' out of range");
9046 else
9047 return GEN_INT (word_ptr[wordnum]);
9048 }
bbf6f052
RK
9049 }
9050 else
b93a436e 9051 error ("missing argument in `__builtin_args_info'");
bbf6f052 9052
b93a436e 9053 return const0_rtx;
bbf6f052 9054
b93a436e
JL
9055#if 0
9056 for (i = 0; i < nwords; i++)
9057 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 9058
b93a436e
JL
9059 type = build_array_type (integer_type_node,
9060 build_index_type (build_int_2 (nwords, 0)));
9061 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
9062 TREE_CONSTANT (result) = 1;
9063 TREE_STATIC (result) = 1;
9064 result = build (INDIRECT_REF, build_pointer_type (type), result);
9065 TREE_CONSTANT (result) = 1;
9066 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
9067#endif
9068 }
9069
9070 /* Return the address of the first anonymous stack arg. */
9071 case BUILT_IN_NEXT_ARG:
ca695ac9 9072 {
b93a436e
JL
9073 tree fntype = TREE_TYPE (current_function_decl);
9074
9075 if ((TYPE_ARG_TYPES (fntype) == 0
9076 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
9077 == void_type_node))
9078 && ! current_function_varargs)
9079 {
9080 error ("`va_start' used in function with fixed args");
9081 return const0_rtx;
9082 }
9083
9084 if (arglist)
9085 {
9086 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
9087 tree arg = TREE_VALUE (arglist);
9088
9089 /* Strip off all nops for the sake of the comparison. This
9090 is not quite the same as STRIP_NOPS. It does more.
9091 We must also strip off INDIRECT_EXPR for C++ reference
9092 parameters. */
9093 while (TREE_CODE (arg) == NOP_EXPR
9094 || TREE_CODE (arg) == CONVERT_EXPR
9095 || TREE_CODE (arg) == NON_LVALUE_EXPR
9096 || TREE_CODE (arg) == INDIRECT_REF)
9097 arg = TREE_OPERAND (arg, 0);
9098 if (arg != last_parm)
9099 warning ("second parameter of `va_start' not last named argument");
9100 }
9101 else if (! current_function_varargs)
9102 /* Evidently an out of date version of <stdarg.h>; can't validate
9103 va_start's second argument, but can still work as intended. */
9104 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
9105 }
9106
b93a436e
JL
9107 return expand_binop (Pmode, add_optab,
9108 current_function_internal_arg_pointer,
9109 current_function_arg_offset_rtx,
9110 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 9111
b93a436e
JL
9112 case BUILT_IN_CLASSIFY_TYPE:
9113 if (arglist != 0)
9114 {
9115 tree type = TREE_TYPE (TREE_VALUE (arglist));
9116 enum tree_code code = TREE_CODE (type);
9117 if (code == VOID_TYPE)
9118 return GEN_INT (void_type_class);
9119 if (code == INTEGER_TYPE)
9120 return GEN_INT (integer_type_class);
9121 if (code == CHAR_TYPE)
9122 return GEN_INT (char_type_class);
9123 if (code == ENUMERAL_TYPE)
9124 return GEN_INT (enumeral_type_class);
9125 if (code == BOOLEAN_TYPE)
9126 return GEN_INT (boolean_type_class);
9127 if (code == POINTER_TYPE)
9128 return GEN_INT (pointer_type_class);
9129 if (code == REFERENCE_TYPE)
9130 return GEN_INT (reference_type_class);
9131 if (code == OFFSET_TYPE)
9132 return GEN_INT (offset_type_class);
9133 if (code == REAL_TYPE)
9134 return GEN_INT (real_type_class);
9135 if (code == COMPLEX_TYPE)
9136 return GEN_INT (complex_type_class);
9137 if (code == FUNCTION_TYPE)
9138 return GEN_INT (function_type_class);
9139 if (code == METHOD_TYPE)
9140 return GEN_INT (method_type_class);
9141 if (code == RECORD_TYPE)
9142 return GEN_INT (record_type_class);
9143 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
9144 return GEN_INT (union_type_class);
9145 if (code == ARRAY_TYPE)
9146 {
9147 if (TYPE_STRING_FLAG (type))
9148 return GEN_INT (string_type_class);
9149 else
9150 return GEN_INT (array_type_class);
9151 }
9152 if (code == SET_TYPE)
9153 return GEN_INT (set_type_class);
9154 if (code == FILE_TYPE)
9155 return GEN_INT (file_type_class);
9156 if (code == LANG_TYPE)
9157 return GEN_INT (lang_type_class);
9158 }
9159 return GEN_INT (no_type_class);
ca695ac9 9160
b93a436e
JL
9161 case BUILT_IN_CONSTANT_P:
9162 if (arglist == 0)
9163 return const0_rtx;
9164 else
9165 {
9166 tree arg = TREE_VALUE (arglist);
185ebd6c 9167 rtx tmp;
ca695ac9 9168
185ebd6c
RH
9169 /* We return 1 for a numeric type that's known to be a constant
9170 value at compile-time or for an aggregate type that's a
9171 literal constant. */
b93a436e 9172 STRIP_NOPS (arg);
185ebd6c
RH
9173
9174 /* If we know this is a constant, emit the constant of one. */
9175 if (TREE_CODE_CLASS (TREE_CODE (arg)) == 'c'
9176 || (TREE_CODE (arg) == CONSTRUCTOR
9177 && TREE_CONSTANT (arg))
cff48d8f
RH
9178 || (TREE_CODE (arg) == ADDR_EXPR
9179 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
9180 return const1_rtx;
9181
185ebd6c
RH
9182 /* If we aren't going to be running CSE or this expression
9183 has side effects, show we don't know it to be a constant.
9184 Likewise if it's a pointer or aggregate type since in those
9185 case we only want literals, since those are only optimized
9186 when generating RTL, not later. */
9187 if (TREE_SIDE_EFFECTS (arg) || cse_not_expected
9188 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
9189 || POINTER_TYPE_P (TREE_TYPE (arg)))
9190 return const0_rtx;
cff48d8f 9191
ee5332b8
RH
9192 /* Otherwise, emit (constant_p_rtx (ARG)) and let CSE get a
9193 chance to see if it can deduce whether ARG is constant. */
185ebd6c
RH
9194
9195 tmp = expand_expr (arg, NULL_RTX, VOIDmode, 0);
ee5332b8 9196 tmp = gen_rtx_CONSTANT_P_RTX (value_mode, tmp);
185ebd6c 9197 return tmp;
b93a436e 9198 }
ca695ac9 9199
b93a436e
JL
9200 case BUILT_IN_FRAME_ADDRESS:
9201 /* The argument must be a nonnegative integer constant.
9202 It counts the number of frames to scan up the stack.
9203 The value is the address of that frame. */
9204 case BUILT_IN_RETURN_ADDRESS:
9205 /* The argument must be a nonnegative integer constant.
9206 It counts the number of frames to scan up the stack.
9207 The value is the return address saved in that frame. */
9208 if (arglist == 0)
9209 /* Warning about missing arg was already issued. */
9210 return const0_rtx;
9211 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
9212 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
9213 {
9214 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9215 error ("invalid arg to `__builtin_frame_address'");
9216 else
9217 error ("invalid arg to `__builtin_return_address'");
9218 return const0_rtx;
9219 }
9220 else
9221 {
9222 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
9223 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
9224 hard_frame_pointer_rtx);
ee33823f 9225
b93a436e
JL
9226 /* Some ports cannot access arbitrary stack frames. */
9227 if (tem == NULL)
9228 {
9229 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9230 warning ("unsupported arg to `__builtin_frame_address'");
9231 else
9232 warning ("unsupported arg to `__builtin_return_address'");
9233 return const0_rtx;
9234 }
ee33823f 9235
b93a436e
JL
9236 /* For __builtin_frame_address, return what we've got. */
9237 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
9238 return tem;
ee33823f 9239
d22cba0a
RH
9240 if (GET_CODE (tem) != REG
9241 && ! CONSTANT_P (tem))
9242 tem = copy_to_mode_reg (Pmode, tem);
b93a436e
JL
9243 return tem;
9244 }
ee33823f 9245
b93a436e
JL
9246 /* Returns the address of the area where the structure is returned.
9247 0 otherwise. */
9248 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9249 if (arglist != 0
9250 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9251 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
9252 return const0_rtx;
9253 else
9254 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 9255
b93a436e
JL
9256 case BUILT_IN_ALLOCA:
9257 if (arglist == 0
9258 /* Arg could be non-integer if user redeclared this fcn wrong. */
9259 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9260 break;
bbf6f052 9261
b93a436e
JL
9262 /* Compute the argument. */
9263 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 9264
b93a436e
JL
9265 /* Allocate the desired space. */
9266 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 9267
b93a436e
JL
9268 case BUILT_IN_FFS:
9269 /* If not optimizing, call the library function. */
9270 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9271 break;
ca695ac9 9272
b93a436e
JL
9273 if (arglist == 0
9274 /* Arg could be non-integer if user redeclared this fcn wrong. */
9275 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
9276 break;
ca695ac9 9277
b93a436e
JL
9278 /* Compute the argument. */
9279 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
9280 /* Compute ffs, into TARGET if possible.
9281 Set TARGET to wherever the result comes back. */
9282 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
9283 ffs_optab, op0, target, 1);
9284 if (target == 0)
9285 abort ();
9286 return target;
bbf6f052 9287
b93a436e
JL
9288 case BUILT_IN_STRLEN:
9289 /* If not optimizing, call the library function. */
9290 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9291 break;
bbf6f052 9292
b93a436e
JL
9293 if (arglist == 0
9294 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9295 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9296 break;
9297 else
9298 {
9299 tree src = TREE_VALUE (arglist);
9300 tree len = c_strlen (src);
bbf6f052 9301
b93a436e
JL
9302 int align
9303 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 9304
b93a436e
JL
9305 rtx result, src_rtx, char_rtx;
9306 enum machine_mode insn_mode = value_mode, char_mode;
9307 enum insn_code icode;
46b68a37 9308
b93a436e
JL
9309 /* If the length is known, just return it. */
9310 if (len != 0)
9311 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 9312
b93a436e
JL
9313 /* If SRC is not a pointer type, don't do this operation inline. */
9314 if (align == 0)
9315 break;
bbf6f052 9316
b93a436e 9317 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 9318
b93a436e
JL
9319 while (insn_mode != VOIDmode)
9320 {
9321 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
9322 if (icode != CODE_FOR_nothing)
9323 break;
ca695ac9 9324
b93a436e
JL
9325 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
9326 }
9327 if (insn_mode == VOIDmode)
9328 break;
ca695ac9 9329
b93a436e
JL
9330 /* Make a place to write the result of the instruction. */
9331 result = target;
9332 if (! (result != 0
9333 && GET_CODE (result) == REG
9334 && GET_MODE (result) == insn_mode
9335 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9336 result = gen_reg_rtx (insn_mode);
ca695ac9 9337
b93a436e 9338 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 9339
b93a436e
JL
9340 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
9341 result = gen_reg_rtx (insn_mode);
9342 src_rtx = memory_address (BLKmode,
9343 expand_expr (src, NULL_RTX, ptr_mode,
9344 EXPAND_NORMAL));
bbf6f052 9345
b93a436e
JL
9346 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
9347 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 9348
b93a436e 9349 /* Check the string is readable and has an end. */
7d384cc0 9350 if (current_function_check_memory_usage)
b93a436e 9351 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
6a9c4aed 9352 src_rtx, Pmode,
b93a436e
JL
9353 GEN_INT (MEMORY_USE_RO),
9354 TYPE_MODE (integer_type_node));
bbf6f052 9355
b93a436e
JL
9356 char_rtx = const0_rtx;
9357 char_mode = insn_operand_mode[(int)icode][2];
9358 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9359 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 9360
b93a436e
JL
9361 emit_insn (GEN_FCN (icode) (result,
9362 gen_rtx_MEM (BLKmode, src_rtx),
9363 char_rtx, GEN_INT (align)));
bbf6f052 9364
b93a436e
JL
9365 /* Return the value in the proper mode for this function. */
9366 if (GET_MODE (result) == value_mode)
9367 return result;
9368 else if (target != 0)
9369 {
9370 convert_move (target, result, 0);
9371 return target;
9372 }
9373 else
9374 return convert_to_mode (value_mode, result, 0);
9375 }
bbf6f052 9376
b93a436e
JL
9377 case BUILT_IN_STRCPY:
9378 /* If not optimizing, call the library function. */
9379 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9380 break;
bbf6f052 9381
b93a436e
JL
9382 if (arglist == 0
9383 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9384 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9385 || TREE_CHAIN (arglist) == 0
9386 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9387 break;
9388 else
9389 {
9390 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 9391
b93a436e
JL
9392 if (len == 0)
9393 break;
bbf6f052 9394
b93a436e 9395 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 9396
b93a436e
JL
9397 chainon (arglist, build_tree_list (NULL_TREE, len));
9398 }
6d100794 9399
b93a436e
JL
9400 /* Drops in. */
9401 case BUILT_IN_MEMCPY:
9402 /* If not optimizing, call the library function. */
9403 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9404 break;
e7c33f54 9405
b93a436e
JL
9406 if (arglist == 0
9407 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9408 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9409 || TREE_CHAIN (arglist) == 0
9410 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9411 != POINTER_TYPE)
9412 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9413 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9414 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9415 != INTEGER_TYPE))
9416 break;
9417 else
9418 {
9419 tree dest = TREE_VALUE (arglist);
9420 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9421 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9422
b93a436e
JL
9423 int src_align
9424 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9425 int dest_align
9426 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9427 rtx dest_mem, src_mem, dest_addr, len_rtx;
e7c33f54 9428
b93a436e
JL
9429 /* If either SRC or DEST is not a pointer type, don't do
9430 this operation in-line. */
9431 if (src_align == 0 || dest_align == 0)
9432 {
9433 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9434 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9435 break;
9436 }
e7c33f54 9437
55a6ba9f
JC
9438 dest_mem = get_memory_rtx (dest);
9439 src_mem = get_memory_rtx (src);
b93a436e 9440 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 9441
b93a436e 9442 /* Just copy the rights of SRC to the rights of DEST. */
7d384cc0 9443 if (current_function_check_memory_usage)
b93a436e 9444 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
6a9c4aed
MK
9445 XEXP (dest_mem, 0), Pmode,
9446 XEXP (src_mem, 0), Pmode,
b93a436e 9447 len_rtx, TYPE_MODE (sizetype));
e7c33f54 9448
b93a436e
JL
9449 /* Copy word part most expediently. */
9450 dest_addr
9451 = emit_block_move (dest_mem, src_mem, len_rtx,
9452 MIN (src_align, dest_align));
e7c33f54 9453
b93a436e 9454 if (dest_addr == 0)
55a6ba9f 9455 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
e7c33f54 9456
b93a436e
JL
9457 return dest_addr;
9458 }
e7c33f54 9459
b93a436e
JL
9460 case BUILT_IN_MEMSET:
9461 /* If not optimizing, call the library function. */
9462 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9463 break;
e7c33f54 9464
b93a436e
JL
9465 if (arglist == 0
9466 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9467 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9468 || TREE_CHAIN (arglist) == 0
9469 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9470 != INTEGER_TYPE)
9471 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9472 || (INTEGER_TYPE
9473 != (TREE_CODE (TREE_TYPE
9474 (TREE_VALUE
9475 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9476 break;
9477 else
9478 {
9479 tree dest = TREE_VALUE (arglist);
9480 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9481 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9482
b93a436e
JL
9483 int dest_align
9484 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9485 rtx dest_mem, dest_addr, len_rtx;
e7c33f54 9486
b93a436e
JL
9487 /* If DEST is not a pointer type, don't do this
9488 operation in-line. */
9489 if (dest_align == 0)
9490 break;
bbf6f052 9491
bf931ec8
JW
9492 /* If the arguments have side-effects, then we can only evaluate
9493 them at most once. The following code evaluates them twice if
9494 they are not constants because we break out to expand_call
9495 in that case. They can't be constants if they have side-effects
9496 so we can check for that first. Alternatively, we could call
9497 save_expr to make multiple evaluation safe. */
9498 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9499 break;
9500
b93a436e
JL
9501 /* If VAL is not 0, don't do this operation in-line. */
9502 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9503 break;
bbf6f052 9504
b93a436e
JL
9505 /* If LEN does not expand to a constant, don't do this
9506 operation in-line. */
9507 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9508 if (GET_CODE (len_rtx) != CONST_INT)
9509 break;
bbf6f052 9510
55a6ba9f 9511 dest_mem = get_memory_rtx (dest);
b93a436e
JL
9512
9513 /* Just check DST is writable and mark it as readable. */
7d384cc0 9514 if (current_function_check_memory_usage)
b93a436e 9515 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6a9c4aed 9516 XEXP (dest_mem, 0), Pmode,
b93a436e
JL
9517 len_rtx, TYPE_MODE (sizetype),
9518 GEN_INT (MEMORY_USE_WO),
9519 TYPE_MODE (integer_type_node));
bbf6f052 9520
bbf6f052 9521
b93a436e 9522 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 9523
b93a436e 9524 if (dest_addr == 0)
55a6ba9f 9525 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
bbf6f052 9526
b93a436e
JL
9527 return dest_addr;
9528 }
bbf6f052 9529
b93a436e
JL
9530/* These comparison functions need an instruction that returns an actual
9531 index. An ordinary compare that just sets the condition codes
9532 is not enough. */
9533#ifdef HAVE_cmpstrsi
9534 case BUILT_IN_STRCMP:
9535 /* If not optimizing, call the library function. */
9536 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9537 break;
bbf6f052 9538
b93a436e 9539 /* If we need to check memory accesses, call the library function. */
7d384cc0 9540 if (current_function_check_memory_usage)
b93a436e 9541 break;
bbf6f052 9542
b93a436e
JL
9543 if (arglist == 0
9544 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9545 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9546 || TREE_CHAIN (arglist) == 0
9547 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9548 break;
9549 else if (!HAVE_cmpstrsi)
9550 break;
9551 {
9552 tree arg1 = TREE_VALUE (arglist);
9553 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 9554 tree len, len2;
a97f5a86 9555
b93a436e
JL
9556 len = c_strlen (arg1);
9557 if (len)
9558 len = size_binop (PLUS_EXPR, integer_one_node, len);
9559 len2 = c_strlen (arg2);
9560 if (len2)
9561 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 9562
b93a436e
JL
9563 /* If we don't have a constant length for the first, use the length
9564 of the second, if we know it. We don't require a constant for
9565 this case; some cost analysis could be done if both are available
9566 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 9567
b93a436e
JL
9568 If both strings have constant lengths, use the smaller. This
9569 could arise if optimization results in strcpy being called with
9570 two fixed strings, or if the code was machine-generated. We should
9571 add some code to the `memcmp' handler below to deal with such
9572 situations, someday. */
9573 if (!len || TREE_CODE (len) != INTEGER_CST)
9574 {
9575 if (len2)
9576 len = len2;
9577 else if (len == 0)
9578 break;
9579 }
9580 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9581 {
9582 if (tree_int_cst_lt (len2, len))
9583 len = len2;
9584 }
bbf6f052 9585
b93a436e
JL
9586 chainon (arglist, build_tree_list (NULL_TREE, len));
9587 }
bbf6f052 9588
b93a436e
JL
9589 /* Drops in. */
9590 case BUILT_IN_MEMCMP:
9591 /* If not optimizing, call the library function. */
9592 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9593 break;
bbf6f052 9594
b93a436e 9595 /* If we need to check memory accesses, call the library function. */
7d384cc0 9596 if (current_function_check_memory_usage)
b93a436e 9597 break;
bbf6f052 9598
b93a436e
JL
9599 if (arglist == 0
9600 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9601 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9602 || TREE_CHAIN (arglist) == 0
9603 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9604 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9605 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9606 break;
9607 else if (!HAVE_cmpstrsi)
9608 break;
9609 {
9610 tree arg1 = TREE_VALUE (arglist);
9611 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9612 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9613 rtx result;
0842a179 9614
b93a436e
JL
9615 int arg1_align
9616 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9617 int arg2_align
9618 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9619 enum machine_mode insn_mode
9620 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 9621
b93a436e
JL
9622 /* If we don't have POINTER_TYPE, call the function. */
9623 if (arg1_align == 0 || arg2_align == 0)
9624 {
9625 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9626 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9627 break;
9628 }
bbf6f052 9629
b93a436e
JL
9630 /* Make a place to write the result of the instruction. */
9631 result = target;
9632 if (! (result != 0
9633 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9634 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9635 result = gen_reg_rtx (insn_mode);
bbf6f052 9636
55a6ba9f
JC
9637 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9638 get_memory_rtx (arg2),
b93a436e
JL
9639 expand_expr (len, NULL_RTX, VOIDmode, 0),
9640 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 9641
b93a436e
JL
9642 /* Return the value in the proper mode for this function. */
9643 mode = TYPE_MODE (TREE_TYPE (exp));
9644 if (GET_MODE (result) == mode)
9645 return result;
9646 else if (target != 0)
9647 {
9648 convert_move (target, result, 0);
9649 return target;
9650 }
9651 else
9652 return convert_to_mode (mode, result, 0);
9653 }
9654#else
9655 case BUILT_IN_STRCMP:
9656 case BUILT_IN_MEMCMP:
9657 break;
9658#endif
bbf6f052 9659
b93a436e
JL
9660 case BUILT_IN_SETJMP:
9661 if (arglist == 0
9662 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9663 break;
6fd1c67b
RH
9664 else
9665 {
9666 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9667 VOIDmode, 0);
9668 rtx lab = gen_label_rtx ();
9669 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9670 emit_label (lab);
9671 return ret;
9672 }
bbf6f052 9673
6fd1c67b
RH
9674 /* __builtin_longjmp is passed a pointer to an array of five words.
9675 It's similar to the C library longjmp function but works with
9676 __builtin_setjmp above. */
b93a436e
JL
9677 case BUILT_IN_LONGJMP:
9678 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9679 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9680 break;
b93a436e 9681 else
b93a436e 9682 {
6fd1c67b
RH
9683 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9684 VOIDmode, 0);
9685 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3e2b9a3d 9686 NULL_RTX, VOIDmode, 0);
e0cd0770
JC
9687
9688 if (value != const1_rtx)
9689 {
9690 error ("__builtin_longjmp second argument must be 1");
9691 return const0_rtx;
9692 }
9693
6fd1c67b
RH
9694 expand_builtin_longjmp (buf_addr, value);
9695 return const0_rtx;
b93a436e 9696 }
bbf6f052 9697
e0cd0770
JC
9698 case BUILT_IN_TRAP:
9699#ifdef HAVE_trap
9700 if (HAVE_trap)
9701 emit_insn (gen_trap ());
9702 else
9703#endif
9704 error ("__builtin_trap not supported by this target");
9705 emit_barrier ();
9706 return const0_rtx;
9707
b93a436e
JL
9708 /* Various hooks for the DWARF 2 __throw routine. */
9709 case BUILT_IN_UNWIND_INIT:
9710 expand_builtin_unwind_init ();
9711 return const0_rtx;
71038426
RH
9712 case BUILT_IN_DWARF_CFA:
9713 return virtual_cfa_rtx;
b93a436e
JL
9714#ifdef DWARF2_UNWIND_INFO
9715 case BUILT_IN_DWARF_FP_REGNUM:
9716 return expand_builtin_dwarf_fp_regnum ();
9717 case BUILT_IN_DWARF_REG_SIZE:
9718 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 9719#endif
b93a436e
JL
9720 case BUILT_IN_FROB_RETURN_ADDR:
9721 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9722 case BUILT_IN_EXTRACT_RETURN_ADDR:
9723 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
71038426
RH
9724 case BUILT_IN_EH_RETURN:
9725 expand_builtin_eh_return (TREE_VALUE (arglist),
9726 TREE_VALUE (TREE_CHAIN (arglist)),
9727 TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))));
b93a436e 9728 return const0_rtx;
ca695ac9 9729
b93a436e
JL
9730 default: /* just do library call, if unknown builtin */
9731 error ("built-in function `%s' not currently supported",
9732 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 9733 }
0006469d 9734
b93a436e
JL
9735 /* The switch statement above can drop through to cause the function
9736 to be called normally. */
0006469d 9737
b93a436e 9738 return expand_call (exp, target, ignore);
ca695ac9 9739}
b93a436e
JL
9740\f
9741/* Built-in functions to perform an untyped call and return. */
0006469d 9742
b93a436e
JL
9743/* For each register that may be used for calling a function, this
9744 gives a mode used to copy the register's value. VOIDmode indicates
9745 the register is not used for calling a function. If the machine
9746 has register windows, this gives only the outbound registers.
9747 INCOMING_REGNO gives the corresponding inbound register. */
9748static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9749
b93a436e
JL
9750/* For each register that may be used for returning values, this gives
9751 a mode used to copy the register's value. VOIDmode indicates the
9752 register is not used for returning values. If the machine has
9753 register windows, this gives only the outbound registers.
9754 INCOMING_REGNO gives the corresponding inbound register. */
9755static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9756
b93a436e
JL
9757/* For each register that may be used for calling a function, this
9758 gives the offset of that register into the block returned by
9759 __builtin_apply_args. 0 indicates that the register is not
9760 used for calling a function. */
9761static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9762
9763/* Return the offset of register REGNO into the block returned by
9764 __builtin_apply_args. This is not declared static, since it is
9765 needed in objc-act.c. */
0006469d 9766
b93a436e
JL
9767int
9768apply_args_register_offset (regno)
9769 int regno;
9770{
9771 apply_args_size ();
0006469d 9772
b93a436e
JL
9773 /* Arguments are always put in outgoing registers (in the argument
9774 block) if such make sense. */
9775#ifdef OUTGOING_REGNO
9776 regno = OUTGOING_REGNO(regno);
9777#endif
9778 return apply_args_reg_offset[regno];
9779}
904762c8 9780
b93a436e
JL
9781/* Return the size required for the block returned by __builtin_apply_args,
9782 and initialize apply_args_mode. */
9783
9784static int
9785apply_args_size ()
0006469d 9786{
b93a436e
JL
9787 static int size = -1;
9788 int align, regno;
2f6e6d22 9789 enum machine_mode mode;
0006469d 9790
b93a436e
JL
9791 /* The values computed by this function never change. */
9792 if (size < 0)
ca695ac9 9793 {
b93a436e
JL
9794 /* The first value is the incoming arg-pointer. */
9795 size = GET_MODE_SIZE (Pmode);
0006469d 9796
b93a436e
JL
9797 /* The second value is the structure value address unless this is
9798 passed as an "invisible" first argument. */
9799 if (struct_value_rtx)
9800 size += GET_MODE_SIZE (Pmode);
0006469d 9801
b93a436e
JL
9802 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9803 if (FUNCTION_ARG_REGNO_P (regno))
9804 {
9805 /* Search for the proper mode for copying this register's
9806 value. I'm not sure this is right, but it works so far. */
9807 enum machine_mode best_mode = VOIDmode;
0006469d 9808
b93a436e
JL
9809 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9810 mode != VOIDmode;
9811 mode = GET_MODE_WIDER_MODE (mode))
9812 if (HARD_REGNO_MODE_OK (regno, mode)
9813 && HARD_REGNO_NREGS (regno, mode) == 1)
9814 best_mode = mode;
0006469d 9815
b93a436e
JL
9816 if (best_mode == VOIDmode)
9817 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9818 mode != VOIDmode;
9819 mode = GET_MODE_WIDER_MODE (mode))
9820 if (HARD_REGNO_MODE_OK (regno, mode)
9821 && (mov_optab->handlers[(int) mode].insn_code
9822 != CODE_FOR_nothing))
9823 best_mode = mode;
0006469d 9824
b93a436e
JL
9825 mode = best_mode;
9826 if (mode == VOIDmode)
9827 abort ();
904762c8 9828
b93a436e
JL
9829 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9830 if (size % align != 0)
9831 size = CEIL (size, align) * align;
9832 apply_args_reg_offset[regno] = size;
9833 size += GET_MODE_SIZE (mode);
9834 apply_args_mode[regno] = mode;
9835 }
9836 else
9837 {
9838 apply_args_mode[regno] = VOIDmode;
9839 apply_args_reg_offset[regno] = 0;
9840 }
9841 }
9842 return size;
9843}
0006469d 9844
b93a436e
JL
9845/* Return the size required for the block returned by __builtin_apply,
9846 and initialize apply_result_mode. */
904762c8 9847
b93a436e
JL
9848static int
9849apply_result_size ()
9850{
9851 static int size = -1;
9852 int align, regno;
9853 enum machine_mode mode;
0006469d 9854
b93a436e
JL
9855 /* The values computed by this function never change. */
9856 if (size < 0)
9857 {
9858 size = 0;
0006469d 9859
b93a436e
JL
9860 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9861 if (FUNCTION_VALUE_REGNO_P (regno))
9862 {
9863 /* Search for the proper mode for copying this register's
9864 value. I'm not sure this is right, but it works so far. */
9865 enum machine_mode best_mode = VOIDmode;
0006469d 9866
b93a436e
JL
9867 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9868 mode != TImode;
9869 mode = GET_MODE_WIDER_MODE (mode))
9870 if (HARD_REGNO_MODE_OK (regno, mode))
9871 best_mode = mode;
0006469d 9872
b93a436e
JL
9873 if (best_mode == VOIDmode)
9874 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9875 mode != VOIDmode;
9876 mode = GET_MODE_WIDER_MODE (mode))
9877 if (HARD_REGNO_MODE_OK (regno, mode)
9878 && (mov_optab->handlers[(int) mode].insn_code
9879 != CODE_FOR_nothing))
9880 best_mode = mode;
0006469d 9881
b93a436e
JL
9882 mode = best_mode;
9883 if (mode == VOIDmode)
9884 abort ();
9885
9886 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9887 if (size % align != 0)
9888 size = CEIL (size, align) * align;
9889 size += GET_MODE_SIZE (mode);
9890 apply_result_mode[regno] = mode;
9891 }
9892 else
9893 apply_result_mode[regno] = VOIDmode;
9894
9895 /* Allow targets that use untyped_call and untyped_return to override
9896 the size so that machine-specific information can be stored here. */
9897#ifdef APPLY_RESULT_SIZE
9898 size = APPLY_RESULT_SIZE;
9899#endif
9900 }
9901 return size;
9902}
0006469d 9903
b93a436e
JL
9904#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9905/* Create a vector describing the result block RESULT. If SAVEP is true,
9906 the result block is used to save the values; otherwise it is used to
9907 restore the values. */
9908
9909static rtx
9910result_vector (savep, result)
9911 int savep;
9912 rtx result;
9913{
9914 int regno, size, align, nelts;
9915 enum machine_mode mode;
9916 rtx reg, mem;
9917 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9918
9919 size = nelts = 0;
9920 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9921 if ((mode = apply_result_mode[regno]) != VOIDmode)
9922 {
9923 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9924 if (size % align != 0)
9925 size = CEIL (size, align) * align;
9926 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9927 mem = change_address (result, mode,
9928 plus_constant (XEXP (result, 0), size));
9929 savevec[nelts++] = (savep
9930 ? gen_rtx_SET (VOIDmode, mem, reg)
9931 : gen_rtx_SET (VOIDmode, reg, mem));
9932 size += GET_MODE_SIZE (mode);
ca695ac9 9933 }
b93a436e
JL
9934 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9935}
9936#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9937
b93a436e
JL
9938/* Save the state required to perform an untyped call with the same
9939 arguments as were passed to the current function. */
904762c8 9940
b93a436e
JL
9941static rtx
9942expand_builtin_apply_args ()
9943{
9944 rtx registers;
9945 int size, align, regno;
9946 enum machine_mode mode;
0006469d 9947
b93a436e
JL
9948 /* Create a block where the arg-pointer, structure value address,
9949 and argument registers can be saved. */
9950 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9951
b93a436e
JL
9952 /* Walk past the arg-pointer and structure value address. */
9953 size = GET_MODE_SIZE (Pmode);
9954 if (struct_value_rtx)
9955 size += GET_MODE_SIZE (Pmode);
0cb1d109 9956
b93a436e
JL
9957 /* Save each register used in calling a function to the block. */
9958 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9959 if ((mode = apply_args_mode[regno]) != VOIDmode)
9960 {
9961 rtx tem;
0cb1d109 9962
b93a436e
JL
9963 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9964 if (size % align != 0)
9965 size = CEIL (size, align) * align;
0006469d 9966
b93a436e 9967 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9968
b93a436e
JL
9969#ifdef STACK_REGS
9970 /* For reg-stack.c's stack register household.
9971 Compare with a similar piece of code in function.c. */
0006469d 9972
b93a436e
JL
9973 emit_insn (gen_rtx_USE (mode, tem));
9974#endif
0e8c9172 9975
b93a436e
JL
9976 emit_move_insn (change_address (registers, mode,
9977 plus_constant (XEXP (registers, 0),
9978 size)),
9979 tem);
9980 size += GET_MODE_SIZE (mode);
0e8c9172 9981 }
0006469d 9982
b93a436e
JL
9983 /* Save the arg pointer to the block. */
9984 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9985 copy_to_reg (virtual_incoming_args_rtx));
9986 size = GET_MODE_SIZE (Pmode);
0006469d 9987
b93a436e
JL
9988 /* Save the structure value address unless this is passed as an
9989 "invisible" first argument. */
9990 if (struct_value_incoming_rtx)
9991 {
9992 emit_move_insn (change_address (registers, Pmode,
9993 plus_constant (XEXP (registers, 0),
9994 size)),
9995 copy_to_reg (struct_value_incoming_rtx));
9996 size += GET_MODE_SIZE (Pmode);
9997 }
0006469d 9998
b93a436e
JL
9999 /* Return the address of the block. */
10000 return copy_addr_to_reg (XEXP (registers, 0));
10001}
0006469d 10002
b93a436e
JL
10003/* Perform an untyped call and save the state required to perform an
10004 untyped return of whatever value was returned by the given function. */
0006469d 10005
b93a436e
JL
10006static rtx
10007expand_builtin_apply (function, arguments, argsize)
10008 rtx function, arguments, argsize;
10009{
10010 int size, align, regno;
10011 enum machine_mode mode;
10012 rtx incoming_args, result, reg, dest, call_insn;
10013 rtx old_stack_level = 0;
10014 rtx call_fusage = 0;
0006469d 10015
b93a436e
JL
10016 /* Create a block where the return registers can be saved. */
10017 result = assign_stack_local (BLKmode, apply_result_size (), -1);
10018
10019 /* ??? The argsize value should be adjusted here. */
10020
10021 /* Fetch the arg pointer from the ARGUMENTS block. */
10022 incoming_args = gen_reg_rtx (Pmode);
10023 emit_move_insn (incoming_args,
10024 gen_rtx_MEM (Pmode, arguments));
10025#ifndef STACK_GROWS_DOWNWARD
10026 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
10027 incoming_args, 0, OPTAB_LIB_WIDEN);
10028#endif
10029
10030 /* Perform postincrements before actually calling the function. */
ca695ac9 10031 emit_queue ();
0006469d 10032
b93a436e
JL
10033 /* Push a new argument block and copy the arguments. */
10034 do_pending_stack_adjust ();
0006469d 10035
b93a436e
JL
10036 /* Save the stack with nonlocal if available */
10037#ifdef HAVE_save_stack_nonlocal
10038 if (HAVE_save_stack_nonlocal)
10039 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
10040 else
10041#endif
10042 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 10043
b93a436e
JL
10044 /* Push a block of memory onto the stack to store the memory arguments.
10045 Save the address in a register, and copy the memory arguments. ??? I
10046 haven't figured out how the calling convention macros effect this,
10047 but it's likely that the source and/or destination addresses in
10048 the block copy will need updating in machine specific ways. */
10049 dest = allocate_dynamic_stack_space (argsize, 0, 0);
10050 emit_block_move (gen_rtx_MEM (BLKmode, dest),
10051 gen_rtx_MEM (BLKmode, incoming_args),
10052 argsize,
10053 PARM_BOUNDARY / BITS_PER_UNIT);
10054
10055 /* Refer to the argument block. */
10056 apply_args_size ();
10057 arguments = gen_rtx_MEM (BLKmode, arguments);
10058
10059 /* Walk past the arg-pointer and structure value address. */
10060 size = GET_MODE_SIZE (Pmode);
10061 if (struct_value_rtx)
10062 size += GET_MODE_SIZE (Pmode);
10063
10064 /* Restore each of the registers previously saved. Make USE insns
10065 for each of these registers for use in making the call. */
10066 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10067 if ((mode = apply_args_mode[regno]) != VOIDmode)
10068 {
10069 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10070 if (size % align != 0)
10071 size = CEIL (size, align) * align;
10072 reg = gen_rtx_REG (mode, regno);
10073 emit_move_insn (reg,
10074 change_address (arguments, mode,
10075 plus_constant (XEXP (arguments, 0),
10076 size)));
10077
10078 use_reg (&call_fusage, reg);
10079 size += GET_MODE_SIZE (mode);
10080 }
10081
10082 /* Restore the structure value address unless this is passed as an
10083 "invisible" first argument. */
10084 size = GET_MODE_SIZE (Pmode);
10085 if (struct_value_rtx)
0006469d 10086 {
b93a436e
JL
10087 rtx value = gen_reg_rtx (Pmode);
10088 emit_move_insn (value,
10089 change_address (arguments, Pmode,
10090 plus_constant (XEXP (arguments, 0),
10091 size)));
10092 emit_move_insn (struct_value_rtx, value);
10093 if (GET_CODE (struct_value_rtx) == REG)
10094 use_reg (&call_fusage, struct_value_rtx);
10095 size += GET_MODE_SIZE (Pmode);
ca695ac9 10096 }
0006469d 10097
b93a436e
JL
10098 /* All arguments and registers used for the call are set up by now! */
10099 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 10100
b93a436e
JL
10101 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
10102 and we don't want to load it into a register as an optimization,
10103 because prepare_call_address already did it if it should be done. */
10104 if (GET_CODE (function) != SYMBOL_REF)
10105 function = memory_address (FUNCTION_MODE, function);
0006469d 10106
b93a436e
JL
10107 /* Generate the actual call instruction and save the return value. */
10108#ifdef HAVE_untyped_call
10109 if (HAVE_untyped_call)
10110 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
10111 result, result_vector (1, result)));
10112 else
10113#endif
10114#ifdef HAVE_call_value
10115 if (HAVE_call_value)
ca695ac9 10116 {
b93a436e 10117 rtx valreg = 0;
0006469d 10118
b93a436e
JL
10119 /* Locate the unique return register. It is not possible to
10120 express a call that sets more than one return register using
10121 call_value; use untyped_call for that. In fact, untyped_call
10122 only needs to save the return registers in the given block. */
10123 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10124 if ((mode = apply_result_mode[regno]) != VOIDmode)
10125 {
10126 if (valreg)
10127 abort (); /* HAVE_untyped_call required. */
10128 valreg = gen_rtx_REG (mode, regno);
10129 }
0006469d 10130
b93a436e
JL
10131 emit_call_insn (gen_call_value (valreg,
10132 gen_rtx_MEM (FUNCTION_MODE, function),
10133 const0_rtx, NULL_RTX, const0_rtx));
0006469d 10134
b93a436e
JL
10135 emit_move_insn (change_address (result, GET_MODE (valreg),
10136 XEXP (result, 0)),
10137 valreg);
ca695ac9 10138 }
b93a436e
JL
10139 else
10140#endif
10141 abort ();
0006469d 10142
b93a436e
JL
10143 /* Find the CALL insn we just emitted. */
10144 for (call_insn = get_last_insn ();
10145 call_insn && GET_CODE (call_insn) != CALL_INSN;
10146 call_insn = PREV_INSN (call_insn))
10147 ;
0006469d 10148
b93a436e
JL
10149 if (! call_insn)
10150 abort ();
0006469d 10151
b93a436e
JL
10152 /* Put the register usage information on the CALL. If there is already
10153 some usage information, put ours at the end. */
10154 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 10155 {
b93a436e 10156 rtx link;
0006469d 10157
b93a436e
JL
10158 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
10159 link = XEXP (link, 1))
10160 ;
10161
10162 XEXP (link, 1) = call_fusage;
ca695ac9 10163 }
b93a436e
JL
10164 else
10165 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 10166
b93a436e
JL
10167 /* Restore the stack. */
10168#ifdef HAVE_save_stack_nonlocal
10169 if (HAVE_save_stack_nonlocal)
10170 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
10171 else
10172#endif
10173 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
10174
10175 /* Return the address of the result block. */
10176 return copy_addr_to_reg (XEXP (result, 0));
0006469d 10177}
bbf6f052 10178
b93a436e 10179/* Perform an untyped return. */
ca695ac9
JB
10180
10181static void
b93a436e
JL
10182expand_builtin_return (result)
10183 rtx result;
bbf6f052 10184{
b93a436e
JL
10185 int size, align, regno;
10186 enum machine_mode mode;
10187 rtx reg;
10188 rtx call_fusage = 0;
bbf6f052 10189
b93a436e
JL
10190 apply_result_size ();
10191 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 10192
b93a436e
JL
10193#ifdef HAVE_untyped_return
10194 if (HAVE_untyped_return)
ca695ac9 10195 {
b93a436e
JL
10196 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
10197 emit_barrier ();
10198 return;
ca695ac9 10199 }
b93a436e 10200#endif
1499e0a8 10201
b93a436e
JL
10202 /* Restore the return value and note that each value is used. */
10203 size = 0;
10204 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
10205 if ((mode = apply_result_mode[regno]) != VOIDmode)
10206 {
10207 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
10208 if (size % align != 0)
10209 size = CEIL (size, align) * align;
10210 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
10211 emit_move_insn (reg,
10212 change_address (result, mode,
10213 plus_constant (XEXP (result, 0),
10214 size)));
10215
10216 push_to_sequence (call_fusage);
10217 emit_insn (gen_rtx_USE (VOIDmode, reg));
10218 call_fusage = get_insns ();
10219 end_sequence ();
10220 size += GET_MODE_SIZE (mode);
10221 }
10222
10223 /* Put the USE insns before the return. */
10224 emit_insns (call_fusage);
10225
10226 /* Return whatever values was restored by jumping directly to the end
10227 of the function. */
10228 expand_null_return ();
ca695ac9
JB
10229}
10230\f
b93a436e
JL
10231/* Expand code for a post- or pre- increment or decrement
10232 and return the RTX for the result.
10233 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 10234
b93a436e
JL
10235static rtx
10236expand_increment (exp, post, ignore)
10237 register tree exp;
10238 int post, ignore;
ca695ac9 10239{
b93a436e
JL
10240 register rtx op0, op1;
10241 register rtx temp, value;
10242 register tree incremented = TREE_OPERAND (exp, 0);
10243 optab this_optab = add_optab;
10244 int icode;
10245 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
10246 int op0_is_copy = 0;
10247 int single_insn = 0;
10248 /* 1 means we can't store into OP0 directly,
10249 because it is a subreg narrower than a word,
10250 and we don't dare clobber the rest of the word. */
10251 int bad_subreg = 0;
1499e0a8 10252
b93a436e
JL
10253 /* Stabilize any component ref that might need to be
10254 evaluated more than once below. */
10255 if (!post
10256 || TREE_CODE (incremented) == BIT_FIELD_REF
10257 || (TREE_CODE (incremented) == COMPONENT_REF
10258 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
10259 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
10260 incremented = stabilize_reference (incremented);
10261 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
10262 ones into save exprs so that they don't accidentally get evaluated
10263 more than once by the code below. */
10264 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
10265 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
10266 incremented = save_expr (incremented);
e9a25f70 10267
b93a436e
JL
10268 /* Compute the operands as RTX.
10269 Note whether OP0 is the actual lvalue or a copy of it:
10270 I believe it is a copy iff it is a register or subreg
10271 and insns were generated in computing it. */
e9a25f70 10272
b93a436e
JL
10273 temp = get_last_insn ();
10274 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 10275
b93a436e
JL
10276 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
10277 in place but instead must do sign- or zero-extension during assignment,
10278 so we copy it into a new register and let the code below use it as
10279 a copy.
e9a25f70 10280
b93a436e
JL
10281 Note that we can safely modify this SUBREG since it is know not to be
10282 shared (it was made by the expand_expr call above). */
10283
10284 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
10285 {
10286 if (post)
10287 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
10288 else
10289 bad_subreg = 1;
10290 }
10291 else if (GET_CODE (op0) == SUBREG
10292 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
10293 {
10294 /* We cannot increment this SUBREG in place. If we are
10295 post-incrementing, get a copy of the old value. Otherwise,
10296 just mark that we cannot increment in place. */
10297 if (post)
10298 op0 = copy_to_reg (op0);
10299 else
10300 bad_subreg = 1;
e9a25f70
JL
10301 }
10302
b93a436e
JL
10303 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
10304 && temp != get_last_insn ());
10305 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
10306 EXPAND_MEMORY_USE_BAD);
1499e0a8 10307
b93a436e
JL
10308 /* Decide whether incrementing or decrementing. */
10309 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
10310 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10311 this_optab = sub_optab;
10312
10313 /* Convert decrement by a constant into a negative increment. */
10314 if (this_optab == sub_optab
10315 && GET_CODE (op1) == CONST_INT)
ca695ac9 10316 {
b93a436e
JL
10317 op1 = GEN_INT (- INTVAL (op1));
10318 this_optab = add_optab;
ca695ac9 10319 }
1499e0a8 10320
b93a436e
JL
10321 /* For a preincrement, see if we can do this with a single instruction. */
10322 if (!post)
10323 {
10324 icode = (int) this_optab->handlers[(int) mode].insn_code;
10325 if (icode != (int) CODE_FOR_nothing
10326 /* Make sure that OP0 is valid for operands 0 and 1
10327 of the insn we want to queue. */
10328 && (*insn_operand_predicate[icode][0]) (op0, mode)
10329 && (*insn_operand_predicate[icode][1]) (op0, mode)
10330 && (*insn_operand_predicate[icode][2]) (op1, mode))
10331 single_insn = 1;
10332 }
bbf6f052 10333
b93a436e
JL
10334 /* If OP0 is not the actual lvalue, but rather a copy in a register,
10335 then we cannot just increment OP0. We must therefore contrive to
10336 increment the original value. Then, for postincrement, we can return
10337 OP0 since it is a copy of the old value. For preincrement, expand here
10338 unless we can do it with a single insn.
bbf6f052 10339
b93a436e
JL
10340 Likewise if storing directly into OP0 would clobber high bits
10341 we need to preserve (bad_subreg). */
10342 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 10343 {
b93a436e
JL
10344 /* This is the easiest way to increment the value wherever it is.
10345 Problems with multiple evaluation of INCREMENTED are prevented
10346 because either (1) it is a component_ref or preincrement,
10347 in which case it was stabilized above, or (2) it is an array_ref
10348 with constant index in an array in a register, which is
10349 safe to reevaluate. */
10350 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10351 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10352 ? MINUS_EXPR : PLUS_EXPR),
10353 TREE_TYPE (exp),
10354 incremented,
10355 TREE_OPERAND (exp, 1));
a358cee0 10356
b93a436e
JL
10357 while (TREE_CODE (incremented) == NOP_EXPR
10358 || TREE_CODE (incremented) == CONVERT_EXPR)
10359 {
10360 newexp = convert (TREE_TYPE (incremented), newexp);
10361 incremented = TREE_OPERAND (incremented, 0);
10362 }
bbf6f052 10363
b93a436e
JL
10364 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10365 return post ? op0 : temp;
10366 }
bbf6f052 10367
b93a436e
JL
10368 if (post)
10369 {
10370 /* We have a true reference to the value in OP0.
10371 If there is an insn to add or subtract in this mode, queue it.
10372 Queueing the increment insn avoids the register shuffling
10373 that often results if we must increment now and first save
10374 the old value for subsequent use. */
bbf6f052 10375
b93a436e
JL
10376#if 0 /* Turned off to avoid making extra insn for indexed memref. */
10377 op0 = stabilize (op0);
10378#endif
41dfd40c 10379
b93a436e
JL
10380 icode = (int) this_optab->handlers[(int) mode].insn_code;
10381 if (icode != (int) CODE_FOR_nothing
10382 /* Make sure that OP0 is valid for operands 0 and 1
10383 of the insn we want to queue. */
10384 && (*insn_operand_predicate[icode][0]) (op0, mode)
10385 && (*insn_operand_predicate[icode][1]) (op0, mode))
10386 {
10387 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10388 op1 = force_reg (mode, op1);
bbf6f052 10389
b93a436e
JL
10390 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10391 }
10392 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10393 {
10394 rtx addr = (general_operand (XEXP (op0, 0), mode)
10395 ? force_reg (Pmode, XEXP (op0, 0))
10396 : copy_to_reg (XEXP (op0, 0)));
10397 rtx temp, result;
ca695ac9 10398
b93a436e
JL
10399 op0 = change_address (op0, VOIDmode, addr);
10400 temp = force_reg (GET_MODE (op0), op0);
10401 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10402 op1 = force_reg (mode, op1);
ca695ac9 10403
b93a436e
JL
10404 /* The increment queue is LIFO, thus we have to `queue'
10405 the instructions in reverse order. */
10406 enqueue_insn (op0, gen_move_insn (op0, temp));
10407 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10408 return result;
bbf6f052
RK
10409 }
10410 }
ca695ac9 10411
b93a436e
JL
10412 /* Preincrement, or we can't increment with one simple insn. */
10413 if (post)
10414 /* Save a copy of the value before inc or dec, to return it later. */
10415 temp = value = copy_to_reg (op0);
10416 else
10417 /* Arrange to return the incremented value. */
10418 /* Copy the rtx because expand_binop will protect from the queue,
10419 and the results of that would be invalid for us to return
10420 if our caller does emit_queue before using our result. */
10421 temp = copy_rtx (value = op0);
bbf6f052 10422
b93a436e
JL
10423 /* Increment however we can. */
10424 op1 = expand_binop (mode, this_optab, value, op1,
7d384cc0 10425 current_function_check_memory_usage ? NULL_RTX : op0,
b93a436e
JL
10426 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10427 /* Make sure the value is stored into OP0. */
10428 if (op1 != op0)
10429 emit_move_insn (op0, op1);
5718612f 10430
b93a436e
JL
10431 return temp;
10432}
10433\f
10434/* Expand all function calls contained within EXP, innermost ones first.
10435 But don't look within expressions that have sequence points.
10436 For each CALL_EXPR, record the rtx for its value
10437 in the CALL_EXPR_RTL field. */
5718612f 10438
b93a436e
JL
10439static void
10440preexpand_calls (exp)
10441 tree exp;
10442{
10443 register int nops, i;
10444 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 10445
b93a436e
JL
10446 if (! do_preexpand_calls)
10447 return;
5718612f 10448
b93a436e 10449 /* Only expressions and references can contain calls. */
bbf6f052 10450
b93a436e
JL
10451 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10452 return;
bbf6f052 10453
b93a436e
JL
10454 switch (TREE_CODE (exp))
10455 {
10456 case CALL_EXPR:
10457 /* Do nothing if already expanded. */
10458 if (CALL_EXPR_RTL (exp) != 0
10459 /* Do nothing if the call returns a variable-sized object. */
10460 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10461 /* Do nothing to built-in functions. */
10462 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10463 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10464 == FUNCTION_DECL)
10465 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10466 return;
bbf6f052 10467
b93a436e
JL
10468 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10469 return;
bbf6f052 10470
b93a436e
JL
10471 case COMPOUND_EXPR:
10472 case COND_EXPR:
10473 case TRUTH_ANDIF_EXPR:
10474 case TRUTH_ORIF_EXPR:
10475 /* If we find one of these, then we can be sure
10476 the adjust will be done for it (since it makes jumps).
10477 Do it now, so that if this is inside an argument
10478 of a function, we don't get the stack adjustment
10479 after some other args have already been pushed. */
10480 do_pending_stack_adjust ();
10481 return;
bbf6f052 10482
b93a436e
JL
10483 case BLOCK:
10484 case RTL_EXPR:
10485 case WITH_CLEANUP_EXPR:
10486 case CLEANUP_POINT_EXPR:
10487 case TRY_CATCH_EXPR:
10488 return;
bbf6f052 10489
b93a436e
JL
10490 case SAVE_EXPR:
10491 if (SAVE_EXPR_RTL (exp) != 0)
10492 return;
10493
10494 default:
10495 break;
ca695ac9 10496 }
bbf6f052 10497
b93a436e
JL
10498 nops = tree_code_length[(int) TREE_CODE (exp)];
10499 for (i = 0; i < nops; i++)
10500 if (TREE_OPERAND (exp, i) != 0)
10501 {
10502 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10503 if (type == 'e' || type == '<' || type == '1' || type == '2'
10504 || type == 'r')
10505 preexpand_calls (TREE_OPERAND (exp, i));
10506 }
10507}
10508\f
10509/* At the start of a function, record that we have no previously-pushed
10510 arguments waiting to be popped. */
bbf6f052 10511
b93a436e
JL
10512void
10513init_pending_stack_adjust ()
10514{
10515 pending_stack_adjust = 0;
10516}
bbf6f052 10517
b93a436e 10518/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
10519 so the adjustment won't get done.
10520
10521 Note, if the current function calls alloca, then it must have a
10522 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 10523
b93a436e
JL
10524void
10525clear_pending_stack_adjust ()
10526{
10527#ifdef EXIT_IGNORE_STACK
10528 if (optimize > 0
060fbabf
JL
10529 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10530 && EXIT_IGNORE_STACK
b93a436e
JL
10531 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10532 && ! flag_inline_functions)
10533 pending_stack_adjust = 0;
10534#endif
10535}
bbf6f052 10536
b93a436e
JL
10537/* Pop any previously-pushed arguments that have not been popped yet. */
10538
10539void
10540do_pending_stack_adjust ()
10541{
10542 if (inhibit_defer_pop == 0)
ca695ac9 10543 {
b93a436e
JL
10544 if (pending_stack_adjust != 0)
10545 adjust_stack (GEN_INT (pending_stack_adjust));
10546 pending_stack_adjust = 0;
bbf6f052 10547 }
bbf6f052
RK
10548}
10549\f
b93a436e 10550/* Expand conditional expressions. */
bbf6f052 10551
b93a436e
JL
10552/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10553 LABEL is an rtx of code CODE_LABEL, in this function and all the
10554 functions here. */
bbf6f052 10555
b93a436e
JL
10556void
10557jumpifnot (exp, label)
ca695ac9 10558 tree exp;
b93a436e 10559 rtx label;
bbf6f052 10560{
b93a436e
JL
10561 do_jump (exp, label, NULL_RTX);
10562}
bbf6f052 10563
b93a436e 10564/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 10565
b93a436e
JL
10566void
10567jumpif (exp, label)
10568 tree exp;
10569 rtx label;
10570{
10571 do_jump (exp, NULL_RTX, label);
10572}
ca695ac9 10573
b93a436e
JL
10574/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10575 the result is zero, or IF_TRUE_LABEL if the result is one.
10576 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10577 meaning fall through in that case.
ca695ac9 10578
b93a436e
JL
10579 do_jump always does any pending stack adjust except when it does not
10580 actually perform a jump. An example where there is no jump
10581 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 10582
b93a436e
JL
10583 This function is responsible for optimizing cases such as
10584 &&, || and comparison operators in EXP. */
5718612f 10585
b93a436e
JL
10586void
10587do_jump (exp, if_false_label, if_true_label)
10588 tree exp;
10589 rtx if_false_label, if_true_label;
10590{
10591 register enum tree_code code = TREE_CODE (exp);
10592 /* Some cases need to create a label to jump to
10593 in order to properly fall through.
10594 These cases set DROP_THROUGH_LABEL nonzero. */
10595 rtx drop_through_label = 0;
10596 rtx temp;
10597 rtx comparison = 0;
10598 int i;
10599 tree type;
10600 enum machine_mode mode;
ca695ac9 10601
dbecbbe4
JL
10602#ifdef MAX_INTEGER_COMPUTATION_MODE
10603 check_max_integer_computation_mode (exp);
10604#endif
10605
b93a436e 10606 emit_queue ();
ca695ac9 10607
b93a436e 10608 switch (code)
ca695ac9 10609 {
b93a436e 10610 case ERROR_MARK:
ca695ac9 10611 break;
bbf6f052 10612
b93a436e
JL
10613 case INTEGER_CST:
10614 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10615 if (temp)
10616 emit_jump (temp);
10617 break;
bbf6f052 10618
b93a436e
JL
10619#if 0
10620 /* This is not true with #pragma weak */
10621 case ADDR_EXPR:
10622 /* The address of something can never be zero. */
10623 if (if_true_label)
10624 emit_jump (if_true_label);
10625 break;
10626#endif
bbf6f052 10627
b93a436e
JL
10628 case NOP_EXPR:
10629 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10630 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10631 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10632 goto normal;
10633 case CONVERT_EXPR:
10634 /* If we are narrowing the operand, we have to do the compare in the
10635 narrower mode. */
10636 if ((TYPE_PRECISION (TREE_TYPE (exp))
10637 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10638 goto normal;
10639 case NON_LVALUE_EXPR:
10640 case REFERENCE_EXPR:
10641 case ABS_EXPR:
10642 case NEGATE_EXPR:
10643 case LROTATE_EXPR:
10644 case RROTATE_EXPR:
10645 /* These cannot change zero->non-zero or vice versa. */
10646 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10647 break;
bbf6f052 10648
b93a436e
JL
10649#if 0
10650 /* This is never less insns than evaluating the PLUS_EXPR followed by
10651 a test and can be longer if the test is eliminated. */
10652 case PLUS_EXPR:
10653 /* Reduce to minus. */
10654 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10655 TREE_OPERAND (exp, 0),
10656 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10657 TREE_OPERAND (exp, 1))));
10658 /* Process as MINUS. */
ca695ac9 10659#endif
bbf6f052 10660
b93a436e
JL
10661 case MINUS_EXPR:
10662 /* Non-zero iff operands of minus differ. */
10663 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10664 TREE_OPERAND (exp, 0),
10665 TREE_OPERAND (exp, 1)),
10666 NE, NE);
10667 break;
bbf6f052 10668
b93a436e
JL
10669 case BIT_AND_EXPR:
10670 /* If we are AND'ing with a small constant, do this comparison in the
10671 smallest type that fits. If the machine doesn't have comparisons
10672 that small, it will be converted back to the wider comparison.
10673 This helps if we are testing the sign bit of a narrower object.
10674 combine can't do this for us because it can't know whether a
10675 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 10676
b93a436e
JL
10677 if (! SLOW_BYTE_ACCESS
10678 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10679 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10680 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10681 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10682 && (type = type_for_mode (mode, 1)) != 0
10683 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10684 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10685 != CODE_FOR_nothing))
10686 {
10687 do_jump (convert (type, exp), if_false_label, if_true_label);
10688 break;
10689 }
10690 goto normal;
bbf6f052 10691
b93a436e
JL
10692 case TRUTH_NOT_EXPR:
10693 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10694 break;
bbf6f052 10695
b93a436e
JL
10696 case TRUTH_ANDIF_EXPR:
10697 if (if_false_label == 0)
10698 if_false_label = drop_through_label = gen_label_rtx ();
10699 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10700 start_cleanup_deferral ();
10701 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10702 end_cleanup_deferral ();
10703 break;
bbf6f052 10704
b93a436e
JL
10705 case TRUTH_ORIF_EXPR:
10706 if (if_true_label == 0)
10707 if_true_label = drop_through_label = gen_label_rtx ();
10708 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10709 start_cleanup_deferral ();
10710 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10711 end_cleanup_deferral ();
10712 break;
bbf6f052 10713
b93a436e
JL
10714 case COMPOUND_EXPR:
10715 push_temp_slots ();
10716 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10717 preserve_temp_slots (NULL_RTX);
10718 free_temp_slots ();
10719 pop_temp_slots ();
10720 emit_queue ();
10721 do_pending_stack_adjust ();
10722 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10723 break;
bbf6f052 10724
b93a436e
JL
10725 case COMPONENT_REF:
10726 case BIT_FIELD_REF:
10727 case ARRAY_REF:
10728 {
10729 int bitsize, bitpos, unsignedp;
10730 enum machine_mode mode;
10731 tree type;
10732 tree offset;
10733 int volatilep = 0;
10734 int alignment;
bbf6f052 10735
b93a436e
JL
10736 /* Get description of this reference. We don't actually care
10737 about the underlying object here. */
10738 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10739 &mode, &unsignedp, &volatilep,
10740 &alignment);
bbf6f052 10741
b93a436e
JL
10742 type = type_for_size (bitsize, unsignedp);
10743 if (! SLOW_BYTE_ACCESS
10744 && type != 0 && bitsize >= 0
10745 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10746 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10747 != CODE_FOR_nothing))
10748 {
10749 do_jump (convert (type, exp), if_false_label, if_true_label);
10750 break;
10751 }
10752 goto normal;
10753 }
bbf6f052 10754
b93a436e
JL
10755 case COND_EXPR:
10756 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10757 if (integer_onep (TREE_OPERAND (exp, 1))
10758 && integer_zerop (TREE_OPERAND (exp, 2)))
10759 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 10760
b93a436e
JL
10761 else if (integer_zerop (TREE_OPERAND (exp, 1))
10762 && integer_onep (TREE_OPERAND (exp, 2)))
10763 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 10764
b93a436e
JL
10765 else
10766 {
10767 register rtx label1 = gen_label_rtx ();
10768 drop_through_label = gen_label_rtx ();
bbf6f052 10769
b93a436e 10770 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 10771
b93a436e
JL
10772 start_cleanup_deferral ();
10773 /* Now the THEN-expression. */
10774 do_jump (TREE_OPERAND (exp, 1),
10775 if_false_label ? if_false_label : drop_through_label,
10776 if_true_label ? if_true_label : drop_through_label);
10777 /* In case the do_jump just above never jumps. */
10778 do_pending_stack_adjust ();
10779 emit_label (label1);
bbf6f052 10780
b93a436e
JL
10781 /* Now the ELSE-expression. */
10782 do_jump (TREE_OPERAND (exp, 2),
10783 if_false_label ? if_false_label : drop_through_label,
10784 if_true_label ? if_true_label : drop_through_label);
10785 end_cleanup_deferral ();
10786 }
10787 break;
bbf6f052 10788
b93a436e
JL
10789 case EQ_EXPR:
10790 {
10791 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10792
9ec36da5
JL
10793 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10794 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10795 {
10796 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10797 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10798 do_jump
10799 (fold
10800 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10801 fold (build (EQ_EXPR, TREE_TYPE (exp),
10802 fold (build1 (REALPART_EXPR,
10803 TREE_TYPE (inner_type),
10804 exp0)),
10805 fold (build1 (REALPART_EXPR,
10806 TREE_TYPE (inner_type),
10807 exp1)))),
10808 fold (build (EQ_EXPR, TREE_TYPE (exp),
10809 fold (build1 (IMAGPART_EXPR,
10810 TREE_TYPE (inner_type),
10811 exp0)),
10812 fold (build1 (IMAGPART_EXPR,
10813 TREE_TYPE (inner_type),
10814 exp1)))))),
10815 if_false_label, if_true_label);
10816 }
9ec36da5
JL
10817
10818 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10819 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10820
b93a436e
JL
10821 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10822 && !can_compare_p (TYPE_MODE (inner_type)))
10823 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10824 else
10825 comparison = compare (exp, EQ, EQ);
10826 break;
10827 }
bbf6f052 10828
b93a436e
JL
10829 case NE_EXPR:
10830 {
10831 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10832
9ec36da5
JL
10833 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10834 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10835 {
10836 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10837 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10838 do_jump
10839 (fold
10840 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10841 fold (build (NE_EXPR, TREE_TYPE (exp),
10842 fold (build1 (REALPART_EXPR,
10843 TREE_TYPE (inner_type),
10844 exp0)),
10845 fold (build1 (REALPART_EXPR,
10846 TREE_TYPE (inner_type),
10847 exp1)))),
10848 fold (build (NE_EXPR, TREE_TYPE (exp),
10849 fold (build1 (IMAGPART_EXPR,
10850 TREE_TYPE (inner_type),
10851 exp0)),
10852 fold (build1 (IMAGPART_EXPR,
10853 TREE_TYPE (inner_type),
10854 exp1)))))),
10855 if_false_label, if_true_label);
10856 }
9ec36da5
JL
10857
10858 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10859 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10860
b93a436e
JL
10861 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10862 && !can_compare_p (TYPE_MODE (inner_type)))
10863 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10864 else
10865 comparison = compare (exp, NE, NE);
10866 break;
10867 }
bbf6f052 10868
b93a436e
JL
10869 case LT_EXPR:
10870 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10871 == MODE_INT)
10872 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10873 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10874 else
10875 comparison = compare (exp, LT, LTU);
10876 break;
bbf6f052 10877
b93a436e
JL
10878 case LE_EXPR:
10879 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10880 == MODE_INT)
10881 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10882 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10883 else
10884 comparison = compare (exp, LE, LEU);
10885 break;
bbf6f052 10886
b93a436e
JL
10887 case GT_EXPR:
10888 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10889 == MODE_INT)
10890 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10891 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10892 else
10893 comparison = compare (exp, GT, GTU);
10894 break;
bbf6f052 10895
b93a436e
JL
10896 case GE_EXPR:
10897 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10898 == MODE_INT)
10899 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10900 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10901 else
10902 comparison = compare (exp, GE, GEU);
10903 break;
bbf6f052 10904
b93a436e
JL
10905 default:
10906 normal:
10907 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10908#if 0
10909 /* This is not needed any more and causes poor code since it causes
10910 comparisons and tests from non-SI objects to have different code
10911 sequences. */
10912 /* Copy to register to avoid generating bad insns by cse
10913 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10914 if (!cse_not_expected && GET_CODE (temp) == MEM)
10915 temp = copy_to_reg (temp);
ca695ac9 10916#endif
b93a436e
JL
10917 do_pending_stack_adjust ();
10918 if (GET_CODE (temp) == CONST_INT)
10919 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10920 else if (GET_CODE (temp) == LABEL_REF)
10921 comparison = const_true_rtx;
10922 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10923 && !can_compare_p (GET_MODE (temp)))
10924 /* Note swapping the labels gives us not-equal. */
10925 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10926 else if (GET_MODE (temp) != VOIDmode)
10927 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10928 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10929 GET_MODE (temp), NULL_RTX, 0);
10930 else
10931 abort ();
10932 }
bbf6f052 10933
b93a436e
JL
10934 /* Do any postincrements in the expression that was tested. */
10935 emit_queue ();
bbf6f052 10936
b93a436e
JL
10937 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10938 straight into a conditional jump instruction as the jump condition.
10939 Otherwise, all the work has been done already. */
bbf6f052 10940
b93a436e
JL
10941 if (comparison == const_true_rtx)
10942 {
10943 if (if_true_label)
10944 emit_jump (if_true_label);
10945 }
10946 else if (comparison == const0_rtx)
10947 {
10948 if (if_false_label)
10949 emit_jump (if_false_label);
10950 }
10951 else if (comparison)
10952 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10953
b93a436e
JL
10954 if (drop_through_label)
10955 {
10956 /* If do_jump produces code that might be jumped around,
10957 do any stack adjusts from that code, before the place
10958 where control merges in. */
10959 do_pending_stack_adjust ();
10960 emit_label (drop_through_label);
10961 }
bbf6f052 10962}
b93a436e
JL
10963\f
10964/* Given a comparison expression EXP for values too wide to be compared
10965 with one insn, test the comparison and jump to the appropriate label.
10966 The code of EXP is ignored; we always test GT if SWAP is 0,
10967 and LT if SWAP is 1. */
bbf6f052 10968
b93a436e
JL
10969static void
10970do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10971 tree exp;
10972 int swap;
10973 rtx if_false_label, if_true_label;
10974{
10975 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10976 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10977 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10978 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10979 rtx drop_through_label = 0;
10980 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10981 int i;
bbf6f052 10982
b93a436e
JL
10983 if (! if_true_label || ! if_false_label)
10984 drop_through_label = gen_label_rtx ();
10985 if (! if_true_label)
10986 if_true_label = drop_through_label;
10987 if (! if_false_label)
10988 if_false_label = drop_through_label;
bbf6f052 10989
b93a436e
JL
10990 /* Compare a word at a time, high order first. */
10991 for (i = 0; i < nwords; i++)
f81497d9 10992 {
b93a436e
JL
10993 rtx comp;
10994 rtx op0_word, op1_word;
10995
10996 if (WORDS_BIG_ENDIAN)
10997 {
10998 op0_word = operand_subword_force (op0, i, mode);
10999 op1_word = operand_subword_force (op1, i, mode);
11000 }
f81497d9 11001 else
b93a436e
JL
11002 {
11003 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11004 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11005 }
11006
11007 /* All but high-order word must be compared as unsigned. */
11008 comp = compare_from_rtx (op0_word, op1_word,
11009 (unsignedp || i > 0) ? GTU : GT,
11010 unsignedp, word_mode, NULL_RTX, 0);
11011 if (comp == const_true_rtx)
11012 emit_jump (if_true_label);
11013 else if (comp != const0_rtx)
11014 do_jump_for_compare (comp, NULL_RTX, if_true_label);
11015
11016 /* Consider lower words only if these are equal. */
11017 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11018 NULL_RTX, 0);
11019 if (comp == const_true_rtx)
11020 emit_jump (if_false_label);
11021 else if (comp != const0_rtx)
11022 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 11023 }
ca695ac9 11024
b93a436e
JL
11025 if (if_false_label)
11026 emit_jump (if_false_label);
11027 if (drop_through_label)
11028 emit_label (drop_through_label);
f81497d9
RS
11029}
11030
b93a436e
JL
11031/* Compare OP0 with OP1, word at a time, in mode MODE.
11032 UNSIGNEDP says to do unsigned comparison.
11033 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 11034
b93a436e
JL
11035void
11036do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
11037 enum machine_mode mode;
11038 int unsignedp;
11039 rtx op0, op1;
11040 rtx if_false_label, if_true_label;
f81497d9 11041{
b93a436e
JL
11042 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11043 rtx drop_through_label = 0;
11044 int i;
f81497d9 11045
b93a436e
JL
11046 if (! if_true_label || ! if_false_label)
11047 drop_through_label = gen_label_rtx ();
11048 if (! if_true_label)
11049 if_true_label = drop_through_label;
11050 if (! if_false_label)
11051 if_false_label = drop_through_label;
f81497d9 11052
b93a436e
JL
11053 /* Compare a word at a time, high order first. */
11054 for (i = 0; i < nwords; i++)
11055 {
11056 rtx comp;
11057 rtx op0_word, op1_word;
bbf6f052 11058
b93a436e
JL
11059 if (WORDS_BIG_ENDIAN)
11060 {
11061 op0_word = operand_subword_force (op0, i, mode);
11062 op1_word = operand_subword_force (op1, i, mode);
11063 }
11064 else
11065 {
11066 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
11067 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
11068 }
bbf6f052 11069
b93a436e
JL
11070 /* All but high-order word must be compared as unsigned. */
11071 comp = compare_from_rtx (op0_word, op1_word,
11072 (unsignedp || i > 0) ? GTU : GT,
11073 unsignedp, word_mode, NULL_RTX, 0);
11074 if (comp == const_true_rtx)
11075 emit_jump (if_true_label);
11076 else if (comp != const0_rtx)
11077 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 11078
b93a436e
JL
11079 /* Consider lower words only if these are equal. */
11080 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
11081 NULL_RTX, 0);
11082 if (comp == const_true_rtx)
11083 emit_jump (if_false_label);
11084 else if (comp != const0_rtx)
11085 do_jump_for_compare (comp, NULL_RTX, if_false_label);
11086 }
bbf6f052 11087
b93a436e
JL
11088 if (if_false_label)
11089 emit_jump (if_false_label);
11090 if (drop_through_label)
11091 emit_label (drop_through_label);
bbf6f052
RK
11092}
11093
b93a436e
JL
11094/* Given an EQ_EXPR expression EXP for values too wide to be compared
11095 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 11096
b93a436e
JL
11097static void
11098do_jump_by_parts_equality (exp, if_false_label, if_true_label)
11099 tree exp;
11100 rtx if_false_label, if_true_label;
bbf6f052 11101{
b93a436e
JL
11102 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11103 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11104 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
11105 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
11106 int i;
11107 rtx drop_through_label = 0;
bbf6f052 11108
b93a436e
JL
11109 if (! if_false_label)
11110 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 11111
b93a436e
JL
11112 for (i = 0; i < nwords; i++)
11113 {
11114 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
11115 operand_subword_force (op1, i, mode),
11116 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
11117 word_mode, NULL_RTX, 0);
11118 if (comp == const_true_rtx)
11119 emit_jump (if_false_label);
11120 else if (comp != const0_rtx)
11121 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11122 }
bbf6f052 11123
b93a436e
JL
11124 if (if_true_label)
11125 emit_jump (if_true_label);
11126 if (drop_through_label)
11127 emit_label (drop_through_label);
bbf6f052 11128}
b93a436e
JL
11129\f
11130/* Jump according to whether OP0 is 0.
11131 We assume that OP0 has an integer mode that is too wide
11132 for the available compare insns. */
bbf6f052 11133
f5963e61 11134void
b93a436e
JL
11135do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
11136 rtx op0;
11137 rtx if_false_label, if_true_label;
ca695ac9 11138{
b93a436e
JL
11139 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
11140 rtx part;
11141 int i;
11142 rtx drop_through_label = 0;
bbf6f052 11143
b93a436e
JL
11144 /* The fastest way of doing this comparison on almost any machine is to
11145 "or" all the words and compare the result. If all have to be loaded
11146 from memory and this is a very wide item, it's possible this may
11147 be slower, but that's highly unlikely. */
bbf6f052 11148
b93a436e
JL
11149 part = gen_reg_rtx (word_mode);
11150 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
11151 for (i = 1; i < nwords && part != 0; i++)
11152 part = expand_binop (word_mode, ior_optab, part,
11153 operand_subword_force (op0, i, GET_MODE (op0)),
11154 part, 1, OPTAB_WIDEN);
bbf6f052 11155
b93a436e
JL
11156 if (part != 0)
11157 {
11158 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
11159 NULL_RTX, 0);
0f41302f 11160
b93a436e
JL
11161 if (comp == const_true_rtx)
11162 emit_jump (if_false_label);
11163 else if (comp == const0_rtx)
11164 emit_jump (if_true_label);
11165 else
11166 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 11167
b93a436e
JL
11168 return;
11169 }
bbf6f052 11170
b93a436e
JL
11171 /* If we couldn't do the "or" simply, do this with a series of compares. */
11172 if (! if_false_label)
11173 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 11174
b93a436e
JL
11175 for (i = 0; i < nwords; i++)
11176 {
11177 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
11178 GET_MODE (op0)),
11179 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
11180 if (comp == const_true_rtx)
11181 emit_jump (if_false_label);
11182 else if (comp != const0_rtx)
11183 do_jump_for_compare (comp, if_false_label, NULL_RTX);
11184 }
bbf6f052 11185
b93a436e
JL
11186 if (if_true_label)
11187 emit_jump (if_true_label);
0f41302f 11188
b93a436e
JL
11189 if (drop_through_label)
11190 emit_label (drop_through_label);
bbf6f052 11191}
bbf6f052 11192
b93a436e
JL
11193/* Given a comparison expression in rtl form, output conditional branches to
11194 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 11195
b93a436e
JL
11196static void
11197do_jump_for_compare (comparison, if_false_label, if_true_label)
11198 rtx comparison, if_false_label, if_true_label;
bbf6f052 11199{
b93a436e
JL
11200 if (if_true_label)
11201 {
11202 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
d804ed43
RH
11203 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11204 (if_true_label));
b93a436e
JL
11205 else
11206 abort ();
ca695ac9 11207
b93a436e
JL
11208 if (if_false_label)
11209 emit_jump (if_false_label);
11210 }
11211 else if (if_false_label)
11212 {
d804ed43
RH
11213 rtx first = get_last_insn (), insn, branch;
11214 int br_count;
0f41302f 11215
b93a436e
JL
11216 /* Output the branch with the opposite condition. Then try to invert
11217 what is generated. If more than one insn is a branch, or if the
11218 branch is not the last insn written, abort. If we can't invert
11219 the branch, emit make a true label, redirect this jump to that,
11220 emit a jump to the false label and define the true label. */
d804ed43
RH
11221 /* ??? Note that we wouldn't have to do any of this nonsense if
11222 we passed both labels into a combined compare-and-branch.
11223 Ah well, jump threading does a good job of repairing the damage. */
bbf6f052 11224
b93a436e 11225 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
d804ed43
RH
11226 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])
11227 (if_false_label));
b93a436e
JL
11228 else
11229 abort ();
bbf6f052 11230
d804ed43 11231 /* Here we get the first insn that was just emitted. It used to be the
b93a436e
JL
11232 case that, on some machines, emitting the branch would discard
11233 the previous compare insn and emit a replacement. This isn't
70f8a7a1 11234 done anymore, but abort if we see that FIRST is deleted. */
bbf6f052 11235
d804ed43
RH
11236 if (first == 0)
11237 first = get_insns ();
11238 else if (INSN_DELETED_P (first))
b93a436e
JL
11239 abort ();
11240 else
d804ed43 11241 first = NEXT_INSN (first);
bbf6f052 11242
d804ed43
RH
11243 /* Look for multiple branches in this sequence, as might be generated
11244 for a multi-word integer comparison. */
11245
11246 br_count = 0;
11247 branch = NULL_RTX;
11248 for (insn = first; insn ; insn = NEXT_INSN (insn))
b93a436e
JL
11249 if (GET_CODE (insn) == JUMP_INSN)
11250 {
b93a436e 11251 branch = insn;
d804ed43 11252 br_count += 1;
b93a436e 11253 }
a7c5971a 11254
d804ed43
RH
11255 /* If we've got one branch at the end of the sequence,
11256 we can try to reverse it. */
bbf6f052 11257
d804ed43 11258 if (br_count == 1 && NEXT_INSN (branch) == NULL_RTX)
b93a436e 11259 {
d804ed43
RH
11260 rtx insn_label;
11261 insn_label = XEXP (condjump_label (branch), 0);
11262 JUMP_LABEL (branch) = insn_label;
11263
11264 if (insn_label != if_false_label)
11265 abort ();
11266
11267 if (invert_jump (branch, if_false_label))
11268 return;
b93a436e 11269 }
d804ed43
RH
11270
11271 /* Multiple branches, or reversion failed. Convert to branches
11272 around an unconditional jump. */
11273
11274 if_true_label = gen_label_rtx ();
11275 for (insn = first; insn; insn = NEXT_INSN (insn))
11276 if (GET_CODE (insn) == JUMP_INSN)
11277 {
11278 rtx insn_label;
11279 insn_label = XEXP (condjump_label (insn), 0);
11280 JUMP_LABEL (insn) = insn_label;
11281
11282 if (insn_label == if_false_label)
11283 redirect_jump (insn, if_true_label);
11284 }
11285 emit_jump (if_false_label);
11286 emit_label (if_true_label);
b93a436e
JL
11287 }
11288}
11289\f
11290/* Generate code for a comparison expression EXP
11291 (including code to compute the values to be compared)
11292 and set (CC0) according to the result.
11293 SIGNED_CODE should be the rtx operation for this comparison for
11294 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 11295
b93a436e
JL
11296 We force a stack adjustment unless there are currently
11297 things pushed on the stack that aren't yet used. */
ca695ac9 11298
b93a436e
JL
11299static rtx
11300compare (exp, signed_code, unsigned_code)
11301 register tree exp;
11302 enum rtx_code signed_code, unsigned_code;
11303{
76bbe028
ZW
11304 register rtx op0, op1;
11305 register tree type;
11306 register enum machine_mode mode;
11307 int unsignedp;
11308 enum rtx_code code;
11309
11310 /* Don't crash if the comparison was erroneous. */
11311 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
11312 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
11313 return op0;
11314
11315 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
11316 type = TREE_TYPE (TREE_OPERAND (exp, 0));
11317 mode = TYPE_MODE (type);
11318 unsignedp = TREE_UNSIGNED (type);
11319 code = unsignedp ? unsigned_code : signed_code;
ca695ac9 11320
b93a436e
JL
11321#ifdef HAVE_canonicalize_funcptr_for_compare
11322 /* If function pointers need to be "canonicalized" before they can
11323 be reliably compared, then canonicalize them. */
11324 if (HAVE_canonicalize_funcptr_for_compare
11325 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11326 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11327 == FUNCTION_TYPE))
bbf6f052 11328 {
b93a436e 11329 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 11330
b93a436e
JL
11331 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
11332 op0 = new_op0;
ca695ac9 11333 }
bbf6f052 11334
b93a436e
JL
11335 if (HAVE_canonicalize_funcptr_for_compare
11336 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11337 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11338 == FUNCTION_TYPE))
11339 {
11340 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 11341
b93a436e
JL
11342 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
11343 op1 = new_op1;
11344 }
11345#endif
0f41302f 11346
b93a436e
JL
11347 return compare_from_rtx (op0, op1, code, unsignedp, mode,
11348 ((mode == BLKmode)
11349 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
11350 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 11351}
bbf6f052 11352
b93a436e
JL
11353/* Like compare but expects the values to compare as two rtx's.
11354 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 11355
b93a436e
JL
11356 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
11357 compared.
bbf6f052 11358
b93a436e
JL
11359 If ALIGN is non-zero, it is the alignment of this type; if zero, the
11360 size of MODE should be used. */
ca695ac9 11361
b93a436e
JL
11362rtx
11363compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
11364 register rtx op0, op1;
11365 enum rtx_code code;
11366 int unsignedp;
11367 enum machine_mode mode;
11368 rtx size;
11369 int align;
bbf6f052 11370{
b93a436e 11371 rtx tem;
bbf6f052 11372
b93a436e
JL
11373 /* If one operand is constant, make it the second one. Only do this
11374 if the other operand is not constant as well. */
e7c33f54 11375
b93a436e
JL
11376 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
11377 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 11378 {
b93a436e
JL
11379 tem = op0;
11380 op0 = op1;
11381 op1 = tem;
11382 code = swap_condition (code);
11383 }
bbf6f052 11384
b93a436e
JL
11385 if (flag_force_mem)
11386 {
11387 op0 = force_not_mem (op0);
11388 op1 = force_not_mem (op1);
11389 }
bbf6f052 11390
b93a436e 11391 do_pending_stack_adjust ();
ca695ac9 11392
b93a436e
JL
11393 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11394 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11395 return tem;
ca695ac9 11396
b93a436e
JL
11397#if 0
11398 /* There's no need to do this now that combine.c can eliminate lots of
11399 sign extensions. This can be less efficient in certain cases on other
11400 machines. */
ca695ac9 11401
b93a436e
JL
11402 /* If this is a signed equality comparison, we can do it as an
11403 unsigned comparison since zero-extension is cheaper than sign
11404 extension and comparisons with zero are done as unsigned. This is
11405 the case even on machines that can do fast sign extension, since
11406 zero-extension is easier to combine with other operations than
11407 sign-extension is. If we are comparing against a constant, we must
11408 convert it to what it would look like unsigned. */
11409 if ((code == EQ || code == NE) && ! unsignedp
11410 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11411 {
11412 if (GET_CODE (op1) == CONST_INT
11413 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11414 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11415 unsignedp = 1;
11416 }
11417#endif
ca695ac9 11418
b93a436e 11419 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 11420
b93a436e
JL
11421 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11422}
11423\f
11424/* Generate code to calculate EXP using a store-flag instruction
11425 and return an rtx for the result. EXP is either a comparison
11426 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 11427
b93a436e 11428 If TARGET is nonzero, store the result there if convenient.
ca695ac9 11429
b93a436e
JL
11430 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11431 cheap.
ca695ac9 11432
b93a436e
JL
11433 Return zero if there is no suitable set-flag instruction
11434 available on this machine.
ca695ac9 11435
b93a436e
JL
11436 Once expand_expr has been called on the arguments of the comparison,
11437 we are committed to doing the store flag, since it is not safe to
11438 re-evaluate the expression. We emit the store-flag insn by calling
11439 emit_store_flag, but only expand the arguments if we have a reason
11440 to believe that emit_store_flag will be successful. If we think that
11441 it will, but it isn't, we have to simulate the store-flag with a
11442 set/jump/set sequence. */
ca695ac9 11443
b93a436e
JL
11444static rtx
11445do_store_flag (exp, target, mode, only_cheap)
11446 tree exp;
11447 rtx target;
11448 enum machine_mode mode;
11449 int only_cheap;
11450{
11451 enum rtx_code code;
11452 tree arg0, arg1, type;
11453 tree tem;
11454 enum machine_mode operand_mode;
11455 int invert = 0;
11456 int unsignedp;
11457 rtx op0, op1;
11458 enum insn_code icode;
11459 rtx subtarget = target;
381127e8 11460 rtx result, label;
ca695ac9 11461
b93a436e
JL
11462 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11463 result at the end. We can't simply invert the test since it would
11464 have already been inverted if it were valid. This case occurs for
11465 some floating-point comparisons. */
ca695ac9 11466
b93a436e
JL
11467 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11468 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 11469
b93a436e
JL
11470 arg0 = TREE_OPERAND (exp, 0);
11471 arg1 = TREE_OPERAND (exp, 1);
11472 type = TREE_TYPE (arg0);
11473 operand_mode = TYPE_MODE (type);
11474 unsignedp = TREE_UNSIGNED (type);
ca695ac9 11475
b93a436e
JL
11476 /* We won't bother with BLKmode store-flag operations because it would mean
11477 passing a lot of information to emit_store_flag. */
11478 if (operand_mode == BLKmode)
11479 return 0;
ca695ac9 11480
b93a436e
JL
11481 /* We won't bother with store-flag operations involving function pointers
11482 when function pointers must be canonicalized before comparisons. */
11483#ifdef HAVE_canonicalize_funcptr_for_compare
11484 if (HAVE_canonicalize_funcptr_for_compare
11485 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11486 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11487 == FUNCTION_TYPE))
11488 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11489 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11490 == FUNCTION_TYPE))))
11491 return 0;
ca695ac9
JB
11492#endif
11493
b93a436e
JL
11494 STRIP_NOPS (arg0);
11495 STRIP_NOPS (arg1);
ca695ac9 11496
b93a436e
JL
11497 /* Get the rtx comparison code to use. We know that EXP is a comparison
11498 operation of some type. Some comparisons against 1 and -1 can be
11499 converted to comparisons with zero. Do so here so that the tests
11500 below will be aware that we have a comparison with zero. These
11501 tests will not catch constants in the first operand, but constants
11502 are rarely passed as the first operand. */
ca695ac9 11503
b93a436e
JL
11504 switch (TREE_CODE (exp))
11505 {
11506 case EQ_EXPR:
11507 code = EQ;
bbf6f052 11508 break;
b93a436e
JL
11509 case NE_EXPR:
11510 code = NE;
bbf6f052 11511 break;
b93a436e
JL
11512 case LT_EXPR:
11513 if (integer_onep (arg1))
11514 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11515 else
11516 code = unsignedp ? LTU : LT;
ca695ac9 11517 break;
b93a436e
JL
11518 case LE_EXPR:
11519 if (! unsignedp && integer_all_onesp (arg1))
11520 arg1 = integer_zero_node, code = LT;
11521 else
11522 code = unsignedp ? LEU : LE;
ca695ac9 11523 break;
b93a436e
JL
11524 case GT_EXPR:
11525 if (! unsignedp && integer_all_onesp (arg1))
11526 arg1 = integer_zero_node, code = GE;
11527 else
11528 code = unsignedp ? GTU : GT;
11529 break;
11530 case GE_EXPR:
11531 if (integer_onep (arg1))
11532 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11533 else
11534 code = unsignedp ? GEU : GE;
ca695ac9 11535 break;
ca695ac9 11536 default:
b93a436e 11537 abort ();
bbf6f052 11538 }
bbf6f052 11539
b93a436e
JL
11540 /* Put a constant second. */
11541 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11542 {
11543 tem = arg0; arg0 = arg1; arg1 = tem;
11544 code = swap_condition (code);
ca695ac9 11545 }
bbf6f052 11546
b93a436e
JL
11547 /* If this is an equality or inequality test of a single bit, we can
11548 do this by shifting the bit being tested to the low-order bit and
11549 masking the result with the constant 1. If the condition was EQ,
11550 we xor it with 1. This does not require an scc insn and is faster
11551 than an scc insn even if we have it. */
d39985fa 11552
b93a436e
JL
11553 if ((code == NE || code == EQ)
11554 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11555 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11556 {
11557 tree inner = TREE_OPERAND (arg0, 0);
11558 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11559 int ops_unsignedp;
bbf6f052 11560
b93a436e
JL
11561 /* If INNER is a right shift of a constant and it plus BITNUM does
11562 not overflow, adjust BITNUM and INNER. */
ca695ac9 11563
b93a436e
JL
11564 if (TREE_CODE (inner) == RSHIFT_EXPR
11565 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11566 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11567 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11568 < TYPE_PRECISION (type)))
ca695ac9 11569 {
b93a436e
JL
11570 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11571 inner = TREE_OPERAND (inner, 0);
ca695ac9 11572 }
ca695ac9 11573
b93a436e
JL
11574 /* If we are going to be able to omit the AND below, we must do our
11575 operations as unsigned. If we must use the AND, we have a choice.
11576 Normally unsigned is faster, but for some machines signed is. */
11577 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11578#ifdef LOAD_EXTEND_OP
11579 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11580#else
11581 : 1
11582#endif
11583 );
bbf6f052 11584
b93a436e
JL
11585 if (subtarget == 0 || GET_CODE (subtarget) != REG
11586 || GET_MODE (subtarget) != operand_mode
e5e809f4 11587 || ! safe_from_p (subtarget, inner, 1))
b93a436e 11588 subtarget = 0;
bbf6f052 11589
b93a436e 11590 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 11591
b93a436e
JL
11592 if (bitnum != 0)
11593 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11594 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 11595
b93a436e
JL
11596 if (GET_MODE (op0) != mode)
11597 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 11598
b93a436e
JL
11599 if ((code == EQ && ! invert) || (code == NE && invert))
11600 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11601 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 11602
b93a436e
JL
11603 /* Put the AND last so it can combine with more things. */
11604 if (bitnum != TYPE_PRECISION (type) - 1)
11605 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 11606
b93a436e
JL
11607 return op0;
11608 }
bbf6f052 11609
b93a436e
JL
11610 /* Now see if we are likely to be able to do this. Return if not. */
11611 if (! can_compare_p (operand_mode))
11612 return 0;
11613 icode = setcc_gen_code[(int) code];
11614 if (icode == CODE_FOR_nothing
11615 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 11616 {
b93a436e
JL
11617 /* We can only do this if it is one of the special cases that
11618 can be handled without an scc insn. */
11619 if ((code == LT && integer_zerop (arg1))
11620 || (! only_cheap && code == GE && integer_zerop (arg1)))
11621 ;
11622 else if (BRANCH_COST >= 0
11623 && ! only_cheap && (code == NE || code == EQ)
11624 && TREE_CODE (type) != REAL_TYPE
11625 && ((abs_optab->handlers[(int) operand_mode].insn_code
11626 != CODE_FOR_nothing)
11627 || (ffs_optab->handlers[(int) operand_mode].insn_code
11628 != CODE_FOR_nothing)))
11629 ;
11630 else
11631 return 0;
ca695ac9 11632 }
b93a436e
JL
11633
11634 preexpand_calls (exp);
11635 if (subtarget == 0 || GET_CODE (subtarget) != REG
11636 || GET_MODE (subtarget) != operand_mode
e5e809f4 11637 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
11638 subtarget = 0;
11639
11640 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11641 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11642
11643 if (target == 0)
11644 target = gen_reg_rtx (mode);
11645
11646 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11647 because, if the emit_store_flag does anything it will succeed and
11648 OP0 and OP1 will not be used subsequently. */
ca695ac9 11649
b93a436e
JL
11650 result = emit_store_flag (target, code,
11651 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11652 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11653 operand_mode, unsignedp, 1);
ca695ac9 11654
b93a436e
JL
11655 if (result)
11656 {
11657 if (invert)
11658 result = expand_binop (mode, xor_optab, result, const1_rtx,
11659 result, 0, OPTAB_LIB_WIDEN);
11660 return result;
ca695ac9 11661 }
bbf6f052 11662
b93a436e
JL
11663 /* If this failed, we have to do this with set/compare/jump/set code. */
11664 if (GET_CODE (target) != REG
11665 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11666 target = gen_reg_rtx (GET_MODE (target));
11667
11668 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11669 result = compare_from_rtx (op0, op1, code, unsignedp,
11670 operand_mode, NULL_RTX, 0);
11671 if (GET_CODE (result) == CONST_INT)
11672 return (((result == const0_rtx && ! invert)
11673 || (result != const0_rtx && invert))
11674 ? const0_rtx : const1_rtx);
ca695ac9 11675
b93a436e
JL
11676 label = gen_label_rtx ();
11677 if (bcc_gen_fctn[(int) code] == 0)
11678 abort ();
0f41302f 11679
b93a436e
JL
11680 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11681 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11682 emit_label (label);
bbf6f052 11683
b93a436e 11684 return target;
ca695ac9 11685}
b93a436e
JL
11686\f
11687/* Generate a tablejump instruction (used for switch statements). */
11688
11689#ifdef HAVE_tablejump
e87b4f3f 11690
b93a436e
JL
11691/* INDEX is the value being switched on, with the lowest value
11692 in the table already subtracted.
11693 MODE is its expected mode (needed if INDEX is constant).
11694 RANGE is the length of the jump table.
11695 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 11696
b93a436e
JL
11697 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11698 index value is out of range. */
0f41302f 11699
ca695ac9 11700void
b93a436e
JL
11701do_tablejump (index, mode, range, table_label, default_label)
11702 rtx index, range, table_label, default_label;
11703 enum machine_mode mode;
ca695ac9 11704{
b93a436e 11705 register rtx temp, vector;
88d3b7f0 11706
b93a436e
JL
11707 /* Do an unsigned comparison (in the proper mode) between the index
11708 expression and the value which represents the length of the range.
11709 Since we just finished subtracting the lower bound of the range
11710 from the index expression, this comparison allows us to simultaneously
11711 check that the original index expression value is both greater than
11712 or equal to the minimum value of the range and less than or equal to
11713 the maximum value of the range. */
709f5be1 11714
c5d5d461
JL
11715 emit_cmp_and_jump_insns (index, range, GTU, NULL_RTX, mode, 1,
11716 0, default_label);
bbf6f052 11717
b93a436e
JL
11718 /* If index is in range, it must fit in Pmode.
11719 Convert to Pmode so we can index with it. */
11720 if (mode != Pmode)
11721 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11722
b93a436e
JL
11723 /* Don't let a MEM slip thru, because then INDEX that comes
11724 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11725 and break_out_memory_refs will go to work on it and mess it up. */
11726#ifdef PIC_CASE_VECTOR_ADDRESS
11727 if (flag_pic && GET_CODE (index) != REG)
11728 index = copy_to_mode_reg (Pmode, index);
11729#endif
ca695ac9 11730
b93a436e
JL
11731 /* If flag_force_addr were to affect this address
11732 it could interfere with the tricky assumptions made
11733 about addresses that contain label-refs,
11734 which may be valid only very near the tablejump itself. */
11735 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11736 GET_MODE_SIZE, because this indicates how large insns are. The other
11737 uses should all be Pmode, because they are addresses. This code
11738 could fail if addresses and insns are not the same size. */
11739 index = gen_rtx_PLUS (Pmode,
11740 gen_rtx_MULT (Pmode, index,
11741 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11742 gen_rtx_LABEL_REF (Pmode, table_label));
11743#ifdef PIC_CASE_VECTOR_ADDRESS
11744 if (flag_pic)
11745 index = PIC_CASE_VECTOR_ADDRESS (index);
11746 else
bbf6f052 11747#endif
b93a436e
JL
11748 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11749 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11750 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11751 RTX_UNCHANGING_P (vector) = 1;
11752 convert_move (temp, vector, 0);
11753
11754 emit_jump_insn (gen_tablejump (temp, table_label));
11755
11756 /* If we are generating PIC code or if the table is PC-relative, the
11757 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11758 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11759 emit_barrier ();
bbf6f052 11760}
b93a436e
JL
11761
11762#endif /* HAVE_tablejump */
This page took 2.537576 seconds and 5 git commands to generate.