]> gcc.gnu.org Git - gcc.git/blame - gcc/expr.c
tree.def (LABELED_BLOCK_EXPR, [...]): New tree nodes.
[gcc.git] / gcc / expr.c
CommitLineData
bbf6f052 1/* Convert tree expression to rtl instructions, for GNU compiler.
c85f7c16 2 Copyright (C) 1988, 92-97, 1998 Free Software Foundation, Inc.
bbf6f052
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
bbf6f052
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
ca695ac9 24#include "machmode.h"
bbf6f052
RK
25#include "rtl.h"
26#include "tree.h"
ca695ac9 27#include "obstack.h"
bbf6f052 28#include "flags.h"
bf76bb5a 29#include "regs.h"
4ed67205 30#include "hard-reg-set.h"
3d195391 31#include "except.h"
bbf6f052
RK
32#include "function.h"
33#include "insn-flags.h"
34#include "insn-codes.h"
bbf6f052 35#include "insn-config.h"
d6f4ec51
KG
36/* Include expr.h after insn-config.h so we get HAVE_conditional_move. */
37#include "expr.h"
bbf6f052
RK
38#include "recog.h"
39#include "output.h"
bbf6f052 40#include "typeclass.h"
ca55abae 41#include "defaults.h"
10f0ad3d 42#include "toplev.h"
bbf6f052
RK
43
44#define CEIL(x,y) (((x) + (y) - 1) / (y))
45
46/* Decide whether a function's arguments should be processed
bbc8a071
RK
47 from first to last or from last to first.
48
49 They should if the stack and args grow in opposite directions, but
50 only if we have push insns. */
bbf6f052 51
bbf6f052 52#ifdef PUSH_ROUNDING
bbc8a071 53
3319a347 54#if defined (STACK_GROWS_DOWNWARD) != defined (ARGS_GROW_DOWNWARD)
bbf6f052
RK
55#define PUSH_ARGS_REVERSED /* If it's last to first */
56#endif
bbc8a071 57
bbf6f052
RK
58#endif
59
60#ifndef STACK_PUSH_CODE
61#ifdef STACK_GROWS_DOWNWARD
62#define STACK_PUSH_CODE PRE_DEC
63#else
64#define STACK_PUSH_CODE PRE_INC
65#endif
66#endif
67
68/* Like STACK_BOUNDARY but in units of bytes, not bits. */
69#define STACK_BYTES (STACK_BOUNDARY / BITS_PER_UNIT)
70
18543a22
ILT
71/* Assume that case vectors are not pc-relative. */
72#ifndef CASE_VECTOR_PC_RELATIVE
73#define CASE_VECTOR_PC_RELATIVE 0
74#endif
75
bbf6f052
RK
76/* If this is nonzero, we do not bother generating VOLATILE
77 around volatile memory references, and we are willing to
78 output indirect addresses. If cse is to follow, we reject
79 indirect addresses so a useful potential cse is generated;
80 if it is used only once, instruction combination will produce
81 the same indirect address eventually. */
82int cse_not_expected;
83
84/* Nonzero to generate code for all the subroutines within an
85 expression before generating the upper levels of the expression.
86 Nowadays this is never zero. */
87int do_preexpand_calls = 1;
88
89/* Number of units that we should eventually pop off the stack.
90 These are the arguments to function calls that have already returned. */
91int pending_stack_adjust;
92
93/* Nonzero means stack pops must not be deferred, and deferred stack
94 pops must not be output. It is nonzero inside a function call,
95 inside a conditional expression, inside a statement expression,
96 and in other cases as well. */
97int inhibit_defer_pop;
98
bbf6f052
RK
99/* Nonzero means __builtin_saveregs has already been done in this function.
100 The value is the pseudoreg containing the value __builtin_saveregs
101 returned. */
102static rtx saveregs_value;
103
dcf76fff
TW
104/* Similarly for __builtin_apply_args. */
105static rtx apply_args_value;
106
cff48d8f
RH
107/* Nonzero if the machine description has been fixed to accept
108 CONSTANT_P_RTX patterns. We will emit a warning and continue
109 if we find we must actually use such a beast. */
110static int can_handle_constant_p;
111
956d6950
JL
112/* Don't check memory usage, since code is being emitted to check a memory
113 usage. Used when flag_check_memory_usage is true, to avoid infinite
114 recursion. */
115static int in_check_memory_usage;
116
4969d05d
RK
117/* This structure is used by move_by_pieces to describe the move to
118 be performed. */
4969d05d
RK
119struct move_by_pieces
120{
121 rtx to;
122 rtx to_addr;
123 int autinc_to;
124 int explicit_inc_to;
e9cf6a97 125 int to_struct;
4969d05d
RK
126 rtx from;
127 rtx from_addr;
128 int autinc_from;
129 int explicit_inc_from;
e9cf6a97 130 int from_struct;
4969d05d
RK
131 int len;
132 int offset;
133 int reverse;
134};
135
9de08200
RK
136/* This structure is used by clear_by_pieces to describe the clear to
137 be performed. */
138
139struct clear_by_pieces
140{
141 rtx to;
142 rtx to_addr;
143 int autinc_to;
144 int explicit_inc_to;
145 int to_struct;
146 int len;
147 int offset;
148 int reverse;
149};
150
292b1216 151extern struct obstack permanent_obstack;
4ed67205 152extern rtx arg_pointer_save_area;
c02bd5d9 153
03566575
JW
154static rtx get_push_address PROTO ((int));
155
4969d05d
RK
156static rtx enqueue_insn PROTO((rtx, rtx));
157static int queued_subexp_p PROTO((rtx));
158static void init_queue PROTO((void));
4969d05d 159static int move_by_pieces_ninsns PROTO((unsigned int, int));
eae4b970 160static void move_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
4969d05d 161 struct move_by_pieces *));
9de08200 162static void clear_by_pieces PROTO((rtx, int, int));
eae4b970 163static void clear_by_pieces_1 PROTO((rtx (*) (rtx, ...), enum machine_mode,
9de08200
RK
164 struct clear_by_pieces *));
165static int is_zeros_p PROTO((tree));
166static int mostly_zeros_p PROTO((tree));
d77fac3b
JL
167static void store_constructor_field PROTO((rtx, int, int, enum machine_mode,
168 tree, tree, int));
e1a43f73 169static void store_constructor PROTO((tree, rtx, int));
4969d05d
RK
170static rtx store_field PROTO((rtx, int, int, enum machine_mode, tree,
171 enum machine_mode, int, int, int));
e009aaf3
JL
172static enum memory_use_mode
173 get_memory_usage_from_modifier PROTO((enum expand_modifier));
4969d05d
RK
174static tree save_noncopied_parts PROTO((tree, tree));
175static tree init_noncopied_parts PROTO((tree, tree));
e5e809f4 176static int safe_from_p PROTO((rtx, tree, int));
4969d05d 177static int fixed_type_p PROTO((tree));
01c8a7c8 178static rtx var_rtx PROTO((tree));
4969d05d
RK
179static int get_pointer_alignment PROTO((tree, unsigned));
180static tree string_constant PROTO((tree, tree *));
181static tree c_strlen PROTO((tree));
55a6ba9f 182static rtx get_memory_rtx PROTO((tree));
307b821c
RK
183static rtx expand_builtin PROTO((tree, rtx, rtx,
184 enum machine_mode, int));
0006469d
TW
185static int apply_args_size PROTO((void));
186static int apply_result_size PROTO((void));
187static rtx result_vector PROTO((int, rtx));
188static rtx expand_builtin_apply_args PROTO((void));
189static rtx expand_builtin_apply PROTO((rtx, rtx, rtx));
190static void expand_builtin_return PROTO((rtx));
7b8b9722 191static rtx expand_increment PROTO((tree, int, int));
4969d05d
RK
192static void preexpand_calls PROTO((tree));
193static void do_jump_by_parts_greater PROTO((tree, int, rtx, rtx));
194static void do_jump_by_parts_equality PROTO((tree, rtx, rtx));
4969d05d
RK
195static void do_jump_for_compare PROTO((rtx, rtx, rtx));
196static rtx compare PROTO((tree, enum rtx_code, enum rtx_code));
197static rtx do_store_flag PROTO((tree, rtx, enum machine_mode, int));
bbf6f052 198
4fa52007
RK
199/* Record for each mode whether we can move a register directly to or
200 from an object of that mode in memory. If we can't, we won't try
201 to use that mode directly when accessing a field of that mode. */
202
203static char direct_load[NUM_MACHINE_MODES];
204static char direct_store[NUM_MACHINE_MODES];
205
bbf6f052
RK
206/* MOVE_RATIO is the number of move instructions that is better than
207 a block move. */
208
209#ifndef MOVE_RATIO
266007a7 210#if defined (HAVE_movstrqi) || defined (HAVE_movstrhi) || defined (HAVE_movstrsi) || defined (HAVE_movstrdi) || defined (HAVE_movstrti)
bbf6f052
RK
211#define MOVE_RATIO 2
212#else
996d9dac
MM
213/* If we are optimizing for space (-Os), cut down the default move ratio */
214#define MOVE_RATIO (optimize_size ? 3 : 15)
bbf6f052
RK
215#endif
216#endif
e87b4f3f 217
266007a7 218/* This array records the insn_code of insns to perform block moves. */
e6677db3 219enum insn_code movstr_optab[NUM_MACHINE_MODES];
266007a7 220
9de08200
RK
221/* This array records the insn_code of insns to perform block clears. */
222enum insn_code clrstr_optab[NUM_MACHINE_MODES];
223
0f41302f 224/* SLOW_UNALIGNED_ACCESS is non-zero if unaligned accesses are very slow. */
e87b4f3f
RS
225
226#ifndef SLOW_UNALIGNED_ACCESS
c7a7ac46 227#define SLOW_UNALIGNED_ACCESS STRICT_ALIGNMENT
e87b4f3f 228#endif
0006469d
TW
229
230/* Register mappings for target machines without register windows. */
231#ifndef INCOMING_REGNO
232#define INCOMING_REGNO(OUT) (OUT)
233#endif
234#ifndef OUTGOING_REGNO
235#define OUTGOING_REGNO(IN) (IN)
236#endif
bbf6f052 237\f
4fa52007 238/* This is run once per compilation to set up which modes can be used
266007a7 239 directly in memory and to initialize the block move optab. */
4fa52007
RK
240
241void
242init_expr_once ()
243{
244 rtx insn, pat;
245 enum machine_mode mode;
cff48d8f 246 int num_clobbers;
9ec36da5
JL
247 rtx mem, mem1;
248 char *free_point;
249
250 start_sequence ();
251
252 /* Since we are on the permanent obstack, we must be sure we save this
253 spot AFTER we call start_sequence, since it will reuse the rtl it
254 makes. */
255 free_point = (char *) oballoc (0);
256
e2549997
RS
257 /* Try indexing by frame ptr and try by stack ptr.
258 It is known that on the Convex the stack ptr isn't a valid index.
259 With luck, one or the other is valid on any machine. */
9ec36da5
JL
260 mem = gen_rtx_MEM (VOIDmode, stack_pointer_rtx);
261 mem1 = gen_rtx_MEM (VOIDmode, frame_pointer_rtx);
4fa52007 262
38a448ca 263 insn = emit_insn (gen_rtx_SET (0, NULL_RTX, NULL_RTX));
4fa52007
RK
264 pat = PATTERN (insn);
265
266 for (mode = VOIDmode; (int) mode < NUM_MACHINE_MODES;
267 mode = (enum machine_mode) ((int) mode + 1))
268 {
269 int regno;
270 rtx reg;
4fa52007
RK
271
272 direct_load[(int) mode] = direct_store[(int) mode] = 0;
273 PUT_MODE (mem, mode);
e2549997 274 PUT_MODE (mem1, mode);
4fa52007 275
e6fe56a4
RK
276 /* See if there is some register that can be used in this mode and
277 directly loaded or stored from memory. */
278
7308a047
RS
279 if (mode != VOIDmode && mode != BLKmode)
280 for (regno = 0; regno < FIRST_PSEUDO_REGISTER
281 && (direct_load[(int) mode] == 0 || direct_store[(int) mode] == 0);
282 regno++)
283 {
284 if (! HARD_REGNO_MODE_OK (regno, mode))
285 continue;
e6fe56a4 286
38a448ca 287 reg = gen_rtx_REG (mode, regno);
e6fe56a4 288
7308a047
RS
289 SET_SRC (pat) = mem;
290 SET_DEST (pat) = reg;
291 if (recog (pat, insn, &num_clobbers) >= 0)
292 direct_load[(int) mode] = 1;
e6fe56a4 293
e2549997
RS
294 SET_SRC (pat) = mem1;
295 SET_DEST (pat) = reg;
296 if (recog (pat, insn, &num_clobbers) >= 0)
297 direct_load[(int) mode] = 1;
298
7308a047
RS
299 SET_SRC (pat) = reg;
300 SET_DEST (pat) = mem;
301 if (recog (pat, insn, &num_clobbers) >= 0)
302 direct_store[(int) mode] = 1;
e2549997
RS
303
304 SET_SRC (pat) = reg;
305 SET_DEST (pat) = mem1;
306 if (recog (pat, insn, &num_clobbers) >= 0)
307 direct_store[(int) mode] = 1;
7308a047 308 }
4fa52007
RK
309 }
310
cff48d8f
RH
311 /* Find out if CONSTANT_P_RTX is accepted. */
312 SET_DEST (pat) = gen_rtx_REG (TYPE_MODE (integer_type_node),
313 FIRST_PSEUDO_REGISTER);
314 SET_SRC (pat) = gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
315 SET_DEST (pat));
316 if (recog (pat, insn, &num_clobbers) >= 0)
317 can_handle_constant_p = 1;
318
4fa52007 319 end_sequence ();
9ec36da5 320 obfree (free_point);
4fa52007 321}
cff48d8f 322
bbf6f052
RK
323/* This is run at the start of compiling a function. */
324
325void
326init_expr ()
327{
328 init_queue ();
329
330 pending_stack_adjust = 0;
331 inhibit_defer_pop = 0;
bbf6f052 332 saveregs_value = 0;
0006469d 333 apply_args_value = 0;
e87b4f3f 334 forced_labels = 0;
bbf6f052
RK
335}
336
337/* Save all variables describing the current status into the structure *P.
338 This is used before starting a nested function. */
339
340void
341save_expr_status (p)
342 struct function *p;
343{
344 /* Instead of saving the postincrement queue, empty it. */
345 emit_queue ();
346
347 p->pending_stack_adjust = pending_stack_adjust;
348 p->inhibit_defer_pop = inhibit_defer_pop;
bbf6f052 349 p->saveregs_value = saveregs_value;
0006469d 350 p->apply_args_value = apply_args_value;
e87b4f3f 351 p->forced_labels = forced_labels;
bbf6f052
RK
352
353 pending_stack_adjust = 0;
354 inhibit_defer_pop = 0;
bbf6f052 355 saveregs_value = 0;
0006469d 356 apply_args_value = 0;
e87b4f3f 357 forced_labels = 0;
bbf6f052
RK
358}
359
360/* Restore all variables describing the current status from the structure *P.
361 This is used after a nested function. */
362
363void
364restore_expr_status (p)
365 struct function *p;
366{
367 pending_stack_adjust = p->pending_stack_adjust;
368 inhibit_defer_pop = p->inhibit_defer_pop;
bbf6f052 369 saveregs_value = p->saveregs_value;
0006469d 370 apply_args_value = p->apply_args_value;
e87b4f3f 371 forced_labels = p->forced_labels;
bbf6f052
RK
372}
373\f
374/* Manage the queue of increment instructions to be output
375 for POSTINCREMENT_EXPR expressions, etc. */
376
377static rtx pending_chain;
378
379/* Queue up to increment (or change) VAR later. BODY says how:
380 BODY should be the same thing you would pass to emit_insn
381 to increment right away. It will go to emit_insn later on.
382
383 The value is a QUEUED expression to be used in place of VAR
384 where you want to guarantee the pre-incrementation value of VAR. */
385
386static rtx
387enqueue_insn (var, body)
388 rtx var, body;
389{
38a448ca
RH
390 pending_chain = gen_rtx_QUEUED (GET_MODE (var),
391 var, NULL_RTX, NULL_RTX, body,
392 pending_chain);
bbf6f052
RK
393 return pending_chain;
394}
395
396/* Use protect_from_queue to convert a QUEUED expression
397 into something that you can put immediately into an instruction.
398 If the queued incrementation has not happened yet,
399 protect_from_queue returns the variable itself.
400 If the incrementation has happened, protect_from_queue returns a temp
401 that contains a copy of the old value of the variable.
402
403 Any time an rtx which might possibly be a QUEUED is to be put
404 into an instruction, it must be passed through protect_from_queue first.
405 QUEUED expressions are not meaningful in instructions.
406
407 Do not pass a value through protect_from_queue and then hold
408 on to it for a while before putting it in an instruction!
409 If the queue is flushed in between, incorrect code will result. */
410
411rtx
412protect_from_queue (x, modify)
413 register rtx x;
414 int modify;
415{
416 register RTX_CODE code = GET_CODE (x);
417
418#if 0 /* A QUEUED can hang around after the queue is forced out. */
419 /* Shortcut for most common case. */
420 if (pending_chain == 0)
421 return x;
422#endif
423
424 if (code != QUEUED)
425 {
e9baa644
RK
426 /* A special hack for read access to (MEM (QUEUED ...)) to facilitate
427 use of autoincrement. Make a copy of the contents of the memory
428 location rather than a copy of the address, but not if the value is
429 of mode BLKmode. Don't modify X in place since it might be
430 shared. */
bbf6f052
RK
431 if (code == MEM && GET_MODE (x) != BLKmode
432 && GET_CODE (XEXP (x, 0)) == QUEUED && !modify)
433 {
434 register rtx y = XEXP (x, 0);
38a448ca 435 register rtx new = gen_rtx_MEM (GET_MODE (x), QUEUED_VAR (y));
e9baa644
RK
436
437 MEM_IN_STRUCT_P (new) = MEM_IN_STRUCT_P (x);
438 RTX_UNCHANGING_P (new) = RTX_UNCHANGING_P (x);
439 MEM_VOLATILE_P (new) = MEM_VOLATILE_P (x);
41472af8 440 MEM_ALIAS_SET (new) = MEM_ALIAS_SET (x);
e9baa644 441
bbf6f052
RK
442 if (QUEUED_INSN (y))
443 {
e9baa644
RK
444 register rtx temp = gen_reg_rtx (GET_MODE (new));
445 emit_insn_before (gen_move_insn (temp, new),
bbf6f052
RK
446 QUEUED_INSN (y));
447 return temp;
448 }
e9baa644 449 return new;
bbf6f052
RK
450 }
451 /* Otherwise, recursively protect the subexpressions of all
452 the kinds of rtx's that can contain a QUEUED. */
453 if (code == MEM)
3f15938e
RS
454 {
455 rtx tem = protect_from_queue (XEXP (x, 0), 0);
456 if (tem != XEXP (x, 0))
457 {
458 x = copy_rtx (x);
459 XEXP (x, 0) = tem;
460 }
461 }
bbf6f052
RK
462 else if (code == PLUS || code == MULT)
463 {
3f15938e
RS
464 rtx new0 = protect_from_queue (XEXP (x, 0), 0);
465 rtx new1 = protect_from_queue (XEXP (x, 1), 0);
466 if (new0 != XEXP (x, 0) || new1 != XEXP (x, 1))
467 {
468 x = copy_rtx (x);
469 XEXP (x, 0) = new0;
470 XEXP (x, 1) = new1;
471 }
bbf6f052
RK
472 }
473 return x;
474 }
475 /* If the increment has not happened, use the variable itself. */
476 if (QUEUED_INSN (x) == 0)
477 return QUEUED_VAR (x);
478 /* If the increment has happened and a pre-increment copy exists,
479 use that copy. */
480 if (QUEUED_COPY (x) != 0)
481 return QUEUED_COPY (x);
482 /* The increment has happened but we haven't set up a pre-increment copy.
483 Set one up now, and use it. */
484 QUEUED_COPY (x) = gen_reg_rtx (GET_MODE (QUEUED_VAR (x)));
485 emit_insn_before (gen_move_insn (QUEUED_COPY (x), QUEUED_VAR (x)),
486 QUEUED_INSN (x));
487 return QUEUED_COPY (x);
488}
489
490/* Return nonzero if X contains a QUEUED expression:
491 if it contains anything that will be altered by a queued increment.
492 We handle only combinations of MEM, PLUS, MINUS and MULT operators
493 since memory addresses generally contain only those. */
494
495static int
496queued_subexp_p (x)
497 rtx x;
498{
499 register enum rtx_code code = GET_CODE (x);
500 switch (code)
501 {
502 case QUEUED:
503 return 1;
504 case MEM:
505 return queued_subexp_p (XEXP (x, 0));
506 case MULT:
507 case PLUS:
508 case MINUS:
e9a25f70
JL
509 return (queued_subexp_p (XEXP (x, 0))
510 || queued_subexp_p (XEXP (x, 1)));
511 default:
512 return 0;
bbf6f052 513 }
bbf6f052
RK
514}
515
516/* Perform all the pending incrementations. */
517
518void
519emit_queue ()
520{
521 register rtx p;
381127e8 522 while ((p = pending_chain))
bbf6f052 523 {
41b083c4
R
524 rtx body = QUEUED_BODY (p);
525
526 if (GET_CODE (body) == SEQUENCE)
527 {
528 QUEUED_INSN (p) = XVECEXP (QUEUED_BODY (p), 0, 0);
529 emit_insn (QUEUED_BODY (p));
530 }
531 else
532 QUEUED_INSN (p) = emit_insn (QUEUED_BODY (p));
bbf6f052
RK
533 pending_chain = QUEUED_NEXT (p);
534 }
535}
536
537static void
538init_queue ()
539{
540 if (pending_chain)
541 abort ();
542}
543\f
544/* Copy data from FROM to TO, where the machine modes are not the same.
545 Both modes may be integer, or both may be floating.
546 UNSIGNEDP should be nonzero if FROM is an unsigned type.
547 This causes zero-extension instead of sign-extension. */
548
549void
550convert_move (to, from, unsignedp)
551 register rtx to, from;
552 int unsignedp;
553{
554 enum machine_mode to_mode = GET_MODE (to);
555 enum machine_mode from_mode = GET_MODE (from);
556 int to_real = GET_MODE_CLASS (to_mode) == MODE_FLOAT;
557 int from_real = GET_MODE_CLASS (from_mode) == MODE_FLOAT;
558 enum insn_code code;
559 rtx libcall;
560
561 /* rtx code for making an equivalent value. */
562 enum rtx_code equiv_code = (unsignedp ? ZERO_EXTEND : SIGN_EXTEND);
563
564 to = protect_from_queue (to, 1);
565 from = protect_from_queue (from, 0);
566
567 if (to_real != from_real)
568 abort ();
569
1499e0a8
RK
570 /* If FROM is a SUBREG that indicates that we have already done at least
571 the required extension, strip it. We don't handle such SUBREGs as
572 TO here. */
573
574 if (GET_CODE (from) == SUBREG && SUBREG_PROMOTED_VAR_P (from)
575 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (from)))
576 >= GET_MODE_SIZE (to_mode))
577 && SUBREG_PROMOTED_UNSIGNED_P (from) == unsignedp)
578 from = gen_lowpart (to_mode, from), from_mode = to_mode;
579
580 if (GET_CODE (to) == SUBREG && SUBREG_PROMOTED_VAR_P (to))
581 abort ();
582
bbf6f052
RK
583 if (to_mode == from_mode
584 || (from_mode == VOIDmode && CONSTANT_P (from)))
585 {
586 emit_move_insn (to, from);
587 return;
588 }
589
590 if (to_real)
591 {
81d79e2c
RS
592 rtx value;
593
2b01c326 594 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode))
b424402e 595 {
2b01c326
RK
596 /* Try converting directly if the insn is supported. */
597 if ((code = can_extend_p (to_mode, from_mode, 0))
598 != CODE_FOR_nothing)
599 {
600 emit_unop_insn (code, to, from, UNKNOWN);
601 return;
602 }
bbf6f052 603 }
2b01c326 604
b424402e
RS
605#ifdef HAVE_trunchfqf2
606 if (HAVE_trunchfqf2 && from_mode == HFmode && to_mode == QFmode)
607 {
608 emit_unop_insn (CODE_FOR_trunchfqf2, to, from, UNKNOWN);
609 return;
610 }
611#endif
704af6a1
JL
612#ifdef HAVE_trunctqfqf2
613 if (HAVE_trunctqfqf2 && from_mode == TQFmode && to_mode == QFmode)
614 {
615 emit_unop_insn (CODE_FOR_trunctqfqf2, to, from, UNKNOWN);
616 return;
617 }
618#endif
b424402e
RS
619#ifdef HAVE_truncsfqf2
620 if (HAVE_truncsfqf2 && from_mode == SFmode && to_mode == QFmode)
621 {
622 emit_unop_insn (CODE_FOR_truncsfqf2, to, from, UNKNOWN);
623 return;
624 }
625#endif
626#ifdef HAVE_truncdfqf2
627 if (HAVE_truncdfqf2 && from_mode == DFmode && to_mode == QFmode)
628 {
629 emit_unop_insn (CODE_FOR_truncdfqf2, to, from, UNKNOWN);
630 return;
631 }
632#endif
633#ifdef HAVE_truncxfqf2
634 if (HAVE_truncxfqf2 && from_mode == XFmode && to_mode == QFmode)
635 {
636 emit_unop_insn (CODE_FOR_truncxfqf2, to, from, UNKNOWN);
637 return;
638 }
639#endif
640#ifdef HAVE_trunctfqf2
641 if (HAVE_trunctfqf2 && from_mode == TFmode && to_mode == QFmode)
642 {
643 emit_unop_insn (CODE_FOR_trunctfqf2, to, from, UNKNOWN);
644 return;
645 }
646#endif
03747aa3
RK
647
648#ifdef HAVE_trunctqfhf2
649 if (HAVE_trunctqfhf2 && from_mode == TQFmode && to_mode == HFmode)
650 {
651 emit_unop_insn (CODE_FOR_trunctqfhf2, to, from, UNKNOWN);
652 return;
653 }
654#endif
b424402e
RS
655#ifdef HAVE_truncsfhf2
656 if (HAVE_truncsfhf2 && from_mode == SFmode && to_mode == HFmode)
657 {
658 emit_unop_insn (CODE_FOR_truncsfhf2, to, from, UNKNOWN);
659 return;
660 }
661#endif
662#ifdef HAVE_truncdfhf2
663 if (HAVE_truncdfhf2 && from_mode == DFmode && to_mode == HFmode)
664 {
665 emit_unop_insn (CODE_FOR_truncdfhf2, to, from, UNKNOWN);
666 return;
667 }
668#endif
669#ifdef HAVE_truncxfhf2
670 if (HAVE_truncxfhf2 && from_mode == XFmode && to_mode == HFmode)
671 {
672 emit_unop_insn (CODE_FOR_truncxfhf2, to, from, UNKNOWN);
673 return;
674 }
675#endif
676#ifdef HAVE_trunctfhf2
677 if (HAVE_trunctfhf2 && from_mode == TFmode && to_mode == HFmode)
678 {
679 emit_unop_insn (CODE_FOR_trunctfhf2, to, from, UNKNOWN);
680 return;
681 }
682#endif
2b01c326
RK
683
684#ifdef HAVE_truncsftqf2
685 if (HAVE_truncsftqf2 && from_mode == SFmode && to_mode == TQFmode)
686 {
687 emit_unop_insn (CODE_FOR_truncsftqf2, to, from, UNKNOWN);
688 return;
689 }
690#endif
691#ifdef HAVE_truncdftqf2
692 if (HAVE_truncdftqf2 && from_mode == DFmode && to_mode == TQFmode)
693 {
694 emit_unop_insn (CODE_FOR_truncdftqf2, to, from, UNKNOWN);
695 return;
696 }
697#endif
698#ifdef HAVE_truncxftqf2
699 if (HAVE_truncxftqf2 && from_mode == XFmode && to_mode == TQFmode)
700 {
701 emit_unop_insn (CODE_FOR_truncxftqf2, to, from, UNKNOWN);
702 return;
703 }
704#endif
705#ifdef HAVE_trunctftqf2
706 if (HAVE_trunctftqf2 && from_mode == TFmode && to_mode == TQFmode)
707 {
708 emit_unop_insn (CODE_FOR_trunctftqf2, to, from, UNKNOWN);
709 return;
710 }
711#endif
712
bbf6f052
RK
713#ifdef HAVE_truncdfsf2
714 if (HAVE_truncdfsf2 && from_mode == DFmode && to_mode == SFmode)
715 {
716 emit_unop_insn (CODE_FOR_truncdfsf2, to, from, UNKNOWN);
717 return;
718 }
719#endif
b092b471
JW
720#ifdef HAVE_truncxfsf2
721 if (HAVE_truncxfsf2 && from_mode == XFmode && to_mode == SFmode)
722 {
723 emit_unop_insn (CODE_FOR_truncxfsf2, to, from, UNKNOWN);
724 return;
725 }
726#endif
bbf6f052
RK
727#ifdef HAVE_trunctfsf2
728 if (HAVE_trunctfsf2 && from_mode == TFmode && to_mode == SFmode)
729 {
730 emit_unop_insn (CODE_FOR_trunctfsf2, to, from, UNKNOWN);
731 return;
732 }
733#endif
b092b471
JW
734#ifdef HAVE_truncxfdf2
735 if (HAVE_truncxfdf2 && from_mode == XFmode && to_mode == DFmode)
736 {
737 emit_unop_insn (CODE_FOR_truncxfdf2, to, from, UNKNOWN);
738 return;
739 }
740#endif
bbf6f052
RK
741#ifdef HAVE_trunctfdf2
742 if (HAVE_trunctfdf2 && from_mode == TFmode && to_mode == DFmode)
743 {
744 emit_unop_insn (CODE_FOR_trunctfdf2, to, from, UNKNOWN);
745 return;
746 }
747#endif
748
b092b471
JW
749 libcall = (rtx) 0;
750 switch (from_mode)
751 {
752 case SFmode:
753 switch (to_mode)
754 {
755 case DFmode:
756 libcall = extendsfdf2_libfunc;
757 break;
758
759 case XFmode:
760 libcall = extendsfxf2_libfunc;
761 break;
762
763 case TFmode:
764 libcall = extendsftf2_libfunc;
765 break;
e9a25f70
JL
766
767 default:
768 break;
b092b471
JW
769 }
770 break;
771
772 case DFmode:
773 switch (to_mode)
774 {
775 case SFmode:
776 libcall = truncdfsf2_libfunc;
777 break;
778
779 case XFmode:
780 libcall = extenddfxf2_libfunc;
781 break;
782
783 case TFmode:
784 libcall = extenddftf2_libfunc;
785 break;
e9a25f70
JL
786
787 default:
788 break;
b092b471
JW
789 }
790 break;
791
792 case XFmode:
793 switch (to_mode)
794 {
795 case SFmode:
796 libcall = truncxfsf2_libfunc;
797 break;
798
799 case DFmode:
800 libcall = truncxfdf2_libfunc;
801 break;
e9a25f70
JL
802
803 default:
804 break;
b092b471
JW
805 }
806 break;
807
808 case TFmode:
809 switch (to_mode)
810 {
811 case SFmode:
812 libcall = trunctfsf2_libfunc;
813 break;
814
815 case DFmode:
816 libcall = trunctfdf2_libfunc;
817 break;
e9a25f70
JL
818
819 default:
820 break;
b092b471
JW
821 }
822 break;
e9a25f70
JL
823
824 default:
825 break;
b092b471
JW
826 }
827
828 if (libcall == (rtx) 0)
829 /* This conversion is not implemented yet. */
bbf6f052
RK
830 abort ();
831
81d79e2c
RS
832 value = emit_library_call_value (libcall, NULL_RTX, 1, to_mode,
833 1, from, from_mode);
834 emit_move_insn (to, value);
bbf6f052
RK
835 return;
836 }
837
838 /* Now both modes are integers. */
839
840 /* Handle expanding beyond a word. */
841 if (GET_MODE_BITSIZE (from_mode) < GET_MODE_BITSIZE (to_mode)
842 && GET_MODE_BITSIZE (to_mode) > BITS_PER_WORD)
843 {
844 rtx insns;
845 rtx lowpart;
846 rtx fill_value;
847 rtx lowfrom;
848 int i;
849 enum machine_mode lowpart_mode;
850 int nwords = CEIL (GET_MODE_SIZE (to_mode), UNITS_PER_WORD);
851
852 /* Try converting directly if the insn is supported. */
853 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
854 != CODE_FOR_nothing)
855 {
cd1b4b44
RK
856 /* If FROM is a SUBREG, put it into a register. Do this
857 so that we always generate the same set of insns for
858 better cse'ing; if an intermediate assignment occurred,
859 we won't be doing the operation directly on the SUBREG. */
860 if (optimize > 0 && GET_CODE (from) == SUBREG)
861 from = force_reg (from_mode, from);
bbf6f052
RK
862 emit_unop_insn (code, to, from, equiv_code);
863 return;
864 }
865 /* Next, try converting via full word. */
866 else if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD
867 && ((code = can_extend_p (to_mode, word_mode, unsignedp))
868 != CODE_FOR_nothing))
869 {
a81fee56 870 if (GET_CODE (to) == REG)
38a448ca 871 emit_insn (gen_rtx_CLOBBER (VOIDmode, to));
bbf6f052
RK
872 convert_move (gen_lowpart (word_mode, to), from, unsignedp);
873 emit_unop_insn (code, to,
874 gen_lowpart (word_mode, to), equiv_code);
875 return;
876 }
877
878 /* No special multiword conversion insn; do it by hand. */
879 start_sequence ();
880
5c5033c3
RK
881 /* Since we will turn this into a no conflict block, we must ensure
882 that the source does not overlap the target. */
883
884 if (reg_overlap_mentioned_p (to, from))
885 from = force_reg (from_mode, from);
886
bbf6f052
RK
887 /* Get a copy of FROM widened to a word, if necessary. */
888 if (GET_MODE_BITSIZE (from_mode) < BITS_PER_WORD)
889 lowpart_mode = word_mode;
890 else
891 lowpart_mode = from_mode;
892
893 lowfrom = convert_to_mode (lowpart_mode, from, unsignedp);
894
895 lowpart = gen_lowpart (lowpart_mode, to);
896 emit_move_insn (lowpart, lowfrom);
897
898 /* Compute the value to put in each remaining word. */
899 if (unsignedp)
900 fill_value = const0_rtx;
901 else
902 {
903#ifdef HAVE_slt
904 if (HAVE_slt
905 && insn_operand_mode[(int) CODE_FOR_slt][0] == word_mode
906 && STORE_FLAG_VALUE == -1)
907 {
906c4e36
RK
908 emit_cmp_insn (lowfrom, const0_rtx, NE, NULL_RTX,
909 lowpart_mode, 0, 0);
bbf6f052
RK
910 fill_value = gen_reg_rtx (word_mode);
911 emit_insn (gen_slt (fill_value));
912 }
913 else
914#endif
915 {
916 fill_value
917 = expand_shift (RSHIFT_EXPR, lowpart_mode, lowfrom,
918 size_int (GET_MODE_BITSIZE (lowpart_mode) - 1),
906c4e36 919 NULL_RTX, 0);
bbf6f052
RK
920 fill_value = convert_to_mode (word_mode, fill_value, 1);
921 }
922 }
923
924 /* Fill the remaining words. */
925 for (i = GET_MODE_SIZE (lowpart_mode) / UNITS_PER_WORD; i < nwords; i++)
926 {
927 int index = (WORDS_BIG_ENDIAN ? nwords - i - 1 : i);
928 rtx subword = operand_subword (to, index, 1, to_mode);
929
930 if (subword == 0)
931 abort ();
932
933 if (fill_value != subword)
934 emit_move_insn (subword, fill_value);
935 }
936
937 insns = get_insns ();
938 end_sequence ();
939
906c4e36 940 emit_no_conflict_block (insns, to, from, NULL_RTX,
38a448ca 941 gen_rtx_fmt_e (equiv_code, to_mode, copy_rtx (from)));
bbf6f052
RK
942 return;
943 }
944
d3c64ee3
RS
945 /* Truncating multi-word to a word or less. */
946 if (GET_MODE_BITSIZE (from_mode) > BITS_PER_WORD
947 && GET_MODE_BITSIZE (to_mode) <= BITS_PER_WORD)
bbf6f052 948 {
431a6eca
JW
949 if (!((GET_CODE (from) == MEM
950 && ! MEM_VOLATILE_P (from)
951 && direct_load[(int) to_mode]
952 && ! mode_dependent_address_p (XEXP (from, 0)))
953 || GET_CODE (from) == REG
954 || GET_CODE (from) == SUBREG))
955 from = force_reg (from_mode, from);
bbf6f052
RK
956 convert_move (to, gen_lowpart (word_mode, from), 0);
957 return;
958 }
959
960 /* Handle pointer conversion */ /* SPEE 900220 */
e5e809f4
JL
961 if (to_mode == PQImode)
962 {
963 if (from_mode != QImode)
964 from = convert_to_mode (QImode, from, unsignedp);
965
966#ifdef HAVE_truncqipqi2
967 if (HAVE_truncqipqi2)
968 {
969 emit_unop_insn (CODE_FOR_truncqipqi2, to, from, UNKNOWN);
970 return;
971 }
972#endif /* HAVE_truncqipqi2 */
973 abort ();
974 }
975
976 if (from_mode == PQImode)
977 {
978 if (to_mode != QImode)
979 {
980 from = convert_to_mode (QImode, from, unsignedp);
981 from_mode = QImode;
982 }
983 else
984 {
985#ifdef HAVE_extendpqiqi2
986 if (HAVE_extendpqiqi2)
987 {
988 emit_unop_insn (CODE_FOR_extendpqiqi2, to, from, UNKNOWN);
989 return;
990 }
991#endif /* HAVE_extendpqiqi2 */
992 abort ();
993 }
994 }
995
bbf6f052
RK
996 if (to_mode == PSImode)
997 {
998 if (from_mode != SImode)
999 from = convert_to_mode (SImode, from, unsignedp);
1000
1f584163
DE
1001#ifdef HAVE_truncsipsi2
1002 if (HAVE_truncsipsi2)
bbf6f052 1003 {
1f584163 1004 emit_unop_insn (CODE_FOR_truncsipsi2, to, from, UNKNOWN);
bbf6f052
RK
1005 return;
1006 }
1f584163 1007#endif /* HAVE_truncsipsi2 */
bbf6f052
RK
1008 abort ();
1009 }
1010
1011 if (from_mode == PSImode)
1012 {
1013 if (to_mode != SImode)
1014 {
1015 from = convert_to_mode (SImode, from, unsignedp);
1016 from_mode = SImode;
1017 }
1018 else
1019 {
1f584163
DE
1020#ifdef HAVE_extendpsisi2
1021 if (HAVE_extendpsisi2)
bbf6f052 1022 {
1f584163 1023 emit_unop_insn (CODE_FOR_extendpsisi2, to, from, UNKNOWN);
bbf6f052
RK
1024 return;
1025 }
1f584163 1026#endif /* HAVE_extendpsisi2 */
bbf6f052
RK
1027 abort ();
1028 }
1029 }
1030
0407367d
RK
1031 if (to_mode == PDImode)
1032 {
1033 if (from_mode != DImode)
1034 from = convert_to_mode (DImode, from, unsignedp);
1035
1036#ifdef HAVE_truncdipdi2
1037 if (HAVE_truncdipdi2)
1038 {
1039 emit_unop_insn (CODE_FOR_truncdipdi2, to, from, UNKNOWN);
1040 return;
1041 }
1042#endif /* HAVE_truncdipdi2 */
1043 abort ();
1044 }
1045
1046 if (from_mode == PDImode)
1047 {
1048 if (to_mode != DImode)
1049 {
1050 from = convert_to_mode (DImode, from, unsignedp);
1051 from_mode = DImode;
1052 }
1053 else
1054 {
1055#ifdef HAVE_extendpdidi2
1056 if (HAVE_extendpdidi2)
1057 {
1058 emit_unop_insn (CODE_FOR_extendpdidi2, to, from, UNKNOWN);
1059 return;
1060 }
1061#endif /* HAVE_extendpdidi2 */
1062 abort ();
1063 }
1064 }
1065
bbf6f052
RK
1066 /* Now follow all the conversions between integers
1067 no more than a word long. */
1068
1069 /* For truncation, usually we can just refer to FROM in a narrower mode. */
1070 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode)
1071 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (to_mode),
d3c64ee3 1072 GET_MODE_BITSIZE (from_mode)))
bbf6f052 1073 {
d3c64ee3
RS
1074 if (!((GET_CODE (from) == MEM
1075 && ! MEM_VOLATILE_P (from)
1076 && direct_load[(int) to_mode]
1077 && ! mode_dependent_address_p (XEXP (from, 0)))
1078 || GET_CODE (from) == REG
1079 || GET_CODE (from) == SUBREG))
1080 from = force_reg (from_mode, from);
34aa3599
RK
1081 if (GET_CODE (from) == REG && REGNO (from) < FIRST_PSEUDO_REGISTER
1082 && ! HARD_REGNO_MODE_OK (REGNO (from), to_mode))
1083 from = copy_to_reg (from);
bbf6f052
RK
1084 emit_move_insn (to, gen_lowpart (to_mode, from));
1085 return;
1086 }
1087
d3c64ee3 1088 /* Handle extension. */
bbf6f052
RK
1089 if (GET_MODE_BITSIZE (to_mode) > GET_MODE_BITSIZE (from_mode))
1090 {
1091 /* Convert directly if that works. */
1092 if ((code = can_extend_p (to_mode, from_mode, unsignedp))
1093 != CODE_FOR_nothing)
1094 {
1095 emit_unop_insn (code, to, from, equiv_code);
1096 return;
1097 }
1098 else
1099 {
1100 enum machine_mode intermediate;
1101
1102 /* Search for a mode to convert via. */
1103 for (intermediate = from_mode; intermediate != VOIDmode;
1104 intermediate = GET_MODE_WIDER_MODE (intermediate))
930b4e39
RK
1105 if (((can_extend_p (to_mode, intermediate, unsignedp)
1106 != CODE_FOR_nothing)
1107 || (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (intermediate)
1108 && TRULY_NOOP_TRUNCATION (to_mode, intermediate)))
bbf6f052
RK
1109 && (can_extend_p (intermediate, from_mode, unsignedp)
1110 != CODE_FOR_nothing))
1111 {
1112 convert_move (to, convert_to_mode (intermediate, from,
1113 unsignedp), unsignedp);
1114 return;
1115 }
1116
1117 /* No suitable intermediate mode. */
1118 abort ();
1119 }
1120 }
1121
1122 /* Support special truncate insns for certain modes. */
1123
1124 if (from_mode == DImode && to_mode == SImode)
1125 {
1126#ifdef HAVE_truncdisi2
1127 if (HAVE_truncdisi2)
1128 {
1129 emit_unop_insn (CODE_FOR_truncdisi2, to, from, UNKNOWN);
1130 return;
1131 }
1132#endif
1133 convert_move (to, force_reg (from_mode, from), unsignedp);
1134 return;
1135 }
1136
1137 if (from_mode == DImode && to_mode == HImode)
1138 {
1139#ifdef HAVE_truncdihi2
1140 if (HAVE_truncdihi2)
1141 {
1142 emit_unop_insn (CODE_FOR_truncdihi2, to, from, UNKNOWN);
1143 return;
1144 }
1145#endif
1146 convert_move (to, force_reg (from_mode, from), unsignedp);
1147 return;
1148 }
1149
1150 if (from_mode == DImode && to_mode == QImode)
1151 {
1152#ifdef HAVE_truncdiqi2
1153 if (HAVE_truncdiqi2)
1154 {
1155 emit_unop_insn (CODE_FOR_truncdiqi2, to, from, UNKNOWN);
1156 return;
1157 }
1158#endif
1159 convert_move (to, force_reg (from_mode, from), unsignedp);
1160 return;
1161 }
1162
1163 if (from_mode == SImode && to_mode == HImode)
1164 {
1165#ifdef HAVE_truncsihi2
1166 if (HAVE_truncsihi2)
1167 {
1168 emit_unop_insn (CODE_FOR_truncsihi2, to, from, UNKNOWN);
1169 return;
1170 }
1171#endif
1172 convert_move (to, force_reg (from_mode, from), unsignedp);
1173 return;
1174 }
1175
1176 if (from_mode == SImode && to_mode == QImode)
1177 {
1178#ifdef HAVE_truncsiqi2
1179 if (HAVE_truncsiqi2)
1180 {
1181 emit_unop_insn (CODE_FOR_truncsiqi2, to, from, UNKNOWN);
1182 return;
1183 }
1184#endif
1185 convert_move (to, force_reg (from_mode, from), unsignedp);
1186 return;
1187 }
1188
1189 if (from_mode == HImode && to_mode == QImode)
1190 {
1191#ifdef HAVE_trunchiqi2
1192 if (HAVE_trunchiqi2)
1193 {
1194 emit_unop_insn (CODE_FOR_trunchiqi2, to, from, UNKNOWN);
1195 return;
1196 }
1197#endif
1198 convert_move (to, force_reg (from_mode, from), unsignedp);
1199 return;
1200 }
1201
b9bcad65
RK
1202 if (from_mode == TImode && to_mode == DImode)
1203 {
1204#ifdef HAVE_trunctidi2
1205 if (HAVE_trunctidi2)
1206 {
1207 emit_unop_insn (CODE_FOR_trunctidi2, to, from, UNKNOWN);
1208 return;
1209 }
1210#endif
1211 convert_move (to, force_reg (from_mode, from), unsignedp);
1212 return;
1213 }
1214
1215 if (from_mode == TImode && to_mode == SImode)
1216 {
1217#ifdef HAVE_trunctisi2
1218 if (HAVE_trunctisi2)
1219 {
1220 emit_unop_insn (CODE_FOR_trunctisi2, to, from, UNKNOWN);
1221 return;
1222 }
1223#endif
1224 convert_move (to, force_reg (from_mode, from), unsignedp);
1225 return;
1226 }
1227
1228 if (from_mode == TImode && to_mode == HImode)
1229 {
1230#ifdef HAVE_trunctihi2
1231 if (HAVE_trunctihi2)
1232 {
1233 emit_unop_insn (CODE_FOR_trunctihi2, to, from, UNKNOWN);
1234 return;
1235 }
1236#endif
1237 convert_move (to, force_reg (from_mode, from), unsignedp);
1238 return;
1239 }
1240
1241 if (from_mode == TImode && to_mode == QImode)
1242 {
1243#ifdef HAVE_trunctiqi2
1244 if (HAVE_trunctiqi2)
1245 {
1246 emit_unop_insn (CODE_FOR_trunctiqi2, to, from, UNKNOWN);
1247 return;
1248 }
1249#endif
1250 convert_move (to, force_reg (from_mode, from), unsignedp);
1251 return;
1252 }
1253
bbf6f052
RK
1254 /* Handle truncation of volatile memrefs, and so on;
1255 the things that couldn't be truncated directly,
1256 and for which there was no special instruction. */
1257 if (GET_MODE_BITSIZE (to_mode) < GET_MODE_BITSIZE (from_mode))
1258 {
1259 rtx temp = force_reg (to_mode, gen_lowpart (to_mode, from));
1260 emit_move_insn (to, temp);
1261 return;
1262 }
1263
1264 /* Mode combination is not recognized. */
1265 abort ();
1266}
1267
1268/* Return an rtx for a value that would result
1269 from converting X to mode MODE.
1270 Both X and MODE may be floating, or both integer.
1271 UNSIGNEDP is nonzero if X is an unsigned value.
1272 This can be done by referring to a part of X in place
5d901c31
RS
1273 or by copying to a new temporary with conversion.
1274
1275 This function *must not* call protect_from_queue
1276 except when putting X into an insn (in which case convert_move does it). */
bbf6f052
RK
1277
1278rtx
1279convert_to_mode (mode, x, unsignedp)
1280 enum machine_mode mode;
1281 rtx x;
1282 int unsignedp;
5ffe63ed
RS
1283{
1284 return convert_modes (mode, VOIDmode, x, unsignedp);
1285}
1286
1287/* Return an rtx for a value that would result
1288 from converting X from mode OLDMODE to mode MODE.
1289 Both modes may be floating, or both integer.
1290 UNSIGNEDP is nonzero if X is an unsigned value.
1291
1292 This can be done by referring to a part of X in place
1293 or by copying to a new temporary with conversion.
1294
1295 You can give VOIDmode for OLDMODE, if you are sure X has a nonvoid mode.
1296
1297 This function *must not* call protect_from_queue
1298 except when putting X into an insn (in which case convert_move does it). */
1299
1300rtx
1301convert_modes (mode, oldmode, x, unsignedp)
1302 enum machine_mode mode, oldmode;
1303 rtx x;
1304 int unsignedp;
bbf6f052
RK
1305{
1306 register rtx temp;
5ffe63ed 1307
1499e0a8
RK
1308 /* If FROM is a SUBREG that indicates that we have already done at least
1309 the required extension, strip it. */
1310
1311 if (GET_CODE (x) == SUBREG && SUBREG_PROMOTED_VAR_P (x)
1312 && GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) >= GET_MODE_SIZE (mode)
1313 && SUBREG_PROMOTED_UNSIGNED_P (x) == unsignedp)
1314 x = gen_lowpart (mode, x);
bbf6f052 1315
64791b18
RK
1316 if (GET_MODE (x) != VOIDmode)
1317 oldmode = GET_MODE (x);
1318
5ffe63ed 1319 if (mode == oldmode)
bbf6f052
RK
1320 return x;
1321
1322 /* There is one case that we must handle specially: If we are converting
906c4e36 1323 a CONST_INT into a mode whose size is twice HOST_BITS_PER_WIDE_INT and
bbf6f052
RK
1324 we are to interpret the constant as unsigned, gen_lowpart will do
1325 the wrong if the constant appears negative. What we want to do is
1326 make the high-order word of the constant zero, not all ones. */
1327
1328 if (unsignedp && GET_MODE_CLASS (mode) == MODE_INT
906c4e36 1329 && GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT
bbf6f052 1330 && GET_CODE (x) == CONST_INT && INTVAL (x) < 0)
96ff8a16
ILT
1331 {
1332 HOST_WIDE_INT val = INTVAL (x);
1333
1334 if (oldmode != VOIDmode
1335 && HOST_BITS_PER_WIDE_INT > GET_MODE_BITSIZE (oldmode))
1336 {
1337 int width = GET_MODE_BITSIZE (oldmode);
1338
1339 /* We need to zero extend VAL. */
1340 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1341 }
1342
1343 return immed_double_const (val, (HOST_WIDE_INT) 0, mode);
1344 }
bbf6f052
RK
1345
1346 /* We can do this with a gen_lowpart if both desired and current modes
1347 are integer, and this is either a constant integer, a register, or a
ba2e110c
RK
1348 non-volatile MEM. Except for the constant case where MODE is no
1349 wider than HOST_BITS_PER_WIDE_INT, we must be narrowing the operand. */
bbf6f052 1350
ba2e110c
RK
1351 if ((GET_CODE (x) == CONST_INT
1352 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 1353 || (GET_MODE_CLASS (mode) == MODE_INT
5ffe63ed 1354 && GET_MODE_CLASS (oldmode) == MODE_INT
bbf6f052 1355 && (GET_CODE (x) == CONST_DOUBLE
5ffe63ed 1356 || (GET_MODE_SIZE (mode) <= GET_MODE_SIZE (oldmode)
d57c66da
JW
1357 && ((GET_CODE (x) == MEM && ! MEM_VOLATILE_P (x)
1358 && direct_load[(int) mode])
2bf29316
JW
1359 || (GET_CODE (x) == REG
1360 && TRULY_NOOP_TRUNCATION (GET_MODE_BITSIZE (mode),
1361 GET_MODE_BITSIZE (GET_MODE (x)))))))))
ba2e110c
RK
1362 {
1363 /* ?? If we don't know OLDMODE, we have to assume here that
1364 X does not need sign- or zero-extension. This may not be
1365 the case, but it's the best we can do. */
1366 if (GET_CODE (x) == CONST_INT && oldmode != VOIDmode
1367 && GET_MODE_SIZE (mode) > GET_MODE_SIZE (oldmode))
1368 {
1369 HOST_WIDE_INT val = INTVAL (x);
1370 int width = GET_MODE_BITSIZE (oldmode);
1371
1372 /* We must sign or zero-extend in this case. Start by
1373 zero-extending, then sign extend if we need to. */
1374 val &= ((HOST_WIDE_INT) 1 << width) - 1;
1375 if (! unsignedp
1376 && (val & ((HOST_WIDE_INT) 1 << (width - 1))))
1377 val |= (HOST_WIDE_INT) (-1) << width;
1378
1379 return GEN_INT (val);
1380 }
1381
1382 return gen_lowpart (mode, x);
1383 }
bbf6f052
RK
1384
1385 temp = gen_reg_rtx (mode);
1386 convert_move (temp, x, unsignedp);
1387 return temp;
1388}
1389\f
1390/* Generate several move instructions to copy LEN bytes
1391 from block FROM to block TO. (These are MEM rtx's with BLKmode).
1392 The caller must pass FROM and TO
1393 through protect_from_queue before calling.
1394 ALIGN (in bytes) is maximum alignment we can assume. */
1395
2e245dac 1396void
bbf6f052
RK
1397move_by_pieces (to, from, len, align)
1398 rtx to, from;
1399 int len, align;
1400{
1401 struct move_by_pieces data;
1402 rtx to_addr = XEXP (to, 0), from_addr = XEXP (from, 0);
e87b4f3f 1403 int max_size = MOVE_MAX + 1;
bbf6f052
RK
1404
1405 data.offset = 0;
1406 data.to_addr = to_addr;
1407 data.from_addr = from_addr;
1408 data.to = to;
1409 data.from = from;
1410 data.autinc_to
1411 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
1412 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
1413 data.autinc_from
1414 = (GET_CODE (from_addr) == PRE_INC || GET_CODE (from_addr) == PRE_DEC
1415 || GET_CODE (from_addr) == POST_INC
1416 || GET_CODE (from_addr) == POST_DEC);
1417
1418 data.explicit_inc_from = 0;
1419 data.explicit_inc_to = 0;
1420 data.reverse
1421 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
1422 if (data.reverse) data.offset = len;
1423 data.len = len;
1424
e9cf6a97
JW
1425 data.to_struct = MEM_IN_STRUCT_P (to);
1426 data.from_struct = MEM_IN_STRUCT_P (from);
1427
bbf6f052
RK
1428 /* If copying requires more than two move insns,
1429 copy addresses to registers (to make displacements shorter)
1430 and use post-increment if available. */
1431 if (!(data.autinc_from && data.autinc_to)
1432 && move_by_pieces_ninsns (len, align) > 2)
1433 {
1434#ifdef HAVE_PRE_DECREMENT
1435 if (data.reverse && ! data.autinc_from)
1436 {
1437 data.from_addr = copy_addr_to_reg (plus_constant (from_addr, len));
1438 data.autinc_from = 1;
1439 data.explicit_inc_from = -1;
1440 }
1441#endif
1442#ifdef HAVE_POST_INCREMENT
1443 if (! data.autinc_from)
1444 {
1445 data.from_addr = copy_addr_to_reg (from_addr);
1446 data.autinc_from = 1;
1447 data.explicit_inc_from = 1;
1448 }
1449#endif
1450 if (!data.autinc_from && CONSTANT_P (from_addr))
1451 data.from_addr = copy_addr_to_reg (from_addr);
1452#ifdef HAVE_PRE_DECREMENT
1453 if (data.reverse && ! data.autinc_to)
1454 {
1455 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
1456 data.autinc_to = 1;
1457 data.explicit_inc_to = -1;
1458 }
1459#endif
1460#ifdef HAVE_POST_INCREMENT
1461 if (! data.reverse && ! data.autinc_to)
1462 {
1463 data.to_addr = copy_addr_to_reg (to_addr);
1464 data.autinc_to = 1;
1465 data.explicit_inc_to = 1;
1466 }
1467#endif
1468 if (!data.autinc_to && CONSTANT_P (to_addr))
1469 data.to_addr = copy_addr_to_reg (to_addr);
1470 }
1471
c7a7ac46 1472 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1473 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1474 align = MOVE_MAX;
bbf6f052
RK
1475
1476 /* First move what we can in the largest integer mode, then go to
1477 successively smaller modes. */
1478
1479 while (max_size > 1)
1480 {
1481 enum machine_mode mode = VOIDmode, tmode;
1482 enum insn_code icode;
1483
e7c33f54
RK
1484 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1485 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1486 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1487 mode = tmode;
1488
1489 if (mode == VOIDmode)
1490 break;
1491
1492 icode = mov_optab->handlers[(int) mode].insn_code;
1493 if (icode != CODE_FOR_nothing
1494 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1495 GET_MODE_SIZE (mode)))
1496 move_by_pieces_1 (GEN_FCN (icode), mode, &data);
1497
1498 max_size = GET_MODE_SIZE (mode);
1499 }
1500
1501 /* The code above should have handled everything. */
2a8e278c 1502 if (data.len > 0)
bbf6f052
RK
1503 abort ();
1504}
1505
1506/* Return number of insns required to move L bytes by pieces.
1507 ALIGN (in bytes) is maximum alignment we can assume. */
1508
1509static int
1510move_by_pieces_ninsns (l, align)
1511 unsigned int l;
1512 int align;
1513{
1514 register int n_insns = 0;
e87b4f3f 1515 int max_size = MOVE_MAX + 1;
bbf6f052 1516
c7a7ac46 1517 if (! SLOW_UNALIGNED_ACCESS
e87b4f3f 1518 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
bbf6f052 1519 align = MOVE_MAX;
bbf6f052
RK
1520
1521 while (max_size > 1)
1522 {
1523 enum machine_mode mode = VOIDmode, tmode;
1524 enum insn_code icode;
1525
e7c33f54
RK
1526 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1527 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
1528 if (GET_MODE_SIZE (tmode) < max_size)
bbf6f052
RK
1529 mode = tmode;
1530
1531 if (mode == VOIDmode)
1532 break;
1533
1534 icode = mov_optab->handlers[(int) mode].insn_code;
1535 if (icode != CODE_FOR_nothing
1536 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
1537 GET_MODE_SIZE (mode)))
1538 n_insns += l / GET_MODE_SIZE (mode), l %= GET_MODE_SIZE (mode);
1539
1540 max_size = GET_MODE_SIZE (mode);
1541 }
1542
1543 return n_insns;
1544}
1545
1546/* Subroutine of move_by_pieces. Move as many bytes as appropriate
1547 with move instructions for mode MODE. GENFUN is the gen_... function
1548 to make a move insn for that mode. DATA has all the other info. */
1549
1550static void
1551move_by_pieces_1 (genfun, mode, data)
eae4b970 1552 rtx (*genfun) PROTO ((rtx, ...));
bbf6f052
RK
1553 enum machine_mode mode;
1554 struct move_by_pieces *data;
1555{
1556 register int size = GET_MODE_SIZE (mode);
1557 register rtx to1, from1;
1558
1559 while (data->len >= size)
1560 {
1561 if (data->reverse) data->offset -= size;
1562
1563 to1 = (data->autinc_to
38a448ca 1564 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
1565 : copy_rtx (change_address (data->to, mode,
1566 plus_constant (data->to_addr,
1567 data->offset))));
e9cf6a97 1568 MEM_IN_STRUCT_P (to1) = data->to_struct;
effbcc6a 1569
db3cf6fb
MS
1570 from1
1571 = (data->autinc_from
38a448ca 1572 ? gen_rtx_MEM (mode, data->from_addr)
db3cf6fb
MS
1573 : copy_rtx (change_address (data->from, mode,
1574 plus_constant (data->from_addr,
1575 data->offset))));
e9cf6a97 1576 MEM_IN_STRUCT_P (from1) = data->from_struct;
bbf6f052
RK
1577
1578#ifdef HAVE_PRE_DECREMENT
1579 if (data->explicit_inc_to < 0)
906c4e36 1580 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
bbf6f052 1581 if (data->explicit_inc_from < 0)
906c4e36 1582 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (-size)));
bbf6f052
RK
1583#endif
1584
1585 emit_insn ((*genfun) (to1, from1));
1586#ifdef HAVE_POST_INCREMENT
1587 if (data->explicit_inc_to > 0)
906c4e36 1588 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
bbf6f052 1589 if (data->explicit_inc_from > 0)
906c4e36 1590 emit_insn (gen_add2_insn (data->from_addr, GEN_INT (size)));
bbf6f052
RK
1591#endif
1592
1593 if (! data->reverse) data->offset += size;
1594
1595 data->len -= size;
1596 }
1597}
1598\f
1599/* Emit code to move a block Y to a block X.
1600 This may be done with string-move instructions,
1601 with multiple scalar move instructions, or with a library call.
1602
1603 Both X and Y must be MEM rtx's (perhaps inside VOLATILE)
1604 with mode BLKmode.
1605 SIZE is an rtx that says how long they are.
1606 ALIGN is the maximum alignment we can assume they have,
e9a25f70 1607 measured in bytes.
bbf6f052 1608
e9a25f70
JL
1609 Return the address of the new block, if memcpy is called and returns it,
1610 0 otherwise. */
1611
1612rtx
bbf6f052
RK
1613emit_block_move (x, y, size, align)
1614 rtx x, y;
1615 rtx size;
1616 int align;
1617{
e9a25f70 1618 rtx retval = 0;
52cf7115
JL
1619#ifdef TARGET_MEM_FUNCTIONS
1620 static tree fn;
1621 tree call_expr, arg_list;
1622#endif
e9a25f70 1623
bbf6f052
RK
1624 if (GET_MODE (x) != BLKmode)
1625 abort ();
1626
1627 if (GET_MODE (y) != BLKmode)
1628 abort ();
1629
1630 x = protect_from_queue (x, 1);
1631 y = protect_from_queue (y, 0);
5d901c31 1632 size = protect_from_queue (size, 0);
bbf6f052
RK
1633
1634 if (GET_CODE (x) != MEM)
1635 abort ();
1636 if (GET_CODE (y) != MEM)
1637 abort ();
1638 if (size == 0)
1639 abort ();
1640
1641 if (GET_CODE (size) == CONST_INT
906c4e36 1642 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
bbf6f052
RK
1643 move_by_pieces (x, y, INTVAL (size), align);
1644 else
1645 {
1646 /* Try the most limited insn first, because there's no point
1647 including more than one in the machine description unless
1648 the more limited one has some advantage. */
266007a7 1649
0bba3f6f 1650 rtx opalign = GEN_INT (align);
266007a7
RK
1651 enum machine_mode mode;
1652
1653 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
1654 mode = GET_MODE_WIDER_MODE (mode))
bbf6f052 1655 {
266007a7 1656 enum insn_code code = movstr_optab[(int) mode];
266007a7
RK
1657
1658 if (code != CODE_FOR_nothing
803090c4
RK
1659 /* We don't need MODE to be narrower than BITS_PER_HOST_WIDE_INT
1660 here because if SIZE is less than the mode mask, as it is
8008b228 1661 returned by the macro, it will definitely be less than the
803090c4 1662 actual mode mask. */
8ca00751
RK
1663 && ((GET_CODE (size) == CONST_INT
1664 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 1665 <= (GET_MODE_MASK (mode) >> 1)))
8ca00751 1666 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
0bba3f6f
RK
1667 && (insn_operand_predicate[(int) code][0] == 0
1668 || (*insn_operand_predicate[(int) code][0]) (x, BLKmode))
1669 && (insn_operand_predicate[(int) code][1] == 0
1670 || (*insn_operand_predicate[(int) code][1]) (y, BLKmode))
1671 && (insn_operand_predicate[(int) code][3] == 0
1672 || (*insn_operand_predicate[(int) code][3]) (opalign,
1673 VOIDmode)))
bbf6f052 1674 {
1ba1e2a8 1675 rtx op2;
266007a7
RK
1676 rtx last = get_last_insn ();
1677 rtx pat;
1678
1ba1e2a8 1679 op2 = convert_to_mode (mode, size, 1);
0bba3f6f
RK
1680 if (insn_operand_predicate[(int) code][2] != 0
1681 && ! (*insn_operand_predicate[(int) code][2]) (op2, mode))
266007a7
RK
1682 op2 = copy_to_mode_reg (mode, op2);
1683
1684 pat = GEN_FCN ((int) code) (x, y, op2, opalign);
1685 if (pat)
1686 {
1687 emit_insn (pat);
e9a25f70 1688 return 0;
266007a7
RK
1689 }
1690 else
1691 delete_insns_since (last);
bbf6f052
RK
1692 }
1693 }
bbf6f052
RK
1694
1695#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
1696 /* It is incorrect to use the libcall calling conventions to call
1697 memcpy in this context.
1698
1699 This could be a user call to memcpy and the user may wish to
1700 examine the return value from memcpy.
1701
1702 For targets where libcalls and normal calls have different conventions
1703 for returning pointers, we could end up generating incorrect code.
1704
1705 So instead of using a libcall sequence we build up a suitable
1706 CALL_EXPR and expand the call in the normal fashion. */
1707 if (fn == NULL_TREE)
1708 {
1709 tree fntype;
1710
1711 /* This was copied from except.c, I don't know if all this is
1712 necessary in this context or not. */
1713 fn = get_identifier ("memcpy");
1714 push_obstacks_nochange ();
1715 end_temporary_allocation ();
1716 fntype = build_pointer_type (void_type_node);
1717 fntype = build_function_type (fntype, NULL_TREE);
1718 fn = build_decl (FUNCTION_DECL, fn, fntype);
1719 DECL_EXTERNAL (fn) = 1;
1720 TREE_PUBLIC (fn) = 1;
1721 DECL_ARTIFICIAL (fn) = 1;
1722 make_decl_rtl (fn, NULL_PTR, 1);
1723 assemble_external (fn);
1724 pop_obstacks ();
1725 }
1726
1727 /* We need to make an argument list for the function call.
1728
1729 memcpy has three arguments, the first two are void * addresses and
1730 the last is a size_t byte count for the copy. */
1731 arg_list
1732 = build_tree_list (NULL_TREE,
1733 make_tree (build_pointer_type (void_type_node),
1734 XEXP (x, 0)));
1735 TREE_CHAIN (arg_list)
1736 = build_tree_list (NULL_TREE,
1737 make_tree (build_pointer_type (void_type_node),
1738 XEXP (y, 0)));
1739 TREE_CHAIN (TREE_CHAIN (arg_list))
1740 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
1741 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
1742
1743 /* Now we have to build up the CALL_EXPR itself. */
1744 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
1745 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
1746 call_expr, arg_list, NULL_TREE);
1747 TREE_SIDE_EFFECTS (call_expr) = 1;
1748
1749 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 1750#else
d562e42e 1751 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
1752 VOIDmode, 3, XEXP (y, 0), Pmode,
1753 XEXP (x, 0), Pmode,
3b6f75e2
JW
1754 convert_to_mode (TYPE_MODE (integer_type_node), size,
1755 TREE_UNSIGNED (integer_type_node)),
1756 TYPE_MODE (integer_type_node));
bbf6f052
RK
1757#endif
1758 }
e9a25f70
JL
1759
1760 return retval;
bbf6f052
RK
1761}
1762\f
1763/* Copy all or part of a value X into registers starting at REGNO.
1764 The number of registers to be filled is NREGS. */
1765
1766void
1767move_block_to_reg (regno, x, nregs, mode)
1768 int regno;
1769 rtx x;
1770 int nregs;
1771 enum machine_mode mode;
1772{
1773 int i;
381127e8
RL
1774#ifdef HAVE_load_multiple
1775 rtx pat;
1776 rtx last;
1777#endif
bbf6f052 1778
72bb9717
RK
1779 if (nregs == 0)
1780 return;
1781
bbf6f052
RK
1782 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
1783 x = validize_mem (force_const_mem (mode, x));
1784
1785 /* See if the machine can do this with a load multiple insn. */
1786#ifdef HAVE_load_multiple
c3a02afe 1787 if (HAVE_load_multiple)
bbf6f052 1788 {
c3a02afe 1789 last = get_last_insn ();
38a448ca 1790 pat = gen_load_multiple (gen_rtx_REG (word_mode, regno), x,
c3a02afe
RK
1791 GEN_INT (nregs));
1792 if (pat)
1793 {
1794 emit_insn (pat);
1795 return;
1796 }
1797 else
1798 delete_insns_since (last);
bbf6f052 1799 }
bbf6f052
RK
1800#endif
1801
1802 for (i = 0; i < nregs; i++)
38a448ca 1803 emit_move_insn (gen_rtx_REG (word_mode, regno + i),
bbf6f052
RK
1804 operand_subword_force (x, i, mode));
1805}
1806
1807/* Copy all or part of a BLKmode value X out of registers starting at REGNO.
0040593d
JW
1808 The number of registers to be filled is NREGS. SIZE indicates the number
1809 of bytes in the object X. */
1810
bbf6f052
RK
1811
1812void
0040593d 1813move_block_from_reg (regno, x, nregs, size)
bbf6f052
RK
1814 int regno;
1815 rtx x;
1816 int nregs;
0040593d 1817 int size;
bbf6f052
RK
1818{
1819 int i;
381127e8
RL
1820#ifdef HAVE_store_multiple
1821 rtx pat;
1822 rtx last;
1823#endif
58a32c5c 1824 enum machine_mode mode;
bbf6f052 1825
58a32c5c
DE
1826 /* If SIZE is that of a mode no bigger than a word, just use that
1827 mode's store operation. */
1828 if (size <= UNITS_PER_WORD
1829 && (mode = mode_for_size (size * BITS_PER_UNIT, MODE_INT, 0)) != BLKmode)
1830 {
1831 emit_move_insn (change_address (x, mode, NULL),
38a448ca 1832 gen_rtx_REG (mode, regno));
58a32c5c
DE
1833 return;
1834 }
1835
0040593d 1836 /* Blocks smaller than a word on a BYTES_BIG_ENDIAN machine must be aligned
58a32c5c
DE
1837 to the left before storing to memory. Note that the previous test
1838 doesn't handle all cases (e.g. SIZE == 3). */
0040593d
JW
1839 if (size < UNITS_PER_WORD && BYTES_BIG_ENDIAN)
1840 {
1841 rtx tem = operand_subword (x, 0, 1, BLKmode);
1842 rtx shift;
1843
1844 if (tem == 0)
1845 abort ();
1846
1847 shift = expand_shift (LSHIFT_EXPR, word_mode,
38a448ca 1848 gen_rtx_REG (word_mode, regno),
0040593d
JW
1849 build_int_2 ((UNITS_PER_WORD - size)
1850 * BITS_PER_UNIT, 0), NULL_RTX, 0);
1851 emit_move_insn (tem, shift);
1852 return;
1853 }
1854
bbf6f052
RK
1855 /* See if the machine can do this with a store multiple insn. */
1856#ifdef HAVE_store_multiple
c3a02afe 1857 if (HAVE_store_multiple)
bbf6f052 1858 {
c3a02afe 1859 last = get_last_insn ();
38a448ca 1860 pat = gen_store_multiple (x, gen_rtx_REG (word_mode, regno),
c3a02afe
RK
1861 GEN_INT (nregs));
1862 if (pat)
1863 {
1864 emit_insn (pat);
1865 return;
1866 }
1867 else
1868 delete_insns_since (last);
bbf6f052 1869 }
bbf6f052
RK
1870#endif
1871
1872 for (i = 0; i < nregs; i++)
1873 {
1874 rtx tem = operand_subword (x, i, 1, BLKmode);
1875
1876 if (tem == 0)
1877 abort ();
1878
38a448ca 1879 emit_move_insn (tem, gen_rtx_REG (word_mode, regno + i));
bbf6f052
RK
1880 }
1881}
1882
aac5cc16
RH
1883/* Emit code to move a block SRC to a block DST, where DST is non-consecutive
1884 registers represented by a PARALLEL. SSIZE represents the total size of
1885 block SRC in bytes, or -1 if not known. ALIGN is the known alignment of
1886 SRC in bits. */
1887/* ??? If SSIZE % UNITS_PER_WORD != 0, we make the blatent assumption that
1888 the balance will be in what would be the low-order memory addresses, i.e.
1889 left justified for big endian, right justified for little endian. This
1890 happens to be true for the targets currently using this support. If this
1891 ever changes, a new target macro along the lines of FUNCTION_ARG_PADDING
1892 would be needed. */
fffa9c1d
JW
1893
1894void
aac5cc16
RH
1895emit_group_load (dst, orig_src, ssize, align)
1896 rtx dst, orig_src;
1897 int align, ssize;
fffa9c1d 1898{
aac5cc16
RH
1899 rtx *tmps, src;
1900 int start, i;
fffa9c1d 1901
aac5cc16 1902 if (GET_CODE (dst) != PARALLEL)
fffa9c1d
JW
1903 abort ();
1904
1905 /* Check for a NULL entry, used to indicate that the parameter goes
1906 both on the stack and in registers. */
aac5cc16
RH
1907 if (XEXP (XVECEXP (dst, 0, 0), 0))
1908 start = 0;
fffa9c1d 1909 else
aac5cc16
RH
1910 start = 1;
1911
1912 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (dst, 0));
1913
1914 /* If we won't be loading directly from memory, protect the real source
1915 from strange tricks we might play. */
1916 src = orig_src;
1917 if (GET_CODE (src) != MEM)
1918 {
1919 src = gen_reg_rtx (GET_MODE (orig_src));
1920 emit_move_insn (src, orig_src);
1921 }
1922
1923 /* Process the pieces. */
1924 for (i = start; i < XVECLEN (dst, 0); i++)
1925 {
1926 enum machine_mode mode = GET_MODE (XEXP (XVECEXP (dst, 0, i), 0));
1927 int bytepos = INTVAL (XEXP (XVECEXP (dst, 0, i), 1));
1928 int bytelen = GET_MODE_SIZE (mode);
1929 int shift = 0;
1930
1931 /* Handle trailing fragments that run over the size of the struct. */
1932 if (ssize >= 0 && bytepos + bytelen > ssize)
1933 {
1934 shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
1935 bytelen = ssize - bytepos;
1936 if (bytelen <= 0)
1937 abort();
1938 }
1939
1940 /* Optimize the access just a bit. */
1941 if (GET_CODE (src) == MEM
1942 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
1943 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
1944 && bytelen == GET_MODE_SIZE (mode))
1945 {
1946 tmps[i] = gen_reg_rtx (mode);
1947 emit_move_insn (tmps[i],
1948 change_address (src, mode,
1949 plus_constant (XEXP (src, 0),
1950 bytepos)));
fffa9c1d
JW
1951 }
1952 else
aac5cc16
RH
1953 {
1954 tmps[i] = extract_bit_field (src, bytelen*BITS_PER_UNIT,
1955 bytepos*BITS_PER_UNIT, 1, NULL_RTX,
1956 mode, mode, align, ssize);
1957 }
fffa9c1d 1958
aac5cc16
RH
1959 if (BYTES_BIG_ENDIAN && shift)
1960 {
1961 expand_binop (mode, ashl_optab, tmps[i], GEN_INT (shift),
1962 tmps[i], 0, OPTAB_WIDEN);
1963 }
fffa9c1d 1964 }
aac5cc16
RH
1965 emit_queue();
1966
1967 /* Copy the extracted pieces into the proper (probable) hard regs. */
1968 for (i = start; i < XVECLEN (dst, 0); i++)
1969 emit_move_insn (XEXP (XVECEXP (dst, 0, i), 0), tmps[i]);
fffa9c1d
JW
1970}
1971
aac5cc16
RH
1972/* Emit code to move a block SRC to a block DST, where SRC is non-consecutive
1973 registers represented by a PARALLEL. SSIZE represents the total size of
1974 block DST, or -1 if not known. ALIGN is the known alignment of DST. */
fffa9c1d
JW
1975
1976void
aac5cc16
RH
1977emit_group_store (orig_dst, src, ssize, align)
1978 rtx orig_dst, src;
1979 int ssize, align;
fffa9c1d 1980{
aac5cc16
RH
1981 rtx *tmps, dst;
1982 int start, i;
fffa9c1d 1983
aac5cc16 1984 if (GET_CODE (src) != PARALLEL)
fffa9c1d
JW
1985 abort ();
1986
1987 /* Check for a NULL entry, used to indicate that the parameter goes
1988 both on the stack and in registers. */
aac5cc16
RH
1989 if (XEXP (XVECEXP (src, 0, 0), 0))
1990 start = 0;
fffa9c1d 1991 else
aac5cc16
RH
1992 start = 1;
1993
1994 tmps = (rtx *) alloca (sizeof(rtx) * XVECLEN (src, 0));
fffa9c1d 1995
aac5cc16
RH
1996 /* Copy the (probable) hard regs into pseudos. */
1997 for (i = start; i < XVECLEN (src, 0); i++)
fffa9c1d 1998 {
aac5cc16
RH
1999 rtx reg = XEXP (XVECEXP (src, 0, i), 0);
2000 tmps[i] = gen_reg_rtx (GET_MODE (reg));
2001 emit_move_insn (tmps[i], reg);
2002 }
2003 emit_queue();
fffa9c1d 2004
aac5cc16
RH
2005 /* If we won't be storing directly into memory, protect the real destination
2006 from strange tricks we might play. */
2007 dst = orig_dst;
2008 if (GET_CODE (dst) != MEM)
2009 {
2010 dst = gen_reg_rtx (GET_MODE (orig_dst));
2011 /* Make life a bit easier for combine. */
2012 emit_move_insn (dst, const0_rtx);
2013 }
2014 else if (! MEM_IN_STRUCT_P (dst))
2015 {
2016 /* store_bit_field requires that memory operations have
2017 mem_in_struct_p set; we might not. */
fffa9c1d 2018
aac5cc16
RH
2019 dst = copy_rtx (orig_dst);
2020 MEM_IN_STRUCT_P (dst) = 1;
2021 }
2022
2023 /* Process the pieces. */
2024 for (i = start; i < XVECLEN (src, 0); i++)
2025 {
2026 int bytepos = INTVAL (XEXP (XVECEXP (src, 0, i), 1));
2027 enum machine_mode mode = GET_MODE (tmps[i]);
2028 int bytelen = GET_MODE_SIZE (mode);
2029
2030 /* Handle trailing fragments that run over the size of the struct. */
2031 if (ssize >= 0 && bytepos + bytelen > ssize)
71bc0330 2032 {
aac5cc16
RH
2033 if (BYTES_BIG_ENDIAN)
2034 {
2035 int shift = (bytelen - (ssize - bytepos)) * BITS_PER_UNIT;
2036 expand_binop (mode, ashr_optab, tmps[i], GEN_INT (shift),
2037 tmps[i], 0, OPTAB_WIDEN);
2038 }
2039 bytelen = ssize - bytepos;
71bc0330 2040 }
fffa9c1d 2041
aac5cc16
RH
2042 /* Optimize the access just a bit. */
2043 if (GET_CODE (dst) == MEM
2044 && align*BITS_PER_UNIT >= GET_MODE_ALIGNMENT (mode)
2045 && bytepos*BITS_PER_UNIT % GET_MODE_ALIGNMENT (mode) == 0
2046 && bytelen == GET_MODE_SIZE (mode))
2047 {
2048 emit_move_insn (change_address (dst, mode,
2049 plus_constant (XEXP (dst, 0),
2050 bytepos)),
2051 tmps[i]);
2052 }
2053 else
2054 {
2055 store_bit_field (dst, bytelen*BITS_PER_UNIT, bytepos*BITS_PER_UNIT,
2056 mode, tmps[i], align, ssize);
2057 }
fffa9c1d 2058 }
aac5cc16
RH
2059 emit_queue();
2060
2061 /* Copy from the pseudo into the (probable) hard reg. */
2062 if (GET_CODE (dst) == REG)
2063 emit_move_insn (orig_dst, dst);
fffa9c1d
JW
2064}
2065
94b25f81
RK
2066/* Add a USE expression for REG to the (possibly empty) list pointed
2067 to by CALL_FUSAGE. REG must denote a hard register. */
bbf6f052
RK
2068
2069void
b3f8cf4a
RK
2070use_reg (call_fusage, reg)
2071 rtx *call_fusage, reg;
2072{
0304dfbb
DE
2073 if (GET_CODE (reg) != REG
2074 || REGNO (reg) >= FIRST_PSEUDO_REGISTER)
b3f8cf4a
RK
2075 abort();
2076
2077 *call_fusage
38a448ca
RH
2078 = gen_rtx_EXPR_LIST (VOIDmode,
2079 gen_rtx_USE (VOIDmode, reg), *call_fusage);
b3f8cf4a
RK
2080}
2081
94b25f81
RK
2082/* Add USE expressions to *CALL_FUSAGE for each of NREGS consecutive regs,
2083 starting at REGNO. All of these registers must be hard registers. */
b3f8cf4a
RK
2084
2085void
0304dfbb
DE
2086use_regs (call_fusage, regno, nregs)
2087 rtx *call_fusage;
bbf6f052
RK
2088 int regno;
2089 int nregs;
2090{
0304dfbb 2091 int i;
bbf6f052 2092
0304dfbb
DE
2093 if (regno + nregs > FIRST_PSEUDO_REGISTER)
2094 abort ();
2095
2096 for (i = 0; i < nregs; i++)
38a448ca 2097 use_reg (call_fusage, gen_rtx_REG (reg_raw_mode[regno + i], regno + i));
bbf6f052 2098}
fffa9c1d
JW
2099
2100/* Add USE expressions to *CALL_FUSAGE for each REG contained in the
2101 PARALLEL REGS. This is for calls that pass values in multiple
2102 non-contiguous locations. The Irix 6 ABI has examples of this. */
2103
2104void
2105use_group_regs (call_fusage, regs)
2106 rtx *call_fusage;
2107 rtx regs;
2108{
2109 int i;
2110
6bd35f86
DE
2111 for (i = 0; i < XVECLEN (regs, 0); i++)
2112 {
2113 rtx reg = XEXP (XVECEXP (regs, 0, i), 0);
fffa9c1d 2114
6bd35f86
DE
2115 /* A NULL entry means the parameter goes both on the stack and in
2116 registers. This can also be a MEM for targets that pass values
2117 partially on the stack and partially in registers. */
e9a25f70 2118 if (reg != 0 && GET_CODE (reg) == REG)
6bd35f86
DE
2119 use_reg (call_fusage, reg);
2120 }
fffa9c1d 2121}
bbf6f052 2122\f
9de08200
RK
2123/* Generate several move instructions to clear LEN bytes of block TO.
2124 (A MEM rtx with BLKmode). The caller must pass TO through
2125 protect_from_queue before calling. ALIGN (in bytes) is maximum alignment
2126 we can assume. */
2127
2128static void
2129clear_by_pieces (to, len, align)
2130 rtx to;
2131 int len, align;
2132{
2133 struct clear_by_pieces data;
2134 rtx to_addr = XEXP (to, 0);
2135 int max_size = MOVE_MAX + 1;
2136
2137 data.offset = 0;
2138 data.to_addr = to_addr;
2139 data.to = to;
2140 data.autinc_to
2141 = (GET_CODE (to_addr) == PRE_INC || GET_CODE (to_addr) == PRE_DEC
2142 || GET_CODE (to_addr) == POST_INC || GET_CODE (to_addr) == POST_DEC);
2143
2144 data.explicit_inc_to = 0;
2145 data.reverse
2146 = (GET_CODE (to_addr) == PRE_DEC || GET_CODE (to_addr) == POST_DEC);
2147 if (data.reverse) data.offset = len;
2148 data.len = len;
2149
2150 data.to_struct = MEM_IN_STRUCT_P (to);
2151
2152 /* If copying requires more than two move insns,
2153 copy addresses to registers (to make displacements shorter)
2154 and use post-increment if available. */
2155 if (!data.autinc_to
2156 && move_by_pieces_ninsns (len, align) > 2)
2157 {
2158#ifdef HAVE_PRE_DECREMENT
2159 if (data.reverse && ! data.autinc_to)
2160 {
2161 data.to_addr = copy_addr_to_reg (plus_constant (to_addr, len));
2162 data.autinc_to = 1;
2163 data.explicit_inc_to = -1;
2164 }
2165#endif
2166#ifdef HAVE_POST_INCREMENT
2167 if (! data.reverse && ! data.autinc_to)
2168 {
2169 data.to_addr = copy_addr_to_reg (to_addr);
2170 data.autinc_to = 1;
2171 data.explicit_inc_to = 1;
2172 }
2173#endif
2174 if (!data.autinc_to && CONSTANT_P (to_addr))
2175 data.to_addr = copy_addr_to_reg (to_addr);
2176 }
2177
2178 if (! SLOW_UNALIGNED_ACCESS
2179 || align > MOVE_MAX || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT)
2180 align = MOVE_MAX;
2181
2182 /* First move what we can in the largest integer mode, then go to
2183 successively smaller modes. */
2184
2185 while (max_size > 1)
2186 {
2187 enum machine_mode mode = VOIDmode, tmode;
2188 enum insn_code icode;
2189
2190 for (tmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2191 tmode != VOIDmode; tmode = GET_MODE_WIDER_MODE (tmode))
2192 if (GET_MODE_SIZE (tmode) < max_size)
2193 mode = tmode;
2194
2195 if (mode == VOIDmode)
2196 break;
2197
2198 icode = mov_optab->handlers[(int) mode].insn_code;
2199 if (icode != CODE_FOR_nothing
2200 && align >= MIN (BIGGEST_ALIGNMENT / BITS_PER_UNIT,
2201 GET_MODE_SIZE (mode)))
2202 clear_by_pieces_1 (GEN_FCN (icode), mode, &data);
2203
2204 max_size = GET_MODE_SIZE (mode);
2205 }
2206
2207 /* The code above should have handled everything. */
2208 if (data.len != 0)
2209 abort ();
2210}
2211
2212/* Subroutine of clear_by_pieces. Clear as many bytes as appropriate
2213 with move instructions for mode MODE. GENFUN is the gen_... function
2214 to make a move insn for that mode. DATA has all the other info. */
2215
2216static void
2217clear_by_pieces_1 (genfun, mode, data)
eae4b970 2218 rtx (*genfun) PROTO ((rtx, ...));
9de08200
RK
2219 enum machine_mode mode;
2220 struct clear_by_pieces *data;
2221{
2222 register int size = GET_MODE_SIZE (mode);
2223 register rtx to1;
2224
2225 while (data->len >= size)
2226 {
2227 if (data->reverse) data->offset -= size;
2228
2229 to1 = (data->autinc_to
38a448ca 2230 ? gen_rtx_MEM (mode, data->to_addr)
effbcc6a
RK
2231 : copy_rtx (change_address (data->to, mode,
2232 plus_constant (data->to_addr,
2233 data->offset))));
9de08200
RK
2234 MEM_IN_STRUCT_P (to1) = data->to_struct;
2235
2236#ifdef HAVE_PRE_DECREMENT
2237 if (data->explicit_inc_to < 0)
2238 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (-size)));
2239#endif
2240
2241 emit_insn ((*genfun) (to1, const0_rtx));
2242#ifdef HAVE_POST_INCREMENT
2243 if (data->explicit_inc_to > 0)
2244 emit_insn (gen_add2_insn (data->to_addr, GEN_INT (size)));
2245#endif
2246
2247 if (! data->reverse) data->offset += size;
2248
2249 data->len -= size;
2250 }
2251}
2252\f
bbf6f052 2253/* Write zeros through the storage of OBJECT.
9de08200 2254 If OBJECT has BLKmode, SIZE is its length in bytes and ALIGN is
e9a25f70 2255 the maximum alignment we can is has, measured in bytes.
bbf6f052 2256
e9a25f70
JL
2257 If we call a function that returns the length of the block, return it. */
2258
2259rtx
9de08200 2260clear_storage (object, size, align)
bbf6f052 2261 rtx object;
4c08eef0 2262 rtx size;
9de08200 2263 int align;
bbf6f052 2264{
52cf7115
JL
2265#ifdef TARGET_MEM_FUNCTIONS
2266 static tree fn;
2267 tree call_expr, arg_list;
2268#endif
e9a25f70
JL
2269 rtx retval = 0;
2270
bbf6f052
RK
2271 if (GET_MODE (object) == BLKmode)
2272 {
9de08200
RK
2273 object = protect_from_queue (object, 1);
2274 size = protect_from_queue (size, 0);
2275
2276 if (GET_CODE (size) == CONST_INT
2277 && (move_by_pieces_ninsns (INTVAL (size), align) < MOVE_RATIO))
2278 clear_by_pieces (object, INTVAL (size), align);
2279
2280 else
2281 {
2282 /* Try the most limited insn first, because there's no point
2283 including more than one in the machine description unless
2284 the more limited one has some advantage. */
2285
2286 rtx opalign = GEN_INT (align);
2287 enum machine_mode mode;
2288
2289 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != VOIDmode;
2290 mode = GET_MODE_WIDER_MODE (mode))
2291 {
2292 enum insn_code code = clrstr_optab[(int) mode];
2293
2294 if (code != CODE_FOR_nothing
2295 /* We don't need MODE to be narrower than
2296 BITS_PER_HOST_WIDE_INT here because if SIZE is less than
2297 the mode mask, as it is returned by the macro, it will
2298 definitely be less than the actual mode mask. */
2299 && ((GET_CODE (size) == CONST_INT
2300 && ((unsigned HOST_WIDE_INT) INTVAL (size)
e5e809f4 2301 <= (GET_MODE_MASK (mode) >> 1)))
9de08200
RK
2302 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2303 && (insn_operand_predicate[(int) code][0] == 0
2304 || (*insn_operand_predicate[(int) code][0]) (object,
2305 BLKmode))
2306 && (insn_operand_predicate[(int) code][2] == 0
2307 || (*insn_operand_predicate[(int) code][2]) (opalign,
2308 VOIDmode)))
2309 {
2310 rtx op1;
2311 rtx last = get_last_insn ();
2312 rtx pat;
2313
2314 op1 = convert_to_mode (mode, size, 1);
2315 if (insn_operand_predicate[(int) code][1] != 0
2316 && ! (*insn_operand_predicate[(int) code][1]) (op1,
2317 mode))
2318 op1 = copy_to_mode_reg (mode, op1);
2319
2320 pat = GEN_FCN ((int) code) (object, op1, opalign);
2321 if (pat)
2322 {
2323 emit_insn (pat);
e9a25f70 2324 return 0;
9de08200
RK
2325 }
2326 else
2327 delete_insns_since (last);
2328 }
2329 }
2330
2331
bbf6f052 2332#ifdef TARGET_MEM_FUNCTIONS
52cf7115
JL
2333 /* It is incorrect to use the libcall calling conventions to call
2334 memset in this context.
2335
2336 This could be a user call to memset and the user may wish to
2337 examine the return value from memset.
2338
2339 For targets where libcalls and normal calls have different conventions
2340 for returning pointers, we could end up generating incorrect code.
2341
2342 So instead of using a libcall sequence we build up a suitable
2343 CALL_EXPR and expand the call in the normal fashion. */
2344 if (fn == NULL_TREE)
2345 {
2346 tree fntype;
2347
2348 /* This was copied from except.c, I don't know if all this is
2349 necessary in this context or not. */
2350 fn = get_identifier ("memset");
2351 push_obstacks_nochange ();
2352 end_temporary_allocation ();
2353 fntype = build_pointer_type (void_type_node);
2354 fntype = build_function_type (fntype, NULL_TREE);
2355 fn = build_decl (FUNCTION_DECL, fn, fntype);
2356 DECL_EXTERNAL (fn) = 1;
2357 TREE_PUBLIC (fn) = 1;
2358 DECL_ARTIFICIAL (fn) = 1;
2359 make_decl_rtl (fn, NULL_PTR, 1);
2360 assemble_external (fn);
2361 pop_obstacks ();
2362 }
2363
2364 /* We need to make an argument list for the function call.
2365
2366 memset has three arguments, the first is a void * addresses, the
2367 second a integer with the initialization value, the last is a size_t
2368 byte count for the copy. */
2369 arg_list
2370 = build_tree_list (NULL_TREE,
2371 make_tree (build_pointer_type (void_type_node),
2372 XEXP (object, 0)));
2373 TREE_CHAIN (arg_list)
2374 = build_tree_list (NULL_TREE,
2375 make_tree (integer_type_node, const0_rtx));
2376 TREE_CHAIN (TREE_CHAIN (arg_list))
2377 = build_tree_list (NULL_TREE, make_tree (sizetype, size));
2378 TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (arg_list))) = NULL_TREE;
2379
2380 /* Now we have to build up the CALL_EXPR itself. */
2381 call_expr = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
2382 call_expr = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fn)),
2383 call_expr, arg_list, NULL_TREE);
2384 TREE_SIDE_EFFECTS (call_expr) = 1;
2385
2386 retval = expand_expr (call_expr, NULL_RTX, VOIDmode, 0);
bbf6f052 2387#else
9de08200
RK
2388 emit_library_call (bzero_libfunc, 0,
2389 VOIDmode, 2,
2390 XEXP (object, 0), Pmode,
e9a25f70
JL
2391 convert_to_mode
2392 (TYPE_MODE (integer_type_node), size,
2393 TREE_UNSIGNED (integer_type_node)),
9de08200 2394 TYPE_MODE (integer_type_node));
bbf6f052 2395#endif
9de08200 2396 }
bbf6f052
RK
2397 }
2398 else
66ed0683 2399 emit_move_insn (object, CONST0_RTX (GET_MODE (object)));
e9a25f70
JL
2400
2401 return retval;
bbf6f052
RK
2402}
2403
2404/* Generate code to copy Y into X.
2405 Both Y and X must have the same mode, except that
2406 Y can be a constant with VOIDmode.
2407 This mode cannot be BLKmode; use emit_block_move for that.
2408
2409 Return the last instruction emitted. */
2410
2411rtx
2412emit_move_insn (x, y)
2413 rtx x, y;
2414{
2415 enum machine_mode mode = GET_MODE (x);
bbf6f052
RK
2416
2417 x = protect_from_queue (x, 1);
2418 y = protect_from_queue (y, 0);
2419
2420 if (mode == BLKmode || (GET_MODE (y) != mode && GET_MODE (y) != VOIDmode))
2421 abort ();
2422
2423 if (CONSTANT_P (y) && ! LEGITIMATE_CONSTANT_P (y))
2424 y = force_const_mem (mode, y);
2425
2426 /* If X or Y are memory references, verify that their addresses are valid
2427 for the machine. */
2428 if (GET_CODE (x) == MEM
2429 && ((! memory_address_p (GET_MODE (x), XEXP (x, 0))
2430 && ! push_operand (x, GET_MODE (x)))
2431 || (flag_force_addr
2432 && CONSTANT_ADDRESS_P (XEXP (x, 0)))))
2433 x = change_address (x, VOIDmode, XEXP (x, 0));
2434
2435 if (GET_CODE (y) == MEM
2436 && (! memory_address_p (GET_MODE (y), XEXP (y, 0))
2437 || (flag_force_addr
2438 && CONSTANT_ADDRESS_P (XEXP (y, 0)))))
2439 y = change_address (y, VOIDmode, XEXP (y, 0));
2440
2441 if (mode == BLKmode)
2442 abort ();
2443
261c4230
RS
2444 return emit_move_insn_1 (x, y);
2445}
2446
2447/* Low level part of emit_move_insn.
2448 Called just like emit_move_insn, but assumes X and Y
2449 are basically valid. */
2450
2451rtx
2452emit_move_insn_1 (x, y)
2453 rtx x, y;
2454{
2455 enum machine_mode mode = GET_MODE (x);
2456 enum machine_mode submode;
2457 enum mode_class class = GET_MODE_CLASS (mode);
2458 int i;
2459
bbf6f052
RK
2460 if (mov_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
2461 return
2462 emit_insn (GEN_FCN (mov_optab->handlers[(int) mode].insn_code) (x, y));
2463
89742723 2464 /* Expand complex moves by moving real part and imag part, if possible. */
7308a047 2465 else if ((class == MODE_COMPLEX_FLOAT || class == MODE_COMPLEX_INT)
d0c76654
RK
2466 && BLKmode != (submode = mode_for_size ((GET_MODE_UNIT_SIZE (mode)
2467 * BITS_PER_UNIT),
2468 (class == MODE_COMPLEX_INT
2469 ? MODE_INT : MODE_FLOAT),
2470 0))
7308a047
RS
2471 && (mov_optab->handlers[(int) submode].insn_code
2472 != CODE_FOR_nothing))
2473 {
2474 /* Don't split destination if it is a stack push. */
2475 int stack = push_operand (x, GET_MODE (x));
7308a047 2476
7308a047
RS
2477 /* If this is a stack, push the highpart first, so it
2478 will be in the argument order.
2479
2480 In that case, change_address is used only to convert
2481 the mode, not to change the address. */
c937357e
RS
2482 if (stack)
2483 {
e33c0d66
RS
2484 /* Note that the real part always precedes the imag part in memory
2485 regardless of machine's endianness. */
c937357e
RS
2486#ifdef STACK_GROWS_DOWNWARD
2487 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2488 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2489 gen_imagpart (submode, y)));
c937357e 2490 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2491 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2492 gen_realpart (submode, y)));
c937357e
RS
2493#else
2494 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2495 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2496 gen_realpart (submode, y)));
c937357e 2497 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
38a448ca 2498 (gen_rtx_MEM (submode, (XEXP (x, 0))),
e33c0d66 2499 gen_imagpart (submode, y)));
c937357e
RS
2500#endif
2501 }
2502 else
2503 {
2638126a
BS
2504 /* Show the output dies here. */
2505 if (x != y)
9e6a5703 2506 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
2638126a 2507
c937357e 2508 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2509 (gen_realpart (submode, x), gen_realpart (submode, y)));
c937357e 2510 emit_insn (GEN_FCN (mov_optab->handlers[(int) submode].insn_code)
976ff203 2511 (gen_imagpart (submode, x), gen_imagpart (submode, y)));
c937357e 2512 }
7308a047 2513
7a1ab50a 2514 return get_last_insn ();
7308a047
RS
2515 }
2516
bbf6f052
RK
2517 /* This will handle any multi-word mode that lacks a move_insn pattern.
2518 However, you will get better code if you define such patterns,
2519 even if they must turn into multiple assembler instructions. */
a4320483 2520 else if (GET_MODE_SIZE (mode) > UNITS_PER_WORD)
bbf6f052
RK
2521 {
2522 rtx last_insn = 0;
6551fa4d 2523
a98c9f1a
RK
2524#ifdef PUSH_ROUNDING
2525
2526 /* If X is a push on the stack, do the push now and replace
2527 X with a reference to the stack pointer. */
2528 if (push_operand (x, GET_MODE (x)))
2529 {
2530 anti_adjust_stack (GEN_INT (GET_MODE_SIZE (GET_MODE (x))));
2531 x = change_address (x, VOIDmode, stack_pointer_rtx);
2532 }
2533#endif
2534
15a7a8ec 2535 /* Show the output dies here. */
43e046cb 2536 if (x != y)
38a448ca 2537 emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
15a7a8ec 2538
bbf6f052
RK
2539 for (i = 0;
2540 i < (GET_MODE_SIZE (mode) + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD;
2541 i++)
2542 {
2543 rtx xpart = operand_subword (x, i, 1, mode);
2544 rtx ypart = operand_subword (y, i, 1, mode);
2545
2546 /* If we can't get a part of Y, put Y into memory if it is a
2547 constant. Otherwise, force it into a register. If we still
2548 can't get a part of Y, abort. */
2549 if (ypart == 0 && CONSTANT_P (y))
2550 {
2551 y = force_const_mem (mode, y);
2552 ypart = operand_subword (y, i, 1, mode);
2553 }
2554 else if (ypart == 0)
2555 ypart = operand_subword_force (y, i, mode);
2556
2557 if (xpart == 0 || ypart == 0)
2558 abort ();
2559
2560 last_insn = emit_move_insn (xpart, ypart);
2561 }
6551fa4d 2562
bbf6f052
RK
2563 return last_insn;
2564 }
2565 else
2566 abort ();
2567}
2568\f
2569/* Pushing data onto the stack. */
2570
2571/* Push a block of length SIZE (perhaps variable)
2572 and return an rtx to address the beginning of the block.
2573 Note that it is not possible for the value returned to be a QUEUED.
2574 The value may be virtual_outgoing_args_rtx.
2575
2576 EXTRA is the number of bytes of padding to push in addition to SIZE.
2577 BELOW nonzero means this padding comes at low addresses;
2578 otherwise, the padding comes at high addresses. */
2579
2580rtx
2581push_block (size, extra, below)
2582 rtx size;
2583 int extra, below;
2584{
2585 register rtx temp;
88f63c77
RK
2586
2587 size = convert_modes (Pmode, ptr_mode, size, 1);
bbf6f052
RK
2588 if (CONSTANT_P (size))
2589 anti_adjust_stack (plus_constant (size, extra));
2590 else if (GET_CODE (size) == REG && extra == 0)
2591 anti_adjust_stack (size);
2592 else
2593 {
2594 rtx temp = copy_to_mode_reg (Pmode, size);
2595 if (extra != 0)
906c4e36 2596 temp = expand_binop (Pmode, add_optab, temp, GEN_INT (extra),
bbf6f052
RK
2597 temp, 0, OPTAB_LIB_WIDEN);
2598 anti_adjust_stack (temp);
2599 }
2600
2601#ifdef STACK_GROWS_DOWNWARD
2602 temp = virtual_outgoing_args_rtx;
2603 if (extra != 0 && below)
2604 temp = plus_constant (temp, extra);
2605#else
2606 if (GET_CODE (size) == CONST_INT)
2607 temp = plus_constant (virtual_outgoing_args_rtx,
2608 - INTVAL (size) - (below ? 0 : extra));
2609 else if (extra != 0 && !below)
38a448ca 2610 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2611 negate_rtx (Pmode, plus_constant (size, extra)));
2612 else
38a448ca 2613 temp = gen_rtx_PLUS (Pmode, virtual_outgoing_args_rtx,
bbf6f052
RK
2614 negate_rtx (Pmode, size));
2615#endif
2616
2617 return memory_address (GET_CLASS_NARROWEST_MODE (MODE_INT), temp);
2618}
2619
87e38d84 2620rtx
bbf6f052
RK
2621gen_push_operand ()
2622{
38a448ca 2623 return gen_rtx_fmt_e (STACK_PUSH_CODE, Pmode, stack_pointer_rtx);
bbf6f052
RK
2624}
2625
921b3427
RK
2626/* Return an rtx for the address of the beginning of a as-if-it-was-pushed
2627 block of SIZE bytes. */
2628
2629static rtx
2630get_push_address (size)
2631 int size;
2632{
2633 register rtx temp;
2634
2635 if (STACK_PUSH_CODE == POST_DEC)
38a448ca 2636 temp = gen_rtx_PLUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427 2637 else if (STACK_PUSH_CODE == POST_INC)
38a448ca 2638 temp = gen_rtx_MINUS (Pmode, stack_pointer_rtx, GEN_INT (size));
921b3427
RK
2639 else
2640 temp = stack_pointer_rtx;
2641
c85f7c16 2642 return copy_to_reg (temp);
921b3427
RK
2643}
2644
bbf6f052
RK
2645/* Generate code to push X onto the stack, assuming it has mode MODE and
2646 type TYPE.
2647 MODE is redundant except when X is a CONST_INT (since they don't
2648 carry mode info).
2649 SIZE is an rtx for the size of data to be copied (in bytes),
2650 needed only if X is BLKmode.
2651
2652 ALIGN (in bytes) is maximum alignment we can assume.
2653
cd048831
RK
2654 If PARTIAL and REG are both nonzero, then copy that many of the first
2655 words of X into registers starting with REG, and push the rest of X.
bbf6f052
RK
2656 The amount of space pushed is decreased by PARTIAL words,
2657 rounded *down* to a multiple of PARM_BOUNDARY.
2658 REG must be a hard register in this case.
cd048831
RK
2659 If REG is zero but PARTIAL is not, take any all others actions for an
2660 argument partially in registers, but do not actually load any
2661 registers.
bbf6f052
RK
2662
2663 EXTRA is the amount in bytes of extra space to leave next to this arg.
6dc42e49 2664 This is ignored if an argument block has already been allocated.
bbf6f052
RK
2665
2666 On a machine that lacks real push insns, ARGS_ADDR is the address of
2667 the bottom of the argument block for this call. We use indexing off there
2668 to store the arg. On machines with push insns, ARGS_ADDR is 0 when a
2669 argument block has not been preallocated.
2670
e5e809f4
JL
2671 ARGS_SO_FAR is the size of args previously pushed for this call.
2672
2673 REG_PARM_STACK_SPACE is nonzero if functions require stack space
2674 for arguments passed in registers. If nonzero, it will be the number
2675 of bytes required. */
bbf6f052
RK
2676
2677void
2678emit_push_insn (x, mode, type, size, align, partial, reg, extra,
e5e809f4 2679 args_addr, args_so_far, reg_parm_stack_space)
bbf6f052
RK
2680 register rtx x;
2681 enum machine_mode mode;
2682 tree type;
2683 rtx size;
2684 int align;
2685 int partial;
2686 rtx reg;
2687 int extra;
2688 rtx args_addr;
2689 rtx args_so_far;
e5e809f4 2690 int reg_parm_stack_space;
bbf6f052
RK
2691{
2692 rtx xinner;
2693 enum direction stack_direction
2694#ifdef STACK_GROWS_DOWNWARD
2695 = downward;
2696#else
2697 = upward;
2698#endif
2699
2700 /* Decide where to pad the argument: `downward' for below,
2701 `upward' for above, or `none' for don't pad it.
2702 Default is below for small data on big-endian machines; else above. */
2703 enum direction where_pad = FUNCTION_ARG_PADDING (mode, type);
2704
2705 /* Invert direction if stack is post-update. */
2706 if (STACK_PUSH_CODE == POST_INC || STACK_PUSH_CODE == POST_DEC)
2707 if (where_pad != none)
2708 where_pad = (where_pad == downward ? upward : downward);
2709
2710 xinner = x = protect_from_queue (x, 0);
2711
2712 if (mode == BLKmode)
2713 {
2714 /* Copy a block into the stack, entirely or partially. */
2715
2716 register rtx temp;
2717 int used = partial * UNITS_PER_WORD;
2718 int offset = used % (PARM_BOUNDARY / BITS_PER_UNIT);
2719 int skip;
2720
2721 if (size == 0)
2722 abort ();
2723
2724 used -= offset;
2725
2726 /* USED is now the # of bytes we need not copy to the stack
2727 because registers will take care of them. */
2728
2729 if (partial != 0)
2730 xinner = change_address (xinner, BLKmode,
2731 plus_constant (XEXP (xinner, 0), used));
2732
2733 /* If the partial register-part of the arg counts in its stack size,
2734 skip the part of stack space corresponding to the registers.
2735 Otherwise, start copying to the beginning of the stack space,
2736 by setting SKIP to 0. */
e5e809f4 2737 skip = (reg_parm_stack_space == 0) ? 0 : used;
bbf6f052
RK
2738
2739#ifdef PUSH_ROUNDING
2740 /* Do it with several push insns if that doesn't take lots of insns
2741 and if there is no difficulty with push insns that skip bytes
2742 on the stack for alignment purposes. */
2743 if (args_addr == 0
2744 && GET_CODE (size) == CONST_INT
2745 && skip == 0
2746 && (move_by_pieces_ninsns ((unsigned) INTVAL (size) - used, align)
2747 < MOVE_RATIO)
bbf6f052
RK
2748 /* Here we avoid the case of a structure whose weak alignment
2749 forces many pushes of a small amount of data,
2750 and such small pushes do rounding that causes trouble. */
c7a7ac46 2751 && ((! SLOW_UNALIGNED_ACCESS)
e87b4f3f 2752 || align >= BIGGEST_ALIGNMENT / BITS_PER_UNIT
bbf6f052 2753 || PUSH_ROUNDING (align) == align)
bbf6f052
RK
2754 && PUSH_ROUNDING (INTVAL (size)) == INTVAL (size))
2755 {
2756 /* Push padding now if padding above and stack grows down,
2757 or if padding below and stack grows up.
2758 But if space already allocated, this has already been done. */
2759 if (extra && args_addr == 0
2760 && where_pad != none && where_pad != stack_direction)
906c4e36 2761 anti_adjust_stack (GEN_INT (extra));
bbf6f052 2762
38a448ca 2763 move_by_pieces (gen_rtx_MEM (BLKmode, gen_push_operand ()), xinner,
bbf6f052 2764 INTVAL (size) - used, align);
921b3427 2765
956d6950 2766 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2767 {
2768 rtx temp;
2769
956d6950 2770 in_check_memory_usage = 1;
921b3427 2771 temp = get_push_address (INTVAL(size) - used);
c85f7c16 2772 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2773 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2774 temp, ptr_mode,
2775 XEXP (xinner, 0), ptr_mode,
2776 GEN_INT (INTVAL(size) - used),
2777 TYPE_MODE (sizetype));
2778 else
2779 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2780 temp, ptr_mode,
2781 GEN_INT (INTVAL(size) - used),
2782 TYPE_MODE (sizetype),
956d6950
JL
2783 GEN_INT (MEMORY_USE_RW),
2784 TYPE_MODE (integer_type_node));
2785 in_check_memory_usage = 0;
921b3427 2786 }
bbf6f052
RK
2787 }
2788 else
2789#endif /* PUSH_ROUNDING */
2790 {
2791 /* Otherwise make space on the stack and copy the data
2792 to the address of that space. */
2793
2794 /* Deduct words put into registers from the size we must copy. */
2795 if (partial != 0)
2796 {
2797 if (GET_CODE (size) == CONST_INT)
906c4e36 2798 size = GEN_INT (INTVAL (size) - used);
bbf6f052
RK
2799 else
2800 size = expand_binop (GET_MODE (size), sub_optab, size,
906c4e36
RK
2801 GEN_INT (used), NULL_RTX, 0,
2802 OPTAB_LIB_WIDEN);
bbf6f052
RK
2803 }
2804
2805 /* Get the address of the stack space.
2806 In this case, we do not deal with EXTRA separately.
2807 A single stack adjust will do. */
2808 if (! args_addr)
2809 {
2810 temp = push_block (size, extra, where_pad == downward);
2811 extra = 0;
2812 }
2813 else if (GET_CODE (args_so_far) == CONST_INT)
2814 temp = memory_address (BLKmode,
2815 plus_constant (args_addr,
2816 skip + INTVAL (args_so_far)));
2817 else
2818 temp = memory_address (BLKmode,
38a448ca
RH
2819 plus_constant (gen_rtx_PLUS (Pmode,
2820 args_addr,
2821 args_so_far),
bbf6f052 2822 skip));
956d6950 2823 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427
RK
2824 {
2825 rtx target;
2826
956d6950 2827 in_check_memory_usage = 1;
921b3427 2828 target = copy_to_reg (temp);
c85f7c16 2829 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
2830 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
2831 target, ptr_mode,
2832 XEXP (xinner, 0), ptr_mode,
2833 size, TYPE_MODE (sizetype));
2834 else
2835 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
2836 target, ptr_mode,
2837 size, TYPE_MODE (sizetype),
956d6950
JL
2838 GEN_INT (MEMORY_USE_RW),
2839 TYPE_MODE (integer_type_node));
2840 in_check_memory_usage = 0;
921b3427 2841 }
bbf6f052
RK
2842
2843 /* TEMP is the address of the block. Copy the data there. */
2844 if (GET_CODE (size) == CONST_INT
2845 && (move_by_pieces_ninsns ((unsigned) INTVAL (size), align)
2846 < MOVE_RATIO))
2847 {
38a448ca 2848 move_by_pieces (gen_rtx_MEM (BLKmode, temp), xinner,
bbf6f052
RK
2849 INTVAL (size), align);
2850 goto ret;
2851 }
e5e809f4 2852 else
bbf6f052 2853 {
e5e809f4
JL
2854 rtx opalign = GEN_INT (align);
2855 enum machine_mode mode;
9e6a5703 2856 rtx target = gen_rtx_MEM (BLKmode, temp);
e5e809f4
JL
2857
2858 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
2859 mode != VOIDmode;
2860 mode = GET_MODE_WIDER_MODE (mode))
c841050e 2861 {
e5e809f4
JL
2862 enum insn_code code = movstr_optab[(int) mode];
2863
2864 if (code != CODE_FOR_nothing
2865 && ((GET_CODE (size) == CONST_INT
2866 && ((unsigned HOST_WIDE_INT) INTVAL (size)
2867 <= (GET_MODE_MASK (mode) >> 1)))
2868 || GET_MODE_BITSIZE (mode) >= BITS_PER_WORD)
2869 && (insn_operand_predicate[(int) code][0] == 0
2870 || ((*insn_operand_predicate[(int) code][0])
2871 (target, BLKmode)))
2872 && (insn_operand_predicate[(int) code][1] == 0
2873 || ((*insn_operand_predicate[(int) code][1])
2874 (xinner, BLKmode)))
2875 && (insn_operand_predicate[(int) code][3] == 0
2876 || ((*insn_operand_predicate[(int) code][3])
2877 (opalign, VOIDmode))))
2878 {
2879 rtx op2 = convert_to_mode (mode, size, 1);
2880 rtx last = get_last_insn ();
2881 rtx pat;
2882
2883 if (insn_operand_predicate[(int) code][2] != 0
2884 && ! ((*insn_operand_predicate[(int) code][2])
2885 (op2, mode)))
2886 op2 = copy_to_mode_reg (mode, op2);
2887
2888 pat = GEN_FCN ((int) code) (target, xinner,
2889 op2, opalign);
2890 if (pat)
2891 {
2892 emit_insn (pat);
2893 goto ret;
2894 }
2895 else
2896 delete_insns_since (last);
2897 }
c841050e 2898 }
bbf6f052 2899 }
bbf6f052
RK
2900
2901#ifndef ACCUMULATE_OUTGOING_ARGS
2902 /* If the source is referenced relative to the stack pointer,
2903 copy it to another register to stabilize it. We do not need
2904 to do this if we know that we won't be changing sp. */
2905
2906 if (reg_mentioned_p (virtual_stack_dynamic_rtx, temp)
2907 || reg_mentioned_p (virtual_outgoing_args_rtx, temp))
2908 temp = copy_to_reg (temp);
2909#endif
2910
2911 /* Make inhibit_defer_pop nonzero around the library call
2912 to force it to pop the bcopy-arguments right away. */
2913 NO_DEFER_POP;
2914#ifdef TARGET_MEM_FUNCTIONS
d562e42e 2915 emit_library_call (memcpy_libfunc, 0,
bbf6f052 2916 VOIDmode, 3, temp, Pmode, XEXP (xinner, 0), Pmode,
0fa83258
RK
2917 convert_to_mode (TYPE_MODE (sizetype),
2918 size, TREE_UNSIGNED (sizetype)),
26ba80fc 2919 TYPE_MODE (sizetype));
bbf6f052 2920#else
d562e42e 2921 emit_library_call (bcopy_libfunc, 0,
bbf6f052 2922 VOIDmode, 3, XEXP (xinner, 0), Pmode, temp, Pmode,
3b6f75e2
JW
2923 convert_to_mode (TYPE_MODE (integer_type_node),
2924 size,
2925 TREE_UNSIGNED (integer_type_node)),
2926 TYPE_MODE (integer_type_node));
bbf6f052
RK
2927#endif
2928 OK_DEFER_POP;
2929 }
2930 }
2931 else if (partial > 0)
2932 {
2933 /* Scalar partly in registers. */
2934
2935 int size = GET_MODE_SIZE (mode) / UNITS_PER_WORD;
2936 int i;
2937 int not_stack;
2938 /* # words of start of argument
2939 that we must make space for but need not store. */
2940 int offset = partial % (PARM_BOUNDARY / BITS_PER_WORD);
2941 int args_offset = INTVAL (args_so_far);
2942 int skip;
2943
2944 /* Push padding now if padding above and stack grows down,
2945 or if padding below and stack grows up.
2946 But if space already allocated, this has already been done. */
2947 if (extra && args_addr == 0
2948 && where_pad != none && where_pad != stack_direction)
906c4e36 2949 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
2950
2951 /* If we make space by pushing it, we might as well push
2952 the real data. Otherwise, we can leave OFFSET nonzero
2953 and leave the space uninitialized. */
2954 if (args_addr == 0)
2955 offset = 0;
2956
2957 /* Now NOT_STACK gets the number of words that we don't need to
2958 allocate on the stack. */
2959 not_stack = partial - offset;
2960
2961 /* If the partial register-part of the arg counts in its stack size,
2962 skip the part of stack space corresponding to the registers.
2963 Otherwise, start copying to the beginning of the stack space,
2964 by setting SKIP to 0. */
e5e809f4 2965 skip = (reg_parm_stack_space == 0) ? 0 : not_stack;
bbf6f052
RK
2966
2967 if (CONSTANT_P (x) && ! LEGITIMATE_CONSTANT_P (x))
2968 x = validize_mem (force_const_mem (mode, x));
2969
2970 /* If X is a hard register in a non-integer mode, copy it into a pseudo;
2971 SUBREGs of such registers are not allowed. */
2972 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER
2973 && GET_MODE_CLASS (GET_MODE (x)) != MODE_INT))
2974 x = copy_to_reg (x);
2975
2976 /* Loop over all the words allocated on the stack for this arg. */
2977 /* We can do it by words, because any scalar bigger than a word
2978 has a size a multiple of a word. */
2979#ifndef PUSH_ARGS_REVERSED
2980 for (i = not_stack; i < size; i++)
2981#else
2982 for (i = size - 1; i >= not_stack; i--)
2983#endif
2984 if (i >= not_stack + offset)
2985 emit_push_insn (operand_subword_force (x, i, mode),
906c4e36
RK
2986 word_mode, NULL_TREE, NULL_RTX, align, 0, NULL_RTX,
2987 0, args_addr,
2988 GEN_INT (args_offset + ((i - not_stack + skip)
e5e809f4
JL
2989 * UNITS_PER_WORD)),
2990 reg_parm_stack_space);
bbf6f052
RK
2991 }
2992 else
2993 {
2994 rtx addr;
921b3427 2995 rtx target = NULL_RTX;
bbf6f052
RK
2996
2997 /* Push padding now if padding above and stack grows down,
2998 or if padding below and stack grows up.
2999 But if space already allocated, this has already been done. */
3000 if (extra && args_addr == 0
3001 && where_pad != none && where_pad != stack_direction)
906c4e36 3002 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3003
3004#ifdef PUSH_ROUNDING
3005 if (args_addr == 0)
3006 addr = gen_push_operand ();
3007 else
3008#endif
921b3427
RK
3009 {
3010 if (GET_CODE (args_so_far) == CONST_INT)
3011 addr
3012 = memory_address (mode,
3013 plus_constant (args_addr,
3014 INTVAL (args_so_far)));
3015 else
38a448ca
RH
3016 addr = memory_address (mode, gen_rtx_PLUS (Pmode, args_addr,
3017 args_so_far));
921b3427
RK
3018 target = addr;
3019 }
bbf6f052 3020
38a448ca 3021 emit_move_insn (gen_rtx_MEM (mode, addr), x);
921b3427 3022
956d6950 3023 if (flag_check_memory_usage && ! in_check_memory_usage)
921b3427 3024 {
956d6950 3025 in_check_memory_usage = 1;
921b3427
RK
3026 if (target == 0)
3027 target = get_push_address (GET_MODE_SIZE (mode));
3028
c85f7c16 3029 if (GET_CODE (x) == MEM && type && AGGREGATE_TYPE_P (type))
921b3427
RK
3030 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3031 target, ptr_mode,
3032 XEXP (x, 0), ptr_mode,
3033 GEN_INT (GET_MODE_SIZE (mode)),
3034 TYPE_MODE (sizetype));
3035 else
3036 emit_library_call (chkr_set_right_libfunc, 1, VOIDmode, 3,
3037 target, ptr_mode,
3038 GEN_INT (GET_MODE_SIZE (mode)),
3039 TYPE_MODE (sizetype),
956d6950
JL
3040 GEN_INT (MEMORY_USE_RW),
3041 TYPE_MODE (integer_type_node));
3042 in_check_memory_usage = 0;
921b3427 3043 }
bbf6f052
RK
3044 }
3045
3046 ret:
3047 /* If part should go in registers, copy that part
3048 into the appropriate registers. Do this now, at the end,
3049 since mem-to-mem copies above may do function calls. */
cd048831 3050 if (partial > 0 && reg != 0)
fffa9c1d
JW
3051 {
3052 /* Handle calls that pass values in multiple non-contiguous locations.
3053 The Irix 6 ABI has examples of this. */
3054 if (GET_CODE (reg) == PARALLEL)
aac5cc16 3055 emit_group_load (reg, x, -1, align); /* ??? size? */
fffa9c1d
JW
3056 else
3057 move_block_to_reg (REGNO (reg), x, partial, mode);
3058 }
bbf6f052
RK
3059
3060 if (extra && args_addr == 0 && where_pad == stack_direction)
906c4e36 3061 anti_adjust_stack (GEN_INT (extra));
bbf6f052
RK
3062}
3063\f
bbf6f052
RK
3064/* Expand an assignment that stores the value of FROM into TO.
3065 If WANT_VALUE is nonzero, return an rtx for the value of TO.
709f5be1
RS
3066 (This may contain a QUEUED rtx;
3067 if the value is constant, this rtx is a constant.)
3068 Otherwise, the returned value is NULL_RTX.
bbf6f052
RK
3069
3070 SUGGEST_REG is no longer actually used.
3071 It used to mean, copy the value through a register
3072 and return that register, if that is possible.
709f5be1 3073 We now use WANT_VALUE to decide whether to do this. */
bbf6f052
RK
3074
3075rtx
3076expand_assignment (to, from, want_value, suggest_reg)
3077 tree to, from;
3078 int want_value;
3079 int suggest_reg;
3080{
3081 register rtx to_rtx = 0;
3082 rtx result;
3083
3084 /* Don't crash if the lhs of the assignment was erroneous. */
3085
3086 if (TREE_CODE (to) == ERROR_MARK)
709f5be1
RS
3087 {
3088 result = expand_expr (from, NULL_RTX, VOIDmode, 0);
3089 return want_value ? result : NULL_RTX;
3090 }
bbf6f052
RK
3091
3092 /* Assignment of a structure component needs special treatment
3093 if the structure component's rtx is not simply a MEM.
6be58303
JW
3094 Assignment of an array element at a constant index, and assignment of
3095 an array element in an unaligned packed structure field, has the same
3096 problem. */
bbf6f052 3097
08293add
RK
3098 if (TREE_CODE (to) == COMPONENT_REF || TREE_CODE (to) == BIT_FIELD_REF
3099 || TREE_CODE (to) == ARRAY_REF)
bbf6f052
RK
3100 {
3101 enum machine_mode mode1;
3102 int bitsize;
3103 int bitpos;
7bb0943f 3104 tree offset;
bbf6f052
RK
3105 int unsignedp;
3106 int volatilep = 0;
0088fcb1 3107 tree tem;
d78d243c 3108 int alignment;
0088fcb1
RK
3109
3110 push_temp_slots ();
839c4796
RK
3111 tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
3112 &unsignedp, &volatilep, &alignment);
bbf6f052
RK
3113
3114 /* If we are going to use store_bit_field and extract_bit_field,
3115 make sure to_rtx will be safe for multiple use. */
3116
3117 if (mode1 == VOIDmode && want_value)
3118 tem = stabilize_reference (tem);
3119
921b3427 3120 to_rtx = expand_expr (tem, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_DONT);
7bb0943f
RS
3121 if (offset != 0)
3122 {
906c4e36 3123 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
3124
3125 if (GET_CODE (to_rtx) != MEM)
3126 abort ();
bd070e1a
RH
3127
3128 if (GET_MODE (offset_rtx) != ptr_mode)
3129 {
3130#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3131 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3132#else
3133 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3134#endif
3135 }
3136
89752202
HB
3137 if (GET_CODE (to_rtx) == MEM
3138 && GET_MODE (to_rtx) == BLKmode
3139 && bitsize
3140 && (bitpos % bitsize) == 0
3141 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
3142 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
3143 {
3144 rtx temp = change_address (to_rtx, mode1,
3145 plus_constant (XEXP (to_rtx, 0),
3146 (bitpos /
3147 BITS_PER_UNIT)));
3148 if (GET_CODE (XEXP (temp, 0)) == REG)
3149 to_rtx = temp;
3150 else
3151 to_rtx = change_address (to_rtx, mode1,
3152 force_reg (GET_MODE (XEXP (temp, 0)),
3153 XEXP (temp, 0)));
3154 bitpos = 0;
3155 }
3156
7bb0943f 3157 to_rtx = change_address (to_rtx, VOIDmode,
38a448ca
RH
3158 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
3159 force_reg (ptr_mode, offset_rtx)));
7bb0943f 3160 }
bbf6f052
RK
3161 if (volatilep)
3162 {
3163 if (GET_CODE (to_rtx) == MEM)
01188446
JW
3164 {
3165 /* When the offset is zero, to_rtx is the address of the
3166 structure we are storing into, and hence may be shared.
3167 We must make a new MEM before setting the volatile bit. */
3168 if (offset == 0)
effbcc6a
RK
3169 to_rtx = copy_rtx (to_rtx);
3170
01188446
JW
3171 MEM_VOLATILE_P (to_rtx) = 1;
3172 }
bbf6f052
RK
3173#if 0 /* This was turned off because, when a field is volatile
3174 in an object which is not volatile, the object may be in a register,
3175 and then we would abort over here. */
3176 else
3177 abort ();
3178#endif
3179 }
3180
956d6950
JL
3181 if (TREE_CODE (to) == COMPONENT_REF
3182 && TREE_READONLY (TREE_OPERAND (to, 1)))
3183 {
8bd6ecc2 3184 if (offset == 0)
956d6950
JL
3185 to_rtx = copy_rtx (to_rtx);
3186
3187 RTX_UNCHANGING_P (to_rtx) = 1;
3188 }
3189
921b3427
RK
3190 /* Check the access. */
3191 if (flag_check_memory_usage && GET_CODE (to_rtx) == MEM)
3192 {
3193 rtx to_addr;
3194 int size;
3195 int best_mode_size;
3196 enum machine_mode best_mode;
3197
3198 best_mode = get_best_mode (bitsize, bitpos,
3199 TYPE_ALIGN (TREE_TYPE (tem)),
3200 mode1, volatilep);
3201 if (best_mode == VOIDmode)
3202 best_mode = QImode;
3203
3204 best_mode_size = GET_MODE_BITSIZE (best_mode);
3205 to_addr = plus_constant (XEXP (to_rtx, 0), (bitpos / BITS_PER_UNIT));
3206 size = CEIL ((bitpos % best_mode_size) + bitsize, best_mode_size);
3207 size *= GET_MODE_SIZE (best_mode);
3208
3209 /* Check the access right of the pointer. */
e9a25f70
JL
3210 if (size)
3211 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3212 to_addr, ptr_mode,
3213 GEN_INT (size), TYPE_MODE (sizetype),
956d6950
JL
3214 GEN_INT (MEMORY_USE_WO),
3215 TYPE_MODE (integer_type_node));
921b3427
RK
3216 }
3217
bbf6f052
RK
3218 result = store_field (to_rtx, bitsize, bitpos, mode1, from,
3219 (want_value
3220 /* Spurious cast makes HPUX compiler happy. */
3221 ? (enum machine_mode) TYPE_MODE (TREE_TYPE (to))
3222 : VOIDmode),
3223 unsignedp,
3224 /* Required alignment of containing datum. */
d78d243c 3225 alignment,
bbf6f052
RK
3226 int_size_in_bytes (TREE_TYPE (tem)));
3227 preserve_temp_slots (result);
3228 free_temp_slots ();
0088fcb1 3229 pop_temp_slots ();
bbf6f052 3230
709f5be1
RS
3231 /* If the value is meaningful, convert RESULT to the proper mode.
3232 Otherwise, return nothing. */
5ffe63ed
RS
3233 return (want_value ? convert_modes (TYPE_MODE (TREE_TYPE (to)),
3234 TYPE_MODE (TREE_TYPE (from)),
3235 result,
3236 TREE_UNSIGNED (TREE_TYPE (to)))
709f5be1 3237 : NULL_RTX);
bbf6f052
RK
3238 }
3239
cd1db108
RS
3240 /* If the rhs is a function call and its value is not an aggregate,
3241 call the function before we start to compute the lhs.
3242 This is needed for correct code for cases such as
3243 val = setjmp (buf) on machines where reference to val
1ad87b63
RK
3244 requires loading up part of an address in a separate insn.
3245
3246 Don't do this if TO is a VAR_DECL whose DECL_RTL is REG since it might be
3247 a promoted variable where the zero- or sign- extension needs to be done.
3248 Handling this in the normal way is safe because no computation is done
3249 before the call. */
3250 if (TREE_CODE (from) == CALL_EXPR && ! aggregate_value_p (from)
b35cd3c1 3251 && TREE_CODE (TYPE_SIZE (TREE_TYPE (from))) == INTEGER_CST
1ad87b63 3252 && ! (TREE_CODE (to) == VAR_DECL && GET_CODE (DECL_RTL (to)) == REG))
cd1db108 3253 {
0088fcb1
RK
3254 rtx value;
3255
3256 push_temp_slots ();
3257 value = expand_expr (from, NULL_RTX, VOIDmode, 0);
cd1db108 3258 if (to_rtx == 0)
921b3427 3259 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
aaf87c45 3260
fffa9c1d
JW
3261 /* Handle calls that return values in multiple non-contiguous locations.
3262 The Irix 6 ABI has examples of this. */
3263 if (GET_CODE (to_rtx) == PARALLEL)
aac5cc16
RH
3264 emit_group_load (to_rtx, value, int_size_in_bytes (TREE_TYPE (from)),
3265 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
fffa9c1d 3266 else if (GET_MODE (to_rtx) == BLKmode)
db3ec607 3267 emit_block_move (to_rtx, value, expr_size (from),
ff9b5bd8 3268 TYPE_ALIGN (TREE_TYPE (from)) / BITS_PER_UNIT);
aaf87c45
JL
3269 else
3270 emit_move_insn (to_rtx, value);
cd1db108
RS
3271 preserve_temp_slots (to_rtx);
3272 free_temp_slots ();
0088fcb1 3273 pop_temp_slots ();
709f5be1 3274 return want_value ? to_rtx : NULL_RTX;
cd1db108
RS
3275 }
3276
bbf6f052
RK
3277 /* Ordinary treatment. Expand TO to get a REG or MEM rtx.
3278 Don't re-expand if it was expanded already (in COMPONENT_REF case). */
3279
3280 if (to_rtx == 0)
41472af8
MM
3281 {
3282 to_rtx = expand_expr (to, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_WO);
3283 if (GET_CODE (to_rtx) == MEM)
3284 MEM_ALIAS_SET (to_rtx) = get_alias_set (to);
3285 }
bbf6f052 3286
86d38d25
RS
3287 /* Don't move directly into a return register. */
3288 if (TREE_CODE (to) == RESULT_DECL && GET_CODE (to_rtx) == REG)
3289 {
0088fcb1
RK
3290 rtx temp;
3291
3292 push_temp_slots ();
3293 temp = expand_expr (from, 0, GET_MODE (to_rtx), 0);
86d38d25
RS
3294 emit_move_insn (to_rtx, temp);
3295 preserve_temp_slots (to_rtx);
3296 free_temp_slots ();
0088fcb1 3297 pop_temp_slots ();
709f5be1 3298 return want_value ? to_rtx : NULL_RTX;
86d38d25
RS
3299 }
3300
bbf6f052
RK
3301 /* In case we are returning the contents of an object which overlaps
3302 the place the value is being stored, use a safe function when copying
3303 a value through a pointer into a structure value return block. */
3304 if (TREE_CODE (to) == RESULT_DECL && TREE_CODE (from) == INDIRECT_REF
3305 && current_function_returns_struct
3306 && !current_function_returns_pcc_struct)
3307 {
0088fcb1
RK
3308 rtx from_rtx, size;
3309
3310 push_temp_slots ();
33a20d10 3311 size = expr_size (from);
921b3427
RK
3312 from_rtx = expand_expr (from, NULL_RTX, VOIDmode,
3313 EXPAND_MEMORY_USE_DONT);
3314
3315 /* Copy the rights of the bitmap. */
3316 if (flag_check_memory_usage)
3317 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3318 XEXP (to_rtx, 0), ptr_mode,
3319 XEXP (from_rtx, 0), ptr_mode,
3320 convert_to_mode (TYPE_MODE (sizetype),
3321 size, TREE_UNSIGNED (sizetype)),
3322 TYPE_MODE (sizetype));
bbf6f052
RK
3323
3324#ifdef TARGET_MEM_FUNCTIONS
d562e42e 3325 emit_library_call (memcpy_libfunc, 0,
bbf6f052
RK
3326 VOIDmode, 3, XEXP (to_rtx, 0), Pmode,
3327 XEXP (from_rtx, 0), Pmode,
0fa83258
RK
3328 convert_to_mode (TYPE_MODE (sizetype),
3329 size, TREE_UNSIGNED (sizetype)),
26ba80fc 3330 TYPE_MODE (sizetype));
bbf6f052 3331#else
d562e42e 3332 emit_library_call (bcopy_libfunc, 0,
bbf6f052
RK
3333 VOIDmode, 3, XEXP (from_rtx, 0), Pmode,
3334 XEXP (to_rtx, 0), Pmode,
3b6f75e2
JW
3335 convert_to_mode (TYPE_MODE (integer_type_node),
3336 size, TREE_UNSIGNED (integer_type_node)),
3337 TYPE_MODE (integer_type_node));
bbf6f052
RK
3338#endif
3339
3340 preserve_temp_slots (to_rtx);
3341 free_temp_slots ();
0088fcb1 3342 pop_temp_slots ();
709f5be1 3343 return want_value ? to_rtx : NULL_RTX;
bbf6f052
RK
3344 }
3345
3346 /* Compute FROM and store the value in the rtx we got. */
3347
0088fcb1 3348 push_temp_slots ();
bbf6f052
RK
3349 result = store_expr (from, to_rtx, want_value);
3350 preserve_temp_slots (result);
3351 free_temp_slots ();
0088fcb1 3352 pop_temp_slots ();
709f5be1 3353 return want_value ? result : NULL_RTX;
bbf6f052
RK
3354}
3355
3356/* Generate code for computing expression EXP,
3357 and storing the value into TARGET.
bbf6f052
RK
3358 TARGET may contain a QUEUED rtx.
3359
709f5be1
RS
3360 If WANT_VALUE is nonzero, return a copy of the value
3361 not in TARGET, so that we can be sure to use the proper
3362 value in a containing expression even if TARGET has something
3363 else stored in it. If possible, we copy the value through a pseudo
3364 and return that pseudo. Or, if the value is constant, we try to
3365 return the constant. In some cases, we return a pseudo
3366 copied *from* TARGET.
3367
3368 If the mode is BLKmode then we may return TARGET itself.
3369 It turns out that in BLKmode it doesn't cause a problem.
3370 because C has no operators that could combine two different
3371 assignments into the same BLKmode object with different values
3372 with no sequence point. Will other languages need this to
3373 be more thorough?
3374
3375 If WANT_VALUE is 0, we return NULL, to make sure
3376 to catch quickly any cases where the caller uses the value
3377 and fails to set WANT_VALUE. */
bbf6f052
RK
3378
3379rtx
709f5be1 3380store_expr (exp, target, want_value)
bbf6f052
RK
3381 register tree exp;
3382 register rtx target;
709f5be1 3383 int want_value;
bbf6f052
RK
3384{
3385 register rtx temp;
3386 int dont_return_target = 0;
3387
3388 if (TREE_CODE (exp) == COMPOUND_EXPR)
3389 {
3390 /* Perform first part of compound expression, then assign from second
3391 part. */
3392 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
3393 emit_queue ();
709f5be1 3394 return store_expr (TREE_OPERAND (exp, 1), target, want_value);
bbf6f052
RK
3395 }
3396 else if (TREE_CODE (exp) == COND_EXPR && GET_MODE (target) == BLKmode)
3397 {
3398 /* For conditional expression, get safe form of the target. Then
3399 test the condition, doing the appropriate assignment on either
3400 side. This avoids the creation of unnecessary temporaries.
3401 For non-BLKmode, it is more efficient not to do this. */
3402
3403 rtx lab1 = gen_label_rtx (), lab2 = gen_label_rtx ();
3404
3405 emit_queue ();
3406 target = protect_from_queue (target, 1);
3407
dabf8373 3408 do_pending_stack_adjust ();
bbf6f052
RK
3409 NO_DEFER_POP;
3410 jumpifnot (TREE_OPERAND (exp, 0), lab1);
956d6950 3411 start_cleanup_deferral ();
709f5be1 3412 store_expr (TREE_OPERAND (exp, 1), target, 0);
956d6950 3413 end_cleanup_deferral ();
bbf6f052
RK
3414 emit_queue ();
3415 emit_jump_insn (gen_jump (lab2));
3416 emit_barrier ();
3417 emit_label (lab1);
956d6950 3418 start_cleanup_deferral ();
709f5be1 3419 store_expr (TREE_OPERAND (exp, 2), target, 0);
956d6950 3420 end_cleanup_deferral ();
bbf6f052
RK
3421 emit_queue ();
3422 emit_label (lab2);
3423 OK_DEFER_POP;
a3a58acc 3424
709f5be1 3425 return want_value ? target : NULL_RTX;
bbf6f052 3426 }
709f5be1 3427 else if (want_value && GET_CODE (target) == MEM && ! MEM_VOLATILE_P (target)
bbf6f052
RK
3428 && GET_MODE (target) != BLKmode)
3429 /* If target is in memory and caller wants value in a register instead,
3430 arrange that. Pass TARGET as target for expand_expr so that,
709f5be1 3431 if EXP is another assignment, WANT_VALUE will be nonzero for it.
c2e6aff6
RS
3432 We know expand_expr will not use the target in that case.
3433 Don't do this if TARGET is volatile because we are supposed
3434 to write it and then read it. */
bbf6f052 3435 {
906c4e36 3436 temp = expand_expr (exp, cse_not_expected ? NULL_RTX : target,
bbf6f052
RK
3437 GET_MODE (target), 0);
3438 if (GET_MODE (temp) != BLKmode && GET_MODE (temp) != VOIDmode)
3439 temp = copy_to_reg (temp);
3440 dont_return_target = 1;
3441 }
3442 else if (queued_subexp_p (target))
709f5be1
RS
3443 /* If target contains a postincrement, let's not risk
3444 using it as the place to generate the rhs. */
bbf6f052
RK
3445 {
3446 if (GET_MODE (target) != BLKmode && GET_MODE (target) != VOIDmode)
3447 {
3448 /* Expand EXP into a new pseudo. */
3449 temp = gen_reg_rtx (GET_MODE (target));
3450 temp = expand_expr (exp, temp, GET_MODE (target), 0);
3451 }
3452 else
906c4e36 3453 temp = expand_expr (exp, NULL_RTX, GET_MODE (target), 0);
709f5be1
RS
3454
3455 /* If target is volatile, ANSI requires accessing the value
3456 *from* the target, if it is accessed. So make that happen.
3457 In no case return the target itself. */
3458 if (! MEM_VOLATILE_P (target) && want_value)
3459 dont_return_target = 1;
bbf6f052 3460 }
1499e0a8
RK
3461 else if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target))
3462 /* If this is an scalar in a register that is stored in a wider mode
3463 than the declared mode, compute the result into its declared mode
3464 and then convert to the wider mode. Our value is the computed
3465 expression. */
3466 {
5a32d038 3467 /* If we don't want a value, we can do the conversion inside EXP,
f635a84d
RK
3468 which will often result in some optimizations. Do the conversion
3469 in two steps: first change the signedness, if needed, then
ab6c58f1
RK
3470 the extend. But don't do this if the type of EXP is a subtype
3471 of something else since then the conversion might involve
3472 more than just converting modes. */
3473 if (! want_value && INTEGRAL_TYPE_P (TREE_TYPE (exp))
3474 && TREE_TYPE (TREE_TYPE (exp)) == 0)
f635a84d
RK
3475 {
3476 if (TREE_UNSIGNED (TREE_TYPE (exp))
3477 != SUBREG_PROMOTED_UNSIGNED_P (target))
3478 exp
3479 = convert
3480 (signed_or_unsigned_type (SUBREG_PROMOTED_UNSIGNED_P (target),
3481 TREE_TYPE (exp)),
3482 exp);
3483
3484 exp = convert (type_for_mode (GET_MODE (SUBREG_REG (target)),
3485 SUBREG_PROMOTED_UNSIGNED_P (target)),
3486 exp);
3487 }
5a32d038 3488
1499e0a8 3489 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
b258707c 3490
766f36c7 3491 /* If TEMP is a volatile MEM and we want a result value, make
f29369b9
RK
3492 the access now so it gets done only once. Likewise if
3493 it contains TARGET. */
3494 if (GET_CODE (temp) == MEM && want_value
3495 && (MEM_VOLATILE_P (temp)
3496 || reg_mentioned_p (SUBREG_REG (target), XEXP (temp, 0))))
766f36c7
RK
3497 temp = copy_to_reg (temp);
3498
b258707c
RS
3499 /* If TEMP is a VOIDmode constant, use convert_modes to make
3500 sure that we properly convert it. */
3501 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode)
3502 temp = convert_modes (GET_MODE (SUBREG_REG (target)),
3503 TYPE_MODE (TREE_TYPE (exp)), temp,
3504 SUBREG_PROMOTED_UNSIGNED_P (target));
3505
1499e0a8
RK
3506 convert_move (SUBREG_REG (target), temp,
3507 SUBREG_PROMOTED_UNSIGNED_P (target));
709f5be1 3508 return want_value ? temp : NULL_RTX;
1499e0a8 3509 }
bbf6f052
RK
3510 else
3511 {
3512 temp = expand_expr (exp, target, GET_MODE (target), 0);
766f36c7 3513 /* Return TARGET if it's a specified hardware register.
709f5be1
RS
3514 If TARGET is a volatile mem ref, either return TARGET
3515 or return a reg copied *from* TARGET; ANSI requires this.
3516
3517 Otherwise, if TEMP is not TARGET, return TEMP
3518 if it is constant (for efficiency),
3519 or if we really want the correct value. */
bbf6f052
RK
3520 if (!(target && GET_CODE (target) == REG
3521 && REGNO (target) < FIRST_PSEUDO_REGISTER)
709f5be1 3522 && !(GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
effbcc6a 3523 && ! rtx_equal_p (temp, target)
709f5be1 3524 && (CONSTANT_P (temp) || want_value))
bbf6f052
RK
3525 dont_return_target = 1;
3526 }
3527
b258707c
RS
3528 /* If TEMP is a VOIDmode constant and the mode of the type of EXP is not
3529 the same as that of TARGET, adjust the constant. This is needed, for
3530 example, in case it is a CONST_DOUBLE and we want only a word-sized
3531 value. */
3532 if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode
c1da1f33 3533 && TREE_CODE (exp) != ERROR_MARK
b258707c
RS
3534 && GET_MODE (target) != TYPE_MODE (TREE_TYPE (exp)))
3535 temp = convert_modes (GET_MODE (target), TYPE_MODE (TREE_TYPE (exp)),
3536 temp, TREE_UNSIGNED (TREE_TYPE (exp)));
3537
921b3427
RK
3538 if (flag_check_memory_usage
3539 && GET_CODE (target) == MEM
3540 && AGGREGATE_TYPE_P (TREE_TYPE (exp)))
3541 {
3542 if (GET_CODE (temp) == MEM)
3543 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
3544 XEXP (target, 0), ptr_mode,
3545 XEXP (temp, 0), ptr_mode,
3546 expr_size (exp), TYPE_MODE (sizetype));
3547 else
3548 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3549 XEXP (target, 0), ptr_mode,
3550 expr_size (exp), TYPE_MODE (sizetype),
956d6950
JL
3551 GEN_INT (MEMORY_USE_WO),
3552 TYPE_MODE (integer_type_node));
921b3427
RK
3553 }
3554
bbf6f052
RK
3555 /* If value was not generated in the target, store it there.
3556 Convert the value to TARGET's type first if nec. */
3557
6036acbb
R
3558 if ((! rtx_equal_p (temp, target)
3559 || side_effects_p (temp)
3560 || side_effects_p (target))
3561 && TREE_CODE (exp) != ERROR_MARK)
bbf6f052
RK
3562 {
3563 target = protect_from_queue (target, 1);
3564 if (GET_MODE (temp) != GET_MODE (target)
f0348c25 3565 && GET_MODE (temp) != VOIDmode)
bbf6f052
RK
3566 {
3567 int unsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
3568 if (dont_return_target)
3569 {
3570 /* In this case, we will return TEMP,
3571 so make sure it has the proper mode.
3572 But don't forget to store the value into TARGET. */
3573 temp = convert_to_mode (GET_MODE (target), temp, unsignedp);
3574 emit_move_insn (target, temp);
3575 }
3576 else
3577 convert_move (target, temp, unsignedp);
3578 }
3579
3580 else if (GET_MODE (temp) == BLKmode && TREE_CODE (exp) == STRING_CST)
3581 {
3582 /* Handle copying a string constant into an array.
3583 The string constant may be shorter than the array.
3584 So copy just the string's actual length, and clear the rest. */
3585 rtx size;
22619c3f 3586 rtx addr;
bbf6f052 3587
e87b4f3f
RS
3588 /* Get the size of the data type of the string,
3589 which is actually the size of the target. */
3590 size = expr_size (exp);
3591 if (GET_CODE (size) == CONST_INT
3592 && INTVAL (size) < TREE_STRING_LENGTH (exp))
3593 emit_block_move (target, temp, size,
3594 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3595 else
bbf6f052 3596 {
e87b4f3f
RS
3597 /* Compute the size of the data to copy from the string. */
3598 tree copy_size
c03b7665 3599 = size_binop (MIN_EXPR,
b50d17a1 3600 make_tree (sizetype, size),
c03b7665
RK
3601 convert (sizetype,
3602 build_int_2 (TREE_STRING_LENGTH (exp), 0)));
906c4e36
RK
3603 rtx copy_size_rtx = expand_expr (copy_size, NULL_RTX,
3604 VOIDmode, 0);
e87b4f3f
RS
3605 rtx label = 0;
3606
3607 /* Copy that much. */
3608 emit_block_move (target, temp, copy_size_rtx,
3609 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3610
88f63c77
RK
3611 /* Figure out how much is left in TARGET that we have to clear.
3612 Do all calculations in ptr_mode. */
3613
3614 addr = XEXP (target, 0);
3615 addr = convert_modes (ptr_mode, Pmode, addr, 1);
3616
e87b4f3f
RS
3617 if (GET_CODE (copy_size_rtx) == CONST_INT)
3618 {
88f63c77 3619 addr = plus_constant (addr, TREE_STRING_LENGTH (exp));
22619c3f 3620 size = plus_constant (size, - TREE_STRING_LENGTH (exp));
e87b4f3f
RS
3621 }
3622 else
3623 {
88f63c77
RK
3624 addr = force_reg (ptr_mode, addr);
3625 addr = expand_binop (ptr_mode, add_optab, addr,
906c4e36
RK
3626 copy_size_rtx, NULL_RTX, 0,
3627 OPTAB_LIB_WIDEN);
e87b4f3f 3628
88f63c77 3629 size = expand_binop (ptr_mode, sub_optab, size,
906c4e36
RK
3630 copy_size_rtx, NULL_RTX, 0,
3631 OPTAB_LIB_WIDEN);
e87b4f3f 3632
906c4e36 3633 emit_cmp_insn (size, const0_rtx, LT, NULL_RTX,
e87b4f3f
RS
3634 GET_MODE (size), 0, 0);
3635 label = gen_label_rtx ();
3636 emit_jump_insn (gen_blt (label));
3637 }
3638
3639 if (size != const0_rtx)
3640 {
921b3427
RK
3641 /* Be sure we can write on ADDR. */
3642 if (flag_check_memory_usage)
3643 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
3644 addr, ptr_mode,
3645 size, TYPE_MODE (sizetype),
956d6950
JL
3646 GEN_INT (MEMORY_USE_WO),
3647 TYPE_MODE (integer_type_node));
bbf6f052 3648#ifdef TARGET_MEM_FUNCTIONS
3b6f75e2 3649 emit_library_call (memset_libfunc, 0, VOIDmode, 3,
86242483 3650 addr, ptr_mode,
3b6f75e2
JW
3651 const0_rtx, TYPE_MODE (integer_type_node),
3652 convert_to_mode (TYPE_MODE (sizetype),
3653 size,
3654 TREE_UNSIGNED (sizetype)),
3655 TYPE_MODE (sizetype));
bbf6f052 3656#else
d562e42e 3657 emit_library_call (bzero_libfunc, 0, VOIDmode, 2,
86242483 3658 addr, ptr_mode,
3b6f75e2
JW
3659 convert_to_mode (TYPE_MODE (integer_type_node),
3660 size,
3661 TREE_UNSIGNED (integer_type_node)),
3662 TYPE_MODE (integer_type_node));
bbf6f052 3663#endif
e87b4f3f 3664 }
22619c3f 3665
e87b4f3f
RS
3666 if (label)
3667 emit_label (label);
bbf6f052
RK
3668 }
3669 }
fffa9c1d
JW
3670 /* Handle calls that return values in multiple non-contiguous locations.
3671 The Irix 6 ABI has examples of this. */
3672 else if (GET_CODE (target) == PARALLEL)
aac5cc16
RH
3673 emit_group_load (target, temp, int_size_in_bytes (TREE_TYPE (exp)),
3674 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
bbf6f052
RK
3675 else if (GET_MODE (temp) == BLKmode)
3676 emit_block_move (target, temp, expr_size (exp),
3677 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
3678 else
3679 emit_move_insn (target, temp);
3680 }
709f5be1 3681
766f36c7
RK
3682 /* If we don't want a value, return NULL_RTX. */
3683 if (! want_value)
3684 return NULL_RTX;
3685
3686 /* If we are supposed to return TEMP, do so as long as it isn't a MEM.
3687 ??? The latter test doesn't seem to make sense. */
3688 else if (dont_return_target && GET_CODE (temp) != MEM)
bbf6f052 3689 return temp;
766f36c7
RK
3690
3691 /* Return TARGET itself if it is a hard register. */
3692 else if (want_value && GET_MODE (target) != BLKmode
3693 && ! (GET_CODE (target) == REG
3694 && REGNO (target) < FIRST_PSEUDO_REGISTER))
709f5be1 3695 return copy_to_reg (target);
766f36c7
RK
3696
3697 else
709f5be1 3698 return target;
bbf6f052
RK
3699}
3700\f
9de08200
RK
3701/* Return 1 if EXP just contains zeros. */
3702
3703static int
3704is_zeros_p (exp)
3705 tree exp;
3706{
3707 tree elt;
3708
3709 switch (TREE_CODE (exp))
3710 {
3711 case CONVERT_EXPR:
3712 case NOP_EXPR:
3713 case NON_LVALUE_EXPR:
3714 return is_zeros_p (TREE_OPERAND (exp, 0));
3715
3716 case INTEGER_CST:
3717 return TREE_INT_CST_LOW (exp) == 0 && TREE_INT_CST_HIGH (exp) == 0;
3718
3719 case COMPLEX_CST:
3720 return
3721 is_zeros_p (TREE_REALPART (exp)) && is_zeros_p (TREE_IMAGPART (exp));
3722
3723 case REAL_CST:
41c9120b 3724 return REAL_VALUES_IDENTICAL (TREE_REAL_CST (exp), dconst0);
9de08200
RK
3725
3726 case CONSTRUCTOR:
e1a43f73
PB
3727 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3728 return CONSTRUCTOR_ELTS (exp) == NULL_TREE;
9de08200
RK
3729 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3730 if (! is_zeros_p (TREE_VALUE (elt)))
3731 return 0;
3732
3733 return 1;
e9a25f70
JL
3734
3735 default:
3736 return 0;
9de08200 3737 }
9de08200
RK
3738}
3739
3740/* Return 1 if EXP contains mostly (3/4) zeros. */
3741
3742static int
3743mostly_zeros_p (exp)
3744 tree exp;
3745{
9de08200
RK
3746 if (TREE_CODE (exp) == CONSTRUCTOR)
3747 {
e1a43f73
PB
3748 int elts = 0, zeros = 0;
3749 tree elt = CONSTRUCTOR_ELTS (exp);
3750 if (TREE_TYPE (exp) && TREE_CODE (TREE_TYPE (exp)) == SET_TYPE)
3751 {
3752 /* If there are no ranges of true bits, it is all zero. */
3753 return elt == NULL_TREE;
3754 }
3755 for (; elt; elt = TREE_CHAIN (elt))
3756 {
3757 /* We do not handle the case where the index is a RANGE_EXPR,
3758 so the statistic will be somewhat inaccurate.
3759 We do make a more accurate count in store_constructor itself,
3760 so since this function is only used for nested array elements,
0f41302f 3761 this should be close enough. */
e1a43f73
PB
3762 if (mostly_zeros_p (TREE_VALUE (elt)))
3763 zeros++;
3764 elts++;
3765 }
9de08200
RK
3766
3767 return 4 * zeros >= 3 * elts;
3768 }
3769
3770 return is_zeros_p (exp);
3771}
3772\f
e1a43f73
PB
3773/* Helper function for store_constructor.
3774 TARGET, BITSIZE, BITPOS, MODE, EXP are as for store_field.
3775 TYPE is the type of the CONSTRUCTOR, not the element type.
23ccec44
JW
3776 CLEARED is as for store_constructor.
3777
3778 This provides a recursive shortcut back to store_constructor when it isn't
3779 necessary to go through store_field. This is so that we can pass through
3780 the cleared field to let store_constructor know that we may not have to
3781 clear a substructure if the outer structure has already been cleared. */
e1a43f73
PB
3782
3783static void
3784store_constructor_field (target, bitsize, bitpos,
3785 mode, exp, type, cleared)
3786 rtx target;
3787 int bitsize, bitpos;
3788 enum machine_mode mode;
3789 tree exp, type;
3790 int cleared;
3791{
3792 if (TREE_CODE (exp) == CONSTRUCTOR
23ccec44
JW
3793 && bitpos % BITS_PER_UNIT == 0
3794 /* If we have a non-zero bitpos for a register target, then we just
3795 let store_field do the bitfield handling. This is unlikely to
3796 generate unnecessary clear instructions anyways. */
3797 && (bitpos == 0 || GET_CODE (target) == MEM))
e1a43f73 3798 {
126e5b0d
JW
3799 if (bitpos != 0)
3800 target = change_address (target, VOIDmode,
3801 plus_constant (XEXP (target, 0),
3802 bitpos / BITS_PER_UNIT));
3803 store_constructor (exp, target, cleared);
e1a43f73
PB
3804 }
3805 else
3806 store_field (target, bitsize, bitpos, mode, exp,
3807 VOIDmode, 0, TYPE_ALIGN (type) / BITS_PER_UNIT,
3808 int_size_in_bytes (type));
3809}
3810
bbf6f052 3811/* Store the value of constructor EXP into the rtx TARGET.
e1a43f73 3812 TARGET is either a REG or a MEM.
0f41302f 3813 CLEARED is true if TARGET is known to have been zero'd. */
bbf6f052
RK
3814
3815static void
e1a43f73 3816store_constructor (exp, target, cleared)
bbf6f052
RK
3817 tree exp;
3818 rtx target;
e1a43f73 3819 int cleared;
bbf6f052 3820{
4af3895e
JVA
3821 tree type = TREE_TYPE (exp);
3822
bbf6f052
RK
3823 /* We know our target cannot conflict, since safe_from_p has been called. */
3824#if 0
3825 /* Don't try copying piece by piece into a hard register
3826 since that is vulnerable to being clobbered by EXP.
3827 Instead, construct in a pseudo register and then copy it all. */
3828 if (GET_CODE (target) == REG && REGNO (target) < FIRST_PSEUDO_REGISTER)
3829 {
3830 rtx temp = gen_reg_rtx (GET_MODE (target));
e1a43f73 3831 store_constructor (exp, temp, 0);
bbf6f052
RK
3832 emit_move_insn (target, temp);
3833 return;
3834 }
3835#endif
3836
e44842fe
RK
3837 if (TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
3838 || TREE_CODE (type) == QUAL_UNION_TYPE)
bbf6f052
RK
3839 {
3840 register tree elt;
3841
4af3895e 3842 /* Inform later passes that the whole union value is dead. */
e44842fe
RK
3843 if (TREE_CODE (type) == UNION_TYPE
3844 || TREE_CODE (type) == QUAL_UNION_TYPE)
38a448ca 3845 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
4af3895e
JVA
3846
3847 /* If we are building a static constructor into a register,
3848 set the initial value as zero so we can fold the value into
67225c15
RK
3849 a constant. But if more than one register is involved,
3850 this probably loses. */
3851 else if (GET_CODE (target) == REG && TREE_STATIC (exp)
3852 && GET_MODE_SIZE (GET_MODE (target)) <= UNITS_PER_WORD)
9de08200
RK
3853 {
3854 if (! cleared)
e9a25f70 3855 emit_move_insn (target, CONST0_RTX (GET_MODE (target)));
4af3895e 3856
9de08200
RK
3857 cleared = 1;
3858 }
3859
3860 /* If the constructor has fewer fields than the structure
3861 or if we are initializing the structure to mostly zeros,
bbf6f052 3862 clear the whole structure first. */
9de08200
RK
3863 else if ((list_length (CONSTRUCTOR_ELTS (exp))
3864 != list_length (TYPE_FIELDS (type)))
3865 || mostly_zeros_p (exp))
3866 {
3867 if (! cleared)
3868 clear_storage (target, expr_size (exp),
3869 TYPE_ALIGN (type) / BITS_PER_UNIT);
3870
3871 cleared = 1;
3872 }
bbf6f052
RK
3873 else
3874 /* Inform later passes that the old value is dead. */
38a448ca 3875 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
3876
3877 /* Store each element of the constructor into
3878 the corresponding field of TARGET. */
3879
3880 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
3881 {
3882 register tree field = TREE_PURPOSE (elt);
3883 register enum machine_mode mode;
3884 int bitsize;
b50d17a1 3885 int bitpos = 0;
bbf6f052 3886 int unsignedp;
b50d17a1
RK
3887 tree pos, constant = 0, offset = 0;
3888 rtx to_rtx = target;
bbf6f052 3889
f32fd778
RS
3890 /* Just ignore missing fields.
3891 We cleared the whole structure, above,
3892 if any fields are missing. */
3893 if (field == 0)
3894 continue;
3895
e1a43f73
PB
3896 if (cleared && is_zeros_p (TREE_VALUE (elt)))
3897 continue;
9de08200 3898
bbf6f052
RK
3899 bitsize = TREE_INT_CST_LOW (DECL_SIZE (field));
3900 unsignedp = TREE_UNSIGNED (field);
3901 mode = DECL_MODE (field);
3902 if (DECL_BIT_FIELD (field))
3903 mode = VOIDmode;
3904
b50d17a1
RK
3905 pos = DECL_FIELD_BITPOS (field);
3906 if (TREE_CODE (pos) == INTEGER_CST)
3907 constant = pos;
3908 else if (TREE_CODE (pos) == PLUS_EXPR
3909 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
3910 constant = TREE_OPERAND (pos, 1), offset = TREE_OPERAND (pos, 0);
3911 else
3912 offset = pos;
3913
3914 if (constant)
cd11b87e 3915 bitpos = TREE_INT_CST_LOW (constant);
b50d17a1
RK
3916
3917 if (offset)
3918 {
3919 rtx offset_rtx;
3920
3921 if (contains_placeholder_p (offset))
3922 offset = build (WITH_RECORD_EXPR, sizetype,
956d6950 3923 offset, make_tree (TREE_TYPE (exp), target));
bbf6f052 3924
b50d17a1
RK
3925 offset = size_binop (FLOOR_DIV_EXPR, offset,
3926 size_int (BITS_PER_UNIT));
bbf6f052 3927
b50d17a1
RK
3928 offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
3929 if (GET_CODE (to_rtx) != MEM)
3930 abort ();
3931
bd070e1a
RH
3932 if (GET_MODE (offset_rtx) != ptr_mode)
3933 {
3934#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 3935 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
bd070e1a
RH
3936#else
3937 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
3938#endif
3939 }
3940
b50d17a1
RK
3941 to_rtx
3942 = change_address (to_rtx, VOIDmode,
38a448ca 3943 gen_rtx_PLUS (ptr_mode, XEXP (to_rtx, 0),
88f63c77 3944 force_reg (ptr_mode, offset_rtx)));
b50d17a1 3945 }
cf04eb80
RK
3946 if (TREE_READONLY (field))
3947 {
9151b3bf 3948 if (GET_CODE (to_rtx) == MEM)
effbcc6a
RK
3949 to_rtx = copy_rtx (to_rtx);
3950
cf04eb80
RK
3951 RTX_UNCHANGING_P (to_rtx) = 1;
3952 }
3953
e1a43f73
PB
3954 store_constructor_field (to_rtx, bitsize, bitpos,
3955 mode, TREE_VALUE (elt), type, cleared);
bbf6f052
RK
3956 }
3957 }
4af3895e 3958 else if (TREE_CODE (type) == ARRAY_TYPE)
bbf6f052
RK
3959 {
3960 register tree elt;
3961 register int i;
e1a43f73 3962 int need_to_clear;
4af3895e 3963 tree domain = TYPE_DOMAIN (type);
906c4e36
RK
3964 HOST_WIDE_INT minelt = TREE_INT_CST_LOW (TYPE_MIN_VALUE (domain));
3965 HOST_WIDE_INT maxelt = TREE_INT_CST_LOW (TYPE_MAX_VALUE (domain));
4af3895e 3966 tree elttype = TREE_TYPE (type);
bbf6f052 3967
e1a43f73 3968 /* If the constructor has fewer elements than the array,
38e01259 3969 clear the whole array first. Similarly if this is
e1a43f73
PB
3970 static constructor of a non-BLKmode object. */
3971 if (cleared || (GET_CODE (target) == REG && TREE_STATIC (exp)))
3972 need_to_clear = 1;
3973 else
3974 {
3975 HOST_WIDE_INT count = 0, zero_count = 0;
3976 need_to_clear = 0;
3977 /* This loop is a more accurate version of the loop in
3978 mostly_zeros_p (it handles RANGE_EXPR in an index).
3979 It is also needed to check for missing elements. */
3980 for (elt = CONSTRUCTOR_ELTS (exp);
3981 elt != NULL_TREE;
df0faff1 3982 elt = TREE_CHAIN (elt))
e1a43f73
PB
3983 {
3984 tree index = TREE_PURPOSE (elt);
3985 HOST_WIDE_INT this_node_count;
3986 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
3987 {
3988 tree lo_index = TREE_OPERAND (index, 0);
3989 tree hi_index = TREE_OPERAND (index, 1);
3990 if (TREE_CODE (lo_index) != INTEGER_CST
3991 || TREE_CODE (hi_index) != INTEGER_CST)
3992 {
3993 need_to_clear = 1;
3994 break;
3995 }
3996 this_node_count = TREE_INT_CST_LOW (hi_index)
3997 - TREE_INT_CST_LOW (lo_index) + 1;
3998 }
3999 else
4000 this_node_count = 1;
4001 count += this_node_count;
4002 if (mostly_zeros_p (TREE_VALUE (elt)))
4003 zero_count += this_node_count;
4004 }
8e958f70 4005 /* Clear the entire array first if there are any missing elements,
0f41302f 4006 or if the incidence of zero elements is >= 75%. */
8e958f70
PB
4007 if (count < maxelt - minelt + 1
4008 || 4 * zero_count >= 3 * count)
e1a43f73
PB
4009 need_to_clear = 1;
4010 }
4011 if (need_to_clear)
9de08200
RK
4012 {
4013 if (! cleared)
4014 clear_storage (target, expr_size (exp),
4015 TYPE_ALIGN (type) / BITS_PER_UNIT);
9de08200
RK
4016 cleared = 1;
4017 }
bbf6f052
RK
4018 else
4019 /* Inform later passes that the old value is dead. */
38a448ca 4020 emit_insn (gen_rtx_CLOBBER (VOIDmode, target));
bbf6f052
RK
4021
4022 /* Store each element of the constructor into
4023 the corresponding element of TARGET, determined
4024 by counting the elements. */
4025 for (elt = CONSTRUCTOR_ELTS (exp), i = 0;
4026 elt;
4027 elt = TREE_CHAIN (elt), i++)
4028 {
4029 register enum machine_mode mode;
4030 int bitsize;
4031 int bitpos;
4032 int unsignedp;
e1a43f73 4033 tree value = TREE_VALUE (elt);
03dc44a6
RS
4034 tree index = TREE_PURPOSE (elt);
4035 rtx xtarget = target;
bbf6f052 4036
e1a43f73
PB
4037 if (cleared && is_zeros_p (value))
4038 continue;
9de08200 4039
bbf6f052
RK
4040 mode = TYPE_MODE (elttype);
4041 bitsize = GET_MODE_BITSIZE (mode);
4042 unsignedp = TREE_UNSIGNED (elttype);
4043
e1a43f73
PB
4044 if (index != NULL_TREE && TREE_CODE (index) == RANGE_EXPR)
4045 {
4046 tree lo_index = TREE_OPERAND (index, 0);
4047 tree hi_index = TREE_OPERAND (index, 1);
4048 rtx index_r, pos_rtx, addr, hi_r, loop_top, loop_end;
4049 struct nesting *loop;
05c0b405
PB
4050 HOST_WIDE_INT lo, hi, count;
4051 tree position;
e1a43f73 4052
0f41302f 4053 /* If the range is constant and "small", unroll the loop. */
e1a43f73 4054 if (TREE_CODE (lo_index) == INTEGER_CST
05c0b405
PB
4055 && TREE_CODE (hi_index) == INTEGER_CST
4056 && (lo = TREE_INT_CST_LOW (lo_index),
4057 hi = TREE_INT_CST_LOW (hi_index),
4058 count = hi - lo + 1,
4059 (GET_CODE (target) != MEM
4060 || count <= 2
4061 || (TREE_CODE (TYPE_SIZE (elttype)) == INTEGER_CST
4062 && TREE_INT_CST_LOW (TYPE_SIZE (elttype)) * count
4063 <= 40 * 8))))
e1a43f73 4064 {
05c0b405
PB
4065 lo -= minelt; hi -= minelt;
4066 for (; lo <= hi; lo++)
e1a43f73 4067 {
05c0b405
PB
4068 bitpos = lo * TREE_INT_CST_LOW (TYPE_SIZE (elttype));
4069 store_constructor_field (target, bitsize, bitpos,
4070 mode, value, type, cleared);
e1a43f73
PB
4071 }
4072 }
4073 else
4074 {
4075 hi_r = expand_expr (hi_index, NULL_RTX, VOIDmode, 0);
4076 loop_top = gen_label_rtx ();
4077 loop_end = gen_label_rtx ();
4078
4079 unsignedp = TREE_UNSIGNED (domain);
4080
4081 index = build_decl (VAR_DECL, NULL_TREE, domain);
4082
4083 DECL_RTL (index) = index_r
4084 = gen_reg_rtx (promote_mode (domain, DECL_MODE (index),
4085 &unsignedp, 0));
4086
4087 if (TREE_CODE (value) == SAVE_EXPR
4088 && SAVE_EXPR_RTL (value) == 0)
4089 {
0f41302f
MS
4090 /* Make sure value gets expanded once before the
4091 loop. */
e1a43f73
PB
4092 expand_expr (value, const0_rtx, VOIDmode, 0);
4093 emit_queue ();
4094 }
4095 store_expr (lo_index, index_r, 0);
4096 loop = expand_start_loop (0);
4097
0f41302f 4098 /* Assign value to element index. */
e1a43f73
PB
4099 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4100 size_int (BITS_PER_UNIT));
4101 position = size_binop (MULT_EXPR,
4102 size_binop (MINUS_EXPR, index,
4103 TYPE_MIN_VALUE (domain)),
4104 position);
4105 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4106 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
e1a43f73
PB
4107 xtarget = change_address (target, mode, addr);
4108 if (TREE_CODE (value) == CONSTRUCTOR)
05c0b405 4109 store_constructor (value, xtarget, cleared);
e1a43f73
PB
4110 else
4111 store_expr (value, xtarget, 0);
4112
4113 expand_exit_loop_if_false (loop,
4114 build (LT_EXPR, integer_type_node,
4115 index, hi_index));
4116
4117 expand_increment (build (PREINCREMENT_EXPR,
4118 TREE_TYPE (index),
7b8b9722 4119 index, integer_one_node), 0, 0);
e1a43f73
PB
4120 expand_end_loop ();
4121 emit_label (loop_end);
4122
4123 /* Needed by stupid register allocation. to extend the
4124 lifetime of pseudo-regs used by target past the end
4125 of the loop. */
38a448ca 4126 emit_insn (gen_rtx_USE (GET_MODE (target), target));
e1a43f73
PB
4127 }
4128 }
4129 else if ((index != 0 && TREE_CODE (index) != INTEGER_CST)
5b6c44ff 4130 || TREE_CODE (TYPE_SIZE (elttype)) != INTEGER_CST)
03dc44a6 4131 {
e1a43f73 4132 rtx pos_rtx, addr;
03dc44a6
RS
4133 tree position;
4134
5b6c44ff
RK
4135 if (index == 0)
4136 index = size_int (i);
4137
e1a43f73
PB
4138 if (minelt)
4139 index = size_binop (MINUS_EXPR, index,
4140 TYPE_MIN_VALUE (domain));
5b6c44ff
RK
4141 position = size_binop (EXACT_DIV_EXPR, TYPE_SIZE (elttype),
4142 size_int (BITS_PER_UNIT));
4143 position = size_binop (MULT_EXPR, index, position);
03dc44a6 4144 pos_rtx = expand_expr (position, 0, VOIDmode, 0);
38a448ca 4145 addr = gen_rtx_PLUS (Pmode, XEXP (target, 0), pos_rtx);
03dc44a6 4146 xtarget = change_address (target, mode, addr);
e1a43f73 4147 store_expr (value, xtarget, 0);
03dc44a6
RS
4148 }
4149 else
4150 {
4151 if (index != 0)
7c314719 4152 bitpos = ((TREE_INT_CST_LOW (index) - minelt)
03dc44a6
RS
4153 * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
4154 else
4155 bitpos = (i * TREE_INT_CST_LOW (TYPE_SIZE (elttype)));
e1a43f73
PB
4156 store_constructor_field (target, bitsize, bitpos,
4157 mode, value, type, cleared);
03dc44a6 4158 }
bbf6f052
RK
4159 }
4160 }
071a6595
PB
4161 /* set constructor assignments */
4162 else if (TREE_CODE (type) == SET_TYPE)
4163 {
e1a43f73 4164 tree elt = CONSTRUCTOR_ELTS (exp);
e1a43f73 4165 int nbytes = int_size_in_bytes (type), nbits;
071a6595
PB
4166 tree domain = TYPE_DOMAIN (type);
4167 tree domain_min, domain_max, bitlength;
4168
9faa82d8 4169 /* The default implementation strategy is to extract the constant
071a6595
PB
4170 parts of the constructor, use that to initialize the target,
4171 and then "or" in whatever non-constant ranges we need in addition.
4172
4173 If a large set is all zero or all ones, it is
4174 probably better to set it using memset (if available) or bzero.
4175 Also, if a large set has just a single range, it may also be
4176 better to first clear all the first clear the set (using
0f41302f 4177 bzero/memset), and set the bits we want. */
071a6595 4178
0f41302f 4179 /* Check for all zeros. */
e1a43f73 4180 if (elt == NULL_TREE)
071a6595 4181 {
e1a43f73
PB
4182 if (!cleared)
4183 clear_storage (target, expr_size (exp),
4184 TYPE_ALIGN (type) / BITS_PER_UNIT);
071a6595
PB
4185 return;
4186 }
4187
071a6595
PB
4188 domain_min = convert (sizetype, TYPE_MIN_VALUE (domain));
4189 domain_max = convert (sizetype, TYPE_MAX_VALUE (domain));
4190 bitlength = size_binop (PLUS_EXPR,
4191 size_binop (MINUS_EXPR, domain_max, domain_min),
4192 size_one_node);
4193
e1a43f73
PB
4194 if (nbytes < 0 || TREE_CODE (bitlength) != INTEGER_CST)
4195 abort ();
4196 nbits = TREE_INT_CST_LOW (bitlength);
4197
4198 /* For "small" sets, or "medium-sized" (up to 32 bytes) sets that
4199 are "complicated" (more than one range), initialize (the
4200 constant parts) by copying from a constant. */
4201 if (GET_MODE (target) != BLKmode || nbits <= 2 * BITS_PER_WORD
4202 || (nbytes <= 32 && TREE_CHAIN (elt) != NULL_TREE))
071a6595 4203 {
b4ee5a72
PB
4204 int set_word_size = TYPE_ALIGN (TREE_TYPE (exp));
4205 enum machine_mode mode = mode_for_size (set_word_size, MODE_INT, 1);
0f41302f 4206 char *bit_buffer = (char *) alloca (nbits);
b4ee5a72
PB
4207 HOST_WIDE_INT word = 0;
4208 int bit_pos = 0;
4209 int ibit = 0;
0f41302f 4210 int offset = 0; /* In bytes from beginning of set. */
e1a43f73 4211 elt = get_set_constructor_bits (exp, bit_buffer, nbits);
b4ee5a72 4212 for (;;)
071a6595 4213 {
b4ee5a72
PB
4214 if (bit_buffer[ibit])
4215 {
b09f3348 4216 if (BYTES_BIG_ENDIAN)
b4ee5a72
PB
4217 word |= (1 << (set_word_size - 1 - bit_pos));
4218 else
4219 word |= 1 << bit_pos;
4220 }
4221 bit_pos++; ibit++;
4222 if (bit_pos >= set_word_size || ibit == nbits)
071a6595 4223 {
e1a43f73
PB
4224 if (word != 0 || ! cleared)
4225 {
4226 rtx datum = GEN_INT (word);
4227 rtx to_rtx;
0f41302f
MS
4228 /* The assumption here is that it is safe to use
4229 XEXP if the set is multi-word, but not if
4230 it's single-word. */
e1a43f73
PB
4231 if (GET_CODE (target) == MEM)
4232 {
4233 to_rtx = plus_constant (XEXP (target, 0), offset);
4234 to_rtx = change_address (target, mode, to_rtx);
4235 }
4236 else if (offset == 0)
4237 to_rtx = target;
4238 else
4239 abort ();
4240 emit_move_insn (to_rtx, datum);
4241 }
b4ee5a72
PB
4242 if (ibit == nbits)
4243 break;
4244 word = 0;
4245 bit_pos = 0;
4246 offset += set_word_size / BITS_PER_UNIT;
071a6595
PB
4247 }
4248 }
071a6595 4249 }
e1a43f73
PB
4250 else if (!cleared)
4251 {
0f41302f 4252 /* Don't bother clearing storage if the set is all ones. */
e1a43f73
PB
4253 if (TREE_CHAIN (elt) != NULL_TREE
4254 || (TREE_PURPOSE (elt) == NULL_TREE
4255 ? nbits != 1
4256 : (TREE_CODE (TREE_VALUE (elt)) != INTEGER_CST
4257 || TREE_CODE (TREE_PURPOSE (elt)) != INTEGER_CST
4258 || (TREE_INT_CST_LOW (TREE_VALUE (elt))
4259 - TREE_INT_CST_LOW (TREE_PURPOSE (elt)) + 1
4260 != nbits))))
4261 clear_storage (target, expr_size (exp),
4262 TYPE_ALIGN (type) / BITS_PER_UNIT);
4263 }
4264
4265 for (; elt != NULL_TREE; elt = TREE_CHAIN (elt))
071a6595
PB
4266 {
4267 /* start of range of element or NULL */
4268 tree startbit = TREE_PURPOSE (elt);
4269 /* end of range of element, or element value */
4270 tree endbit = TREE_VALUE (elt);
381127e8 4271#ifdef TARGET_MEM_FUNCTIONS
071a6595 4272 HOST_WIDE_INT startb, endb;
381127e8 4273#endif
071a6595
PB
4274 rtx bitlength_rtx, startbit_rtx, endbit_rtx, targetx;
4275
4276 bitlength_rtx = expand_expr (bitlength,
4277 NULL_RTX, MEM, EXPAND_CONST_ADDRESS);
4278
4279 /* handle non-range tuple element like [ expr ] */
4280 if (startbit == NULL_TREE)
4281 {
4282 startbit = save_expr (endbit);
4283 endbit = startbit;
4284 }
4285 startbit = convert (sizetype, startbit);
4286 endbit = convert (sizetype, endbit);
4287 if (! integer_zerop (domain_min))
4288 {
4289 startbit = size_binop (MINUS_EXPR, startbit, domain_min);
4290 endbit = size_binop (MINUS_EXPR, endbit, domain_min);
4291 }
4292 startbit_rtx = expand_expr (startbit, NULL_RTX, MEM,
4293 EXPAND_CONST_ADDRESS);
4294 endbit_rtx = expand_expr (endbit, NULL_RTX, MEM,
4295 EXPAND_CONST_ADDRESS);
4296
4297 if (REG_P (target))
4298 {
4299 targetx = assign_stack_temp (GET_MODE (target),
4300 GET_MODE_SIZE (GET_MODE (target)),
4301 0);
4302 emit_move_insn (targetx, target);
4303 }
4304 else if (GET_CODE (target) == MEM)
4305 targetx = target;
4306 else
4307 abort ();
4308
4309#ifdef TARGET_MEM_FUNCTIONS
4310 /* Optimization: If startbit and endbit are
9faa82d8 4311 constants divisible by BITS_PER_UNIT,
0f41302f 4312 call memset instead. */
071a6595
PB
4313 if (TREE_CODE (startbit) == INTEGER_CST
4314 && TREE_CODE (endbit) == INTEGER_CST
4315 && (startb = TREE_INT_CST_LOW (startbit)) % BITS_PER_UNIT == 0
e1a43f73 4316 && (endb = TREE_INT_CST_LOW (endbit) + 1) % BITS_PER_UNIT == 0)
071a6595 4317 {
071a6595
PB
4318 emit_library_call (memset_libfunc, 0,
4319 VOIDmode, 3,
e1a43f73
PB
4320 plus_constant (XEXP (targetx, 0),
4321 startb / BITS_PER_UNIT),
071a6595 4322 Pmode,
3b6f75e2 4323 constm1_rtx, TYPE_MODE (integer_type_node),
071a6595 4324 GEN_INT ((endb - startb) / BITS_PER_UNIT),
3b6f75e2 4325 TYPE_MODE (sizetype));
071a6595
PB
4326 }
4327 else
4328#endif
4329 {
38a448ca 4330 emit_library_call (gen_rtx_SYMBOL_REF (Pmode, "__setbits"),
071a6595
PB
4331 0, VOIDmode, 4, XEXP (targetx, 0), Pmode,
4332 bitlength_rtx, TYPE_MODE (sizetype),
4333 startbit_rtx, TYPE_MODE (sizetype),
4334 endbit_rtx, TYPE_MODE (sizetype));
4335 }
4336 if (REG_P (target))
4337 emit_move_insn (target, targetx);
4338 }
4339 }
bbf6f052
RK
4340
4341 else
4342 abort ();
4343}
4344
4345/* Store the value of EXP (an expression tree)
4346 into a subfield of TARGET which has mode MODE and occupies
4347 BITSIZE bits, starting BITPOS bits from the start of TARGET.
4348 If MODE is VOIDmode, it means that we are storing into a bit-field.
4349
4350 If VALUE_MODE is VOIDmode, return nothing in particular.
4351 UNSIGNEDP is not used in this case.
4352
4353 Otherwise, return an rtx for the value stored. This rtx
4354 has mode VALUE_MODE if that is convenient to do.
4355 In this case, UNSIGNEDP must be nonzero if the value is an unsigned type.
4356
4357 ALIGN is the alignment that TARGET is known to have, measured in bytes.
4358 TOTAL_SIZE is the size in bytes of the structure, or -1 if varying. */
4359
4360static rtx
4361store_field (target, bitsize, bitpos, mode, exp, value_mode,
4362 unsignedp, align, total_size)
4363 rtx target;
4364 int bitsize, bitpos;
4365 enum machine_mode mode;
4366 tree exp;
4367 enum machine_mode value_mode;
4368 int unsignedp;
4369 int align;
4370 int total_size;
4371{
906c4e36 4372 HOST_WIDE_INT width_mask = 0;
bbf6f052 4373
e9a25f70
JL
4374 if (TREE_CODE (exp) == ERROR_MARK)
4375 return const0_rtx;
4376
906c4e36
RK
4377 if (bitsize < HOST_BITS_PER_WIDE_INT)
4378 width_mask = ((HOST_WIDE_INT) 1 << bitsize) - 1;
bbf6f052
RK
4379
4380 /* If we are storing into an unaligned field of an aligned union that is
4381 in a register, we may have the mode of TARGET being an integer mode but
4382 MODE == BLKmode. In that case, get an aligned object whose size and
4383 alignment are the same as TARGET and store TARGET into it (we can avoid
4384 the store if the field being stored is the entire width of TARGET). Then
4385 call ourselves recursively to store the field into a BLKmode version of
4386 that object. Finally, load from the object into TARGET. This is not
4387 very efficient in general, but should only be slightly more expensive
4388 than the otherwise-required unaligned accesses. Perhaps this can be
4389 cleaned up later. */
4390
4391 if (mode == BLKmode
4392 && (GET_CODE (target) == REG || GET_CODE (target) == SUBREG))
4393 {
4394 rtx object = assign_stack_temp (GET_MODE (target),
4395 GET_MODE_SIZE (GET_MODE (target)), 0);
4396 rtx blk_object = copy_rtx (object);
4397
24a13950
JW
4398 MEM_IN_STRUCT_P (object) = 1;
4399 MEM_IN_STRUCT_P (blk_object) = 1;
bbf6f052
RK
4400 PUT_MODE (blk_object, BLKmode);
4401
4402 if (bitsize != GET_MODE_BITSIZE (GET_MODE (target)))
4403 emit_move_insn (object, target);
4404
4405 store_field (blk_object, bitsize, bitpos, mode, exp, VOIDmode, 0,
4406 align, total_size);
4407
46093b97
RS
4408 /* Even though we aren't returning target, we need to
4409 give it the updated value. */
bbf6f052
RK
4410 emit_move_insn (target, object);
4411
46093b97 4412 return blk_object;
bbf6f052
RK
4413 }
4414
4415 /* If the structure is in a register or if the component
4416 is a bit field, we cannot use addressing to access it.
4417 Use bit-field techniques or SUBREG to store in it. */
4418
4fa52007
RK
4419 if (mode == VOIDmode
4420 || (mode != BLKmode && ! direct_store[(int) mode])
4421 || GET_CODE (target) == REG
c980ac49 4422 || GET_CODE (target) == SUBREG
ccc98036
RS
4423 /* If the field isn't aligned enough to store as an ordinary memref,
4424 store it as a bit field. */
c7a7ac46 4425 || (SLOW_UNALIGNED_ACCESS
ccc98036 4426 && align * BITS_PER_UNIT < GET_MODE_ALIGNMENT (mode))
c7a7ac46 4427 || (SLOW_UNALIGNED_ACCESS && bitpos % GET_MODE_ALIGNMENT (mode) != 0))
bbf6f052 4428 {
906c4e36 4429 rtx temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
bbd6cf73 4430
ef19912d
RK
4431 /* If BITSIZE is narrower than the size of the type of EXP
4432 we will be narrowing TEMP. Normally, what's wanted are the
4433 low-order bits. However, if EXP's type is a record and this is
4434 big-endian machine, we want the upper BITSIZE bits. */
4435 if (BYTES_BIG_ENDIAN && GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
4436 && bitsize < GET_MODE_BITSIZE (GET_MODE (temp))
4437 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE)
4438 temp = expand_shift (RSHIFT_EXPR, GET_MODE (temp), temp,
4439 size_int (GET_MODE_BITSIZE (GET_MODE (temp))
4440 - bitsize),
4441 temp, 1);
4442
bbd6cf73
RK
4443 /* Unless MODE is VOIDmode or BLKmode, convert TEMP to
4444 MODE. */
4445 if (mode != VOIDmode && mode != BLKmode
4446 && mode != TYPE_MODE (TREE_TYPE (exp)))
4447 temp = convert_modes (mode, TYPE_MODE (TREE_TYPE (exp)), temp, 1);
4448
a281e72d
RK
4449 /* If the modes of TARGET and TEMP are both BLKmode, both
4450 must be in memory and BITPOS must be aligned on a byte
4451 boundary. If so, we simply do a block copy. */
4452 if (GET_MODE (target) == BLKmode && GET_MODE (temp) == BLKmode)
4453 {
4454 if (GET_CODE (target) != MEM || GET_CODE (temp) != MEM
4455 || bitpos % BITS_PER_UNIT != 0)
4456 abort ();
4457
0086427c
RK
4458 target = change_address (target, VOIDmode,
4459 plus_constant (XEXP (target, 0),
a281e72d
RK
4460 bitpos / BITS_PER_UNIT));
4461
4462 emit_block_move (target, temp,
4463 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
4464 / BITS_PER_UNIT),
4465 1);
4466
4467 return value_mode == VOIDmode ? const0_rtx : target;
4468 }
4469
bbf6f052
RK
4470 /* Store the value in the bitfield. */
4471 store_bit_field (target, bitsize, bitpos, mode, temp, align, total_size);
4472 if (value_mode != VOIDmode)
4473 {
4474 /* The caller wants an rtx for the value. */
4475 /* If possible, avoid refetching from the bitfield itself. */
4476 if (width_mask != 0
4477 && ! (GET_CODE (target) == MEM && MEM_VOLATILE_P (target)))
5c4d7cfb 4478 {
9074de27 4479 tree count;
5c4d7cfb 4480 enum machine_mode tmode;
86a2c12a 4481
5c4d7cfb
RS
4482 if (unsignedp)
4483 return expand_and (temp, GEN_INT (width_mask), NULL_RTX);
4484 tmode = GET_MODE (temp);
86a2c12a
RS
4485 if (tmode == VOIDmode)
4486 tmode = value_mode;
5c4d7cfb
RS
4487 count = build_int_2 (GET_MODE_BITSIZE (tmode) - bitsize, 0);
4488 temp = expand_shift (LSHIFT_EXPR, tmode, temp, count, 0, 0);
4489 return expand_shift (RSHIFT_EXPR, tmode, temp, count, 0, 0);
4490 }
bbf6f052 4491 return extract_bit_field (target, bitsize, bitpos, unsignedp,
906c4e36
RK
4492 NULL_RTX, value_mode, 0, align,
4493 total_size);
bbf6f052
RK
4494 }
4495 return const0_rtx;
4496 }
4497 else
4498 {
4499 rtx addr = XEXP (target, 0);
4500 rtx to_rtx;
4501
4502 /* If a value is wanted, it must be the lhs;
4503 so make the address stable for multiple use. */
4504
4505 if (value_mode != VOIDmode && GET_CODE (addr) != REG
4506 && ! CONSTANT_ADDRESS_P (addr)
4507 /* A frame-pointer reference is already stable. */
4508 && ! (GET_CODE (addr) == PLUS
4509 && GET_CODE (XEXP (addr, 1)) == CONST_INT
4510 && (XEXP (addr, 0) == virtual_incoming_args_rtx
4511 || XEXP (addr, 0) == virtual_stack_vars_rtx)))
4512 addr = copy_to_reg (addr);
4513
4514 /* Now build a reference to just the desired component. */
4515
effbcc6a
RK
4516 to_rtx = copy_rtx (change_address (target, mode,
4517 plus_constant (addr,
4518 (bitpos
4519 / BITS_PER_UNIT))));
bbf6f052
RK
4520 MEM_IN_STRUCT_P (to_rtx) = 1;
4521
4522 return store_expr (exp, to_rtx, value_mode != VOIDmode);
4523 }
4524}
4525\f
4526/* Given an expression EXP that may be a COMPONENT_REF, a BIT_FIELD_REF,
4527 or an ARRAY_REF, look for nested COMPONENT_REFs, BIT_FIELD_REFs, or
742920c7 4528 ARRAY_REFs and find the ultimate containing object, which we return.
bbf6f052
RK
4529
4530 We set *PBITSIZE to the size in bits that we want, *PBITPOS to the
4531 bit position, and *PUNSIGNEDP to the signedness of the field.
7bb0943f
RS
4532 If the position of the field is variable, we store a tree
4533 giving the variable offset (in units) in *POFFSET.
4534 This offset is in addition to the bit position.
4535 If the position is not variable, we store 0 in *POFFSET.
839c4796
RK
4536 We set *PALIGNMENT to the alignment in bytes of the address that will be
4537 computed. This is the alignment of the thing we return if *POFFSET
4538 is zero, but can be more less strictly aligned if *POFFSET is nonzero.
bbf6f052
RK
4539
4540 If any of the extraction expressions is volatile,
4541 we store 1 in *PVOLATILEP. Otherwise we don't change that.
4542
4543 If the field is a bit-field, *PMODE is set to VOIDmode. Otherwise, it
4544 is a mode that can be used to access the field. In that case, *PBITSIZE
e7c33f54
RK
4545 is redundant.
4546
4547 If the field describes a variable-sized object, *PMODE is set to
4548 VOIDmode and *PBITSIZE is set to -1. An access cannot be made in
839c4796 4549 this case, but the address of the object can be found. */
bbf6f052
RK
4550
4551tree
4969d05d 4552get_inner_reference (exp, pbitsize, pbitpos, poffset, pmode,
839c4796 4553 punsignedp, pvolatilep, palignment)
bbf6f052
RK
4554 tree exp;
4555 int *pbitsize;
4556 int *pbitpos;
7bb0943f 4557 tree *poffset;
bbf6f052
RK
4558 enum machine_mode *pmode;
4559 int *punsignedp;
4560 int *pvolatilep;
839c4796 4561 int *palignment;
bbf6f052 4562{
b50d17a1 4563 tree orig_exp = exp;
bbf6f052
RK
4564 tree size_tree = 0;
4565 enum machine_mode mode = VOIDmode;
742920c7 4566 tree offset = integer_zero_node;
839c4796 4567 int alignment = BIGGEST_ALIGNMENT;
bbf6f052
RK
4568
4569 if (TREE_CODE (exp) == COMPONENT_REF)
4570 {
4571 size_tree = DECL_SIZE (TREE_OPERAND (exp, 1));
4572 if (! DECL_BIT_FIELD (TREE_OPERAND (exp, 1)))
4573 mode = DECL_MODE (TREE_OPERAND (exp, 1));
4574 *punsignedp = TREE_UNSIGNED (TREE_OPERAND (exp, 1));
4575 }
4576 else if (TREE_CODE (exp) == BIT_FIELD_REF)
4577 {
4578 size_tree = TREE_OPERAND (exp, 1);
4579 *punsignedp = TREE_UNSIGNED (exp);
4580 }
4581 else
4582 {
4583 mode = TYPE_MODE (TREE_TYPE (exp));
4584 *pbitsize = GET_MODE_BITSIZE (mode);
4585 *punsignedp = TREE_UNSIGNED (TREE_TYPE (exp));
4586 }
4587
4588 if (size_tree)
4589 {
4590 if (TREE_CODE (size_tree) != INTEGER_CST)
e7c33f54
RK
4591 mode = BLKmode, *pbitsize = -1;
4592 else
4593 *pbitsize = TREE_INT_CST_LOW (size_tree);
bbf6f052
RK
4594 }
4595
4596 /* Compute cumulative bit-offset for nested component-refs and array-refs,
4597 and find the ultimate containing object. */
4598
4599 *pbitpos = 0;
4600
4601 while (1)
4602 {
7bb0943f 4603 if (TREE_CODE (exp) == COMPONENT_REF || TREE_CODE (exp) == BIT_FIELD_REF)
bbf6f052 4604 {
7bb0943f
RS
4605 tree pos = (TREE_CODE (exp) == COMPONENT_REF
4606 ? DECL_FIELD_BITPOS (TREE_OPERAND (exp, 1))
4607 : TREE_OPERAND (exp, 2));
e6d8c385 4608 tree constant = integer_zero_node, var = pos;
bbf6f052 4609
e7f3c83f
RK
4610 /* If this field hasn't been filled in yet, don't go
4611 past it. This should only happen when folding expressions
4612 made during type construction. */
4613 if (pos == 0)
4614 break;
4615
e6d8c385
RK
4616 /* Assume here that the offset is a multiple of a unit.
4617 If not, there should be an explicitly added constant. */
4618 if (TREE_CODE (pos) == PLUS_EXPR
4619 && TREE_CODE (TREE_OPERAND (pos, 1)) == INTEGER_CST)
4620 constant = TREE_OPERAND (pos, 1), var = TREE_OPERAND (pos, 0);
7bb0943f 4621 else if (TREE_CODE (pos) == INTEGER_CST)
e6d8c385
RK
4622 constant = pos, var = integer_zero_node;
4623
4624 *pbitpos += TREE_INT_CST_LOW (constant);
8d8c9ba9
RK
4625 offset = size_binop (PLUS_EXPR, offset,
4626 size_binop (EXACT_DIV_EXPR, var,
4627 size_int (BITS_PER_UNIT)));
bbf6f052 4628 }
bbf6f052 4629
742920c7 4630 else if (TREE_CODE (exp) == ARRAY_REF)
bbf6f052 4631 {
742920c7
RK
4632 /* This code is based on the code in case ARRAY_REF in expand_expr
4633 below. We assume here that the size of an array element is
4634 always an integral multiple of BITS_PER_UNIT. */
4635
4636 tree index = TREE_OPERAND (exp, 1);
4637 tree domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (exp, 0)));
4638 tree low_bound
4639 = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
4640 tree index_type = TREE_TYPE (index);
ead17059 4641 tree xindex;
742920c7 4642
4c08eef0 4643 if (TYPE_PRECISION (index_type) != TYPE_PRECISION (sizetype))
742920c7 4644 {
4c08eef0
RK
4645 index = convert (type_for_size (TYPE_PRECISION (sizetype), 0),
4646 index);
742920c7
RK
4647 index_type = TREE_TYPE (index);
4648 }
4649
74a4fbfc
DB
4650 /* Optimize the special-case of a zero lower bound.
4651
4652 We convert the low_bound to sizetype to avoid some problems
4653 with constant folding. (E.g. suppose the lower bound is 1,
4654 and its mode is QI. Without the conversion, (ARRAY
4655 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
4656 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
4657
4658 But sizetype isn't quite right either (especially if
4659 the lowbound is negative). FIXME */
4660
ca0f2220 4661 if (! integer_zerop (low_bound))
74a4fbfc
DB
4662 index = fold (build (MINUS_EXPR, index_type, index,
4663 convert (sizetype, low_bound)));
ca0f2220 4664
f8dac6eb
R
4665 if (TREE_CODE (index) == INTEGER_CST)
4666 {
4667 index = convert (sbitsizetype, index);
4668 index_type = TREE_TYPE (index);
4669 }
4670
ead17059
RH
4671 xindex = fold (build (MULT_EXPR, sbitsizetype, index,
4672 convert (sbitsizetype,
4673 TYPE_SIZE (TREE_TYPE (exp)))));
742920c7 4674
ead17059
RH
4675 if (TREE_CODE (xindex) == INTEGER_CST
4676 && TREE_INT_CST_HIGH (xindex) == 0)
4677 *pbitpos += TREE_INT_CST_LOW (xindex);
742920c7 4678 else
956d6950 4679 {
ead17059
RH
4680 /* Either the bit offset calculated above is not constant, or
4681 it overflowed. In either case, redo the multiplication
4682 against the size in units. This is especially important
4683 in the non-constant case to avoid a division at runtime. */
4684 xindex = fold (build (MULT_EXPR, ssizetype, index,
4685 convert (ssizetype,
4686 TYPE_SIZE_UNIT (TREE_TYPE (exp)))));
4687
4688 if (contains_placeholder_p (xindex))
4689 xindex = build (WITH_RECORD_EXPR, sizetype, xindex, exp);
4690
4691 offset = size_binop (PLUS_EXPR, offset, xindex);
956d6950 4692 }
bbf6f052
RK
4693 }
4694 else if (TREE_CODE (exp) != NON_LVALUE_EXPR
4695 && ! ((TREE_CODE (exp) == NOP_EXPR
4696 || TREE_CODE (exp) == CONVERT_EXPR)
7f62854a
RK
4697 && ! (TREE_CODE (TREE_TYPE (exp)) == UNION_TYPE
4698 && (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0)))
4699 != UNION_TYPE))
bbf6f052
RK
4700 && (TYPE_MODE (TREE_TYPE (exp))
4701 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))))
4702 break;
7bb0943f
RS
4703
4704 /* If any reference in the chain is volatile, the effect is volatile. */
4705 if (TREE_THIS_VOLATILE (exp))
4706 *pvolatilep = 1;
839c4796
RK
4707
4708 /* If the offset is non-constant already, then we can't assume any
4709 alignment more than the alignment here. */
4710 if (! integer_zerop (offset))
4711 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4712
bbf6f052
RK
4713 exp = TREE_OPERAND (exp, 0);
4714 }
4715
839c4796
RK
4716 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
4717 alignment = MIN (alignment, DECL_ALIGN (exp));
9293498f 4718 else if (TREE_TYPE (exp) != 0)
839c4796
RK
4719 alignment = MIN (alignment, TYPE_ALIGN (TREE_TYPE (exp)));
4720
742920c7
RK
4721 if (integer_zerop (offset))
4722 offset = 0;
4723
b50d17a1
RK
4724 if (offset != 0 && contains_placeholder_p (offset))
4725 offset = build (WITH_RECORD_EXPR, sizetype, offset, orig_exp);
4726
bbf6f052 4727 *pmode = mode;
7bb0943f 4728 *poffset = offset;
839c4796 4729 *palignment = alignment / BITS_PER_UNIT;
bbf6f052
RK
4730 return exp;
4731}
921b3427
RK
4732
4733/* Subroutine of expand_exp: compute memory_usage from modifier. */
4734static enum memory_use_mode
4735get_memory_usage_from_modifier (modifier)
4736 enum expand_modifier modifier;
4737{
4738 switch (modifier)
4739 {
4740 case EXPAND_NORMAL:
e5e809f4 4741 case EXPAND_SUM:
921b3427
RK
4742 return MEMORY_USE_RO;
4743 break;
4744 case EXPAND_MEMORY_USE_WO:
4745 return MEMORY_USE_WO;
4746 break;
4747 case EXPAND_MEMORY_USE_RW:
4748 return MEMORY_USE_RW;
4749 break;
921b3427 4750 case EXPAND_MEMORY_USE_DONT:
e5e809f4
JL
4751 /* EXPAND_CONST_ADDRESS and EXPAND_INITIALIZER are converted into
4752 MEMORY_USE_DONT, because they are modifiers to a call of
4753 expand_expr in the ADDR_EXPR case of expand_expr. */
921b3427 4754 case EXPAND_CONST_ADDRESS:
e5e809f4 4755 case EXPAND_INITIALIZER:
921b3427
RK
4756 return MEMORY_USE_DONT;
4757 case EXPAND_MEMORY_USE_BAD:
4758 default:
4759 abort ();
4760 }
4761}
bbf6f052
RK
4762\f
4763/* Given an rtx VALUE that may contain additions and multiplications,
4764 return an equivalent value that just refers to a register or memory.
4765 This is done by generating instructions to perform the arithmetic
c45a13a6
RK
4766 and returning a pseudo-register containing the value.
4767
4768 The returned value may be a REG, SUBREG, MEM or constant. */
bbf6f052
RK
4769
4770rtx
4771force_operand (value, target)
4772 rtx value, target;
4773{
4774 register optab binoptab = 0;
4775 /* Use a temporary to force order of execution of calls to
4776 `force_operand'. */
4777 rtx tmp;
4778 register rtx op2;
4779 /* Use subtarget as the target for operand 0 of a binary operation. */
4780 register rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
4781
8b015896
RH
4782 /* Check for a PIC address load. */
4783 if (flag_pic
4784 && (GET_CODE (value) == PLUS || GET_CODE (value) == MINUS)
4785 && XEXP (value, 0) == pic_offset_table_rtx
4786 && (GET_CODE (XEXP (value, 1)) == SYMBOL_REF
4787 || GET_CODE (XEXP (value, 1)) == LABEL_REF
4788 || GET_CODE (XEXP (value, 1)) == CONST))
4789 {
4790 if (!subtarget)
4791 subtarget = gen_reg_rtx (GET_MODE (value));
4792 emit_move_insn (subtarget, value);
4793 return subtarget;
4794 }
4795
bbf6f052
RK
4796 if (GET_CODE (value) == PLUS)
4797 binoptab = add_optab;
4798 else if (GET_CODE (value) == MINUS)
4799 binoptab = sub_optab;
4800 else if (GET_CODE (value) == MULT)
4801 {
4802 op2 = XEXP (value, 1);
4803 if (!CONSTANT_P (op2)
4804 && !(GET_CODE (op2) == REG && op2 != subtarget))
4805 subtarget = 0;
4806 tmp = force_operand (XEXP (value, 0), subtarget);
4807 return expand_mult (GET_MODE (value), tmp,
906c4e36 4808 force_operand (op2, NULL_RTX),
bbf6f052
RK
4809 target, 0);
4810 }
4811
4812 if (binoptab)
4813 {
4814 op2 = XEXP (value, 1);
4815 if (!CONSTANT_P (op2)
4816 && !(GET_CODE (op2) == REG && op2 != subtarget))
4817 subtarget = 0;
4818 if (binoptab == sub_optab && GET_CODE (op2) == CONST_INT)
4819 {
4820 binoptab = add_optab;
4821 op2 = negate_rtx (GET_MODE (value), op2);
4822 }
4823
4824 /* Check for an addition with OP2 a constant integer and our first
4825 operand a PLUS of a virtual register and something else. In that
4826 case, we want to emit the sum of the virtual register and the
4827 constant first and then add the other value. This allows virtual
4828 register instantiation to simply modify the constant rather than
4829 creating another one around this addition. */
4830 if (binoptab == add_optab && GET_CODE (op2) == CONST_INT
4831 && GET_CODE (XEXP (value, 0)) == PLUS
4832 && GET_CODE (XEXP (XEXP (value, 0), 0)) == REG
4833 && REGNO (XEXP (XEXP (value, 0), 0)) >= FIRST_VIRTUAL_REGISTER
4834 && REGNO (XEXP (XEXP (value, 0), 0)) <= LAST_VIRTUAL_REGISTER)
4835 {
4836 rtx temp = expand_binop (GET_MODE (value), binoptab,
4837 XEXP (XEXP (value, 0), 0), op2,
4838 subtarget, 0, OPTAB_LIB_WIDEN);
4839 return expand_binop (GET_MODE (value), binoptab, temp,
4840 force_operand (XEXP (XEXP (value, 0), 1), 0),
4841 target, 0, OPTAB_LIB_WIDEN);
4842 }
4843
4844 tmp = force_operand (XEXP (value, 0), subtarget);
4845 return expand_binop (GET_MODE (value), binoptab, tmp,
906c4e36 4846 force_operand (op2, NULL_RTX),
bbf6f052 4847 target, 0, OPTAB_LIB_WIDEN);
8008b228 4848 /* We give UNSIGNEDP = 0 to expand_binop
bbf6f052
RK
4849 because the only operations we are expanding here are signed ones. */
4850 }
4851 return value;
4852}
4853\f
4854/* Subroutine of expand_expr:
4855 save the non-copied parts (LIST) of an expr (LHS), and return a list
4856 which can restore these values to their previous values,
4857 should something modify their storage. */
4858
4859static tree
4860save_noncopied_parts (lhs, list)
4861 tree lhs;
4862 tree list;
4863{
4864 tree tail;
4865 tree parts = 0;
4866
4867 for (tail = list; tail; tail = TREE_CHAIN (tail))
4868 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4869 parts = chainon (parts, save_noncopied_parts (lhs, TREE_VALUE (tail)));
4870 else
4871 {
4872 tree part = TREE_VALUE (tail);
4873 tree part_type = TREE_TYPE (part);
906c4e36 4874 tree to_be_saved = build (COMPONENT_REF, part_type, lhs, part);
06089a8b 4875 rtx target = assign_temp (part_type, 0, 1, 1);
bbf6f052 4876 if (! memory_address_p (TYPE_MODE (part_type), XEXP (target, 0)))
906c4e36 4877 target = change_address (target, TYPE_MODE (part_type), NULL_RTX);
bbf6f052 4878 parts = tree_cons (to_be_saved,
906c4e36
RK
4879 build (RTL_EXPR, part_type, NULL_TREE,
4880 (tree) target),
bbf6f052
RK
4881 parts);
4882 store_expr (TREE_PURPOSE (parts), RTL_EXPR_RTL (TREE_VALUE (parts)), 0);
4883 }
4884 return parts;
4885}
4886
4887/* Subroutine of expand_expr:
4888 record the non-copied parts (LIST) of an expr (LHS), and return a list
4889 which specifies the initial values of these parts. */
4890
4891static tree
4892init_noncopied_parts (lhs, list)
4893 tree lhs;
4894 tree list;
4895{
4896 tree tail;
4897 tree parts = 0;
4898
4899 for (tail = list; tail; tail = TREE_CHAIN (tail))
4900 if (TREE_CODE (TREE_VALUE (tail)) == TREE_LIST)
4901 parts = chainon (parts, init_noncopied_parts (lhs, TREE_VALUE (tail)));
4902 else
4903 {
4904 tree part = TREE_VALUE (tail);
4905 tree part_type = TREE_TYPE (part);
906c4e36 4906 tree to_be_initialized = build (COMPONENT_REF, part_type, lhs, part);
bbf6f052
RK
4907 parts = tree_cons (TREE_PURPOSE (tail), to_be_initialized, parts);
4908 }
4909 return parts;
4910}
4911
4912/* Subroutine of expand_expr: return nonzero iff there is no way that
e5e809f4
JL
4913 EXP can reference X, which is being modified. TOP_P is nonzero if this
4914 call is going to be used to determine whether we need a temporary
ff439b5f
CB
4915 for EXP, as opposed to a recursive call to this function.
4916
4917 It is always safe for this routine to return zero since it merely
4918 searches for optimization opportunities. */
bbf6f052
RK
4919
4920static int
e5e809f4 4921safe_from_p (x, exp, top_p)
bbf6f052
RK
4922 rtx x;
4923 tree exp;
e5e809f4 4924 int top_p;
bbf6f052
RK
4925{
4926 rtx exp_rtl = 0;
4927 int i, nops;
ff439b5f
CB
4928 static int save_expr_count;
4929 static int save_expr_size = 0;
4930 static tree *save_expr_rewritten;
4931 static tree save_expr_trees[256];
bbf6f052 4932
6676e72f
RK
4933 if (x == 0
4934 /* If EXP has varying size, we MUST use a target since we currently
8f6562d0
PB
4935 have no way of allocating temporaries of variable size
4936 (except for arrays that have TYPE_ARRAY_MAX_SIZE set).
4937 So we assume here that something at a higher level has prevented a
f4510f37 4938 clash. This is somewhat bogus, but the best we can do. Only
e5e809f4
JL
4939 do this when X is BLKmode and when we are at the top level. */
4940 || (top_p && TREE_TYPE (exp) != 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
f4510f37 4941 && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) != INTEGER_CST
8f6562d0
PB
4942 && (TREE_CODE (TREE_TYPE (exp)) != ARRAY_TYPE
4943 || TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)) == NULL_TREE
4944 || TREE_CODE (TYPE_ARRAY_MAX_SIZE (TREE_TYPE (exp)))
4945 != INTEGER_CST)
f4510f37 4946 && GET_MODE (x) == BLKmode))
bbf6f052
RK
4947 return 1;
4948
ff439b5f
CB
4949 if (top_p && save_expr_size == 0)
4950 {
4951 int rtn;
4952
4953 save_expr_count = 0;
4954 save_expr_size = sizeof (save_expr_trees) / sizeof (save_expr_trees[0]);
4955 save_expr_rewritten = &save_expr_trees[0];
4956
4957 rtn = safe_from_p (x, exp, 1);
4958
4959 for (i = 0; i < save_expr_count; ++i)
4960 {
4961 if (TREE_CODE (save_expr_trees[i]) != ERROR_MARK)
4962 abort ();
4963 TREE_SET_CODE (save_expr_trees[i], SAVE_EXPR);
4964 }
4965
4966 save_expr_size = 0;
4967
4968 return rtn;
4969 }
4970
bbf6f052
RK
4971 /* If this is a subreg of a hard register, declare it unsafe, otherwise,
4972 find the underlying pseudo. */
4973 if (GET_CODE (x) == SUBREG)
4974 {
4975 x = SUBREG_REG (x);
4976 if (GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
4977 return 0;
4978 }
4979
4980 /* If X is a location in the outgoing argument area, it is always safe. */
4981 if (GET_CODE (x) == MEM
4982 && (XEXP (x, 0) == virtual_outgoing_args_rtx
4983 || (GET_CODE (XEXP (x, 0)) == PLUS
4984 && XEXP (XEXP (x, 0), 0) == virtual_outgoing_args_rtx)))
4985 return 1;
4986
4987 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
4988 {
4989 case 'd':
4990 exp_rtl = DECL_RTL (exp);
4991 break;
4992
4993 case 'c':
4994 return 1;
4995
4996 case 'x':
4997 if (TREE_CODE (exp) == TREE_LIST)
f32fd778 4998 return ((TREE_VALUE (exp) == 0
e5e809f4 4999 || safe_from_p (x, TREE_VALUE (exp), 0))
bbf6f052 5000 && (TREE_CHAIN (exp) == 0
e5e809f4 5001 || safe_from_p (x, TREE_CHAIN (exp), 0)));
ff439b5f
CB
5002 else if (TREE_CODE (exp) == ERROR_MARK)
5003 return 1; /* An already-visited SAVE_EXPR? */
bbf6f052
RK
5004 else
5005 return 0;
5006
5007 case '1':
e5e809f4 5008 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
bbf6f052
RK
5009
5010 case '2':
5011 case '<':
e5e809f4
JL
5012 return (safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5013 && safe_from_p (x, TREE_OPERAND (exp, 1), 0));
bbf6f052
RK
5014
5015 case 'e':
5016 case 'r':
5017 /* Now do code-specific tests. EXP_RTL is set to any rtx we find in
5018 the expression. If it is set, we conflict iff we are that rtx or
5019 both are in memory. Otherwise, we check all operands of the
5020 expression recursively. */
5021
5022 switch (TREE_CODE (exp))
5023 {
5024 case ADDR_EXPR:
e44842fe 5025 return (staticp (TREE_OPERAND (exp, 0))
e5e809f4
JL
5026 || safe_from_p (x, TREE_OPERAND (exp, 0), 0)
5027 || TREE_STATIC (exp));
bbf6f052
RK
5028
5029 case INDIRECT_REF:
5030 if (GET_CODE (x) == MEM)
5031 return 0;
5032 break;
5033
5034 case CALL_EXPR:
5035 exp_rtl = CALL_EXPR_RTL (exp);
5036 if (exp_rtl == 0)
5037 {
5038 /* Assume that the call will clobber all hard registers and
5039 all of memory. */
5040 if ((GET_CODE (x) == REG && REGNO (x) < FIRST_PSEUDO_REGISTER)
5041 || GET_CODE (x) == MEM)
5042 return 0;
5043 }
5044
5045 break;
5046
5047 case RTL_EXPR:
3bb5826a
RK
5048 /* If a sequence exists, we would have to scan every instruction
5049 in the sequence to see if it was safe. This is probably not
5050 worthwhile. */
5051 if (RTL_EXPR_SEQUENCE (exp))
bbf6f052
RK
5052 return 0;
5053
3bb5826a 5054 exp_rtl = RTL_EXPR_RTL (exp);
bbf6f052
RK
5055 break;
5056
5057 case WITH_CLEANUP_EXPR:
5058 exp_rtl = RTL_EXPR_RTL (exp);
5059 break;
5060
5dab5552 5061 case CLEANUP_POINT_EXPR:
e5e809f4 5062 return safe_from_p (x, TREE_OPERAND (exp, 0), 0);
5dab5552 5063
bbf6f052
RK
5064 case SAVE_EXPR:
5065 exp_rtl = SAVE_EXPR_RTL (exp);
ff439b5f
CB
5066 if (exp_rtl)
5067 break;
5068
5069 /* This SAVE_EXPR might appear many times in the top-level
5070 safe_from_p() expression, and if it has a complex
5071 subexpression, examining it multiple times could result
5072 in a combinatorial explosion. E.g. on an Alpha
5073 running at least 200MHz, a Fortran test case compiled with
5074 optimization took about 28 minutes to compile -- even though
5075 it was only a few lines long, and the complicated line causing
5076 so much time to be spent in the earlier version of safe_from_p()
5077 had only 293 or so unique nodes.
5078
5079 So, turn this SAVE_EXPR into an ERROR_MARK for now, but remember
5080 where it is so we can turn it back in the top-level safe_from_p()
5081 when we're done. */
5082
5083 /* For now, don't bother re-sizing the array. */
5084 if (save_expr_count >= save_expr_size)
5085 return 0;
5086 save_expr_rewritten[save_expr_count++] = exp;
ff439b5f
CB
5087
5088 nops = tree_code_length[(int) SAVE_EXPR];
5089 for (i = 0; i < nops; i++)
ff59bfe6
JM
5090 {
5091 tree operand = TREE_OPERAND (exp, i);
5092 if (operand == NULL_TREE)
5093 continue;
5094 TREE_SET_CODE (exp, ERROR_MARK);
5095 if (!safe_from_p (x, operand, 0))
5096 return 0;
5097 TREE_SET_CODE (exp, SAVE_EXPR);
5098 }
5099 TREE_SET_CODE (exp, ERROR_MARK);
ff439b5f 5100 return 1;
bbf6f052 5101
8129842c
RS
5102 case BIND_EXPR:
5103 /* The only operand we look at is operand 1. The rest aren't
5104 part of the expression. */
e5e809f4 5105 return safe_from_p (x, TREE_OPERAND (exp, 1), 0);
8129842c 5106
bbf6f052 5107 case METHOD_CALL_EXPR:
0f41302f 5108 /* This takes a rtx argument, but shouldn't appear here. */
bbf6f052 5109 abort ();
e9a25f70
JL
5110
5111 default:
5112 break;
bbf6f052
RK
5113 }
5114
5115 /* If we have an rtx, we do not need to scan our operands. */
5116 if (exp_rtl)
5117 break;
5118
5119 nops = tree_code_length[(int) TREE_CODE (exp)];
5120 for (i = 0; i < nops; i++)
5121 if (TREE_OPERAND (exp, i) != 0
e5e809f4 5122 && ! safe_from_p (x, TREE_OPERAND (exp, i), 0))
bbf6f052
RK
5123 return 0;
5124 }
5125
5126 /* If we have an rtl, find any enclosed object. Then see if we conflict
5127 with it. */
5128 if (exp_rtl)
5129 {
5130 if (GET_CODE (exp_rtl) == SUBREG)
5131 {
5132 exp_rtl = SUBREG_REG (exp_rtl);
5133 if (GET_CODE (exp_rtl) == REG
5134 && REGNO (exp_rtl) < FIRST_PSEUDO_REGISTER)
5135 return 0;
5136 }
5137
5138 /* If the rtl is X, then it is not safe. Otherwise, it is unless both
5139 are memory and EXP is not readonly. */
5140 return ! (rtx_equal_p (x, exp_rtl)
5141 || (GET_CODE (x) == MEM && GET_CODE (exp_rtl) == MEM
5142 && ! TREE_READONLY (exp)));
5143 }
5144
5145 /* If we reach here, it is safe. */
5146 return 1;
5147}
5148
5149/* Subroutine of expand_expr: return nonzero iff EXP is an
5150 expression whose type is statically determinable. */
5151
5152static int
5153fixed_type_p (exp)
5154 tree exp;
5155{
5156 if (TREE_CODE (exp) == PARM_DECL
5157 || TREE_CODE (exp) == VAR_DECL
5158 || TREE_CODE (exp) == CALL_EXPR || TREE_CODE (exp) == TARGET_EXPR
5159 || TREE_CODE (exp) == COMPONENT_REF
5160 || TREE_CODE (exp) == ARRAY_REF)
5161 return 1;
5162 return 0;
5163}
01c8a7c8
RK
5164
5165/* Subroutine of expand_expr: return rtx if EXP is a
5166 variable or parameter; else return 0. */
5167
5168static rtx
5169var_rtx (exp)
5170 tree exp;
5171{
5172 STRIP_NOPS (exp);
5173 switch (TREE_CODE (exp))
5174 {
5175 case PARM_DECL:
5176 case VAR_DECL:
5177 return DECL_RTL (exp);
5178 default:
5179 return 0;
5180 }
5181}
dbecbbe4
JL
5182
5183#ifdef MAX_INTEGER_COMPUTATION_MODE
5184void
5185check_max_integer_computation_mode (exp)
5186 tree exp;
5187{
5188 enum tree_code code = TREE_CODE (exp);
5189 enum machine_mode mode;
5190
71bca506
JL
5191 /* We must allow conversions of constants to MAX_INTEGER_COMPUTATION_MODE. */
5192 if (code == NOP_EXPR
5193 && TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
5194 return;
5195
dbecbbe4
JL
5196 /* First check the type of the overall operation. We need only look at
5197 unary, binary and relational operations. */
5198 if (TREE_CODE_CLASS (code) == '1'
5199 || TREE_CODE_CLASS (code) == '2'
5200 || TREE_CODE_CLASS (code) == '<')
5201 {
5202 mode = TYPE_MODE (TREE_TYPE (exp));
5203 if (GET_MODE_CLASS (mode) == MODE_INT
5204 && mode > MAX_INTEGER_COMPUTATION_MODE)
5205 fatal ("unsupported wide integer operation");
5206 }
5207
5208 /* Check operand of a unary op. */
5209 if (TREE_CODE_CLASS (code) == '1')
5210 {
5211 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5212 if (GET_MODE_CLASS (mode) == MODE_INT
5213 && mode > MAX_INTEGER_COMPUTATION_MODE)
5214 fatal ("unsupported wide integer operation");
5215 }
5216
5217 /* Check operands of a binary/comparison op. */
5218 if (TREE_CODE_CLASS (code) == '2' || TREE_CODE_CLASS (code) == '<')
5219 {
5220 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
5221 if (GET_MODE_CLASS (mode) == MODE_INT
5222 && mode > MAX_INTEGER_COMPUTATION_MODE)
5223 fatal ("unsupported wide integer operation");
5224
5225 mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1)));
5226 if (GET_MODE_CLASS (mode) == MODE_INT
5227 && mode > MAX_INTEGER_COMPUTATION_MODE)
5228 fatal ("unsupported wide integer operation");
5229 }
5230}
5231#endif
5232
bbf6f052
RK
5233\f
5234/* expand_expr: generate code for computing expression EXP.
5235 An rtx for the computed value is returned. The value is never null.
5236 In the case of a void EXP, const0_rtx is returned.
5237
5238 The value may be stored in TARGET if TARGET is nonzero.
5239 TARGET is just a suggestion; callers must assume that
5240 the rtx returned may not be the same as TARGET.
5241
5242 If TARGET is CONST0_RTX, it means that the value will be ignored.
5243
5244 If TMODE is not VOIDmode, it suggests generating the
5245 result in mode TMODE. But this is done only when convenient.
5246 Otherwise, TMODE is ignored and the value generated in its natural mode.
5247 TMODE is just a suggestion; callers must assume that
5248 the rtx returned may not have mode TMODE.
5249
d6a5ac33
RK
5250 Note that TARGET may have neither TMODE nor MODE. In that case, it
5251 probably will not be used.
bbf6f052
RK
5252
5253 If MODIFIER is EXPAND_SUM then when EXP is an addition
5254 we can return an rtx of the form (MULT (REG ...) (CONST_INT ...))
5255 or a nest of (PLUS ...) and (MINUS ...) where the terms are
5256 products as above, or REG or MEM, or constant.
5257 Ordinarily in such cases we would output mul or add instructions
5258 and then return a pseudo reg containing the sum.
5259
5260 EXPAND_INITIALIZER is much like EXPAND_SUM except that
5261 it also marks a label as absolutely required (it can't be dead).
26fcb35a 5262 It also makes a ZERO_EXTEND or SIGN_EXTEND instead of emitting extend insns.
d6a5ac33
RK
5263 This is used for outputting expressions used in initializers.
5264
5265 EXPAND_CONST_ADDRESS says that it is okay to return a MEM
5266 with a constant address even if that address is not normally legitimate.
5267 EXPAND_INITIALIZER and EXPAND_SUM also have this effect. */
bbf6f052
RK
5268
5269rtx
5270expand_expr (exp, target, tmode, modifier)
5271 register tree exp;
5272 rtx target;
5273 enum machine_mode tmode;
5274 enum expand_modifier modifier;
5275{
b50d17a1
RK
5276 /* Chain of pending expressions for PLACEHOLDER_EXPR to replace.
5277 This is static so it will be accessible to our recursive callees. */
5278 static tree placeholder_list = 0;
bbf6f052
RK
5279 register rtx op0, op1, temp;
5280 tree type = TREE_TYPE (exp);
5281 int unsignedp = TREE_UNSIGNED (type);
5282 register enum machine_mode mode = TYPE_MODE (type);
5283 register enum tree_code code = TREE_CODE (exp);
5284 optab this_optab;
5285 /* Use subtarget as the target for operand 0 of a binary operation. */
5286 rtx subtarget = (target != 0 && GET_CODE (target) == REG ? target : 0);
5287 rtx original_target = target;
dd27116b
RK
5288 int ignore = (target == const0_rtx
5289 || ((code == NON_LVALUE_EXPR || code == NOP_EXPR
4d87de75
RS
5290 || code == CONVERT_EXPR || code == REFERENCE_EXPR
5291 || code == COND_EXPR)
dd27116b 5292 && TREE_CODE (type) == VOID_TYPE));
bbf6f052 5293 tree context;
921b3427
RK
5294 /* Used by check-memory-usage to make modifier read only. */
5295 enum expand_modifier ro_modifier;
bbf6f052 5296
921b3427
RK
5297 /* Make a read-only version of the modifier. */
5298 if (modifier == EXPAND_NORMAL || modifier == EXPAND_SUM
5299 || modifier == EXPAND_CONST_ADDRESS || modifier == EXPAND_INITIALIZER)
5300 ro_modifier = modifier;
5301 else
5302 ro_modifier = EXPAND_NORMAL;
ca695ac9 5303
bbf6f052
RK
5304 /* Don't use hard regs as subtargets, because the combiner
5305 can only handle pseudo regs. */
5306 if (subtarget && REGNO (subtarget) < FIRST_PSEUDO_REGISTER)
5307 subtarget = 0;
5308 /* Avoid subtargets inside loops,
5309 since they hide some invariant expressions. */
5310 if (preserve_subexpressions_p ())
5311 subtarget = 0;
5312
dd27116b
RK
5313 /* If we are going to ignore this result, we need only do something
5314 if there is a side-effect somewhere in the expression. If there
b50d17a1
RK
5315 is, short-circuit the most common cases here. Note that we must
5316 not call expand_expr with anything but const0_rtx in case this
5317 is an initial expansion of a size that contains a PLACEHOLDER_EXPR. */
bbf6f052 5318
dd27116b
RK
5319 if (ignore)
5320 {
5321 if (! TREE_SIDE_EFFECTS (exp))
5322 return const0_rtx;
5323
5324 /* Ensure we reference a volatile object even if value is ignored. */
5325 if (TREE_THIS_VOLATILE (exp)
5326 && TREE_CODE (exp) != FUNCTION_DECL
5327 && mode != VOIDmode && mode != BLKmode)
5328 {
921b3427 5329 temp = expand_expr (exp, NULL_RTX, VOIDmode, ro_modifier);
dd27116b
RK
5330 if (GET_CODE (temp) == MEM)
5331 temp = copy_to_reg (temp);
5332 return const0_rtx;
5333 }
5334
5335 if (TREE_CODE_CLASS (code) == '1')
5336 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5337 VOIDmode, ro_modifier);
dd27116b
RK
5338 else if (TREE_CODE_CLASS (code) == '2'
5339 || TREE_CODE_CLASS (code) == '<')
5340 {
921b3427
RK
5341 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, ro_modifier);
5342 expand_expr (TREE_OPERAND (exp, 1), const0_rtx, VOIDmode, ro_modifier);
dd27116b
RK
5343 return const0_rtx;
5344 }
5345 else if ((code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
5346 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 1)))
5347 /* If the second operand has no side effects, just evaluate
0f41302f 5348 the first. */
dd27116b 5349 return expand_expr (TREE_OPERAND (exp, 0), const0_rtx,
921b3427 5350 VOIDmode, ro_modifier);
dd27116b 5351
90764a87 5352 target = 0;
dd27116b 5353 }
bbf6f052 5354
dbecbbe4 5355#ifdef MAX_INTEGER_COMPUTATION_MODE
ce3c0b53
JL
5356 if (target
5357 && TREE_CODE (exp) != INTEGER_CST
5358 && TREE_CODE (exp) != PARM_DECL
5359 && TREE_CODE (exp) != VAR_DECL)
dbecbbe4
JL
5360 {
5361 enum machine_mode mode = GET_MODE (target);
5362
5363 if (GET_MODE_CLASS (mode) == MODE_INT
5364 && mode > MAX_INTEGER_COMPUTATION_MODE)
5365 fatal ("unsupported wide integer operation");
5366 }
5367
71bca506 5368 if (TREE_CODE (exp) != INTEGER_CST
ce3c0b53
JL
5369 && TREE_CODE (exp) != PARM_DECL
5370 && TREE_CODE (exp) != VAR_DECL
71bca506 5371 && GET_MODE_CLASS (tmode) == MODE_INT
dbecbbe4
JL
5372 && tmode > MAX_INTEGER_COMPUTATION_MODE)
5373 fatal ("unsupported wide integer operation");
5374
5375 check_max_integer_computation_mode (exp);
5376#endif
5377
e44842fe
RK
5378 /* If will do cse, generate all results into pseudo registers
5379 since 1) that allows cse to find more things
5380 and 2) otherwise cse could produce an insn the machine
5381 cannot support. */
5382
bbf6f052
RK
5383 if (! cse_not_expected && mode != BLKmode && target
5384 && (GET_CODE (target) != REG || REGNO (target) < FIRST_PSEUDO_REGISTER))
5385 target = subtarget;
5386
bbf6f052
RK
5387 switch (code)
5388 {
5389 case LABEL_DECL:
b552441b
RS
5390 {
5391 tree function = decl_function_context (exp);
5392 /* Handle using a label in a containing function. */
d0977240
RK
5393 if (function != current_function_decl
5394 && function != inline_function_decl && function != 0)
b552441b
RS
5395 {
5396 struct function *p = find_function_data (function);
5397 /* Allocate in the memory associated with the function
5398 that the label is in. */
5399 push_obstacks (p->function_obstack,
5400 p->function_maybepermanent_obstack);
5401
38a448ca
RH
5402 p->forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5403 label_rtx (exp),
5404 p->forced_labels);
b552441b
RS
5405 pop_obstacks ();
5406 }
5407 else if (modifier == EXPAND_INITIALIZER)
38a448ca
RH
5408 forced_labels = gen_rtx_EXPR_LIST (VOIDmode,
5409 label_rtx (exp), forced_labels);
5410 temp = gen_rtx_MEM (FUNCTION_MODE,
5411 gen_rtx_LABEL_REF (Pmode, label_rtx (exp)));
d0977240
RK
5412 if (function != current_function_decl
5413 && function != inline_function_decl && function != 0)
26fcb35a
RS
5414 LABEL_REF_NONLOCAL_P (XEXP (temp, 0)) = 1;
5415 return temp;
b552441b 5416 }
bbf6f052
RK
5417
5418 case PARM_DECL:
5419 if (DECL_RTL (exp) == 0)
5420 {
5421 error_with_decl (exp, "prior parameter's size depends on `%s'");
4af3895e 5422 return CONST0_RTX (mode);
bbf6f052
RK
5423 }
5424
0f41302f 5425 /* ... fall through ... */
d6a5ac33 5426
bbf6f052 5427 case VAR_DECL:
2dca20cd
RS
5428 /* If a static var's type was incomplete when the decl was written,
5429 but the type is complete now, lay out the decl now. */
5430 if (DECL_SIZE (exp) == 0 && TYPE_SIZE (TREE_TYPE (exp)) != 0
5431 && (TREE_STATIC (exp) || DECL_EXTERNAL (exp)))
5432 {
5433 push_obstacks_nochange ();
5434 end_temporary_allocation ();
5435 layout_decl (exp, 0);
5436 PUT_MODE (DECL_RTL (exp), DECL_MODE (exp));
5437 pop_obstacks ();
5438 }
d6a5ac33 5439
921b3427
RK
5440 /* Only check automatic variables. Currently, function arguments are
5441 not checked (this can be done at compile-time with prototypes).
5442 Aggregates are not checked. */
5443 if (flag_check_memory_usage && code == VAR_DECL
5444 && GET_CODE (DECL_RTL (exp)) == MEM
5445 && DECL_CONTEXT (exp) != NULL_TREE
5446 && ! TREE_STATIC (exp)
5447 && ! AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5448 {
5449 enum memory_use_mode memory_usage;
5450 memory_usage = get_memory_usage_from_modifier (modifier);
5451
5452 if (memory_usage != MEMORY_USE_DONT)
5453 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5454 XEXP (DECL_RTL (exp), 0), ptr_mode,
5455 GEN_INT (int_size_in_bytes (type)),
5456 TYPE_MODE (sizetype),
956d6950
JL
5457 GEN_INT (memory_usage),
5458 TYPE_MODE (integer_type_node));
921b3427
RK
5459 }
5460
0f41302f 5461 /* ... fall through ... */
d6a5ac33 5462
2dca20cd 5463 case FUNCTION_DECL:
bbf6f052
RK
5464 case RESULT_DECL:
5465 if (DECL_RTL (exp) == 0)
5466 abort ();
d6a5ac33 5467
e44842fe
RK
5468 /* Ensure variable marked as used even if it doesn't go through
5469 a parser. If it hasn't be used yet, write out an external
5470 definition. */
5471 if (! TREE_USED (exp))
5472 {
5473 assemble_external (exp);
5474 TREE_USED (exp) = 1;
5475 }
5476
dc6d66b3
RK
5477 /* Show we haven't gotten RTL for this yet. */
5478 temp = 0;
5479
bbf6f052
RK
5480 /* Handle variables inherited from containing functions. */
5481 context = decl_function_context (exp);
5482
5483 /* We treat inline_function_decl as an alias for the current function
5484 because that is the inline function whose vars, types, etc.
5485 are being merged into the current function.
5486 See expand_inline_function. */
d6a5ac33 5487
bbf6f052
RK
5488 if (context != 0 && context != current_function_decl
5489 && context != inline_function_decl
5490 /* If var is static, we don't need a static chain to access it. */
5491 && ! (GET_CODE (DECL_RTL (exp)) == MEM
5492 && CONSTANT_P (XEXP (DECL_RTL (exp), 0))))
5493 {
5494 rtx addr;
5495
5496 /* Mark as non-local and addressable. */
81feeecb 5497 DECL_NONLOCAL (exp) = 1;
38ee6ed9
JM
5498 if (DECL_NO_STATIC_CHAIN (current_function_decl))
5499 abort ();
bbf6f052
RK
5500 mark_addressable (exp);
5501 if (GET_CODE (DECL_RTL (exp)) != MEM)
5502 abort ();
5503 addr = XEXP (DECL_RTL (exp), 0);
5504 if (GET_CODE (addr) == MEM)
38a448ca
RH
5505 addr = gen_rtx_MEM (Pmode,
5506 fix_lexical_addr (XEXP (addr, 0), exp));
bbf6f052
RK
5507 else
5508 addr = fix_lexical_addr (addr, exp);
dc6d66b3 5509 temp = change_address (DECL_RTL (exp), mode, addr);
bbf6f052 5510 }
4af3895e 5511
bbf6f052
RK
5512 /* This is the case of an array whose size is to be determined
5513 from its initializer, while the initializer is still being parsed.
5514 See expand_decl. */
d6a5ac33 5515
dc6d66b3
RK
5516 else if (GET_CODE (DECL_RTL (exp)) == MEM
5517 && GET_CODE (XEXP (DECL_RTL (exp), 0)) == REG)
5518 temp = change_address (DECL_RTL (exp), GET_MODE (DECL_RTL (exp)),
bbf6f052 5519 XEXP (DECL_RTL (exp), 0));
d6a5ac33
RK
5520
5521 /* If DECL_RTL is memory, we are in the normal case and either
5522 the address is not valid or it is not a register and -fforce-addr
5523 is specified, get the address into a register. */
5524
dc6d66b3
RK
5525 else if (GET_CODE (DECL_RTL (exp)) == MEM
5526 && modifier != EXPAND_CONST_ADDRESS
5527 && modifier != EXPAND_SUM
5528 && modifier != EXPAND_INITIALIZER
5529 && (! memory_address_p (DECL_MODE (exp),
5530 XEXP (DECL_RTL (exp), 0))
5531 || (flag_force_addr
5532 && GET_CODE (XEXP (DECL_RTL (exp), 0)) != REG)))
5533 temp = change_address (DECL_RTL (exp), VOIDmode,
d6a5ac33 5534 copy_rtx (XEXP (DECL_RTL (exp), 0)));
1499e0a8 5535
dc6d66b3
RK
5536 /* If we got something, return it. But first, set the alignment
5537 the address is a register. */
5538 if (temp != 0)
5539 {
5540 if (GET_CODE (temp) == MEM && GET_CODE (XEXP (temp, 0)) == REG)
5541 mark_reg_pointer (XEXP (temp, 0),
5542 DECL_ALIGN (exp) / BITS_PER_UNIT);
5543
5544 return temp;
5545 }
5546
1499e0a8
RK
5547 /* If the mode of DECL_RTL does not match that of the decl, it
5548 must be a promoted value. We return a SUBREG of the wanted mode,
5549 but mark it so that we know that it was already extended. */
5550
5551 if (GET_CODE (DECL_RTL (exp)) == REG
5552 && GET_MODE (DECL_RTL (exp)) != mode)
5553 {
1499e0a8
RK
5554 /* Get the signedness used for this variable. Ensure we get the
5555 same mode we got when the variable was declared. */
78911e8b
RK
5556 if (GET_MODE (DECL_RTL (exp))
5557 != promote_mode (type, DECL_MODE (exp), &unsignedp, 0))
1499e0a8
RK
5558 abort ();
5559
38a448ca 5560 temp = gen_rtx_SUBREG (mode, DECL_RTL (exp), 0);
1499e0a8
RK
5561 SUBREG_PROMOTED_VAR_P (temp) = 1;
5562 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5563 return temp;
5564 }
5565
bbf6f052
RK
5566 return DECL_RTL (exp);
5567
5568 case INTEGER_CST:
5569 return immed_double_const (TREE_INT_CST_LOW (exp),
5570 TREE_INT_CST_HIGH (exp),
5571 mode);
5572
5573 case CONST_DECL:
921b3427
RK
5574 return expand_expr (DECL_INITIAL (exp), target, VOIDmode,
5575 EXPAND_MEMORY_USE_BAD);
bbf6f052
RK
5576
5577 case REAL_CST:
5578 /* If optimized, generate immediate CONST_DOUBLE
5579 which will be turned into memory by reload if necessary.
5580
5581 We used to force a register so that loop.c could see it. But
5582 this does not allow gen_* patterns to perform optimizations with
5583 the constants. It also produces two insns in cases like "x = 1.0;".
5584 On most machines, floating-point constants are not permitted in
5585 many insns, so we'd end up copying it to a register in any case.
5586
5587 Now, we do the copying in expand_binop, if appropriate. */
5588 return immed_real_const (exp);
5589
5590 case COMPLEX_CST:
5591 case STRING_CST:
5592 if (! TREE_CST_RTL (exp))
5593 output_constant_def (exp);
5594
5595 /* TREE_CST_RTL probably contains a constant address.
5596 On RISC machines where a constant address isn't valid,
5597 make some insns to get that address into a register. */
5598 if (GET_CODE (TREE_CST_RTL (exp)) == MEM
5599 && modifier != EXPAND_CONST_ADDRESS
5600 && modifier != EXPAND_INITIALIZER
5601 && modifier != EXPAND_SUM
d6a5ac33
RK
5602 && (! memory_address_p (mode, XEXP (TREE_CST_RTL (exp), 0))
5603 || (flag_force_addr
5604 && GET_CODE (XEXP (TREE_CST_RTL (exp), 0)) != REG)))
bbf6f052
RK
5605 return change_address (TREE_CST_RTL (exp), VOIDmode,
5606 copy_rtx (XEXP (TREE_CST_RTL (exp), 0)));
5607 return TREE_CST_RTL (exp);
5608
bf1e5319 5609 case EXPR_WITH_FILE_LOCATION:
b24f65cd
APB
5610 {
5611 rtx to_return;
5612 char *saved_input_filename = input_filename;
5613 int saved_lineno = lineno;
5614 input_filename = EXPR_WFL_FILENAME (exp);
5615 lineno = EXPR_WFL_LINENO (exp);
5616 if (EXPR_WFL_EMIT_LINE_NOTE (exp))
5617 emit_line_note (input_filename, lineno);
5618 /* Possibly avoid switching back and force here */
5619 to_return = expand_expr (EXPR_WFL_NODE (exp), target, tmode, modifier);
5620 input_filename = saved_input_filename;
5621 lineno = saved_lineno;
5622 return to_return;
5623 }
bf1e5319 5624
bbf6f052
RK
5625 case SAVE_EXPR:
5626 context = decl_function_context (exp);
d6a5ac33 5627
d0977240
RK
5628 /* If this SAVE_EXPR was at global context, assume we are an
5629 initialization function and move it into our context. */
5630 if (context == 0)
5631 SAVE_EXPR_CONTEXT (exp) = current_function_decl;
5632
bbf6f052
RK
5633 /* We treat inline_function_decl as an alias for the current function
5634 because that is the inline function whose vars, types, etc.
5635 are being merged into the current function.
5636 See expand_inline_function. */
5637 if (context == current_function_decl || context == inline_function_decl)
5638 context = 0;
5639
5640 /* If this is non-local, handle it. */
5641 if (context)
5642 {
d0977240
RK
5643 /* The following call just exists to abort if the context is
5644 not of a containing function. */
5645 find_function_data (context);
5646
bbf6f052
RK
5647 temp = SAVE_EXPR_RTL (exp);
5648 if (temp && GET_CODE (temp) == REG)
5649 {
5650 put_var_into_stack (exp);
5651 temp = SAVE_EXPR_RTL (exp);
5652 }
5653 if (temp == 0 || GET_CODE (temp) != MEM)
5654 abort ();
5655 return change_address (temp, mode,
5656 fix_lexical_addr (XEXP (temp, 0), exp));
5657 }
5658 if (SAVE_EXPR_RTL (exp) == 0)
5659 {
06089a8b
RK
5660 if (mode == VOIDmode)
5661 temp = const0_rtx;
5662 else
e5e809f4 5663 temp = assign_temp (type, 3, 0, 0);
1499e0a8 5664
bbf6f052 5665 SAVE_EXPR_RTL (exp) = temp;
bbf6f052 5666 if (!optimize && GET_CODE (temp) == REG)
38a448ca
RH
5667 save_expr_regs = gen_rtx_EXPR_LIST (VOIDmode, temp,
5668 save_expr_regs);
ff78f773
RK
5669
5670 /* If the mode of TEMP does not match that of the expression, it
5671 must be a promoted value. We pass store_expr a SUBREG of the
5672 wanted mode but mark it so that we know that it was already
5673 extended. Note that `unsignedp' was modified above in
5674 this case. */
5675
5676 if (GET_CODE (temp) == REG && GET_MODE (temp) != mode)
5677 {
38a448ca 5678 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
ff78f773
RK
5679 SUBREG_PROMOTED_VAR_P (temp) = 1;
5680 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5681 }
5682
4c7a0be9 5683 if (temp == const0_rtx)
921b3427
RK
5684 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
5685 EXPAND_MEMORY_USE_BAD);
4c7a0be9
JW
5686 else
5687 store_expr (TREE_OPERAND (exp, 0), temp, 0);
e5e809f4
JL
5688
5689 TREE_USED (exp) = 1;
bbf6f052 5690 }
1499e0a8
RK
5691
5692 /* If the mode of SAVE_EXPR_RTL does not match that of the expression, it
5693 must be a promoted value. We return a SUBREG of the wanted mode,
0f41302f 5694 but mark it so that we know that it was already extended. */
1499e0a8
RK
5695
5696 if (GET_CODE (SAVE_EXPR_RTL (exp)) == REG
5697 && GET_MODE (SAVE_EXPR_RTL (exp)) != mode)
5698 {
e70d22c8
RK
5699 /* Compute the signedness and make the proper SUBREG. */
5700 promote_mode (type, mode, &unsignedp, 0);
38a448ca 5701 temp = gen_rtx_SUBREG (mode, SAVE_EXPR_RTL (exp), 0);
1499e0a8
RK
5702 SUBREG_PROMOTED_VAR_P (temp) = 1;
5703 SUBREG_PROMOTED_UNSIGNED_P (temp) = unsignedp;
5704 return temp;
5705 }
5706
bbf6f052
RK
5707 return SAVE_EXPR_RTL (exp);
5708
679163cf
MS
5709 case UNSAVE_EXPR:
5710 {
5711 rtx temp;
5712 temp = expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
5713 TREE_OPERAND (exp, 0) = unsave_expr_now (TREE_OPERAND (exp, 0));
5714 return temp;
5715 }
5716
b50d17a1 5717 case PLACEHOLDER_EXPR:
e9a25f70
JL
5718 {
5719 tree placeholder_expr;
5720
5721 /* If there is an object on the head of the placeholder list,
e5e809f4 5722 see if some object in it of type TYPE or a pointer to it. For
e9a25f70
JL
5723 further information, see tree.def. */
5724 for (placeholder_expr = placeholder_list;
5725 placeholder_expr != 0;
5726 placeholder_expr = TREE_CHAIN (placeholder_expr))
5727 {
5728 tree need_type = TYPE_MAIN_VARIANT (type);
5729 tree object = 0;
5730 tree old_list = placeholder_list;
5731 tree elt;
5732
e5e809f4
JL
5733 /* Find the outermost reference that is of the type we want.
5734 If none, see if any object has a type that is a pointer to
5735 the type we want. */
5736 for (elt = TREE_PURPOSE (placeholder_expr);
5737 elt != 0 && object == 0;
5738 elt
5739 = ((TREE_CODE (elt) == COMPOUND_EXPR
5740 || TREE_CODE (elt) == COND_EXPR)
5741 ? TREE_OPERAND (elt, 1)
5742 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5743 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5744 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5745 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5746 ? TREE_OPERAND (elt, 0) : 0))
5747 if (TYPE_MAIN_VARIANT (TREE_TYPE (elt)) == need_type)
5748 object = elt;
e9a25f70 5749
e9a25f70 5750 for (elt = TREE_PURPOSE (placeholder_expr);
e5e809f4
JL
5751 elt != 0 && object == 0;
5752 elt
5753 = ((TREE_CODE (elt) == COMPOUND_EXPR
5754 || TREE_CODE (elt) == COND_EXPR)
5755 ? TREE_OPERAND (elt, 1)
5756 : (TREE_CODE_CLASS (TREE_CODE (elt)) == 'r'
5757 || TREE_CODE_CLASS (TREE_CODE (elt)) == '1'
5758 || TREE_CODE_CLASS (TREE_CODE (elt)) == '2'
5759 || TREE_CODE_CLASS (TREE_CODE (elt)) == 'e')
5760 ? TREE_OPERAND (elt, 0) : 0))
5761 if (POINTER_TYPE_P (TREE_TYPE (elt))
5762 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (elt)))
e9a25f70 5763 == need_type))
e5e809f4 5764 object = build1 (INDIRECT_REF, need_type, elt);
dc500fa1 5765
e9a25f70 5766 if (object != 0)
2cde2255 5767 {
e9a25f70
JL
5768 /* Expand this object skipping the list entries before
5769 it was found in case it is also a PLACEHOLDER_EXPR.
5770 In that case, we want to translate it using subsequent
5771 entries. */
5772 placeholder_list = TREE_CHAIN (placeholder_expr);
5773 temp = expand_expr (object, original_target, tmode,
5774 ro_modifier);
5775 placeholder_list = old_list;
5776 return temp;
2cde2255 5777 }
e9a25f70
JL
5778 }
5779 }
b50d17a1
RK
5780
5781 /* We can't find the object or there was a missing WITH_RECORD_EXPR. */
5782 abort ();
5783
5784 case WITH_RECORD_EXPR:
5785 /* Put the object on the placeholder list, expand our first operand,
5786 and pop the list. */
5787 placeholder_list = tree_cons (TREE_OPERAND (exp, 1), NULL_TREE,
5788 placeholder_list);
5789 target = expand_expr (TREE_OPERAND (exp, 0), original_target,
921b3427 5790 tmode, ro_modifier);
b50d17a1
RK
5791 placeholder_list = TREE_CHAIN (placeholder_list);
5792 return target;
5793
bbf6f052 5794 case EXIT_EXPR:
e44842fe
RK
5795 expand_exit_loop_if_false (NULL_PTR,
5796 invert_truthvalue (TREE_OPERAND (exp, 0)));
bbf6f052
RK
5797 return const0_rtx;
5798
5799 case LOOP_EXPR:
0088fcb1 5800 push_temp_slots ();
bbf6f052
RK
5801 expand_start_loop (1);
5802 expand_expr_stmt (TREE_OPERAND (exp, 0));
5803 expand_end_loop ();
0088fcb1 5804 pop_temp_slots ();
bbf6f052
RK
5805
5806 return const0_rtx;
5807
5808 case BIND_EXPR:
5809 {
5810 tree vars = TREE_OPERAND (exp, 0);
5811 int vars_need_expansion = 0;
5812
5813 /* Need to open a binding contour here because
e976b8b2 5814 if there are any cleanups they must be contained here. */
bbf6f052
RK
5815 expand_start_bindings (0);
5816
2df53c0b
RS
5817 /* Mark the corresponding BLOCK for output in its proper place. */
5818 if (TREE_OPERAND (exp, 2) != 0
5819 && ! TREE_USED (TREE_OPERAND (exp, 2)))
5820 insert_block (TREE_OPERAND (exp, 2));
bbf6f052
RK
5821
5822 /* If VARS have not yet been expanded, expand them now. */
5823 while (vars)
5824 {
5825 if (DECL_RTL (vars) == 0)
5826 {
5827 vars_need_expansion = 1;
5828 expand_decl (vars);
5829 }
5830 expand_decl_init (vars);
5831 vars = TREE_CHAIN (vars);
5832 }
5833
921b3427 5834 temp = expand_expr (TREE_OPERAND (exp, 1), target, tmode, ro_modifier);
bbf6f052
RK
5835
5836 expand_end_bindings (TREE_OPERAND (exp, 0), 0, 0);
5837
5838 return temp;
5839 }
5840
5841 case RTL_EXPR:
83b853c9
JM
5842 if (RTL_EXPR_SEQUENCE (exp))
5843 {
5844 if (RTL_EXPR_SEQUENCE (exp) == const0_rtx)
5845 abort ();
5846 emit_insns (RTL_EXPR_SEQUENCE (exp));
5847 RTL_EXPR_SEQUENCE (exp) = const0_rtx;
5848 }
99310285 5849 preserve_rtl_expr_result (RTL_EXPR_RTL (exp));
ca814259 5850 free_temps_for_rtl_expr (exp);
bbf6f052
RK
5851 return RTL_EXPR_RTL (exp);
5852
5853 case CONSTRUCTOR:
dd27116b
RK
5854 /* If we don't need the result, just ensure we evaluate any
5855 subexpressions. */
5856 if (ignore)
5857 {
5858 tree elt;
5859 for (elt = CONSTRUCTOR_ELTS (exp); elt; elt = TREE_CHAIN (elt))
921b3427
RK
5860 expand_expr (TREE_VALUE (elt), const0_rtx, VOIDmode,
5861 EXPAND_MEMORY_USE_BAD);
dd27116b
RK
5862 return const0_rtx;
5863 }
3207b172 5864
4af3895e
JVA
5865 /* All elts simple constants => refer to a constant in memory. But
5866 if this is a non-BLKmode mode, let it store a field at a time
5867 since that should make a CONST_INT or CONST_DOUBLE when we
3207b172 5868 fold. Likewise, if we have a target we can use, it is best to
d720b9d1
RK
5869 store directly into the target unless the type is large enough
5870 that memcpy will be used. If we are making an initializer and
3207b172 5871 all operands are constant, put it in memory as well. */
dd27116b 5872 else if ((TREE_STATIC (exp)
3207b172 5873 && ((mode == BLKmode
e5e809f4 5874 && ! (target != 0 && safe_from_p (target, exp, 1)))
d720b9d1
RK
5875 || TREE_ADDRESSABLE (exp)
5876 || (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5877 && (move_by_pieces_ninsns
67225c15
RK
5878 (TREE_INT_CST_LOW (TYPE_SIZE (type))/BITS_PER_UNIT,
5879 TYPE_ALIGN (type) / BITS_PER_UNIT)
9de08200
RK
5880 > MOVE_RATIO)
5881 && ! mostly_zeros_p (exp))))
dd27116b 5882 || (modifier == EXPAND_INITIALIZER && TREE_CONSTANT (exp)))
bbf6f052
RK
5883 {
5884 rtx constructor = output_constant_def (exp);
b552441b
RS
5885 if (modifier != EXPAND_CONST_ADDRESS
5886 && modifier != EXPAND_INITIALIZER
5887 && modifier != EXPAND_SUM
d6a5ac33
RK
5888 && (! memory_address_p (GET_MODE (constructor),
5889 XEXP (constructor, 0))
5890 || (flag_force_addr
5891 && GET_CODE (XEXP (constructor, 0)) != REG)))
bbf6f052
RK
5892 constructor = change_address (constructor, VOIDmode,
5893 XEXP (constructor, 0));
5894 return constructor;
5895 }
5896
bbf6f052
RK
5897 else
5898 {
e9ac02a6
JW
5899 /* Handle calls that pass values in multiple non-contiguous
5900 locations. The Irix 6 ABI has examples of this. */
e5e809f4 5901 if (target == 0 || ! safe_from_p (target, exp, 1)
e9ac02a6 5902 || GET_CODE (target) == PARALLEL)
06089a8b
RK
5903 {
5904 if (mode != BLKmode && ! TREE_ADDRESSABLE (exp))
5905 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
5906 else
5907 target = assign_temp (type, 0, 1, 1);
5908 }
07604beb
RK
5909
5910 if (TREE_READONLY (exp))
5911 {
9151b3bf 5912 if (GET_CODE (target) == MEM)
effbcc6a
RK
5913 target = copy_rtx (target);
5914
07604beb
RK
5915 RTX_UNCHANGING_P (target) = 1;
5916 }
5917
e1a43f73 5918 store_constructor (exp, target, 0);
bbf6f052
RK
5919 return target;
5920 }
5921
5922 case INDIRECT_REF:
5923 {
5924 tree exp1 = TREE_OPERAND (exp, 0);
5925 tree exp2;
7581a30f
JW
5926 tree index;
5927 tree string = string_constant (exp1, &index);
5928 int i;
5929
06eaa86f 5930 /* Try to optimize reads from const strings. */
7581a30f
JW
5931 if (string
5932 && TREE_CODE (string) == STRING_CST
5933 && TREE_CODE (index) == INTEGER_CST
5934 && !TREE_INT_CST_HIGH (index)
5935 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (string)
5936 && GET_MODE_CLASS (mode) == MODE_INT
06eaa86f
JW
5937 && GET_MODE_SIZE (mode) == 1
5938 && modifier != EXPAND_MEMORY_USE_WO)
7581a30f 5939 return GEN_INT (TREE_STRING_POINTER (string)[i]);
bbf6f052 5940
405f0da6
JW
5941 op0 = expand_expr (exp1, NULL_RTX, VOIDmode, EXPAND_SUM);
5942 op0 = memory_address (mode, op0);
8c8a8e34 5943
921b3427
RK
5944 if (flag_check_memory_usage && !AGGREGATE_TYPE_P (TREE_TYPE (exp)))
5945 {
5946 enum memory_use_mode memory_usage;
5947 memory_usage = get_memory_usage_from_modifier (modifier);
5948
5949 if (memory_usage != MEMORY_USE_DONT)
c85f7c16
JL
5950 {
5951 in_check_memory_usage = 1;
5952 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
5953 op0, ptr_mode,
5954 GEN_INT (int_size_in_bytes (type)),
5955 TYPE_MODE (sizetype),
5956 GEN_INT (memory_usage),
5957 TYPE_MODE (integer_type_node));
5958 in_check_memory_usage = 0;
5959 }
921b3427
RK
5960 }
5961
38a448ca 5962 temp = gen_rtx_MEM (mode, op0);
8c8a8e34
JW
5963 /* If address was computed by addition,
5964 mark this as an element of an aggregate. */
9ec36da5
JL
5965 if (TREE_CODE (exp1) == PLUS_EXPR
5966 || (TREE_CODE (exp1) == SAVE_EXPR
5967 && TREE_CODE (TREE_OPERAND (exp1, 0)) == PLUS_EXPR)
05e3bdb9 5968 || AGGREGATE_TYPE_P (TREE_TYPE (exp))
8c8a8e34
JW
5969 || (TREE_CODE (exp1) == ADDR_EXPR
5970 && (exp2 = TREE_OPERAND (exp1, 0))
b5f88157 5971 && AGGREGATE_TYPE_P (TREE_TYPE (exp2))))
8c8a8e34 5972 MEM_IN_STRUCT_P (temp) = 1;
b5f88157
JL
5973
5974 /* If the pointer is actually a REFERENCE_TYPE, this could be pointing
5975 into some aggregate too. In theory we could fold this into the
5976 previous check and use rtx_addr_varies_p there too.
5977
5978 However, this seems safer. */
5979 if (!MEM_IN_STRUCT_P (temp)
5980 && (TREE_CODE (TREE_TYPE (exp1)) == REFERENCE_TYPE
5981 /* This may have been an array reference to the first element
5982 that was optimized away from being an addition. */
5983 || (TREE_CODE (exp1) == NOP_EXPR
5984 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5985 == REFERENCE_TYPE)
5986 || ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp1, 0)))
5987 == POINTER_TYPE)
5988 && (AGGREGATE_TYPE_P
5989 (TREE_TYPE (TREE_TYPE
5990 (TREE_OPERAND (exp1, 0))))))))))
5991 MEM_IN_STRUCT_P (temp) = ! rtx_addr_varies_p (temp);
5992
2c4c436a 5993 MEM_VOLATILE_P (temp) = TREE_THIS_VOLATILE (exp) | flag_volatile;
41472af8 5994 MEM_ALIAS_SET (temp) = get_alias_set (exp);
1125706f
RK
5995
5996 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY
5997 here, because, in C and C++, the fact that a location is accessed
5998 through a pointer to const does not mean that the value there can
5999 never change. Languages where it can never change should
6000 also set TREE_STATIC. */
5cb7a25a 6001 RTX_UNCHANGING_P (temp) = TREE_READONLY (exp) & TREE_STATIC (exp);
8c8a8e34
JW
6002 return temp;
6003 }
bbf6f052
RK
6004
6005 case ARRAY_REF:
742920c7
RK
6006 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) != ARRAY_TYPE)
6007 abort ();
bbf6f052 6008
bbf6f052 6009 {
742920c7
RK
6010 tree array = TREE_OPERAND (exp, 0);
6011 tree domain = TYPE_DOMAIN (TREE_TYPE (array));
6012 tree low_bound = domain ? TYPE_MIN_VALUE (domain) : integer_zero_node;
6013 tree index = TREE_OPERAND (exp, 1);
6014 tree index_type = TREE_TYPE (index);
08293add 6015 HOST_WIDE_INT i;
b50d17a1 6016
d4c89139
PB
6017 /* Optimize the special-case of a zero lower bound.
6018
6019 We convert the low_bound to sizetype to avoid some problems
6020 with constant folding. (E.g. suppose the lower bound is 1,
6021 and its mode is QI. Without the conversion, (ARRAY
6022 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
6023 +INDEX), which becomes (ARRAY+255+INDEX). Oops!)
6024
6025 But sizetype isn't quite right either (especially if
6026 the lowbound is negative). FIXME */
6027
742920c7 6028 if (! integer_zerop (low_bound))
d4c89139
PB
6029 index = fold (build (MINUS_EXPR, index_type, index,
6030 convert (sizetype, low_bound)));
742920c7 6031
742920c7 6032 /* Fold an expression like: "foo"[2].
ad2e7dd0
RK
6033 This is not done in fold so it won't happen inside &.
6034 Don't fold if this is for wide characters since it's too
6035 difficult to do correctly and this is a very rare case. */
742920c7
RK
6036
6037 if (TREE_CODE (array) == STRING_CST
6038 && TREE_CODE (index) == INTEGER_CST
6039 && !TREE_INT_CST_HIGH (index)
307b821c 6040 && (i = TREE_INT_CST_LOW (index)) < TREE_STRING_LENGTH (array)
ad2e7dd0
RK
6041 && GET_MODE_CLASS (mode) == MODE_INT
6042 && GET_MODE_SIZE (mode) == 1)
307b821c 6043 return GEN_INT (TREE_STRING_POINTER (array)[i]);
bbf6f052 6044
742920c7
RK
6045 /* If this is a constant index into a constant array,
6046 just get the value from the array. Handle both the cases when
6047 we have an explicit constructor and when our operand is a variable
6048 that was declared const. */
4af3895e 6049
742920c7
RK
6050 if (TREE_CODE (array) == CONSTRUCTOR && ! TREE_SIDE_EFFECTS (array))
6051 {
6052 if (TREE_CODE (index) == INTEGER_CST
6053 && TREE_INT_CST_HIGH (index) == 0)
6054 {
6055 tree elem = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0));
6056
6057 i = TREE_INT_CST_LOW (index);
6058 while (elem && i--)
6059 elem = TREE_CHAIN (elem);
6060 if (elem)
6061 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6062 tmode, ro_modifier);
742920c7
RK
6063 }
6064 }
4af3895e 6065
742920c7
RK
6066 else if (optimize >= 1
6067 && TREE_READONLY (array) && ! TREE_SIDE_EFFECTS (array)
6068 && TREE_CODE (array) == VAR_DECL && DECL_INITIAL (array)
6069 && TREE_CODE (DECL_INITIAL (array)) != ERROR_MARK)
6070 {
08293add 6071 if (TREE_CODE (index) == INTEGER_CST)
742920c7
RK
6072 {
6073 tree init = DECL_INITIAL (array);
6074
6075 i = TREE_INT_CST_LOW (index);
6076 if (TREE_CODE (init) == CONSTRUCTOR)
6077 {
6078 tree elem = CONSTRUCTOR_ELTS (init);
6079
03dc44a6
RS
6080 while (elem
6081 && !tree_int_cst_equal (TREE_PURPOSE (elem), index))
742920c7
RK
6082 elem = TREE_CHAIN (elem);
6083 if (elem)
6084 return expand_expr (fold (TREE_VALUE (elem)), target,
921b3427 6085 tmode, ro_modifier);
742920c7
RK
6086 }
6087 else if (TREE_CODE (init) == STRING_CST
08293add
RK
6088 && TREE_INT_CST_HIGH (index) == 0
6089 && (TREE_INT_CST_LOW (index)
6090 < TREE_STRING_LENGTH (init)))
6091 return (GEN_INT
6092 (TREE_STRING_POINTER
6093 (init)[TREE_INT_CST_LOW (index)]));
742920c7
RK
6094 }
6095 }
6096 }
8c8a8e34 6097
08293add 6098 /* ... fall through ... */
bbf6f052
RK
6099
6100 case COMPONENT_REF:
6101 case BIT_FIELD_REF:
4af3895e 6102 /* If the operand is a CONSTRUCTOR, we can just extract the
7a0b7b9a
RK
6103 appropriate field if it is present. Don't do this if we have
6104 already written the data since we want to refer to that copy
6105 and varasm.c assumes that's what we'll do. */
4af3895e 6106 if (code != ARRAY_REF
7a0b7b9a
RK
6107 && TREE_CODE (TREE_OPERAND (exp, 0)) == CONSTRUCTOR
6108 && TREE_CST_RTL (TREE_OPERAND (exp, 0)) == 0)
4af3895e
JVA
6109 {
6110 tree elt;
6111
6112 for (elt = CONSTRUCTOR_ELTS (TREE_OPERAND (exp, 0)); elt;
6113 elt = TREE_CHAIN (elt))
86b5812c
RK
6114 if (TREE_PURPOSE (elt) == TREE_OPERAND (exp, 1)
6115 /* We can normally use the value of the field in the
6116 CONSTRUCTOR. However, if this is a bitfield in
6117 an integral mode that we can fit in a HOST_WIDE_INT,
6118 we must mask only the number of bits in the bitfield,
6119 since this is done implicitly by the constructor. If
6120 the bitfield does not meet either of those conditions,
6121 we can't do this optimization. */
6122 && (! DECL_BIT_FIELD (TREE_PURPOSE (elt))
6123 || ((GET_MODE_CLASS (DECL_MODE (TREE_PURPOSE (elt)))
6124 == MODE_INT)
6125 && (GET_MODE_BITSIZE (DECL_MODE (TREE_PURPOSE (elt)))
6126 <= HOST_BITS_PER_WIDE_INT))))
6127 {
6128 op0 = expand_expr (TREE_VALUE (elt), target, tmode, modifier);
6129 if (DECL_BIT_FIELD (TREE_PURPOSE (elt)))
6130 {
6131 int bitsize = DECL_FIELD_SIZE (TREE_PURPOSE (elt));
86b5812c
RK
6132
6133 if (TREE_UNSIGNED (TREE_TYPE (TREE_PURPOSE (elt))))
6134 {
6135 op1 = GEN_INT (((HOST_WIDE_INT) 1 << bitsize) - 1);
6136 op0 = expand_and (op0, op1, target);
6137 }
6138 else
6139 {
e5e809f4
JL
6140 enum machine_mode imode
6141 = TYPE_MODE (TREE_TYPE (TREE_PURPOSE (elt)));
86b5812c 6142 tree count
e5e809f4
JL
6143 = build_int_2 (GET_MODE_BITSIZE (imode) - bitsize,
6144 0);
86b5812c
RK
6145
6146 op0 = expand_shift (LSHIFT_EXPR, imode, op0, count,
6147 target, 0);
6148 op0 = expand_shift (RSHIFT_EXPR, imode, op0, count,
6149 target, 0);
6150 }
6151 }
6152
6153 return op0;
6154 }
4af3895e
JVA
6155 }
6156
bbf6f052
RK
6157 {
6158 enum machine_mode mode1;
6159 int bitsize;
6160 int bitpos;
7bb0943f 6161 tree offset;
bbf6f052 6162 int volatilep = 0;
034f9101 6163 int alignment;
839c4796
RK
6164 tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
6165 &mode1, &unsignedp, &volatilep,
6166 &alignment);
bbf6f052 6167
e7f3c83f
RK
6168 /* If we got back the original object, something is wrong. Perhaps
6169 we are evaluating an expression too early. In any event, don't
6170 infinitely recurse. */
6171 if (tem == exp)
6172 abort ();
6173
3d27140a 6174 /* If TEM's type is a union of variable size, pass TARGET to the inner
b74f5ff2
RK
6175 computation, since it will need a temporary and TARGET is known
6176 to have to do. This occurs in unchecked conversion in Ada. */
6177
6178 op0 = expand_expr (tem,
6179 (TREE_CODE (TREE_TYPE (tem)) == UNION_TYPE
6180 && (TREE_CODE (TYPE_SIZE (TREE_TYPE (tem)))
6181 != INTEGER_CST)
6182 ? target : NULL_RTX),
4ed67205 6183 VOIDmode,
e5e809f4
JL
6184 modifier == EXPAND_INITIALIZER
6185 ? modifier : EXPAND_NORMAL);
bbf6f052 6186
8c8a8e34 6187 /* If this is a constant, put it into a register if it is a
8008b228 6188 legitimate constant and memory if it isn't. */
8c8a8e34
JW
6189 if (CONSTANT_P (op0))
6190 {
6191 enum machine_mode mode = TYPE_MODE (TREE_TYPE (tem));
f2878c6b 6192 if (mode != BLKmode && LEGITIMATE_CONSTANT_P (op0))
8c8a8e34
JW
6193 op0 = force_reg (mode, op0);
6194 else
6195 op0 = validize_mem (force_const_mem (mode, op0));
6196 }
6197
7bb0943f
RS
6198 if (offset != 0)
6199 {
906c4e36 6200 rtx offset_rtx = expand_expr (offset, NULL_RTX, VOIDmode, 0);
7bb0943f
RS
6201
6202 if (GET_CODE (op0) != MEM)
6203 abort ();
2d48c13d
JL
6204
6205 if (GET_MODE (offset_rtx) != ptr_mode)
bd070e1a 6206 {
2d48c13d 6207#ifdef POINTERS_EXTEND_UNSIGNED
822a3443 6208 offset_rtx = convert_memory_address (ptr_mode, offset_rtx);
2d48c13d 6209#else
bd070e1a 6210 offset_rtx = convert_to_mode (ptr_mode, offset_rtx, 0);
2d48c13d 6211#endif
bd070e1a 6212 }
2d48c13d 6213
89752202
HB
6214 if (GET_CODE (op0) == MEM
6215 && GET_MODE (op0) == BLKmode
6216 && bitsize
6217 && (bitpos % bitsize) == 0
6218 && (bitsize % GET_MODE_ALIGNMENT (mode1)) == 0
6219 && (alignment * BITS_PER_UNIT) == GET_MODE_ALIGNMENT (mode1))
6220 {
6221 rtx temp = change_address (op0, mode1,
6222 plus_constant (XEXP (op0, 0),
6223 (bitpos /
6224 BITS_PER_UNIT)));
6225 if (GET_CODE (XEXP (temp, 0)) == REG)
6226 op0 = temp;
6227 else
6228 op0 = change_address (op0, mode1,
6229 force_reg (GET_MODE (XEXP (temp, 0)),
6230 XEXP (temp, 0)));
6231 bitpos = 0;
6232 }
6233
6234
7bb0943f 6235 op0 = change_address (op0, VOIDmode,
38a448ca
RH
6236 gen_rtx_PLUS (ptr_mode, XEXP (op0, 0),
6237 force_reg (ptr_mode, offset_rtx)));
7bb0943f
RS
6238 }
6239
bbf6f052
RK
6240 /* Don't forget about volatility even if this is a bitfield. */
6241 if (GET_CODE (op0) == MEM && volatilep && ! MEM_VOLATILE_P (op0))
6242 {
6243 op0 = copy_rtx (op0);
6244 MEM_VOLATILE_P (op0) = 1;
6245 }
6246
921b3427
RK
6247 /* Check the access. */
6248 if (flag_check_memory_usage && GET_CODE (op0) == MEM)
6249 {
6250 enum memory_use_mode memory_usage;
6251 memory_usage = get_memory_usage_from_modifier (modifier);
6252
6253 if (memory_usage != MEMORY_USE_DONT)
6254 {
6255 rtx to;
6256 int size;
6257
6258 to = plus_constant (XEXP (op0, 0), (bitpos / BITS_PER_UNIT));
6259 size = (bitpos % BITS_PER_UNIT) + bitsize + BITS_PER_UNIT - 1;
6260
6261 /* Check the access right of the pointer. */
e9a25f70
JL
6262 if (size > BITS_PER_UNIT)
6263 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
6264 to, ptr_mode,
6265 GEN_INT (size / BITS_PER_UNIT),
6266 TYPE_MODE (sizetype),
956d6950
JL
6267 GEN_INT (memory_usage),
6268 TYPE_MODE (integer_type_node));
921b3427
RK
6269 }
6270 }
6271
ccc98036
RS
6272 /* In cases where an aligned union has an unaligned object
6273 as a field, we might be extracting a BLKmode value from
6274 an integer-mode (e.g., SImode) object. Handle this case
6275 by doing the extract into an object as wide as the field
6276 (which we know to be the width of a basic mode), then
f2420d0b
JW
6277 storing into memory, and changing the mode to BLKmode.
6278 If we ultimately want the address (EXPAND_CONST_ADDRESS or
6279 EXPAND_INITIALIZER), then we must not copy to a temporary. */
bbf6f052 6280 if (mode1 == VOIDmode
ccc98036 6281 || GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
f9409c3a 6282 || (modifier != EXPAND_CONST_ADDRESS
f9409c3a 6283 && modifier != EXPAND_INITIALIZER
c2722ef6
RK
6284 && ((mode1 != BLKmode && ! direct_load[(int) mode1]
6285 && GET_MODE_CLASS (mode) != MODE_COMPLEX_INT
6286 && GET_MODE_CLASS (mode) != MODE_COMPLEX_FLOAT)
f9409c3a
JW
6287 /* If the field isn't aligned enough to fetch as a memref,
6288 fetch it as a bit field. */
6289 || (SLOW_UNALIGNED_ACCESS
6290 && ((TYPE_ALIGN (TREE_TYPE (tem)) < GET_MODE_ALIGNMENT (mode))
6291 || (bitpos % GET_MODE_ALIGNMENT (mode) != 0))))))
bbf6f052 6292 {
bbf6f052
RK
6293 enum machine_mode ext_mode = mode;
6294
6295 if (ext_mode == BLKmode)
6296 ext_mode = mode_for_size (bitsize, MODE_INT, 1);
6297
6298 if (ext_mode == BLKmode)
a281e72d
RK
6299 {
6300 /* In this case, BITPOS must start at a byte boundary and
6301 TARGET, if specified, must be a MEM. */
6302 if (GET_CODE (op0) != MEM
6303 || (target != 0 && GET_CODE (target) != MEM)
6304 || bitpos % BITS_PER_UNIT != 0)
6305 abort ();
6306
6307 op0 = change_address (op0, VOIDmode,
6308 plus_constant (XEXP (op0, 0),
6309 bitpos / BITS_PER_UNIT));
6310 if (target == 0)
6311 target = assign_temp (type, 0, 1, 1);
6312
6313 emit_block_move (target, op0,
6314 GEN_INT ((bitsize + BITS_PER_UNIT - 1)
6315 / BITS_PER_UNIT),
6316 1);
6317
6318 return target;
6319 }
bbf6f052 6320
dc6d66b3
RK
6321 op0 = validize_mem (op0);
6322
6323 if (GET_CODE (op0) == MEM && GET_CODE (XEXP (op0, 0)) == REG)
6324 mark_reg_pointer (XEXP (op0, 0), alignment);
6325
6326 op0 = extract_bit_field (op0, bitsize, bitpos,
bbf6f052 6327 unsignedp, target, ext_mode, ext_mode,
034f9101 6328 alignment,
bbf6f052 6329 int_size_in_bytes (TREE_TYPE (tem)));
ef19912d
RK
6330
6331 /* If the result is a record type and BITSIZE is narrower than
6332 the mode of OP0, an integral mode, and this is a big endian
6333 machine, we must put the field into the high-order bits. */
6334 if (TREE_CODE (type) == RECORD_TYPE && BYTES_BIG_ENDIAN
6335 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_INT
6336 && bitsize < GET_MODE_BITSIZE (GET_MODE (op0)))
6337 op0 = expand_shift (LSHIFT_EXPR, GET_MODE (op0), op0,
6338 size_int (GET_MODE_BITSIZE (GET_MODE (op0))
6339 - bitsize),
6340 op0, 1);
6341
bbf6f052
RK
6342 if (mode == BLKmode)
6343 {
6344 rtx new = assign_stack_temp (ext_mode,
6345 bitsize / BITS_PER_UNIT, 0);
6346
6347 emit_move_insn (new, op0);
6348 op0 = copy_rtx (new);
6349 PUT_MODE (op0, BLKmode);
092dded9 6350 MEM_IN_STRUCT_P (op0) = 1;
bbf6f052
RK
6351 }
6352
6353 return op0;
6354 }
6355
05019f83
RK
6356 /* If the result is BLKmode, use that to access the object
6357 now as well. */
6358 if (mode == BLKmode)
6359 mode1 = BLKmode;
6360
bbf6f052
RK
6361 /* Get a reference to just this component. */
6362 if (modifier == EXPAND_CONST_ADDRESS
6363 || modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
38a448ca
RH
6364 op0 = gen_rtx_MEM (mode1, plus_constant (XEXP (op0, 0),
6365 (bitpos / BITS_PER_UNIT)));
bbf6f052
RK
6366 else
6367 op0 = change_address (op0, mode1,
6368 plus_constant (XEXP (op0, 0),
6369 (bitpos / BITS_PER_UNIT)));
41472af8
MM
6370
6371 if (GET_CODE (op0) == MEM)
6372 MEM_ALIAS_SET (op0) = get_alias_set (exp);
6373
dc6d66b3
RK
6374 if (GET_CODE (XEXP (op0, 0)) == REG)
6375 mark_reg_pointer (XEXP (op0, 0), alignment);
6376
bbf6f052
RK
6377 MEM_IN_STRUCT_P (op0) = 1;
6378 MEM_VOLATILE_P (op0) |= volatilep;
0d15e60c 6379 if (mode == mode1 || mode1 == BLKmode || mode1 == tmode
08bbd316 6380 || modifier == EXPAND_CONST_ADDRESS
0d15e60c 6381 || modifier == EXPAND_INITIALIZER)
bbf6f052 6382 return op0;
0d15e60c 6383 else if (target == 0)
bbf6f052 6384 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
0d15e60c 6385
bbf6f052
RK
6386 convert_move (target, op0, unsignedp);
6387 return target;
6388 }
6389
bbf6f052
RK
6390 /* Intended for a reference to a buffer of a file-object in Pascal.
6391 But it's not certain that a special tree code will really be
6392 necessary for these. INDIRECT_REF might work for them. */
6393 case BUFFER_REF:
6394 abort ();
6395
7308a047 6396 case IN_EXPR:
7308a047 6397 {
d6a5ac33
RK
6398 /* Pascal set IN expression.
6399
6400 Algorithm:
6401 rlo = set_low - (set_low%bits_per_word);
6402 the_word = set [ (index - rlo)/bits_per_word ];
6403 bit_index = index % bits_per_word;
6404 bitmask = 1 << bit_index;
6405 return !!(the_word & bitmask); */
6406
7308a047
RS
6407 tree set = TREE_OPERAND (exp, 0);
6408 tree index = TREE_OPERAND (exp, 1);
d6a5ac33 6409 int iunsignedp = TREE_UNSIGNED (TREE_TYPE (index));
7308a047 6410 tree set_type = TREE_TYPE (set);
7308a047
RS
6411 tree set_low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (set_type));
6412 tree set_high_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (set_type));
d6a5ac33
RK
6413 rtx index_val = expand_expr (index, 0, VOIDmode, 0);
6414 rtx lo_r = expand_expr (set_low_bound, 0, VOIDmode, 0);
6415 rtx hi_r = expand_expr (set_high_bound, 0, VOIDmode, 0);
6416 rtx setval = expand_expr (set, 0, VOIDmode, 0);
6417 rtx setaddr = XEXP (setval, 0);
6418 enum machine_mode index_mode = TYPE_MODE (TREE_TYPE (index));
7308a047
RS
6419 rtx rlow;
6420 rtx diff, quo, rem, addr, bit, result;
7308a047 6421
d6a5ac33
RK
6422 preexpand_calls (exp);
6423
6424 /* If domain is empty, answer is no. Likewise if index is constant
6425 and out of bounds. */
51723711 6426 if (((TREE_CODE (set_high_bound) == INTEGER_CST
d6a5ac33 6427 && TREE_CODE (set_low_bound) == INTEGER_CST
51723711 6428 && tree_int_cst_lt (set_high_bound, set_low_bound))
d6a5ac33
RK
6429 || (TREE_CODE (index) == INTEGER_CST
6430 && TREE_CODE (set_low_bound) == INTEGER_CST
6431 && tree_int_cst_lt (index, set_low_bound))
6432 || (TREE_CODE (set_high_bound) == INTEGER_CST
6433 && TREE_CODE (index) == INTEGER_CST
6434 && tree_int_cst_lt (set_high_bound, index))))
7308a047
RS
6435 return const0_rtx;
6436
d6a5ac33
RK
6437 if (target == 0)
6438 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
7308a047
RS
6439
6440 /* If we get here, we have to generate the code for both cases
6441 (in range and out of range). */
6442
6443 op0 = gen_label_rtx ();
6444 op1 = gen_label_rtx ();
6445
6446 if (! (GET_CODE (index_val) == CONST_INT
6447 && GET_CODE (lo_r) == CONST_INT))
6448 {
17938e57 6449 emit_cmp_insn (index_val, lo_r, LT, NULL_RTX,
d6a5ac33 6450 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
6451 emit_jump_insn (gen_blt (op1));
6452 }
6453
6454 if (! (GET_CODE (index_val) == CONST_INT
6455 && GET_CODE (hi_r) == CONST_INT))
6456 {
17938e57 6457 emit_cmp_insn (index_val, hi_r, GT, NULL_RTX,
d6a5ac33 6458 GET_MODE (index_val), iunsignedp, 0);
7308a047
RS
6459 emit_jump_insn (gen_bgt (op1));
6460 }
6461
6462 /* Calculate the element number of bit zero in the first word
6463 of the set. */
6464 if (GET_CODE (lo_r) == CONST_INT)
17938e57
RK
6465 rlow = GEN_INT (INTVAL (lo_r)
6466 & ~ ((HOST_WIDE_INT) 1 << BITS_PER_UNIT));
7308a047 6467 else
17938e57
RK
6468 rlow = expand_binop (index_mode, and_optab, lo_r,
6469 GEN_INT (~((HOST_WIDE_INT) 1 << BITS_PER_UNIT)),
d6a5ac33 6470 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047 6471
d6a5ac33
RK
6472 diff = expand_binop (index_mode, sub_optab, index_val, rlow,
6473 NULL_RTX, iunsignedp, OPTAB_LIB_WIDEN);
7308a047
RS
6474
6475 quo = expand_divmod (0, TRUNC_DIV_EXPR, index_mode, diff,
d6a5ac33 6476 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
7308a047 6477 rem = expand_divmod (1, TRUNC_MOD_EXPR, index_mode, index_val,
d6a5ac33
RK
6478 GEN_INT (BITS_PER_UNIT), NULL_RTX, iunsignedp);
6479
7308a047 6480 addr = memory_address (byte_mode,
d6a5ac33
RK
6481 expand_binop (index_mode, add_optab, diff,
6482 setaddr, NULL_RTX, iunsignedp,
17938e57 6483 OPTAB_LIB_WIDEN));
d6a5ac33 6484
7308a047
RS
6485 /* Extract the bit we want to examine */
6486 bit = expand_shift (RSHIFT_EXPR, byte_mode,
38a448ca 6487 gen_rtx_MEM (byte_mode, addr),
17938e57
RK
6488 make_tree (TREE_TYPE (index), rem),
6489 NULL_RTX, 1);
6490 result = expand_binop (byte_mode, and_optab, bit, const1_rtx,
6491 GET_MODE (target) == byte_mode ? target : 0,
7308a047 6492 1, OPTAB_LIB_WIDEN);
17938e57
RK
6493
6494 if (result != target)
6495 convert_move (target, result, 1);
7308a047
RS
6496
6497 /* Output the code to handle the out-of-range case. */
6498 emit_jump (op0);
6499 emit_label (op1);
6500 emit_move_insn (target, const0_rtx);
6501 emit_label (op0);
6502 return target;
6503 }
6504
bbf6f052
RK
6505 case WITH_CLEANUP_EXPR:
6506 if (RTL_EXPR_RTL (exp) == 0)
6507 {
6508 RTL_EXPR_RTL (exp)
921b3427 6509 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
e976b8b2
MS
6510 expand_decl_cleanup (NULL_TREE, TREE_OPERAND (exp, 2));
6511
bbf6f052
RK
6512 /* That's it for this cleanup. */
6513 TREE_OPERAND (exp, 2) = 0;
6514 }
6515 return RTL_EXPR_RTL (exp);
6516
5dab5552
MS
6517 case CLEANUP_POINT_EXPR:
6518 {
d93d4205 6519 extern int temp_slot_level;
e976b8b2
MS
6520 /* Start a new binding layer that will keep track of all cleanup
6521 actions to be performed. */
6522 expand_start_bindings (0);
6523
d93d4205 6524 target_temp_slot_level = temp_slot_level;
e976b8b2 6525
921b3427 6526 op0 = expand_expr (TREE_OPERAND (exp, 0), target, tmode, ro_modifier);
f283f66b
JM
6527 /* If we're going to use this value, load it up now. */
6528 if (! ignore)
6529 op0 = force_not_mem (op0);
d93d4205 6530 preserve_temp_slots (op0);
e976b8b2 6531 expand_end_bindings (NULL_TREE, 0, 0);
5dab5552
MS
6532 }
6533 return op0;
6534
bbf6f052
RK
6535 case CALL_EXPR:
6536 /* Check for a built-in function. */
6537 if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
d6a5ac33
RK
6538 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
6539 == FUNCTION_DECL)
bbf6f052
RK
6540 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6541 return expand_builtin (exp, target, subtarget, tmode, ignore);
d6a5ac33 6542
bbf6f052
RK
6543 /* If this call was expanded already by preexpand_calls,
6544 just return the result we got. */
6545 if (CALL_EXPR_RTL (exp) != 0)
6546 return CALL_EXPR_RTL (exp);
d6a5ac33 6547
8129842c 6548 return expand_call (exp, target, ignore);
bbf6f052
RK
6549
6550 case NON_LVALUE_EXPR:
6551 case NOP_EXPR:
6552 case CONVERT_EXPR:
6553 case REFERENCE_EXPR:
bbf6f052
RK
6554 if (TREE_CODE (type) == UNION_TYPE)
6555 {
6556 tree valtype = TREE_TYPE (TREE_OPERAND (exp, 0));
6557 if (target == 0)
06089a8b
RK
6558 {
6559 if (mode != BLKmode)
6560 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
6561 else
6562 target = assign_temp (type, 0, 1, 1);
6563 }
d6a5ac33 6564
bbf6f052
RK
6565 if (GET_CODE (target) == MEM)
6566 /* Store data into beginning of memory target. */
6567 store_expr (TREE_OPERAND (exp, 0),
1499e0a8
RK
6568 change_address (target, TYPE_MODE (valtype), 0), 0);
6569
bbf6f052
RK
6570 else if (GET_CODE (target) == REG)
6571 /* Store this field into a union of the proper type. */
6572 store_field (target, GET_MODE_BITSIZE (TYPE_MODE (valtype)), 0,
6573 TYPE_MODE (valtype), TREE_OPERAND (exp, 0),
6574 VOIDmode, 0, 1,
6575 int_size_in_bytes (TREE_TYPE (TREE_OPERAND (exp, 0))));
6576 else
6577 abort ();
6578
6579 /* Return the entire union. */
6580 return target;
6581 }
d6a5ac33 6582
7f62854a
RK
6583 if (mode == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
6584 {
6585 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode,
921b3427 6586 ro_modifier);
7f62854a
RK
6587
6588 /* If the signedness of the conversion differs and OP0 is
6589 a promoted SUBREG, clear that indication since we now
6590 have to do the proper extension. */
6591 if (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))) != unsignedp
6592 && GET_CODE (op0) == SUBREG)
6593 SUBREG_PROMOTED_VAR_P (op0) = 0;
6594
6595 return op0;
6596 }
6597
1499e0a8 6598 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, mode, 0);
12342f90
RS
6599 if (GET_MODE (op0) == mode)
6600 return op0;
12342f90 6601
d6a5ac33
RK
6602 /* If OP0 is a constant, just convert it into the proper mode. */
6603 if (CONSTANT_P (op0))
6604 return
6605 convert_modes (mode, TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6606 op0, TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
12342f90 6607
26fcb35a 6608 if (modifier == EXPAND_INITIALIZER)
38a448ca 6609 return gen_rtx_fmt_e (unsignedp ? ZERO_EXTEND : SIGN_EXTEND, mode, op0);
d6a5ac33 6610
bbf6f052 6611 if (target == 0)
d6a5ac33
RK
6612 return
6613 convert_to_mode (mode, op0,
6614 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052 6615 else
d6a5ac33
RK
6616 convert_move (target, op0,
6617 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
bbf6f052
RK
6618 return target;
6619
6620 case PLUS_EXPR:
0f41302f
MS
6621 /* We come here from MINUS_EXPR when the second operand is a
6622 constant. */
bbf6f052
RK
6623 plus_expr:
6624 this_optab = add_optab;
6625
6626 /* If we are adding a constant, an RTL_EXPR that is sp, fp, or ap, and
6627 something else, make sure we add the register to the constant and
6628 then to the other thing. This case can occur during strength
6629 reduction and doing it this way will produce better code if the
6630 frame pointer or argument pointer is eliminated.
6631
6632 fold-const.c will ensure that the constant is always in the inner
6633 PLUS_EXPR, so the only case we need to do anything about is if
6634 sp, ap, or fp is our second argument, in which case we must swap
6635 the innermost first argument and our second argument. */
6636
6637 if (TREE_CODE (TREE_OPERAND (exp, 0)) == PLUS_EXPR
6638 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 1)) == INTEGER_CST
6639 && TREE_CODE (TREE_OPERAND (exp, 1)) == RTL_EXPR
6640 && (RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == frame_pointer_rtx
6641 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == stack_pointer_rtx
6642 || RTL_EXPR_RTL (TREE_OPERAND (exp, 1)) == arg_pointer_rtx))
6643 {
6644 tree t = TREE_OPERAND (exp, 1);
6645
6646 TREE_OPERAND (exp, 1) = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
6647 TREE_OPERAND (TREE_OPERAND (exp, 0), 0) = t;
6648 }
6649
88f63c77 6650 /* If the result is to be ptr_mode and we are adding an integer to
bbf6f052
RK
6651 something, we might be forming a constant. So try to use
6652 plus_constant. If it produces a sum and we can't accept it,
6653 use force_operand. This allows P = &ARR[const] to generate
6654 efficient code on machines where a SYMBOL_REF is not a valid
6655 address.
6656
6657 If this is an EXPAND_SUM call, always return the sum. */
c980ac49 6658 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER
88f63c77 6659 || mode == ptr_mode)
bbf6f052 6660 {
c980ac49
RS
6661 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST
6662 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT
6663 && TREE_CONSTANT (TREE_OPERAND (exp, 1)))
6664 {
6665 op1 = expand_expr (TREE_OPERAND (exp, 1), subtarget, VOIDmode,
6666 EXPAND_SUM);
6667 op1 = plus_constant (op1, TREE_INT_CST_LOW (TREE_OPERAND (exp, 0)));
6668 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6669 op1 = force_operand (op1, target);
6670 return op1;
6671 }
bbf6f052 6672
c980ac49
RS
6673 else if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6674 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_INT
6675 && TREE_CONSTANT (TREE_OPERAND (exp, 0)))
6676 {
6677 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6678 EXPAND_SUM);
6679 if (! CONSTANT_P (op0))
6680 {
6681 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6682 VOIDmode, modifier);
709f5be1
RS
6683 /* Don't go to both_summands if modifier
6684 says it's not right to return a PLUS. */
6685 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6686 goto binop2;
c980ac49
RS
6687 goto both_summands;
6688 }
6689 op0 = plus_constant (op0, TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)));
6690 if (modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
6691 op0 = force_operand (op0, target);
6692 return op0;
6693 }
bbf6f052
RK
6694 }
6695
6696 /* No sense saving up arithmetic to be done
6697 if it's all in the wrong mode to form part of an address.
6698 And force_operand won't know whether to sign-extend or
6699 zero-extend. */
6700 if ((modifier != EXPAND_SUM && modifier != EXPAND_INITIALIZER)
88f63c77 6701 || mode != ptr_mode)
c980ac49 6702 goto binop;
bbf6f052
RK
6703
6704 preexpand_calls (exp);
e5e809f4 6705 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6706 subtarget = 0;
6707
921b3427
RK
6708 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, ro_modifier);
6709 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, ro_modifier);
bbf6f052 6710
c980ac49 6711 both_summands:
bbf6f052
RK
6712 /* Make sure any term that's a sum with a constant comes last. */
6713 if (GET_CODE (op0) == PLUS
6714 && CONSTANT_P (XEXP (op0, 1)))
6715 {
6716 temp = op0;
6717 op0 = op1;
6718 op1 = temp;
6719 }
6720 /* If adding to a sum including a constant,
6721 associate it to put the constant outside. */
6722 if (GET_CODE (op1) == PLUS
6723 && CONSTANT_P (XEXP (op1, 1)))
6724 {
6725 rtx constant_term = const0_rtx;
6726
6727 temp = simplify_binary_operation (PLUS, mode, XEXP (op1, 0), op0);
6728 if (temp != 0)
6729 op0 = temp;
6f90e075
JW
6730 /* Ensure that MULT comes first if there is one. */
6731 else if (GET_CODE (op0) == MULT)
38a448ca 6732 op0 = gen_rtx_PLUS (mode, op0, XEXP (op1, 0));
bbf6f052 6733 else
38a448ca 6734 op0 = gen_rtx_PLUS (mode, XEXP (op1, 0), op0);
bbf6f052
RK
6735
6736 /* Let's also eliminate constants from op0 if possible. */
6737 op0 = eliminate_constant_term (op0, &constant_term);
6738
6739 /* CONSTANT_TERM and XEXP (op1, 1) are known to be constant, so
6740 their sum should be a constant. Form it into OP1, since the
6741 result we want will then be OP0 + OP1. */
6742
6743 temp = simplify_binary_operation (PLUS, mode, constant_term,
6744 XEXP (op1, 1));
6745 if (temp != 0)
6746 op1 = temp;
6747 else
38a448ca 6748 op1 = gen_rtx_PLUS (mode, constant_term, XEXP (op1, 1));
bbf6f052
RK
6749 }
6750
6751 /* Put a constant term last and put a multiplication first. */
6752 if (CONSTANT_P (op0) || GET_CODE (op1) == MULT)
6753 temp = op1, op1 = op0, op0 = temp;
6754
6755 temp = simplify_binary_operation (PLUS, mode, op0, op1);
38a448ca 6756 return temp ? temp : gen_rtx_PLUS (mode, op0, op1);
bbf6f052
RK
6757
6758 case MINUS_EXPR:
ea87523e
RK
6759 /* For initializers, we are allowed to return a MINUS of two
6760 symbolic constants. Here we handle all cases when both operands
6761 are constant. */
bbf6f052
RK
6762 /* Handle difference of two symbolic constants,
6763 for the sake of an initializer. */
6764 if ((modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
6765 && really_constant_p (TREE_OPERAND (exp, 0))
6766 && really_constant_p (TREE_OPERAND (exp, 1)))
6767 {
906c4e36 6768 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX,
921b3427 6769 VOIDmode, ro_modifier);
906c4e36 6770 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
921b3427 6771 VOIDmode, ro_modifier);
ea87523e 6772
ea87523e
RK
6773 /* If the last operand is a CONST_INT, use plus_constant of
6774 the negated constant. Else make the MINUS. */
6775 if (GET_CODE (op1) == CONST_INT)
6776 return plus_constant (op0, - INTVAL (op1));
6777 else
38a448ca 6778 return gen_rtx_MINUS (mode, op0, op1);
bbf6f052
RK
6779 }
6780 /* Convert A - const to A + (-const). */
6781 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6782 {
ae431183
RK
6783 tree negated = fold (build1 (NEGATE_EXPR, type,
6784 TREE_OPERAND (exp, 1)));
6785
6786 /* Deal with the case where we can't negate the constant
6787 in TYPE. */
6788 if (TREE_UNSIGNED (type) || TREE_OVERFLOW (negated))
6789 {
6790 tree newtype = signed_type (type);
6791 tree newop0 = convert (newtype, TREE_OPERAND (exp, 0));
6792 tree newop1 = convert (newtype, TREE_OPERAND (exp, 1));
6793 tree newneg = fold (build1 (NEGATE_EXPR, newtype, newop1));
6794
6795 if (! TREE_OVERFLOW (newneg))
6796 return expand_expr (convert (type,
6797 build (PLUS_EXPR, newtype,
6798 newop0, newneg)),
921b3427 6799 target, tmode, ro_modifier);
ae431183
RK
6800 }
6801 else
6802 {
6803 exp = build (PLUS_EXPR, type, TREE_OPERAND (exp, 0), negated);
6804 goto plus_expr;
6805 }
bbf6f052
RK
6806 }
6807 this_optab = sub_optab;
6808 goto binop;
6809
6810 case MULT_EXPR:
6811 preexpand_calls (exp);
6812 /* If first operand is constant, swap them.
6813 Thus the following special case checks need only
6814 check the second operand. */
6815 if (TREE_CODE (TREE_OPERAND (exp, 0)) == INTEGER_CST)
6816 {
6817 register tree t1 = TREE_OPERAND (exp, 0);
6818 TREE_OPERAND (exp, 0) = TREE_OPERAND (exp, 1);
6819 TREE_OPERAND (exp, 1) = t1;
6820 }
6821
6822 /* Attempt to return something suitable for generating an
6823 indexed address, for machines that support that. */
6824
88f63c77 6825 if (modifier == EXPAND_SUM && mode == ptr_mode
bbf6f052 6826 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
906c4e36 6827 && GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
bbf6f052 6828 {
921b3427
RK
6829 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode,
6830 EXPAND_SUM);
bbf6f052
RK
6831
6832 /* Apply distributive law if OP0 is x+c. */
6833 if (GET_CODE (op0) == PLUS
6834 && GET_CODE (XEXP (op0, 1)) == CONST_INT)
38a448ca
RH
6835 return gen_rtx_PLUS (mode,
6836 gen_rtx_MULT (mode, XEXP (op0, 0),
6837 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))),
906c4e36
RK
6838 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))
6839 * INTVAL (XEXP (op0, 1))));
bbf6f052
RK
6840
6841 if (GET_CODE (op0) != REG)
906c4e36 6842 op0 = force_operand (op0, NULL_RTX);
bbf6f052
RK
6843 if (GET_CODE (op0) != REG)
6844 op0 = copy_to_mode_reg (mode, op0);
6845
38a448ca
RH
6846 return gen_rtx_MULT (mode, op0,
6847 GEN_INT (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))));
bbf6f052
RK
6848 }
6849
e5e809f4 6850 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6851 subtarget = 0;
6852
6853 /* Check for multiplying things that have been extended
6854 from a narrower type. If this machine supports multiplying
6855 in that narrower type with a result in the desired type,
6856 do it that way, and avoid the explicit type-conversion. */
6857 if (TREE_CODE (TREE_OPERAND (exp, 0)) == NOP_EXPR
6858 && TREE_CODE (type) == INTEGER_TYPE
6859 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6860 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))))
6861 && ((TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
6862 && int_fits_type_p (TREE_OPERAND (exp, 1),
6863 TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6864 /* Don't use a widening multiply if a shift will do. */
6865 && ((GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 1))))
906c4e36 6866 > HOST_BITS_PER_WIDE_INT)
bbf6f052
RK
6867 || exact_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1))) < 0))
6868 ||
6869 (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
6870 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6871 ==
6872 TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
6873 /* If both operands are extended, they must either both
6874 be zero-extended or both be sign-extended. */
6875 && (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)))
6876 ==
6877 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))))))
6878 {
6879 enum machine_mode innermode
6880 = TYPE_MODE (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)));
b10af0c8
TG
6881 optab other_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6882 ? smul_widen_optab : umul_widen_optab);
bbf6f052
RK
6883 this_optab = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)))
6884 ? umul_widen_optab : smul_widen_optab);
b10af0c8 6885 if (mode == GET_MODE_WIDER_MODE (innermode))
bbf6f052 6886 {
b10af0c8
TG
6887 if (this_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing)
6888 {
6889 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6890 NULL_RTX, VOIDmode, 0);
6891 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6892 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6893 VOIDmode, 0);
6894 else
6895 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6896 NULL_RTX, VOIDmode, 0);
6897 goto binop2;
6898 }
6899 else if (other_optab->handlers[(int) mode].insn_code != CODE_FOR_nothing
6900 && innermode == word_mode)
6901 {
6902 rtx htem;
6903 op0 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
6904 NULL_RTX, VOIDmode, 0);
6905 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
6906 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX,
6907 VOIDmode, 0);
6908 else
6909 op1 = expand_expr (TREE_OPERAND (TREE_OPERAND (exp, 1), 0),
6910 NULL_RTX, VOIDmode, 0);
6911 temp = expand_binop (mode, other_optab, op0, op1, target,
6912 unsignedp, OPTAB_LIB_WIDEN);
6913 htem = expand_mult_highpart_adjust (innermode,
6914 gen_highpart (innermode, temp),
6915 op0, op1,
6916 gen_highpart (innermode, temp),
6917 unsignedp);
6918 emit_move_insn (gen_highpart (innermode, temp), htem);
6919 return temp;
6920 }
bbf6f052
RK
6921 }
6922 }
6923 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6924 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6925 return expand_mult (mode, op0, op1, target, unsignedp);
6926
6927 case TRUNC_DIV_EXPR:
6928 case FLOOR_DIV_EXPR:
6929 case CEIL_DIV_EXPR:
6930 case ROUND_DIV_EXPR:
6931 case EXACT_DIV_EXPR:
6932 preexpand_calls (exp);
e5e809f4 6933 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6934 subtarget = 0;
6935 /* Possible optimization: compute the dividend with EXPAND_SUM
6936 then if the divisor is constant can optimize the case
6937 where some terms of the dividend have coeffs divisible by it. */
6938 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6939 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6940 return expand_divmod (0, code, mode, op0, op1, target, unsignedp);
6941
6942 case RDIV_EXPR:
6943 this_optab = flodiv_optab;
6944 goto binop;
6945
6946 case TRUNC_MOD_EXPR:
6947 case FLOOR_MOD_EXPR:
6948 case CEIL_MOD_EXPR:
6949 case ROUND_MOD_EXPR:
6950 preexpand_calls (exp);
e5e809f4 6951 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
6952 subtarget = 0;
6953 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 6954 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6955 return expand_divmod (1, code, mode, op0, op1, target, unsignedp);
6956
6957 case FIX_ROUND_EXPR:
6958 case FIX_FLOOR_EXPR:
6959 case FIX_CEIL_EXPR:
6960 abort (); /* Not used for C. */
6961
6962 case FIX_TRUNC_EXPR:
906c4e36 6963 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6964 if (target == 0)
6965 target = gen_reg_rtx (mode);
6966 expand_fix (target, op0, unsignedp);
6967 return target;
6968
6969 case FLOAT_EXPR:
906c4e36 6970 op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
6971 if (target == 0)
6972 target = gen_reg_rtx (mode);
6973 /* expand_float can't figure out what to do if FROM has VOIDmode.
6974 So give it the correct mode. With -O, cse will optimize this. */
6975 if (GET_MODE (op0) == VOIDmode)
6976 op0 = copy_to_mode_reg (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
6977 op0);
6978 expand_float (target, op0,
6979 TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0))));
6980 return target;
6981
6982 case NEGATE_EXPR:
5b22bee8 6983 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
bbf6f052
RK
6984 temp = expand_unop (mode, neg_optab, op0, target, 0);
6985 if (temp == 0)
6986 abort ();
6987 return temp;
6988
6989 case ABS_EXPR:
6990 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
6991
2d7050fd 6992 /* Handle complex values specially. */
d6a5ac33
RK
6993 if (GET_MODE_CLASS (mode) == MODE_COMPLEX_INT
6994 || GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT)
6995 return expand_complex_abs (mode, op0, target, unsignedp);
2d7050fd 6996
bbf6f052
RK
6997 /* Unsigned abs is simply the operand. Testing here means we don't
6998 risk generating incorrect code below. */
6999 if (TREE_UNSIGNED (type))
7000 return op0;
7001
2e5ec6cf 7002 return expand_abs (mode, op0, target, unsignedp,
e5e809f4 7003 safe_from_p (target, TREE_OPERAND (exp, 0), 1));
bbf6f052
RK
7004
7005 case MAX_EXPR:
7006 case MIN_EXPR:
7007 target = original_target;
e5e809f4 7008 if (target == 0 || ! safe_from_p (target, TREE_OPERAND (exp, 1), 1)
fc155707 7009 || (GET_CODE (target) == MEM && MEM_VOLATILE_P (target))
d6a5ac33 7010 || GET_MODE (target) != mode
bbf6f052
RK
7011 || (GET_CODE (target) == REG
7012 && REGNO (target) < FIRST_PSEUDO_REGISTER))
7013 target = gen_reg_rtx (mode);
906c4e36 7014 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7015 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7016
7017 /* First try to do it with a special MIN or MAX instruction.
7018 If that does not win, use a conditional jump to select the proper
7019 value. */
7020 this_optab = (TREE_UNSIGNED (type)
7021 ? (code == MIN_EXPR ? umin_optab : umax_optab)
7022 : (code == MIN_EXPR ? smin_optab : smax_optab));
7023
7024 temp = expand_binop (mode, this_optab, op0, op1, target, unsignedp,
7025 OPTAB_WIDEN);
7026 if (temp != 0)
7027 return temp;
7028
fa2981d8
JW
7029 /* At this point, a MEM target is no longer useful; we will get better
7030 code without it. */
7031
7032 if (GET_CODE (target) == MEM)
7033 target = gen_reg_rtx (mode);
7034
ee456b1c
RK
7035 if (target != op0)
7036 emit_move_insn (target, op0);
d6a5ac33 7037
bbf6f052 7038 op0 = gen_label_rtx ();
d6a5ac33 7039
f81497d9
RS
7040 /* If this mode is an integer too wide to compare properly,
7041 compare word by word. Rely on cse to optimize constant cases. */
d6a5ac33 7042 if (GET_MODE_CLASS (mode) == MODE_INT && !can_compare_p (mode))
bbf6f052 7043 {
f81497d9 7044 if (code == MAX_EXPR)
d6a5ac33
RK
7045 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7046 target, op1, NULL_RTX, op0);
bbf6f052 7047 else
d6a5ac33
RK
7048 do_jump_by_parts_greater_rtx (mode, TREE_UNSIGNED (type),
7049 op1, target, NULL_RTX, op0);
ee456b1c 7050 emit_move_insn (target, op1);
bbf6f052 7051 }
f81497d9
RS
7052 else
7053 {
7054 if (code == MAX_EXPR)
7055 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7056 ? compare_from_rtx (target, op1, GEU, 1, mode, NULL_RTX, 0)
7057 : compare_from_rtx (target, op1, GE, 0, mode, NULL_RTX, 0));
f81497d9
RS
7058 else
7059 temp = (TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))
ee456b1c
RK
7060 ? compare_from_rtx (target, op1, LEU, 1, mode, NULL_RTX, 0)
7061 : compare_from_rtx (target, op1, LE, 0, mode, NULL_RTX, 0));
f81497d9 7062 if (temp == const0_rtx)
ee456b1c 7063 emit_move_insn (target, op1);
f81497d9
RS
7064 else if (temp != const_true_rtx)
7065 {
7066 if (bcc_gen_fctn[(int) GET_CODE (temp)] != 0)
7067 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (temp)]) (op0));
7068 else
7069 abort ();
ee456b1c 7070 emit_move_insn (target, op1);
f81497d9
RS
7071 }
7072 }
bbf6f052
RK
7073 emit_label (op0);
7074 return target;
7075
bbf6f052
RK
7076 case BIT_NOT_EXPR:
7077 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7078 temp = expand_unop (mode, one_cmpl_optab, op0, target, 1);
7079 if (temp == 0)
7080 abort ();
7081 return temp;
7082
7083 case FFS_EXPR:
7084 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7085 temp = expand_unop (mode, ffs_optab, op0, target, 1);
7086 if (temp == 0)
7087 abort ();
7088 return temp;
7089
d6a5ac33
RK
7090 /* ??? Can optimize bitwise operations with one arg constant.
7091 Can optimize (a bitwise1 n) bitwise2 (a bitwise3 b)
7092 and (a bitwise1 b) bitwise2 b (etc)
7093 but that is probably not worth while. */
7094
7095 /* BIT_AND_EXPR is for bitwise anding. TRUTH_AND_EXPR is for anding two
7096 boolean values when we want in all cases to compute both of them. In
7097 general it is fastest to do TRUTH_AND_EXPR by computing both operands
7098 as actual zero-or-1 values and then bitwise anding. In cases where
7099 there cannot be any side effects, better code would be made by
7100 treating TRUTH_AND_EXPR like TRUTH_ANDIF_EXPR; but the question is
7101 how to recognize those cases. */
7102
bbf6f052
RK
7103 case TRUTH_AND_EXPR:
7104 case BIT_AND_EXPR:
7105 this_optab = and_optab;
7106 goto binop;
7107
bbf6f052
RK
7108 case TRUTH_OR_EXPR:
7109 case BIT_IOR_EXPR:
7110 this_optab = ior_optab;
7111 goto binop;
7112
874726a8 7113 case TRUTH_XOR_EXPR:
bbf6f052
RK
7114 case BIT_XOR_EXPR:
7115 this_optab = xor_optab;
7116 goto binop;
7117
7118 case LSHIFT_EXPR:
7119 case RSHIFT_EXPR:
7120 case LROTATE_EXPR:
7121 case RROTATE_EXPR:
7122 preexpand_calls (exp);
e5e809f4 7123 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7124 subtarget = 0;
7125 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
7126 return expand_shift (code, mode, op0, TREE_OPERAND (exp, 1), target,
7127 unsignedp);
7128
d6a5ac33
RK
7129 /* Could determine the answer when only additive constants differ. Also,
7130 the addition of one can be handled by changing the condition. */
bbf6f052
RK
7131 case LT_EXPR:
7132 case LE_EXPR:
7133 case GT_EXPR:
7134 case GE_EXPR:
7135 case EQ_EXPR:
7136 case NE_EXPR:
7137 preexpand_calls (exp);
7138 temp = do_store_flag (exp, target, tmode != VOIDmode ? tmode : mode, 0);
7139 if (temp != 0)
7140 return temp;
d6a5ac33 7141
0f41302f 7142 /* For foo != 0, load foo, and if it is nonzero load 1 instead. */
bbf6f052
RK
7143 if (code == NE_EXPR && integer_zerop (TREE_OPERAND (exp, 1))
7144 && original_target
7145 && GET_CODE (original_target) == REG
7146 && (GET_MODE (original_target)
7147 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
7148 {
d6a5ac33
RK
7149 temp = expand_expr (TREE_OPERAND (exp, 0), original_target,
7150 VOIDmode, 0);
7151
bbf6f052
RK
7152 if (temp != original_target)
7153 temp = copy_to_reg (temp);
d6a5ac33 7154
bbf6f052 7155 op1 = gen_label_rtx ();
906c4e36 7156 emit_cmp_insn (temp, const0_rtx, EQ, NULL_RTX,
bbf6f052
RK
7157 GET_MODE (temp), unsignedp, 0);
7158 emit_jump_insn (gen_beq (op1));
7159 emit_move_insn (temp, const1_rtx);
7160 emit_label (op1);
7161 return temp;
7162 }
d6a5ac33 7163
bbf6f052
RK
7164 /* If no set-flag instruction, must generate a conditional
7165 store into a temporary variable. Drop through
7166 and handle this like && and ||. */
7167
7168 case TRUTH_ANDIF_EXPR:
7169 case TRUTH_ORIF_EXPR:
e44842fe 7170 if (! ignore
e5e809f4 7171 && (target == 0 || ! safe_from_p (target, exp, 1)
e44842fe
RK
7172 /* Make sure we don't have a hard reg (such as function's return
7173 value) live across basic blocks, if not optimizing. */
7174 || (!optimize && GET_CODE (target) == REG
7175 && REGNO (target) < FIRST_PSEUDO_REGISTER)))
bbf6f052 7176 target = gen_reg_rtx (tmode != VOIDmode ? tmode : mode);
e44842fe
RK
7177
7178 if (target)
7179 emit_clr_insn (target);
7180
bbf6f052
RK
7181 op1 = gen_label_rtx ();
7182 jumpifnot (exp, op1);
e44842fe
RK
7183
7184 if (target)
7185 emit_0_to_1_insn (target);
7186
bbf6f052 7187 emit_label (op1);
e44842fe 7188 return ignore ? const0_rtx : target;
bbf6f052
RK
7189
7190 case TRUTH_NOT_EXPR:
7191 op0 = expand_expr (TREE_OPERAND (exp, 0), target, VOIDmode, 0);
7192 /* The parser is careful to generate TRUTH_NOT_EXPR
7193 only with operands that are always zero or one. */
906c4e36 7194 temp = expand_binop (mode, xor_optab, op0, const1_rtx,
bbf6f052
RK
7195 target, 1, OPTAB_LIB_WIDEN);
7196 if (temp == 0)
7197 abort ();
7198 return temp;
7199
7200 case COMPOUND_EXPR:
7201 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
7202 emit_queue ();
7203 return expand_expr (TREE_OPERAND (exp, 1),
7204 (ignore ? const0_rtx : target),
7205 VOIDmode, 0);
7206
7207 case COND_EXPR:
ac01eace
RK
7208 /* If we would have a "singleton" (see below) were it not for a
7209 conversion in each arm, bring that conversion back out. */
7210 if (TREE_CODE (TREE_OPERAND (exp, 1)) == NOP_EXPR
7211 && TREE_CODE (TREE_OPERAND (exp, 2)) == NOP_EXPR
7212 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0))
7213 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (exp, 2), 0))))
7214 {
7215 tree true = TREE_OPERAND (TREE_OPERAND (exp, 1), 0);
7216 tree false = TREE_OPERAND (TREE_OPERAND (exp, 2), 0);
7217
7218 if ((TREE_CODE_CLASS (TREE_CODE (true)) == '2'
7219 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7220 || (TREE_CODE_CLASS (TREE_CODE (false)) == '2'
7221 && operand_equal_p (true, TREE_OPERAND (false, 0), 0))
7222 || (TREE_CODE_CLASS (TREE_CODE (true)) == '1'
7223 && operand_equal_p (false, TREE_OPERAND (true, 0), 0))
7224 || (TREE_CODE_CLASS (TREE_CODE (false)) == '1'
7225 && operand_equal_p (true, TREE_OPERAND (false, 0), 0)))
7226 return expand_expr (build1 (NOP_EXPR, type,
7227 build (COND_EXPR, TREE_TYPE (true),
7228 TREE_OPERAND (exp, 0),
7229 true, false)),
7230 target, tmode, modifier);
7231 }
7232
bbf6f052
RK
7233 {
7234 /* Note that COND_EXPRs whose type is a structure or union
7235 are required to be constructed to contain assignments of
7236 a temporary variable, so that we can evaluate them here
7237 for side effect only. If type is void, we must do likewise. */
7238
7239 /* If an arm of the branch requires a cleanup,
7240 only that cleanup is performed. */
7241
7242 tree singleton = 0;
7243 tree binary_op = 0, unary_op = 0;
bbf6f052
RK
7244
7245 /* If this is (A ? 1 : 0) and A is a condition, just evaluate it and
7246 convert it to our mode, if necessary. */
7247 if (integer_onep (TREE_OPERAND (exp, 1))
7248 && integer_zerop (TREE_OPERAND (exp, 2))
7249 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7250 {
dd27116b
RK
7251 if (ignore)
7252 {
7253 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode,
921b3427 7254 ro_modifier);
dd27116b
RK
7255 return const0_rtx;
7256 }
7257
921b3427 7258 op0 = expand_expr (TREE_OPERAND (exp, 0), target, mode, ro_modifier);
bbf6f052
RK
7259 if (GET_MODE (op0) == mode)
7260 return op0;
d6a5ac33 7261
bbf6f052
RK
7262 if (target == 0)
7263 target = gen_reg_rtx (mode);
7264 convert_move (target, op0, unsignedp);
7265 return target;
7266 }
7267
ac01eace
RK
7268 /* Check for X ? A + B : A. If we have this, we can copy A to the
7269 output and conditionally add B. Similarly for unary operations.
7270 Don't do this if X has side-effects because those side effects
7271 might affect A or B and the "?" operation is a sequence point in
7272 ANSI. (operand_equal_p tests for side effects.) */
bbf6f052
RK
7273
7274 if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '2'
7275 && operand_equal_p (TREE_OPERAND (exp, 2),
7276 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7277 singleton = TREE_OPERAND (exp, 2), binary_op = TREE_OPERAND (exp, 1);
7278 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '2'
7279 && operand_equal_p (TREE_OPERAND (exp, 1),
7280 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7281 singleton = TREE_OPERAND (exp, 1), binary_op = TREE_OPERAND (exp, 2);
7282 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 1))) == '1'
7283 && operand_equal_p (TREE_OPERAND (exp, 2),
7284 TREE_OPERAND (TREE_OPERAND (exp, 1), 0), 0))
7285 singleton = TREE_OPERAND (exp, 2), unary_op = TREE_OPERAND (exp, 1);
7286 else if (TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 2))) == '1'
7287 && operand_equal_p (TREE_OPERAND (exp, 1),
7288 TREE_OPERAND (TREE_OPERAND (exp, 2), 0), 0))
7289 singleton = TREE_OPERAND (exp, 1), unary_op = TREE_OPERAND (exp, 2);
7290
01c8a7c8
RK
7291 /* If we are not to produce a result, we have no target. Otherwise,
7292 if a target was specified use it; it will not be used as an
7293 intermediate target unless it is safe. If no target, use a
7294 temporary. */
7295
7296 if (ignore)
7297 temp = 0;
7298 else if (original_target
e5e809f4 7299 && (safe_from_p (original_target, TREE_OPERAND (exp, 0), 1)
01c8a7c8
RK
7300 || (singleton && GET_CODE (original_target) == REG
7301 && REGNO (original_target) >= FIRST_PSEUDO_REGISTER
7302 && original_target == var_rtx (singleton)))
7303 && GET_MODE (original_target) == mode
7c00d1fe
RK
7304#ifdef HAVE_conditional_move
7305 && (! can_conditionally_move_p (mode)
7306 || GET_CODE (original_target) == REG
7307 || TREE_ADDRESSABLE (type))
7308#endif
01c8a7c8
RK
7309 && ! (GET_CODE (original_target) == MEM
7310 && MEM_VOLATILE_P (original_target)))
7311 temp = original_target;
7312 else if (TREE_ADDRESSABLE (type))
7313 abort ();
7314 else
7315 temp = assign_temp (type, 0, 0, 1);
7316
ac01eace
RK
7317 /* If we had X ? A + C : A, with C a constant power of 2, and we can
7318 do the test of X as a store-flag operation, do this as
7319 A + ((X != 0) << log C). Similarly for other simple binary
7320 operators. Only do for C == 1 if BRANCH_COST is low. */
dd27116b 7321 if (temp && singleton && binary_op
bbf6f052
RK
7322 && (TREE_CODE (binary_op) == PLUS_EXPR
7323 || TREE_CODE (binary_op) == MINUS_EXPR
7324 || TREE_CODE (binary_op) == BIT_IOR_EXPR
9fbd9f58 7325 || TREE_CODE (binary_op) == BIT_XOR_EXPR)
ac01eace
RK
7326 && (BRANCH_COST >= 3 ? integer_pow2p (TREE_OPERAND (binary_op, 1))
7327 : integer_onep (TREE_OPERAND (binary_op, 1)))
bbf6f052
RK
7328 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<')
7329 {
7330 rtx result;
7331 optab boptab = (TREE_CODE (binary_op) == PLUS_EXPR ? add_optab
7332 : TREE_CODE (binary_op) == MINUS_EXPR ? sub_optab
7333 : TREE_CODE (binary_op) == BIT_IOR_EXPR ? ior_optab
2d444001 7334 : xor_optab);
bbf6f052
RK
7335
7336 /* If we had X ? A : A + 1, do this as A + (X == 0).
7337
7338 We have to invert the truth value here and then put it
7339 back later if do_store_flag fails. We cannot simply copy
7340 TREE_OPERAND (exp, 0) to another variable and modify that
7341 because invert_truthvalue can modify the tree pointed to
7342 by its argument. */
7343 if (singleton == TREE_OPERAND (exp, 1))
7344 TREE_OPERAND (exp, 0)
7345 = invert_truthvalue (TREE_OPERAND (exp, 0));
7346
7347 result = do_store_flag (TREE_OPERAND (exp, 0),
e5e809f4 7348 (safe_from_p (temp, singleton, 1)
906c4e36 7349 ? temp : NULL_RTX),
bbf6f052
RK
7350 mode, BRANCH_COST <= 1);
7351
ac01eace
RK
7352 if (result != 0 && ! integer_onep (TREE_OPERAND (binary_op, 1)))
7353 result = expand_shift (LSHIFT_EXPR, mode, result,
7354 build_int_2 (tree_log2
7355 (TREE_OPERAND
7356 (binary_op, 1)),
7357 0),
e5e809f4 7358 (safe_from_p (temp, singleton, 1)
ac01eace
RK
7359 ? temp : NULL_RTX), 0);
7360
bbf6f052
RK
7361 if (result)
7362 {
906c4e36 7363 op1 = expand_expr (singleton, NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7364 return expand_binop (mode, boptab, op1, result, temp,
7365 unsignedp, OPTAB_LIB_WIDEN);
7366 }
7367 else if (singleton == TREE_OPERAND (exp, 1))
7368 TREE_OPERAND (exp, 0)
7369 = invert_truthvalue (TREE_OPERAND (exp, 0));
7370 }
7371
dabf8373 7372 do_pending_stack_adjust ();
bbf6f052
RK
7373 NO_DEFER_POP;
7374 op0 = gen_label_rtx ();
7375
7376 if (singleton && ! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0)))
7377 {
7378 if (temp != 0)
7379 {
7380 /* If the target conflicts with the other operand of the
7381 binary op, we can't use it. Also, we can't use the target
7382 if it is a hard register, because evaluating the condition
7383 might clobber it. */
7384 if ((binary_op
e5e809f4 7385 && ! safe_from_p (temp, TREE_OPERAND (binary_op, 1), 1))
bbf6f052
RK
7386 || (GET_CODE (temp) == REG
7387 && REGNO (temp) < FIRST_PSEUDO_REGISTER))
7388 temp = gen_reg_rtx (mode);
7389 store_expr (singleton, temp, 0);
7390 }
7391 else
906c4e36 7392 expand_expr (singleton,
2937cf87 7393 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7394 if (singleton == TREE_OPERAND (exp, 1))
7395 jumpif (TREE_OPERAND (exp, 0), op0);
7396 else
7397 jumpifnot (TREE_OPERAND (exp, 0), op0);
7398
956d6950 7399 start_cleanup_deferral ();
bbf6f052
RK
7400 if (binary_op && temp == 0)
7401 /* Just touch the other operand. */
7402 expand_expr (TREE_OPERAND (binary_op, 1),
906c4e36 7403 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7404 else if (binary_op)
7405 store_expr (build (TREE_CODE (binary_op), type,
7406 make_tree (type, temp),
7407 TREE_OPERAND (binary_op, 1)),
7408 temp, 0);
7409 else
7410 store_expr (build1 (TREE_CODE (unary_op), type,
7411 make_tree (type, temp)),
7412 temp, 0);
7413 op1 = op0;
bbf6f052 7414 }
bbf6f052
RK
7415 /* Check for A op 0 ? A : FOO and A op 0 ? FOO : A where OP is any
7416 comparison operator. If we have one of these cases, set the
7417 output to A, branch on A (cse will merge these two references),
7418 then set the output to FOO. */
7419 else if (temp
7420 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7421 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7422 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7423 TREE_OPERAND (exp, 1), 0)
e9a25f70
JL
7424 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7425 || TREE_CODE (TREE_OPERAND (exp, 1)) == SAVE_EXPR)
e5e809f4 7426 && safe_from_p (temp, TREE_OPERAND (exp, 2), 1))
bbf6f052
RK
7427 {
7428 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7429 temp = gen_reg_rtx (mode);
7430 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7431 jumpif (TREE_OPERAND (exp, 0), op0);
5dab5552 7432
956d6950 7433 start_cleanup_deferral ();
bbf6f052
RK
7434 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7435 op1 = op0;
7436 }
7437 else if (temp
7438 && TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, 0))) == '<'
7439 && integer_zerop (TREE_OPERAND (TREE_OPERAND (exp, 0), 1))
7440 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (exp, 0), 0),
7441 TREE_OPERAND (exp, 2), 0)
e9a25f70
JL
7442 && (! TREE_SIDE_EFFECTS (TREE_OPERAND (exp, 0))
7443 || TREE_CODE (TREE_OPERAND (exp, 2)) == SAVE_EXPR)
e5e809f4 7444 && safe_from_p (temp, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7445 {
7446 if (GET_CODE (temp) == REG && REGNO (temp) < FIRST_PSEUDO_REGISTER)
7447 temp = gen_reg_rtx (mode);
7448 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7449 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7450
956d6950 7451 start_cleanup_deferral ();
bbf6f052
RK
7452 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7453 op1 = op0;
7454 }
7455 else
7456 {
7457 op1 = gen_label_rtx ();
7458 jumpifnot (TREE_OPERAND (exp, 0), op0);
5dab5552 7459
956d6950 7460 start_cleanup_deferral ();
bbf6f052
RK
7461 if (temp != 0)
7462 store_expr (TREE_OPERAND (exp, 1), temp, 0);
7463 else
906c4e36
RK
7464 expand_expr (TREE_OPERAND (exp, 1),
7465 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
956d6950 7466 end_cleanup_deferral ();
bbf6f052
RK
7467 emit_queue ();
7468 emit_jump_insn (gen_jump (op1));
7469 emit_barrier ();
7470 emit_label (op0);
956d6950 7471 start_cleanup_deferral ();
bbf6f052
RK
7472 if (temp != 0)
7473 store_expr (TREE_OPERAND (exp, 2), temp, 0);
7474 else
906c4e36
RK
7475 expand_expr (TREE_OPERAND (exp, 2),
7476 ignore ? const0_rtx : NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7477 }
7478
956d6950 7479 end_cleanup_deferral ();
bbf6f052
RK
7480
7481 emit_queue ();
7482 emit_label (op1);
7483 OK_DEFER_POP;
5dab5552 7484
bbf6f052
RK
7485 return temp;
7486 }
7487
7488 case TARGET_EXPR:
7489 {
7490 /* Something needs to be initialized, but we didn't know
7491 where that thing was when building the tree. For example,
7492 it could be the return value of a function, or a parameter
7493 to a function which lays down in the stack, or a temporary
7494 variable which must be passed by reference.
7495
7496 We guarantee that the expression will either be constructed
7497 or copied into our original target. */
7498
7499 tree slot = TREE_OPERAND (exp, 0);
2a888d4c 7500 tree cleanups = NULL_TREE;
5c062816 7501 tree exp1;
bbf6f052
RK
7502
7503 if (TREE_CODE (slot) != VAR_DECL)
7504 abort ();
7505
9c51f375
RK
7506 if (! ignore)
7507 target = original_target;
7508
bbf6f052
RK
7509 if (target == 0)
7510 {
7511 if (DECL_RTL (slot) != 0)
ac993f4f
MS
7512 {
7513 target = DECL_RTL (slot);
5c062816 7514 /* If we have already expanded the slot, so don't do
ac993f4f 7515 it again. (mrs) */
5c062816
MS
7516 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7517 return target;
ac993f4f 7518 }
bbf6f052
RK
7519 else
7520 {
e9a25f70 7521 target = assign_temp (type, 2, 0, 1);
bbf6f052
RK
7522 /* All temp slots at this level must not conflict. */
7523 preserve_temp_slots (target);
7524 DECL_RTL (slot) = target;
e9a25f70
JL
7525 if (TREE_ADDRESSABLE (slot))
7526 {
7527 TREE_ADDRESSABLE (slot) = 0;
7528 mark_addressable (slot);
7529 }
bbf6f052 7530
e287fd6e
RK
7531 /* Since SLOT is not known to the called function
7532 to belong to its stack frame, we must build an explicit
7533 cleanup. This case occurs when we must build up a reference
7534 to pass the reference as an argument. In this case,
7535 it is very likely that such a reference need not be
7536 built here. */
7537
7538 if (TREE_OPERAND (exp, 2) == 0)
7539 TREE_OPERAND (exp, 2) = maybe_build_cleanup (slot);
2a888d4c 7540 cleanups = TREE_OPERAND (exp, 2);
e287fd6e 7541 }
bbf6f052
RK
7542 }
7543 else
7544 {
7545 /* This case does occur, when expanding a parameter which
7546 needs to be constructed on the stack. The target
7547 is the actual stack address that we want to initialize.
7548 The function we call will perform the cleanup in this case. */
7549
8c042b47
RS
7550 /* If we have already assigned it space, use that space,
7551 not target that we were passed in, as our target
7552 parameter is only a hint. */
7553 if (DECL_RTL (slot) != 0)
7554 {
7555 target = DECL_RTL (slot);
7556 /* If we have already expanded the slot, so don't do
7557 it again. (mrs) */
7558 if (TREE_OPERAND (exp, 1) == NULL_TREE)
7559 return target;
7560 }
21002281
JW
7561 else
7562 {
7563 DECL_RTL (slot) = target;
7564 /* If we must have an addressable slot, then make sure that
7565 the RTL that we just stored in slot is OK. */
7566 if (TREE_ADDRESSABLE (slot))
7567 {
7568 TREE_ADDRESSABLE (slot) = 0;
7569 mark_addressable (slot);
7570 }
7571 }
bbf6f052
RK
7572 }
7573
4847c938 7574 exp1 = TREE_OPERAND (exp, 3) = TREE_OPERAND (exp, 1);
5c062816
MS
7575 /* Mark it as expanded. */
7576 TREE_OPERAND (exp, 1) = NULL_TREE;
7577
e5e809f4 7578 TREE_USED (slot) = 1;
41531e5b 7579 store_expr (exp1, target, 0);
61d6b1cc 7580
e976b8b2 7581 expand_decl_cleanup (NULL_TREE, cleanups);
61d6b1cc 7582
41531e5b 7583 return target;
bbf6f052
RK
7584 }
7585
7586 case INIT_EXPR:
7587 {
7588 tree lhs = TREE_OPERAND (exp, 0);
7589 tree rhs = TREE_OPERAND (exp, 1);
7590 tree noncopied_parts = 0;
7591 tree lhs_type = TREE_TYPE (lhs);
7592
7593 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7594 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0 && !fixed_type_p (rhs))
7595 noncopied_parts = init_noncopied_parts (stabilize_reference (lhs),
7596 TYPE_NONCOPIED_PARTS (lhs_type));
7597 while (noncopied_parts != 0)
7598 {
7599 expand_assignment (TREE_VALUE (noncopied_parts),
7600 TREE_PURPOSE (noncopied_parts), 0, 0);
7601 noncopied_parts = TREE_CHAIN (noncopied_parts);
7602 }
7603 return temp;
7604 }
7605
7606 case MODIFY_EXPR:
7607 {
7608 /* If lhs is complex, expand calls in rhs before computing it.
7609 That's so we don't compute a pointer and save it over a call.
7610 If lhs is simple, compute it first so we can give it as a
7611 target if the rhs is just a call. This avoids an extra temp and copy
7612 and that prevents a partial-subsumption which makes bad code.
7613 Actually we could treat component_ref's of vars like vars. */
7614
7615 tree lhs = TREE_OPERAND (exp, 0);
7616 tree rhs = TREE_OPERAND (exp, 1);
7617 tree noncopied_parts = 0;
7618 tree lhs_type = TREE_TYPE (lhs);
7619
7620 temp = 0;
7621
7622 if (TREE_CODE (lhs) != VAR_DECL
7623 && TREE_CODE (lhs) != RESULT_DECL
b60334e8
RK
7624 && TREE_CODE (lhs) != PARM_DECL
7625 && ! (TREE_CODE (lhs) == INDIRECT_REF
7626 && TYPE_READONLY (TREE_TYPE (TREE_OPERAND (lhs, 0)))))
bbf6f052
RK
7627 preexpand_calls (exp);
7628
7629 /* Check for |= or &= of a bitfield of size one into another bitfield
7630 of size 1. In this case, (unless we need the result of the
7631 assignment) we can do this more efficiently with a
7632 test followed by an assignment, if necessary.
7633
7634 ??? At this point, we can't get a BIT_FIELD_REF here. But if
7635 things change so we do, this code should be enhanced to
7636 support it. */
7637 if (ignore
7638 && TREE_CODE (lhs) == COMPONENT_REF
7639 && (TREE_CODE (rhs) == BIT_IOR_EXPR
7640 || TREE_CODE (rhs) == BIT_AND_EXPR)
7641 && TREE_OPERAND (rhs, 0) == lhs
7642 && TREE_CODE (TREE_OPERAND (rhs, 1)) == COMPONENT_REF
7643 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (lhs, 1))) == 1
7644 && TREE_INT_CST_LOW (DECL_SIZE (TREE_OPERAND (TREE_OPERAND (rhs, 1), 1))) == 1)
7645 {
7646 rtx label = gen_label_rtx ();
7647
7648 do_jump (TREE_OPERAND (rhs, 1),
7649 TREE_CODE (rhs) == BIT_IOR_EXPR ? label : 0,
7650 TREE_CODE (rhs) == BIT_AND_EXPR ? label : 0);
7651 expand_assignment (lhs, convert (TREE_TYPE (rhs),
7652 (TREE_CODE (rhs) == BIT_IOR_EXPR
7653 ? integer_one_node
7654 : integer_zero_node)),
7655 0, 0);
e7c33f54 7656 do_pending_stack_adjust ();
bbf6f052
RK
7657 emit_label (label);
7658 return const0_rtx;
7659 }
7660
7661 if (TYPE_NONCOPIED_PARTS (lhs_type) != 0
7662 && ! (fixed_type_p (lhs) && fixed_type_p (rhs)))
7663 noncopied_parts = save_noncopied_parts (stabilize_reference (lhs),
7664 TYPE_NONCOPIED_PARTS (lhs_type));
7665
7666 temp = expand_assignment (lhs, rhs, ! ignore, original_target != 0);
7667 while (noncopied_parts != 0)
7668 {
7669 expand_assignment (TREE_PURPOSE (noncopied_parts),
7670 TREE_VALUE (noncopied_parts), 0, 0);
7671 noncopied_parts = TREE_CHAIN (noncopied_parts);
7672 }
7673 return temp;
7674 }
7675
6e7f84a7
APB
7676 case RETURN_EXPR:
7677 if (!TREE_OPERAND (exp, 0))
7678 expand_null_return ();
7679 else
7680 expand_return (TREE_OPERAND (exp, 0));
7681 return const0_rtx;
7682
bbf6f052
RK
7683 case PREINCREMENT_EXPR:
7684 case PREDECREMENT_EXPR:
7b8b9722 7685 return expand_increment (exp, 0, ignore);
bbf6f052
RK
7686
7687 case POSTINCREMENT_EXPR:
7688 case POSTDECREMENT_EXPR:
7689 /* Faster to treat as pre-increment if result is not used. */
7b8b9722 7690 return expand_increment (exp, ! ignore, ignore);
bbf6f052
RK
7691
7692 case ADDR_EXPR:
987c71d9 7693 /* If nonzero, TEMP will be set to the address of something that might
0f41302f 7694 be a MEM corresponding to a stack slot. */
987c71d9
RK
7695 temp = 0;
7696
bbf6f052
RK
7697 /* Are we taking the address of a nested function? */
7698 if (TREE_CODE (TREE_OPERAND (exp, 0)) == FUNCTION_DECL
38ee6ed9 7699 && decl_function_context (TREE_OPERAND (exp, 0)) != 0
e5e809f4
JL
7700 && ! DECL_NO_STATIC_CHAIN (TREE_OPERAND (exp, 0))
7701 && ! TREE_STATIC (exp))
bbf6f052
RK
7702 {
7703 op0 = trampoline_address (TREE_OPERAND (exp, 0));
7704 op0 = force_operand (op0, target);
7705 }
682ba3a6
RK
7706 /* If we are taking the address of something erroneous, just
7707 return a zero. */
7708 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ERROR_MARK)
7709 return const0_rtx;
bbf6f052
RK
7710 else
7711 {
e287fd6e
RK
7712 /* We make sure to pass const0_rtx down if we came in with
7713 ignore set, to avoid doing the cleanups twice for something. */
7714 op0 = expand_expr (TREE_OPERAND (exp, 0),
7715 ignore ? const0_rtx : NULL_RTX, VOIDmode,
bbf6f052
RK
7716 (modifier == EXPAND_INITIALIZER
7717 ? modifier : EXPAND_CONST_ADDRESS));
896102d0 7718
119af78a
RK
7719 /* If we are going to ignore the result, OP0 will have been set
7720 to const0_rtx, so just return it. Don't get confused and
7721 think we are taking the address of the constant. */
7722 if (ignore)
7723 return op0;
7724
3539e816
MS
7725 op0 = protect_from_queue (op0, 0);
7726
896102d0
RK
7727 /* We would like the object in memory. If it is a constant,
7728 we can have it be statically allocated into memory. For
682ba3a6 7729 a non-constant (REG, SUBREG or CONCAT), we need to allocate some
896102d0
RK
7730 memory and store the value into it. */
7731
7732 if (CONSTANT_P (op0))
7733 op0 = force_const_mem (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))),
7734 op0);
987c71d9 7735 else if (GET_CODE (op0) == MEM)
af5b53ed
RK
7736 {
7737 mark_temp_addr_taken (op0);
7738 temp = XEXP (op0, 0);
7739 }
896102d0 7740
682ba3a6 7741 else if (GET_CODE (op0) == REG || GET_CODE (op0) == SUBREG
6c8538cc 7742 || GET_CODE (op0) == CONCAT || GET_CODE (op0) == ADDRESSOF)
896102d0
RK
7743 {
7744 /* If this object is in a register, it must be not
0f41302f 7745 be BLKmode. */
896102d0 7746 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
06089a8b 7747 rtx memloc = assign_temp (inner_type, 1, 1, 1);
896102d0 7748
7a0b7b9a 7749 mark_temp_addr_taken (memloc);
896102d0
RK
7750 emit_move_insn (memloc, op0);
7751 op0 = memloc;
7752 }
7753
bbf6f052
RK
7754 if (GET_CODE (op0) != MEM)
7755 abort ();
7756
7757 if (modifier == EXPAND_SUM || modifier == EXPAND_INITIALIZER)
88f63c77
RK
7758 {
7759 temp = XEXP (op0, 0);
7760#ifdef POINTERS_EXTEND_UNSIGNED
7761 if (GET_MODE (temp) == Pmode && GET_MODE (temp) != mode
7762 && mode == ptr_mode)
9fcfcce7 7763 temp = convert_memory_address (ptr_mode, temp);
88f63c77
RK
7764#endif
7765 return temp;
7766 }
987c71d9 7767
bbf6f052
RK
7768 op0 = force_operand (XEXP (op0, 0), target);
7769 }
987c71d9 7770
bbf6f052 7771 if (flag_force_addr && GET_CODE (op0) != REG)
987c71d9
RK
7772 op0 = force_reg (Pmode, op0);
7773
dc6d66b3
RK
7774 if (GET_CODE (op0) == REG
7775 && ! REG_USERVAR_P (op0))
7776 mark_reg_pointer (op0, TYPE_ALIGN (TREE_TYPE (type)) / BITS_PER_UNIT);
987c71d9
RK
7777
7778 /* If we might have had a temp slot, add an equivalent address
7779 for it. */
7780 if (temp != 0)
7781 update_temp_slot_address (temp, op0);
7782
88f63c77
RK
7783#ifdef POINTERS_EXTEND_UNSIGNED
7784 if (GET_MODE (op0) == Pmode && GET_MODE (op0) != mode
7785 && mode == ptr_mode)
9fcfcce7 7786 op0 = convert_memory_address (ptr_mode, op0);
88f63c77
RK
7787#endif
7788
bbf6f052
RK
7789 return op0;
7790
7791 case ENTRY_VALUE_EXPR:
7792 abort ();
7793
7308a047
RS
7794 /* COMPLEX type for Extended Pascal & Fortran */
7795 case COMPLEX_EXPR:
7796 {
7797 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
6551fa4d 7798 rtx insns;
7308a047
RS
7799
7800 /* Get the rtx code of the operands. */
7801 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7802 op1 = expand_expr (TREE_OPERAND (exp, 1), 0, VOIDmode, 0);
7803
7804 if (! target)
7805 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7806
6551fa4d 7807 start_sequence ();
7308a047
RS
7808
7809 /* Move the real (op0) and imaginary (op1) parts to their location. */
2d7050fd
RS
7810 emit_move_insn (gen_realpart (mode, target), op0);
7811 emit_move_insn (gen_imagpart (mode, target), op1);
7308a047 7812
6551fa4d
JW
7813 insns = get_insns ();
7814 end_sequence ();
7815
7308a047 7816 /* Complex construction should appear as a single unit. */
6551fa4d
JW
7817 /* If TARGET is a CONCAT, we got insns like RD = RS, ID = IS,
7818 each with a separate pseudo as destination.
7819 It's not correct for flow to treat them as a unit. */
6d6e61ce 7820 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7821 emit_no_conflict_block (insns, target, op0, op1, NULL_RTX);
7822 else
7823 emit_insns (insns);
7308a047
RS
7824
7825 return target;
7826 }
7827
7828 case REALPART_EXPR:
2d7050fd
RS
7829 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7830 return gen_realpart (mode, op0);
7308a047
RS
7831
7832 case IMAGPART_EXPR:
2d7050fd
RS
7833 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7834 return gen_imagpart (mode, op0);
7308a047
RS
7835
7836 case CONJ_EXPR:
7837 {
62acb978 7838 enum machine_mode partmode = TYPE_MODE (TREE_TYPE (TREE_TYPE (exp)));
7308a047 7839 rtx imag_t;
6551fa4d 7840 rtx insns;
7308a047
RS
7841
7842 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7843
7844 if (! target)
d6a5ac33 7845 target = gen_reg_rtx (mode);
7308a047 7846
6551fa4d 7847 start_sequence ();
7308a047
RS
7848
7849 /* Store the realpart and the negated imagpart to target. */
62acb978
RK
7850 emit_move_insn (gen_realpart (partmode, target),
7851 gen_realpart (partmode, op0));
7308a047 7852
62acb978
RK
7853 imag_t = gen_imagpart (partmode, target);
7854 temp = expand_unop (partmode, neg_optab,
7855 gen_imagpart (partmode, op0), imag_t, 0);
7308a047
RS
7856 if (temp != imag_t)
7857 emit_move_insn (imag_t, temp);
7858
6551fa4d
JW
7859 insns = get_insns ();
7860 end_sequence ();
7861
d6a5ac33
RK
7862 /* Conjugate should appear as a single unit
7863 If TARGET is a CONCAT, we got insns like RD = RS, ID = - IS,
6551fa4d
JW
7864 each with a separate pseudo as destination.
7865 It's not correct for flow to treat them as a unit. */
6d6e61ce 7866 if (GET_CODE (target) != CONCAT)
6551fa4d
JW
7867 emit_no_conflict_block (insns, target, op0, NULL_RTX, NULL_RTX);
7868 else
7869 emit_insns (insns);
7308a047
RS
7870
7871 return target;
7872 }
7873
e976b8b2
MS
7874 case TRY_CATCH_EXPR:
7875 {
7876 tree handler = TREE_OPERAND (exp, 1);
7877
7878 expand_eh_region_start ();
7879
7880 op0 = expand_expr (TREE_OPERAND (exp, 0), 0, VOIDmode, 0);
7881
7882 expand_eh_region_end (handler);
7883
7884 return op0;
7885 }
7886
7887 case POPDCC_EXPR:
7888 {
7889 rtx dcc = get_dynamic_cleanup_chain ();
38a448ca 7890 emit_move_insn (dcc, validize_mem (gen_rtx_MEM (Pmode, dcc)));
e976b8b2
MS
7891 return const0_rtx;
7892 }
7893
7894 case POPDHC_EXPR:
7895 {
7896 rtx dhc = get_dynamic_handler_chain ();
38a448ca 7897 emit_move_insn (dhc, validize_mem (gen_rtx_MEM (Pmode, dhc)));
e976b8b2
MS
7898 return const0_rtx;
7899 }
7900
bbf6f052 7901 case ERROR_MARK:
66538193
RS
7902 op0 = CONST0_RTX (tmode);
7903 if (op0 != 0)
7904 return op0;
bbf6f052
RK
7905 return const0_rtx;
7906
7907 default:
90764a87 7908 return (*lang_expand_expr) (exp, original_target, tmode, modifier);
bbf6f052
RK
7909 }
7910
7911 /* Here to do an ordinary binary operator, generating an instruction
7912 from the optab already placed in `this_optab'. */
7913 binop:
7914 preexpand_calls (exp);
e5e809f4 7915 if (! safe_from_p (subtarget, TREE_OPERAND (exp, 1), 1))
bbf6f052
RK
7916 subtarget = 0;
7917 op0 = expand_expr (TREE_OPERAND (exp, 0), subtarget, VOIDmode, 0);
906c4e36 7918 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
bbf6f052
RK
7919 binop2:
7920 temp = expand_binop (mode, this_optab, op0, op1, target,
7921 unsignedp, OPTAB_LIB_WIDEN);
7922 if (temp == 0)
7923 abort ();
7924 return temp;
7925}
bbf6f052 7926
bbf6f052 7927
b93a436e
JL
7928\f
7929/* Return the alignment in bits of EXP, a pointer valued expression.
7930 But don't return more than MAX_ALIGN no matter what.
7931 The alignment returned is, by default, the alignment of the thing that
7932 EXP points to (if it is not a POINTER_TYPE, 0 is returned).
7933
7934 Otherwise, look at the expression to see if we can do better, i.e., if the
7935 expression is actually pointing at an object whose alignment is tighter. */
0f41302f 7936
b93a436e
JL
7937static int
7938get_pointer_alignment (exp, max_align)
7939 tree exp;
7940 unsigned max_align;
bbf6f052 7941{
b93a436e
JL
7942 unsigned align, inner;
7943
7944 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7945 return 0;
7946
7947 align = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7948 align = MIN (align, max_align);
7949
7950 while (1)
bbf6f052 7951 {
b93a436e 7952 switch (TREE_CODE (exp))
bbf6f052 7953 {
b93a436e
JL
7954 case NOP_EXPR:
7955 case CONVERT_EXPR:
7956 case NON_LVALUE_EXPR:
7957 exp = TREE_OPERAND (exp, 0);
7958 if (TREE_CODE (TREE_TYPE (exp)) != POINTER_TYPE)
7959 return align;
7960 inner = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
7961 align = MIN (inner, max_align);
7962 break;
7963
7964 case PLUS_EXPR:
7965 /* If sum of pointer + int, restrict our maximum alignment to that
7966 imposed by the integer. If not, we can't do any better than
7967 ALIGN. */
7968 if (TREE_CODE (TREE_OPERAND (exp, 1)) != INTEGER_CST)
7969 return align;
7970
7971 while (((TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT)
7972 & (max_align - 1))
7973 != 0)
7974 max_align >>= 1;
7975
7976 exp = TREE_OPERAND (exp, 0);
7977 break;
7978
7979 case ADDR_EXPR:
7980 /* See what we are pointing at and look at its alignment. */
7981 exp = TREE_OPERAND (exp, 0);
7982 if (TREE_CODE (exp) == FUNCTION_DECL)
7983 align = FUNCTION_BOUNDARY;
7984 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd')
7985 align = DECL_ALIGN (exp);
7986#ifdef CONSTANT_ALIGNMENT
7987 else if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'c')
7988 align = CONSTANT_ALIGNMENT (exp, align);
c02bd5d9 7989#endif
b93a436e 7990 return MIN (align, max_align);
c02bd5d9 7991
b93a436e
JL
7992 default:
7993 return align;
7994 }
7995 }
7996}
7997\f
7998/* Return the tree node and offset if a given argument corresponds to
7999 a string constant. */
8000
8001static tree
8002string_constant (arg, ptr_offset)
8003 tree arg;
8004 tree *ptr_offset;
8005{
8006 STRIP_NOPS (arg);
8007
8008 if (TREE_CODE (arg) == ADDR_EXPR
8009 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST)
8010 {
8011 *ptr_offset = integer_zero_node;
8012 return TREE_OPERAND (arg, 0);
8013 }
8014 else if (TREE_CODE (arg) == PLUS_EXPR)
8015 {
8016 tree arg0 = TREE_OPERAND (arg, 0);
8017 tree arg1 = TREE_OPERAND (arg, 1);
8018
8019 STRIP_NOPS (arg0);
8020 STRIP_NOPS (arg1);
8021
8022 if (TREE_CODE (arg0) == ADDR_EXPR
8023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == STRING_CST)
bbf6f052 8024 {
b93a436e
JL
8025 *ptr_offset = arg1;
8026 return TREE_OPERAND (arg0, 0);
bbf6f052 8027 }
b93a436e
JL
8028 else if (TREE_CODE (arg1) == ADDR_EXPR
8029 && TREE_CODE (TREE_OPERAND (arg1, 0)) == STRING_CST)
bbf6f052 8030 {
b93a436e
JL
8031 *ptr_offset = arg0;
8032 return TREE_OPERAND (arg1, 0);
bbf6f052 8033 }
b93a436e 8034 }
ca695ac9 8035
b93a436e
JL
8036 return 0;
8037}
ca695ac9 8038
b93a436e
JL
8039/* Compute the length of a C string. TREE_STRING_LENGTH is not the right
8040 way, because it could contain a zero byte in the middle.
8041 TREE_STRING_LENGTH is the size of the character array, not the string.
ca695ac9 8042
b93a436e
JL
8043 Unfortunately, string_constant can't access the values of const char
8044 arrays with initializers, so neither can we do so here. */
e87b4f3f 8045
b93a436e
JL
8046static tree
8047c_strlen (src)
8048 tree src;
8049{
8050 tree offset_node;
8051 int offset, max;
8052 char *ptr;
e7c33f54 8053
b93a436e
JL
8054 src = string_constant (src, &offset_node);
8055 if (src == 0)
8056 return 0;
8057 max = TREE_STRING_LENGTH (src);
8058 ptr = TREE_STRING_POINTER (src);
8059 if (offset_node && TREE_CODE (offset_node) != INTEGER_CST)
8060 {
8061 /* If the string has an internal zero byte (e.g., "foo\0bar"), we can't
8062 compute the offset to the following null if we don't know where to
8063 start searching for it. */
8064 int i;
8065 for (i = 0; i < max; i++)
8066 if (ptr[i] == 0)
8067 return 0;
8068 /* We don't know the starting offset, but we do know that the string
8069 has no internal zero bytes. We can assume that the offset falls
8070 within the bounds of the string; otherwise, the programmer deserves
8071 what he gets. Subtract the offset from the length of the string,
8072 and return that. */
8073 /* This would perhaps not be valid if we were dealing with named
8074 arrays in addition to literal string constants. */
8075 return size_binop (MINUS_EXPR, size_int (max), offset_node);
8076 }
e7c33f54 8077
b93a436e
JL
8078 /* We have a known offset into the string. Start searching there for
8079 a null character. */
8080 if (offset_node == 0)
8081 offset = 0;
8082 else
8083 {
8084 /* Did we get a long long offset? If so, punt. */
8085 if (TREE_INT_CST_HIGH (offset_node) != 0)
8086 return 0;
8087 offset = TREE_INT_CST_LOW (offset_node);
8088 }
8089 /* If the offset is known to be out of bounds, warn, and call strlen at
8090 runtime. */
8091 if (offset < 0 || offset > max)
8092 {
8093 warning ("offset outside bounds of constant string");
8094 return 0;
8095 }
8096 /* Use strlen to search for the first zero byte. Since any strings
8097 constructed with build_string will have nulls appended, we win even
8098 if we get handed something like (char[4])"abcd".
e7c33f54 8099
b93a436e
JL
8100 Since OFFSET is our starting index into the string, no further
8101 calculation is needed. */
8102 return size_int (strlen (ptr + offset));
8103}
1bbddf11 8104
b93a436e
JL
8105rtx
8106expand_builtin_return_addr (fndecl_code, count, tem)
8107 enum built_in_function fndecl_code;
8108 int count;
8109 rtx tem;
8110{
8111 int i;
e7c33f54 8112
b93a436e
JL
8113 /* Some machines need special handling before we can access
8114 arbitrary frames. For example, on the sparc, we must first flush
8115 all register windows to the stack. */
8116#ifdef SETUP_FRAME_ADDRESSES
8117 if (count > 0)
8118 SETUP_FRAME_ADDRESSES ();
8119#endif
e87b4f3f 8120
b93a436e
JL
8121 /* On the sparc, the return address is not in the frame, it is in a
8122 register. There is no way to access it off of the current frame
8123 pointer, but it can be accessed off the previous frame pointer by
8124 reading the value from the register window save area. */
8125#ifdef RETURN_ADDR_IN_PREVIOUS_FRAME
8126 if (fndecl_code == BUILT_IN_RETURN_ADDRESS)
8127 count--;
8128#endif
60bac6ea 8129
b93a436e
JL
8130 /* Scan back COUNT frames to the specified frame. */
8131 for (i = 0; i < count; i++)
8132 {
8133 /* Assume the dynamic chain pointer is in the word that the
8134 frame address points to, unless otherwise specified. */
8135#ifdef DYNAMIC_CHAIN_ADDRESS
8136 tem = DYNAMIC_CHAIN_ADDRESS (tem);
8137#endif
8138 tem = memory_address (Pmode, tem);
8139 tem = copy_to_reg (gen_rtx_MEM (Pmode, tem));
8140 }
ca695ac9 8141
b93a436e
JL
8142 /* For __builtin_frame_address, return what we've got. */
8143 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
8144 return tem;
e9a25f70 8145
b93a436e
JL
8146 /* For __builtin_return_address, Get the return address from that
8147 frame. */
8148#ifdef RETURN_ADDR_RTX
8149 tem = RETURN_ADDR_RTX (count, tem);
8150#else
8151 tem = memory_address (Pmode,
8152 plus_constant (tem, GET_MODE_SIZE (Pmode)));
8153 tem = gen_rtx_MEM (Pmode, tem);
8154#endif
8155 return tem;
8156}
e9a25f70 8157
b93a436e
JL
8158/* __builtin_setjmp is passed a pointer to an array of five words (not
8159 all will be used on all machines). It operates similarly to the C
8160 library function of the same name, but is more efficient. Much of
8161 the code below (and for longjmp) is copied from the handling of
8162 non-local gotos.
ca695ac9 8163
b93a436e
JL
8164 NOTE: This is intended for use by GNAT and the exception handling
8165 scheme in the compiler and will only work in the method used by
8166 them. */
e9a25f70 8167
b93a436e 8168rtx
6fd1c67b 8169expand_builtin_setjmp (buf_addr, target, first_label, next_label)
b93a436e
JL
8170 rtx buf_addr;
8171 rtx target;
6fd1c67b 8172 rtx first_label, next_label;
b93a436e 8173{
6fd1c67b 8174 rtx lab1 = gen_label_rtx ();
a260abc9
DE
8175 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
8176 enum machine_mode value_mode;
b93a436e 8177 rtx stack_save;
e9a25f70 8178
b93a436e 8179 value_mode = TYPE_MODE (integer_type_node);
ca695ac9 8180
b93a436e
JL
8181#ifdef POINTERS_EXTEND_UNSIGNED
8182 buf_addr = convert_memory_address (Pmode, buf_addr);
8183#endif
d7f21d63 8184
b93a436e 8185 buf_addr = force_reg (Pmode, buf_addr);
d7f21d63 8186
b93a436e
JL
8187 if (target == 0 || GET_CODE (target) != REG
8188 || REGNO (target) < FIRST_PSEUDO_REGISTER)
8189 target = gen_reg_rtx (value_mode);
d7f21d63 8190
b93a436e 8191 emit_queue ();
d7f21d63 8192
9ec36da5
JL
8193 /* We store the frame pointer and the address of lab1 in the buffer
8194 and use the rest of it for the stack save area, which is
8195 machine-dependent. */
8196
0bc02db4
MS
8197#ifndef BUILTIN_SETJMP_FRAME_VALUE
8198#define BUILTIN_SETJMP_FRAME_VALUE virtual_stack_vars_rtx
8199#endif
8200
b93a436e 8201 emit_move_insn (gen_rtx_MEM (Pmode, buf_addr),
0bc02db4 8202 BUILTIN_SETJMP_FRAME_VALUE);
6fd1c67b
RH
8203 emit_move_insn (validize_mem
8204 (gen_rtx_MEM (Pmode,
b93a436e
JL
8205 plus_constant (buf_addr,
8206 GET_MODE_SIZE (Pmode)))),
6fd1c67b 8207 gen_rtx_LABEL_REF (Pmode, lab1));
d7f21d63 8208
b93a436e
JL
8209 stack_save = gen_rtx_MEM (sa_mode,
8210 plus_constant (buf_addr,
8211 2 * GET_MODE_SIZE (Pmode)));
8212 emit_stack_save (SAVE_NONLOCAL, &stack_save, NULL_RTX);
e9a25f70 8213
6fd1c67b
RH
8214 /* If there is further processing to do, do it. */
8215#ifdef HAVE_builtin_setjmp_setup
8216 if (HAVE_builtin_setjmp_setup)
8217 emit_insn (gen_builtin_setjmp_setup (buf_addr));
b93a436e 8218#endif
d7f21d63 8219
6fd1c67b 8220 /* Set TARGET to zero and branch to the first-time-through label. */
b93a436e 8221 emit_move_insn (target, const0_rtx);
6fd1c67b 8222 emit_jump_insn (gen_jump (first_label));
b93a436e
JL
8223 emit_barrier ();
8224 emit_label (lab1);
d7f21d63 8225
6fd1c67b
RH
8226 /* Tell flow about the strange goings on. */
8227 current_function_has_nonlocal_label = 1;
8228
8229 /* Clobber the FP when we get here, so we have to make sure it's
8230 marked as used by this function. */
b93a436e 8231 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
e9a25f70 8232
b93a436e
JL
8233 /* Mark the static chain as clobbered here so life information
8234 doesn't get messed up for it. */
8235 emit_insn (gen_rtx_CLOBBER (VOIDmode, static_chain_rtx));
d7f21d63 8236
b93a436e
JL
8237 /* Now put in the code to restore the frame pointer, and argument
8238 pointer, if needed. The code below is from expand_end_bindings
8239 in stmt.c; see detailed documentation there. */
8240#ifdef HAVE_nonlocal_goto
8241 if (! HAVE_nonlocal_goto)
8242#endif
8243 emit_move_insn (virtual_stack_vars_rtx, hard_frame_pointer_rtx);
ca695ac9 8244
b93a436e
JL
8245#if ARG_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
8246 if (fixed_regs[ARG_POINTER_REGNUM])
8247 {
8248#ifdef ELIMINABLE_REGS
081f5e7e 8249 int i;
b93a436e 8250 static struct elims {int from, to;} elim_regs[] = ELIMINABLE_REGS;
ca695ac9 8251
b93a436e
JL
8252 for (i = 0; i < sizeof elim_regs / sizeof elim_regs[0]; i++)
8253 if (elim_regs[i].from == ARG_POINTER_REGNUM
8254 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
8255 break;
ca695ac9 8256
b93a436e
JL
8257 if (i == sizeof elim_regs / sizeof elim_regs [0])
8258#endif
8259 {
8260 /* Now restore our arg pointer from the address at which it
8261 was saved in our stack frame.
8262 If there hasn't be space allocated for it yet, make
8263 some now. */
8264 if (arg_pointer_save_area == 0)
8265 arg_pointer_save_area
8266 = assign_stack_local (Pmode, GET_MODE_SIZE (Pmode), 0);
8267 emit_move_insn (virtual_incoming_args_rtx,
8268 copy_to_reg (arg_pointer_save_area));
8269 }
8270 }
8271#endif
ca695ac9 8272
6fd1c67b
RH
8273#ifdef HAVE_builtin_setjmp_receiver
8274 if (HAVE_builtin_setjmp_receiver)
8275 emit_insn (gen_builtin_setjmp_receiver (lab1));
8276 else
8277#endif
b93a436e 8278#ifdef HAVE_nonlocal_goto_receiver
6fd1c67b
RH
8279 if (HAVE_nonlocal_goto_receiver)
8280 emit_insn (gen_nonlocal_goto_receiver ());
8281 else
b93a436e 8282#endif
081f5e7e
KG
8283 {
8284 ; /* Nothing */
8285 }
6fd1c67b
RH
8286
8287 /* Set TARGET, and branch to the next-time-through label. */
3e2b9a3d 8288 emit_move_insn (target, const1_rtx);
6fd1c67b
RH
8289 emit_jump_insn (gen_jump (next_label));
8290 emit_barrier ();
ca695ac9 8291
6fd1c67b
RH
8292 return target;
8293}
ca695ac9 8294
6fd1c67b
RH
8295void
8296expand_builtin_longjmp (buf_addr, value)
8297 rtx buf_addr, value;
8298{
8299 rtx fp, lab, stack;
a260abc9 8300 enum machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
ca695ac9 8301
6fd1c67b
RH
8302#ifdef POINTERS_EXTEND_UNSIGNED
8303 buf_addr = convert_memory_address (Pmode, buf_addr);
b93a436e 8304#endif
6fd1c67b
RH
8305 buf_addr = force_reg (Pmode, buf_addr);
8306
3e2b9a3d
JW
8307 /* We used to store value in static_chain_rtx, but that fails if pointers
8308 are smaller than integers. We instead require that the user must pass
8309 a second argument of 1, because that is what builtin_setjmp will
8310 return. This also makes EH slightly more efficient, since we are no
8311 longer copying around a value that we don't care about. */
8312 if (value != const1_rtx)
8313 abort ();
6fd1c67b
RH
8314
8315#ifdef HAVE_builtin_longjmp
8316 if (HAVE_builtin_longjmp)
3e2b9a3d 8317 emit_insn (gen_builtin_longjmp (buf_addr));
6fd1c67b 8318 else
b93a436e 8319#endif
6fd1c67b
RH
8320 {
8321 fp = gen_rtx_MEM (Pmode, buf_addr);
8322 lab = gen_rtx_MEM (Pmode, plus_constant (buf_addr,
8323 GET_MODE_SIZE (Pmode)));
e9a25f70 8324
6fd1c67b
RH
8325 stack = gen_rtx_MEM (sa_mode, plus_constant (buf_addr,
8326 2 * GET_MODE_SIZE (Pmode)));
8327
8328 /* Pick up FP, label, and SP from the block and jump. This code is
8329 from expand_goto in stmt.c; see there for detailed comments. */
8330#if HAVE_nonlocal_goto
8331 if (HAVE_nonlocal_goto)
3e2b9a3d
JW
8332 /* We have to pass a value to the nonlocal_goto pattern that will
8333 get copied into the static_chain pointer, but it does not matter
8334 what that value is, because builtin_setjmp does not use it. */
6fd1c67b
RH
8335 emit_insn (gen_nonlocal_goto (value, fp, stack, lab));
8336 else
b93a436e 8337#endif
6fd1c67b
RH
8338 {
8339 lab = copy_to_reg (lab);
60bac6ea 8340
6fd1c67b
RH
8341 emit_move_insn (hard_frame_pointer_rtx, fp);
8342 emit_stack_restore (SAVE_NONLOCAL, stack, NULL_RTX);
8343
8344 emit_insn (gen_rtx_USE (VOIDmode, hard_frame_pointer_rtx));
8345 emit_insn (gen_rtx_USE (VOIDmode, stack_pointer_rtx));
6fd1c67b
RH
8346 emit_indirect_jump (lab);
8347 }
8348 }
b93a436e 8349}
60bac6ea 8350
55a6ba9f
JC
8351static rtx
8352get_memory_rtx (exp)
8353 tree exp;
8354{
8355 rtx mem;
8356 int is_aggregate;
8357
8358 mem = gen_rtx_MEM (BLKmode,
8359 memory_address (BLKmode,
8360 expand_expr (exp, NULL_RTX,
8361 ptr_mode, EXPAND_SUM)));
8362
8363 RTX_UNCHANGING_P (mem) = TREE_READONLY (exp);
8364
8365 /* Figure out the type of the object pointed to. Set MEM_IN_STRUCT_P
8366 if the value is the address of a structure or if the expression is
8367 cast to a pointer to structure type. */
8368 is_aggregate = 0;
8369
8370 while (TREE_CODE (exp) == NOP_EXPR)
8371 {
8372 tree cast_type = TREE_TYPE (exp);
8373 if (TREE_CODE (cast_type) == POINTER_TYPE
8374 && AGGREGATE_TYPE_P (TREE_TYPE (cast_type)))
8375 {
8376 is_aggregate = 1;
8377 break;
8378 }
8379 exp = TREE_OPERAND (exp, 0);
8380 }
8381
8382 if (is_aggregate == 0)
8383 {
8384 tree type;
8385
8386 if (TREE_CODE (exp) == ADDR_EXPR)
8387 /* If this is the address of an object, check whether the
8388 object is an array. */
8389 type = TREE_TYPE (TREE_OPERAND (exp, 0));
8390 else
8391 type = TREE_TYPE (TREE_TYPE (exp));
8392 is_aggregate = AGGREGATE_TYPE_P (type);
8393 }
8394
8395 MEM_IN_STRUCT_P (mem) = is_aggregate;
8396 return mem;
8397}
8398
b93a436e
JL
8399\f
8400/* Expand an expression EXP that calls a built-in function,
8401 with result going to TARGET if that's convenient
8402 (and in mode MODE if that's convenient).
8403 SUBTARGET may be used as the target for computing one of EXP's operands.
8404 IGNORE is nonzero if the value is to be ignored. */
60bac6ea 8405
b93a436e
JL
8406#define CALLED_AS_BUILT_IN(NODE) \
8407 (!strncmp (IDENTIFIER_POINTER (DECL_NAME (NODE)), "__builtin_", 10))
60bac6ea 8408
b93a436e
JL
8409static rtx
8410expand_builtin (exp, target, subtarget, mode, ignore)
8411 tree exp;
8412 rtx target;
8413 rtx subtarget;
8414 enum machine_mode mode;
8415 int ignore;
8416{
8417 tree fndecl = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
8418 tree arglist = TREE_OPERAND (exp, 1);
8419 rtx op0;
8420 rtx lab1, insns;
8421 enum machine_mode value_mode = TYPE_MODE (TREE_TYPE (exp));
8422 optab builtin_optab;
60bac6ea 8423
b93a436e
JL
8424 switch (DECL_FUNCTION_CODE (fndecl))
8425 {
8426 case BUILT_IN_ABS:
8427 case BUILT_IN_LABS:
8428 case BUILT_IN_FABS:
8429 /* build_function_call changes these into ABS_EXPR. */
8430 abort ();
4ed67205 8431
b93a436e
JL
8432 case BUILT_IN_SIN:
8433 case BUILT_IN_COS:
8434 /* Treat these like sqrt, but only if the user asks for them. */
8435 if (! flag_fast_math)
8436 break;
8437 case BUILT_IN_FSQRT:
8438 /* If not optimizing, call the library function. */
8439 if (! optimize)
8440 break;
4ed67205 8441
b93a436e
JL
8442 if (arglist == 0
8443 /* Arg could be wrong type if user redeclared this fcn wrong. */
8444 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != REAL_TYPE)
4ed67205
RK
8445 break;
8446
b93a436e
JL
8447 /* Stabilize and compute the argument. */
8448 if (TREE_CODE (TREE_VALUE (arglist)) != VAR_DECL
8449 && TREE_CODE (TREE_VALUE (arglist)) != PARM_DECL)
8450 {
8451 exp = copy_node (exp);
8452 arglist = copy_node (arglist);
8453 TREE_OPERAND (exp, 1) = arglist;
8454 TREE_VALUE (arglist) = save_expr (TREE_VALUE (arglist));
8455 }
8456 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
b089937a 8457
b93a436e
JL
8458 /* Make a suitable register to place result in. */
8459 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
7565a035 8460
b93a436e
JL
8461 emit_queue ();
8462 start_sequence ();
7565a035 8463
b93a436e
JL
8464 switch (DECL_FUNCTION_CODE (fndecl))
8465 {
8466 case BUILT_IN_SIN:
8467 builtin_optab = sin_optab; break;
8468 case BUILT_IN_COS:
8469 builtin_optab = cos_optab; break;
8470 case BUILT_IN_FSQRT:
8471 builtin_optab = sqrt_optab; break;
8472 default:
8473 abort ();
8474 }
4ed67205 8475
b93a436e
JL
8476 /* Compute into TARGET.
8477 Set TARGET to wherever the result comes back. */
8478 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8479 builtin_optab, op0, target, 0);
8480
8481 /* If we were unable to expand via the builtin, stop the
8482 sequence (without outputting the insns) and break, causing
38e01259 8483 a call to the library function. */
b93a436e 8484 if (target == 0)
4ed67205 8485 {
b93a436e
JL
8486 end_sequence ();
8487 break;
8488 }
4ed67205 8489
b93a436e
JL
8490 /* Check the results by default. But if flag_fast_math is turned on,
8491 then assume sqrt will always be called with valid arguments. */
4ed67205 8492
b93a436e
JL
8493 if (! flag_fast_math)
8494 {
8495 /* Don't define the builtin FP instructions
8496 if your machine is not IEEE. */
8497 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
8498 abort ();
4ed67205 8499
b93a436e 8500 lab1 = gen_label_rtx ();
ca55abae 8501
b93a436e
JL
8502 /* Test the result; if it is NaN, set errno=EDOM because
8503 the argument was not in the domain. */
8504 emit_cmp_insn (target, target, EQ, 0, GET_MODE (target), 0, 0);
8505 emit_jump_insn (gen_beq (lab1));
8506
8507#ifdef TARGET_EDOM
8508 {
8509#ifdef GEN_ERRNO_RTX
8510 rtx errno_rtx = GEN_ERRNO_RTX;
8511#else
8512 rtx errno_rtx
8513 = gen_rtx_MEM (word_mode, gen_rtx_SYMBOL_REF (Pmode, "errno"));
8514#endif
e87b4f3f 8515
b93a436e
JL
8516 emit_move_insn (errno_rtx, GEN_INT (TARGET_EDOM));
8517 }
8518#else
8519 /* We can't set errno=EDOM directly; let the library call do it.
8520 Pop the arguments right away in case the call gets deleted. */
8521 NO_DEFER_POP;
8522 expand_call (exp, target, 0);
8523 OK_DEFER_POP;
8524#endif
e7c33f54 8525
b93a436e
JL
8526 emit_label (lab1);
8527 }
0006469d 8528
b93a436e
JL
8529 /* Output the entire sequence. */
8530 insns = get_insns ();
8531 end_sequence ();
8532 emit_insns (insns);
8533
8534 return target;
0006469d 8535
b93a436e
JL
8536 case BUILT_IN_FMOD:
8537 break;
0006469d 8538
b93a436e
JL
8539 /* __builtin_apply_args returns block of memory allocated on
8540 the stack into which is stored the arg pointer, structure
8541 value address, static chain, and all the registers that might
8542 possibly be used in performing a function call. The code is
8543 moved to the start of the function so the incoming values are
8544 saved. */
8545 case BUILT_IN_APPLY_ARGS:
8546 /* Don't do __builtin_apply_args more than once in a function.
8547 Save the result of the first call and reuse it. */
8548 if (apply_args_value != 0)
8549 return apply_args_value;
8550 {
8551 /* When this function is called, it means that registers must be
8552 saved on entry to this function. So we migrate the
8553 call to the first insn of this function. */
8554 rtx temp;
8555 rtx seq;
0006469d 8556
b93a436e
JL
8557 start_sequence ();
8558 temp = expand_builtin_apply_args ();
8559 seq = get_insns ();
8560 end_sequence ();
0006469d 8561
b93a436e 8562 apply_args_value = temp;
0006469d 8563
b93a436e
JL
8564 /* Put the sequence after the NOTE that starts the function.
8565 If this is inside a SEQUENCE, make the outer-level insn
8566 chain current, so the code is placed at the start of the
8567 function. */
8568 push_topmost_sequence ();
8569 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8570 pop_topmost_sequence ();
8571 return temp;
8572 }
0006469d 8573
b93a436e
JL
8574 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
8575 FUNCTION with a copy of the parameters described by
8576 ARGUMENTS, and ARGSIZE. It returns a block of memory
8577 allocated on the stack into which is stored all the registers
8578 that might possibly be used for returning the result of a
8579 function. ARGUMENTS is the value returned by
8580 __builtin_apply_args. ARGSIZE is the number of bytes of
8581 arguments that must be copied. ??? How should this value be
8582 computed? We'll also need a safe worst case value for varargs
8583 functions. */
8584 case BUILT_IN_APPLY:
8585 if (arglist == 0
8586 /* Arg could be non-pointer if user redeclared this fcn wrong. */
e5e809f4 8587 || ! POINTER_TYPE_P (TREE_TYPE (TREE_VALUE (arglist)))
b93a436e
JL
8588 || TREE_CHAIN (arglist) == 0
8589 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
8590 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
8591 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
8592 return const0_rtx;
8593 else
8594 {
8595 int i;
8596 tree t;
8597 rtx ops[3];
0006469d 8598
b93a436e
JL
8599 for (t = arglist, i = 0; t; t = TREE_CHAIN (t), i++)
8600 ops[i] = expand_expr (TREE_VALUE (t), NULL_RTX, VOIDmode, 0);
bbf6f052 8601
b93a436e
JL
8602 return expand_builtin_apply (ops[0], ops[1], ops[2]);
8603 }
bbf6f052 8604
b93a436e
JL
8605 /* __builtin_return (RESULT) causes the function to return the
8606 value described by RESULT. RESULT is address of the block of
8607 memory returned by __builtin_apply. */
8608 case BUILT_IN_RETURN:
8609 if (arglist
8610 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8611 && TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) == POINTER_TYPE)
8612 expand_builtin_return (expand_expr (TREE_VALUE (arglist),
8613 NULL_RTX, VOIDmode, 0));
8614 return const0_rtx;
ca695ac9 8615
b93a436e
JL
8616 case BUILT_IN_SAVEREGS:
8617 /* Don't do __builtin_saveregs more than once in a function.
8618 Save the result of the first call and reuse it. */
8619 if (saveregs_value != 0)
8620 return saveregs_value;
8621 {
8622 /* When this function is called, it means that registers must be
8623 saved on entry to this function. So we migrate the
8624 call to the first insn of this function. */
8625 rtx temp;
8626 rtx seq;
ca695ac9 8627
b93a436e
JL
8628 /* Now really call the function. `expand_call' does not call
8629 expand_builtin, so there is no danger of infinite recursion here. */
8630 start_sequence ();
ca695ac9 8631
b93a436e
JL
8632#ifdef EXPAND_BUILTIN_SAVEREGS
8633 /* Do whatever the machine needs done in this case. */
8634 temp = EXPAND_BUILTIN_SAVEREGS (arglist);
8635#else
8636 /* The register where the function returns its value
8637 is likely to have something else in it, such as an argument.
8638 So preserve that register around the call. */
ca695ac9 8639
b93a436e
JL
8640 if (value_mode != VOIDmode)
8641 {
8642 rtx valreg = hard_libcall_value (value_mode);
8643 rtx saved_valreg = gen_reg_rtx (value_mode);
ca695ac9 8644
b93a436e
JL
8645 emit_move_insn (saved_valreg, valreg);
8646 temp = expand_call (exp, target, ignore);
8647 emit_move_insn (valreg, saved_valreg);
ca695ac9
JB
8648 }
8649 else
b93a436e
JL
8650 /* Generate the call, putting the value in a pseudo. */
8651 temp = expand_call (exp, target, ignore);
8652#endif
bbf6f052 8653
b93a436e
JL
8654 seq = get_insns ();
8655 end_sequence ();
bbf6f052 8656
b93a436e 8657 saveregs_value = temp;
bbf6f052 8658
b93a436e
JL
8659 /* Put the sequence after the NOTE that starts the function.
8660 If this is inside a SEQUENCE, make the outer-level insn
8661 chain current, so the code is placed at the start of the
8662 function. */
8663 push_topmost_sequence ();
8664 emit_insns_before (seq, NEXT_INSN (get_insns ()));
8665 pop_topmost_sequence ();
8666 return temp;
8667 }
bbf6f052 8668
b93a436e
JL
8669 /* __builtin_args_info (N) returns word N of the arg space info
8670 for the current function. The number and meanings of words
8671 is controlled by the definition of CUMULATIVE_ARGS. */
8672 case BUILT_IN_ARGS_INFO:
8673 {
8674 int nwords = sizeof (CUMULATIVE_ARGS) / sizeof (int);
b93a436e 8675 int *word_ptr = (int *) &current_function_args_info;
381127e8
RL
8676#if 0
8677 /* These are used by the code below that is if 0'ed away */
8678 int i;
b93a436e 8679 tree type, elts, result;
381127e8 8680#endif
bbf6f052 8681
b93a436e
JL
8682 if (sizeof (CUMULATIVE_ARGS) % sizeof (int) != 0)
8683 fatal ("CUMULATIVE_ARGS type defined badly; see %s, line %d",
8684 __FILE__, __LINE__);
bbf6f052 8685
b93a436e
JL
8686 if (arglist != 0)
8687 {
8688 tree arg = TREE_VALUE (arglist);
8689 if (TREE_CODE (arg) != INTEGER_CST)
8690 error ("argument of `__builtin_args_info' must be constant");
8691 else
8692 {
8693 int wordnum = TREE_INT_CST_LOW (arg);
bbf6f052 8694
b93a436e
JL
8695 if (wordnum < 0 || wordnum >= nwords || TREE_INT_CST_HIGH (arg))
8696 error ("argument of `__builtin_args_info' out of range");
8697 else
8698 return GEN_INT (word_ptr[wordnum]);
8699 }
bbf6f052
RK
8700 }
8701 else
b93a436e 8702 error ("missing argument in `__builtin_args_info'");
bbf6f052 8703
b93a436e 8704 return const0_rtx;
bbf6f052 8705
b93a436e
JL
8706#if 0
8707 for (i = 0; i < nwords; i++)
8708 elts = tree_cons (NULL_TREE, build_int_2 (word_ptr[i], 0));
bbf6f052 8709
b93a436e
JL
8710 type = build_array_type (integer_type_node,
8711 build_index_type (build_int_2 (nwords, 0)));
8712 result = build (CONSTRUCTOR, type, NULL_TREE, nreverse (elts));
8713 TREE_CONSTANT (result) = 1;
8714 TREE_STATIC (result) = 1;
8715 result = build (INDIRECT_REF, build_pointer_type (type), result);
8716 TREE_CONSTANT (result) = 1;
8717 return expand_expr (result, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_BAD);
8718#endif
8719 }
8720
8721 /* Return the address of the first anonymous stack arg. */
8722 case BUILT_IN_NEXT_ARG:
ca695ac9 8723 {
b93a436e
JL
8724 tree fntype = TREE_TYPE (current_function_decl);
8725
8726 if ((TYPE_ARG_TYPES (fntype) == 0
8727 || (TREE_VALUE (tree_last (TYPE_ARG_TYPES (fntype)))
8728 == void_type_node))
8729 && ! current_function_varargs)
8730 {
8731 error ("`va_start' used in function with fixed args");
8732 return const0_rtx;
8733 }
8734
8735 if (arglist)
8736 {
8737 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
8738 tree arg = TREE_VALUE (arglist);
8739
8740 /* Strip off all nops for the sake of the comparison. This
8741 is not quite the same as STRIP_NOPS. It does more.
8742 We must also strip off INDIRECT_EXPR for C++ reference
8743 parameters. */
8744 while (TREE_CODE (arg) == NOP_EXPR
8745 || TREE_CODE (arg) == CONVERT_EXPR
8746 || TREE_CODE (arg) == NON_LVALUE_EXPR
8747 || TREE_CODE (arg) == INDIRECT_REF)
8748 arg = TREE_OPERAND (arg, 0);
8749 if (arg != last_parm)
8750 warning ("second parameter of `va_start' not last named argument");
8751 }
8752 else if (! current_function_varargs)
8753 /* Evidently an out of date version of <stdarg.h>; can't validate
8754 va_start's second argument, but can still work as intended. */
8755 warning ("`__builtin_next_arg' called without an argument");
bbf6f052
RK
8756 }
8757
b93a436e
JL
8758 return expand_binop (Pmode, add_optab,
8759 current_function_internal_arg_pointer,
8760 current_function_arg_offset_rtx,
8761 NULL_RTX, 0, OPTAB_LIB_WIDEN);
ca695ac9 8762
b93a436e
JL
8763 case BUILT_IN_CLASSIFY_TYPE:
8764 if (arglist != 0)
8765 {
8766 tree type = TREE_TYPE (TREE_VALUE (arglist));
8767 enum tree_code code = TREE_CODE (type);
8768 if (code == VOID_TYPE)
8769 return GEN_INT (void_type_class);
8770 if (code == INTEGER_TYPE)
8771 return GEN_INT (integer_type_class);
8772 if (code == CHAR_TYPE)
8773 return GEN_INT (char_type_class);
8774 if (code == ENUMERAL_TYPE)
8775 return GEN_INT (enumeral_type_class);
8776 if (code == BOOLEAN_TYPE)
8777 return GEN_INT (boolean_type_class);
8778 if (code == POINTER_TYPE)
8779 return GEN_INT (pointer_type_class);
8780 if (code == REFERENCE_TYPE)
8781 return GEN_INT (reference_type_class);
8782 if (code == OFFSET_TYPE)
8783 return GEN_INT (offset_type_class);
8784 if (code == REAL_TYPE)
8785 return GEN_INT (real_type_class);
8786 if (code == COMPLEX_TYPE)
8787 return GEN_INT (complex_type_class);
8788 if (code == FUNCTION_TYPE)
8789 return GEN_INT (function_type_class);
8790 if (code == METHOD_TYPE)
8791 return GEN_INT (method_type_class);
8792 if (code == RECORD_TYPE)
8793 return GEN_INT (record_type_class);
8794 if (code == UNION_TYPE || code == QUAL_UNION_TYPE)
8795 return GEN_INT (union_type_class);
8796 if (code == ARRAY_TYPE)
8797 {
8798 if (TYPE_STRING_FLAG (type))
8799 return GEN_INT (string_type_class);
8800 else
8801 return GEN_INT (array_type_class);
8802 }
8803 if (code == SET_TYPE)
8804 return GEN_INT (set_type_class);
8805 if (code == FILE_TYPE)
8806 return GEN_INT (file_type_class);
8807 if (code == LANG_TYPE)
8808 return GEN_INT (lang_type_class);
8809 }
8810 return GEN_INT (no_type_class);
ca695ac9 8811
b93a436e
JL
8812 case BUILT_IN_CONSTANT_P:
8813 if (arglist == 0)
8814 return const0_rtx;
8815 else
8816 {
8817 tree arg = TREE_VALUE (arglist);
ca695ac9 8818
b93a436e 8819 STRIP_NOPS (arg);
cff48d8f
RH
8820 if (really_constant_p (arg)
8821 || (TREE_CODE (arg) == ADDR_EXPR
8822 && TREE_CODE (TREE_OPERAND (arg, 0)) == STRING_CST))
8823 return const1_rtx;
8824
8825 /* Only emit CONSTANT_P_RTX if CSE will be run.
8826 Moreover, we don't want to expand trees that have side effects,
8827 as the original __builtin_constant_p did not evaluate its
8828 argument at all, and we would break existing usage by changing
8829 this. This quirk was generally useful, eliminating a bit of hair
8830 in the writing of the macros that use this function. Now the
8831 same thing can be better accomplished in an inline function. */
8832
8833 if (! cse_not_expected && ! TREE_SIDE_EFFECTS (arg))
8834 {
8835 /* Lazy fixup of old code: issue a warning and fail the test. */
8836 if (! can_handle_constant_p)
8837 {
8838 warning ("Delayed evaluation of __builtin_constant_p not supported on this target.");
8839 warning ("Please report this as a bug to egcs-bugs@cygnus.com.");
8840 return const0_rtx;
8841 }
8842 return gen_rtx_CONSTANT_P_RTX (TYPE_MODE (integer_type_node),
8843 expand_expr (arg, NULL_RTX,
8844 VOIDmode, 0));
8845 }
8846
8847 return const0_rtx;
b93a436e 8848 }
ca695ac9 8849
b93a436e
JL
8850 case BUILT_IN_FRAME_ADDRESS:
8851 /* The argument must be a nonnegative integer constant.
8852 It counts the number of frames to scan up the stack.
8853 The value is the address of that frame. */
8854 case BUILT_IN_RETURN_ADDRESS:
8855 /* The argument must be a nonnegative integer constant.
8856 It counts the number of frames to scan up the stack.
8857 The value is the return address saved in that frame. */
8858 if (arglist == 0)
8859 /* Warning about missing arg was already issued. */
8860 return const0_rtx;
8861 else if (TREE_CODE (TREE_VALUE (arglist)) != INTEGER_CST
8862 || tree_int_cst_sgn (TREE_VALUE (arglist)) < 0)
8863 {
8864 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8865 error ("invalid arg to `__builtin_frame_address'");
8866 else
8867 error ("invalid arg to `__builtin_return_address'");
8868 return const0_rtx;
8869 }
8870 else
8871 {
8872 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl),
8873 TREE_INT_CST_LOW (TREE_VALUE (arglist)),
8874 hard_frame_pointer_rtx);
ee33823f 8875
b93a436e
JL
8876 /* Some ports cannot access arbitrary stack frames. */
8877 if (tem == NULL)
8878 {
8879 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8880 warning ("unsupported arg to `__builtin_frame_address'");
8881 else
8882 warning ("unsupported arg to `__builtin_return_address'");
8883 return const0_rtx;
8884 }
ee33823f 8885
b93a436e
JL
8886 /* For __builtin_frame_address, return what we've got. */
8887 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
8888 return tem;
ee33823f 8889
b93a436e
JL
8890 if (GET_CODE (tem) != REG)
8891 tem = copy_to_reg (tem);
8892 return tem;
8893 }
ee33823f 8894
b93a436e
JL
8895 /* Returns the address of the area where the structure is returned.
8896 0 otherwise. */
8897 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
8898 if (arglist != 0
8899 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
8900 || GET_CODE (DECL_RTL (DECL_RESULT (current_function_decl))) != MEM)
8901 return const0_rtx;
8902 else
8903 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
ee33823f 8904
b93a436e
JL
8905 case BUILT_IN_ALLOCA:
8906 if (arglist == 0
8907 /* Arg could be non-integer if user redeclared this fcn wrong. */
8908 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8909 break;
bbf6f052 8910
b93a436e
JL
8911 /* Compute the argument. */
8912 op0 = expand_expr (TREE_VALUE (arglist), NULL_RTX, VOIDmode, 0);
bbf6f052 8913
b93a436e
JL
8914 /* Allocate the desired space. */
8915 return allocate_dynamic_stack_space (op0, target, BITS_PER_UNIT);
ca695ac9 8916
b93a436e
JL
8917 case BUILT_IN_FFS:
8918 /* If not optimizing, call the library function. */
8919 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8920 break;
ca695ac9 8921
b93a436e
JL
8922 if (arglist == 0
8923 /* Arg could be non-integer if user redeclared this fcn wrong. */
8924 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != INTEGER_TYPE)
8925 break;
ca695ac9 8926
b93a436e
JL
8927 /* Compute the argument. */
8928 op0 = expand_expr (TREE_VALUE (arglist), subtarget, VOIDmode, 0);
8929 /* Compute ffs, into TARGET if possible.
8930 Set TARGET to wherever the result comes back. */
8931 target = expand_unop (TYPE_MODE (TREE_TYPE (TREE_VALUE (arglist))),
8932 ffs_optab, op0, target, 1);
8933 if (target == 0)
8934 abort ();
8935 return target;
bbf6f052 8936
b93a436e
JL
8937 case BUILT_IN_STRLEN:
8938 /* If not optimizing, call the library function. */
8939 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
8940 break;
bbf6f052 8941
b93a436e
JL
8942 if (arglist == 0
8943 /* Arg could be non-pointer if user redeclared this fcn wrong. */
8944 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
8945 break;
8946 else
8947 {
8948 tree src = TREE_VALUE (arglist);
8949 tree len = c_strlen (src);
bbf6f052 8950
b93a436e
JL
8951 int align
8952 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
46b68a37 8953
b93a436e
JL
8954 rtx result, src_rtx, char_rtx;
8955 enum machine_mode insn_mode = value_mode, char_mode;
8956 enum insn_code icode;
46b68a37 8957
b93a436e
JL
8958 /* If the length is known, just return it. */
8959 if (len != 0)
8960 return expand_expr (len, target, mode, EXPAND_MEMORY_USE_BAD);
956d6950 8961
b93a436e
JL
8962 /* If SRC is not a pointer type, don't do this operation inline. */
8963 if (align == 0)
8964 break;
bbf6f052 8965
b93a436e 8966 /* Call a function if we can't compute strlen in the right mode. */
bbf6f052 8967
b93a436e
JL
8968 while (insn_mode != VOIDmode)
8969 {
8970 icode = strlen_optab->handlers[(int) insn_mode].insn_code;
8971 if (icode != CODE_FOR_nothing)
8972 break;
ca695ac9 8973
b93a436e
JL
8974 insn_mode = GET_MODE_WIDER_MODE (insn_mode);
8975 }
8976 if (insn_mode == VOIDmode)
8977 break;
ca695ac9 8978
b93a436e
JL
8979 /* Make a place to write the result of the instruction. */
8980 result = target;
8981 if (! (result != 0
8982 && GET_CODE (result) == REG
8983 && GET_MODE (result) == insn_mode
8984 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
8985 result = gen_reg_rtx (insn_mode);
ca695ac9 8986
b93a436e 8987 /* Make sure the operands are acceptable to the predicates. */
ca695ac9 8988
b93a436e
JL
8989 if (! (*insn_operand_predicate[(int)icode][0]) (result, insn_mode))
8990 result = gen_reg_rtx (insn_mode);
8991 src_rtx = memory_address (BLKmode,
8992 expand_expr (src, NULL_RTX, ptr_mode,
8993 EXPAND_NORMAL));
bbf6f052 8994
b93a436e
JL
8995 if (! (*insn_operand_predicate[(int)icode][1]) (src_rtx, Pmode))
8996 src_rtx = copy_to_mode_reg (Pmode, src_rtx);
bbf6f052 8997
b93a436e
JL
8998 /* Check the string is readable and has an end. */
8999 if (flag_check_memory_usage)
9000 emit_library_call (chkr_check_str_libfunc, 1, VOIDmode, 2,
9001 src_rtx, ptr_mode,
9002 GEN_INT (MEMORY_USE_RO),
9003 TYPE_MODE (integer_type_node));
bbf6f052 9004
b93a436e
JL
9005 char_rtx = const0_rtx;
9006 char_mode = insn_operand_mode[(int)icode][2];
9007 if (! (*insn_operand_predicate[(int)icode][2]) (char_rtx, char_mode))
9008 char_rtx = copy_to_mode_reg (char_mode, char_rtx);
bbf6f052 9009
b93a436e
JL
9010 emit_insn (GEN_FCN (icode) (result,
9011 gen_rtx_MEM (BLKmode, src_rtx),
9012 char_rtx, GEN_INT (align)));
bbf6f052 9013
b93a436e
JL
9014 /* Return the value in the proper mode for this function. */
9015 if (GET_MODE (result) == value_mode)
9016 return result;
9017 else if (target != 0)
9018 {
9019 convert_move (target, result, 0);
9020 return target;
9021 }
9022 else
9023 return convert_to_mode (value_mode, result, 0);
9024 }
bbf6f052 9025
b93a436e
JL
9026 case BUILT_IN_STRCPY:
9027 /* If not optimizing, call the library function. */
9028 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9029 break;
bbf6f052 9030
b93a436e
JL
9031 if (arglist == 0
9032 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9033 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9034 || TREE_CHAIN (arglist) == 0
9035 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9036 break;
9037 else
9038 {
9039 tree len = c_strlen (TREE_VALUE (TREE_CHAIN (arglist)));
bbf6f052 9040
b93a436e
JL
9041 if (len == 0)
9042 break;
bbf6f052 9043
b93a436e 9044 len = size_binop (PLUS_EXPR, len, integer_one_node);
6d100794 9045
b93a436e
JL
9046 chainon (arglist, build_tree_list (NULL_TREE, len));
9047 }
6d100794 9048
b93a436e
JL
9049 /* Drops in. */
9050 case BUILT_IN_MEMCPY:
9051 /* If not optimizing, call the library function. */
9052 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9053 break;
e7c33f54 9054
b93a436e
JL
9055 if (arglist == 0
9056 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9057 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9058 || TREE_CHAIN (arglist) == 0
9059 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9060 != POINTER_TYPE)
9061 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9062 || (TREE_CODE (TREE_TYPE (TREE_VALUE
9063 (TREE_CHAIN (TREE_CHAIN (arglist)))))
9064 != INTEGER_TYPE))
9065 break;
9066 else
9067 {
9068 tree dest = TREE_VALUE (arglist);
9069 tree src = TREE_VALUE (TREE_CHAIN (arglist));
9070 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9071
b93a436e
JL
9072 int src_align
9073 = get_pointer_alignment (src, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9074 int dest_align
9075 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9076 rtx dest_mem, src_mem, dest_addr, len_rtx;
e7c33f54 9077
b93a436e
JL
9078 /* If either SRC or DEST is not a pointer type, don't do
9079 this operation in-line. */
9080 if (src_align == 0 || dest_align == 0)
9081 {
9082 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCPY)
9083 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9084 break;
9085 }
e7c33f54 9086
55a6ba9f
JC
9087 dest_mem = get_memory_rtx (dest);
9088 src_mem = get_memory_rtx (src);
b93a436e 9089 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
e7c33f54 9090
b93a436e
JL
9091 /* Just copy the rights of SRC to the rights of DEST. */
9092 if (flag_check_memory_usage)
9093 emit_library_call (chkr_copy_bitmap_libfunc, 1, VOIDmode, 3,
55a6ba9f
JC
9094 XEXP (dest_mem, 0), ptr_mode,
9095 XEXP (src_mem, 0), ptr_mode,
b93a436e 9096 len_rtx, TYPE_MODE (sizetype));
e7c33f54 9097
b93a436e
JL
9098 /* Copy word part most expediently. */
9099 dest_addr
9100 = emit_block_move (dest_mem, src_mem, len_rtx,
9101 MIN (src_align, dest_align));
e7c33f54 9102
b93a436e 9103 if (dest_addr == 0)
55a6ba9f 9104 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
e7c33f54 9105
b93a436e
JL
9106 return dest_addr;
9107 }
e7c33f54 9108
b93a436e
JL
9109 case BUILT_IN_MEMSET:
9110 /* If not optimizing, call the library function. */
9111 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9112 break;
e7c33f54 9113
b93a436e
JL
9114 if (arglist == 0
9115 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9116 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9117 || TREE_CHAIN (arglist) == 0
9118 || (TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist))))
9119 != INTEGER_TYPE)
9120 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9121 || (INTEGER_TYPE
9122 != (TREE_CODE (TREE_TYPE
9123 (TREE_VALUE
9124 (TREE_CHAIN (TREE_CHAIN (arglist))))))))
9125 break;
9126 else
9127 {
9128 tree dest = TREE_VALUE (arglist);
9129 tree val = TREE_VALUE (TREE_CHAIN (arglist));
9130 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
e7c33f54 9131
b93a436e
JL
9132 int dest_align
9133 = get_pointer_alignment (dest, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
55a6ba9f 9134 rtx dest_mem, dest_addr, len_rtx;
e7c33f54 9135
b93a436e
JL
9136 /* If DEST is not a pointer type, don't do this
9137 operation in-line. */
9138 if (dest_align == 0)
9139 break;
bbf6f052 9140
bf931ec8
JW
9141 /* If the arguments have side-effects, then we can only evaluate
9142 them at most once. The following code evaluates them twice if
9143 they are not constants because we break out to expand_call
9144 in that case. They can't be constants if they have side-effects
9145 so we can check for that first. Alternatively, we could call
9146 save_expr to make multiple evaluation safe. */
9147 if (TREE_SIDE_EFFECTS (val) || TREE_SIDE_EFFECTS (len))
9148 break;
9149
b93a436e
JL
9150 /* If VAL is not 0, don't do this operation in-line. */
9151 if (expand_expr (val, NULL_RTX, VOIDmode, 0) != const0_rtx)
9152 break;
bbf6f052 9153
b93a436e
JL
9154 /* If LEN does not expand to a constant, don't do this
9155 operation in-line. */
9156 len_rtx = expand_expr (len, NULL_RTX, VOIDmode, 0);
9157 if (GET_CODE (len_rtx) != CONST_INT)
9158 break;
bbf6f052 9159
55a6ba9f 9160 dest_mem = get_memory_rtx (dest);
b93a436e
JL
9161
9162 /* Just check DST is writable and mark it as readable. */
9163 if (flag_check_memory_usage)
9164 emit_library_call (chkr_check_addr_libfunc, 1, VOIDmode, 3,
55a6ba9f 9165 XEXP (dest_mem, 0), ptr_mode,
b93a436e
JL
9166 len_rtx, TYPE_MODE (sizetype),
9167 GEN_INT (MEMORY_USE_WO),
9168 TYPE_MODE (integer_type_node));
bbf6f052 9169
bbf6f052 9170
b93a436e 9171 dest_addr = clear_storage (dest_mem, len_rtx, dest_align);
bbf6f052 9172
b93a436e 9173 if (dest_addr == 0)
55a6ba9f 9174 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
bbf6f052 9175
b93a436e
JL
9176 return dest_addr;
9177 }
bbf6f052 9178
b93a436e
JL
9179/* These comparison functions need an instruction that returns an actual
9180 index. An ordinary compare that just sets the condition codes
9181 is not enough. */
9182#ifdef HAVE_cmpstrsi
9183 case BUILT_IN_STRCMP:
9184 /* If not optimizing, call the library function. */
9185 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9186 break;
bbf6f052 9187
b93a436e
JL
9188 /* If we need to check memory accesses, call the library function. */
9189 if (flag_check_memory_usage)
9190 break;
bbf6f052 9191
b93a436e
JL
9192 if (arglist == 0
9193 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9194 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9195 || TREE_CHAIN (arglist) == 0
9196 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE)
9197 break;
9198 else if (!HAVE_cmpstrsi)
9199 break;
9200 {
9201 tree arg1 = TREE_VALUE (arglist);
9202 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
b93a436e 9203 tree len, len2;
a97f5a86 9204
b93a436e
JL
9205 len = c_strlen (arg1);
9206 if (len)
9207 len = size_binop (PLUS_EXPR, integer_one_node, len);
9208 len2 = c_strlen (arg2);
9209 if (len2)
9210 len2 = size_binop (PLUS_EXPR, integer_one_node, len2);
e9cdf6e4 9211
b93a436e
JL
9212 /* If we don't have a constant length for the first, use the length
9213 of the second, if we know it. We don't require a constant for
9214 this case; some cost analysis could be done if both are available
9215 but neither is constant. For now, assume they're equally cheap.
e9cdf6e4 9216
b93a436e
JL
9217 If both strings have constant lengths, use the smaller. This
9218 could arise if optimization results in strcpy being called with
9219 two fixed strings, or if the code was machine-generated. We should
9220 add some code to the `memcmp' handler below to deal with such
9221 situations, someday. */
9222 if (!len || TREE_CODE (len) != INTEGER_CST)
9223 {
9224 if (len2)
9225 len = len2;
9226 else if (len == 0)
9227 break;
9228 }
9229 else if (len2 && TREE_CODE (len2) == INTEGER_CST)
9230 {
9231 if (tree_int_cst_lt (len2, len))
9232 len = len2;
9233 }
bbf6f052 9234
b93a436e
JL
9235 chainon (arglist, build_tree_list (NULL_TREE, len));
9236 }
bbf6f052 9237
b93a436e
JL
9238 /* Drops in. */
9239 case BUILT_IN_MEMCMP:
9240 /* If not optimizing, call the library function. */
9241 if (!optimize && ! CALLED_AS_BUILT_IN (fndecl))
9242 break;
bbf6f052 9243
b93a436e
JL
9244 /* If we need to check memory accesses, call the library function. */
9245 if (flag_check_memory_usage)
9246 break;
bbf6f052 9247
b93a436e
JL
9248 if (arglist == 0
9249 /* Arg could be non-pointer if user redeclared this fcn wrong. */
9250 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE
9251 || TREE_CHAIN (arglist) == 0
9252 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (arglist)))) != POINTER_TYPE
9253 || TREE_CHAIN (TREE_CHAIN (arglist)) == 0
9254 || TREE_CODE (TREE_TYPE (TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist))))) != INTEGER_TYPE)
9255 break;
9256 else if (!HAVE_cmpstrsi)
9257 break;
9258 {
9259 tree arg1 = TREE_VALUE (arglist);
9260 tree arg2 = TREE_VALUE (TREE_CHAIN (arglist));
9261 tree len = TREE_VALUE (TREE_CHAIN (TREE_CHAIN (arglist)));
9262 rtx result;
0842a179 9263
b93a436e
JL
9264 int arg1_align
9265 = get_pointer_alignment (arg1, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9266 int arg2_align
9267 = get_pointer_alignment (arg2, BIGGEST_ALIGNMENT) / BITS_PER_UNIT;
9268 enum machine_mode insn_mode
9269 = insn_operand_mode[(int) CODE_FOR_cmpstrsi][0];
0842a179 9270
b93a436e
JL
9271 /* If we don't have POINTER_TYPE, call the function. */
9272 if (arg1_align == 0 || arg2_align == 0)
9273 {
9274 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRCMP)
9275 TREE_CHAIN (TREE_CHAIN (arglist)) = 0;
9276 break;
9277 }
bbf6f052 9278
b93a436e
JL
9279 /* Make a place to write the result of the instruction. */
9280 result = target;
9281 if (! (result != 0
9282 && GET_CODE (result) == REG && GET_MODE (result) == insn_mode
9283 && REGNO (result) >= FIRST_PSEUDO_REGISTER))
9284 result = gen_reg_rtx (insn_mode);
bbf6f052 9285
55a6ba9f
JC
9286 emit_insn (gen_cmpstrsi (result, get_memory_rtx (arg1),
9287 get_memory_rtx (arg2),
b93a436e
JL
9288 expand_expr (len, NULL_RTX, VOIDmode, 0),
9289 GEN_INT (MIN (arg1_align, arg2_align))));
bbf6f052 9290
b93a436e
JL
9291 /* Return the value in the proper mode for this function. */
9292 mode = TYPE_MODE (TREE_TYPE (exp));
9293 if (GET_MODE (result) == mode)
9294 return result;
9295 else if (target != 0)
9296 {
9297 convert_move (target, result, 0);
9298 return target;
9299 }
9300 else
9301 return convert_to_mode (mode, result, 0);
9302 }
9303#else
9304 case BUILT_IN_STRCMP:
9305 case BUILT_IN_MEMCMP:
9306 break;
9307#endif
bbf6f052 9308
b93a436e
JL
9309 case BUILT_IN_SETJMP:
9310 if (arglist == 0
9311 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9312 break;
6fd1c67b
RH
9313 else
9314 {
9315 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9316 VOIDmode, 0);
9317 rtx lab = gen_label_rtx ();
9318 rtx ret = expand_builtin_setjmp (buf_addr, target, lab, lab);
9319 emit_label (lab);
9320 return ret;
9321 }
bbf6f052 9322
6fd1c67b
RH
9323 /* __builtin_longjmp is passed a pointer to an array of five words.
9324 It's similar to the C library longjmp function but works with
9325 __builtin_setjmp above. */
b93a436e
JL
9326 case BUILT_IN_LONGJMP:
9327 if (arglist == 0 || TREE_CHAIN (arglist) == 0
9328 || TREE_CODE (TREE_TYPE (TREE_VALUE (arglist))) != POINTER_TYPE)
9329 break;
b93a436e 9330 else
b93a436e 9331 {
6fd1c67b
RH
9332 rtx buf_addr = expand_expr (TREE_VALUE (arglist), subtarget,
9333 VOIDmode, 0);
9334 rtx value = expand_expr (TREE_VALUE (TREE_CHAIN (arglist)),
3e2b9a3d 9335 NULL_RTX, VOIDmode, 0);
e0cd0770
JC
9336
9337 if (value != const1_rtx)
9338 {
9339 error ("__builtin_longjmp second argument must be 1");
9340 return const0_rtx;
9341 }
9342
6fd1c67b
RH
9343 expand_builtin_longjmp (buf_addr, value);
9344 return const0_rtx;
b93a436e 9345 }
bbf6f052 9346
e0cd0770
JC
9347 case BUILT_IN_TRAP:
9348#ifdef HAVE_trap
9349 if (HAVE_trap)
9350 emit_insn (gen_trap ());
9351 else
9352#endif
9353 error ("__builtin_trap not supported by this target");
9354 emit_barrier ();
9355 return const0_rtx;
9356
b93a436e
JL
9357 /* Various hooks for the DWARF 2 __throw routine. */
9358 case BUILT_IN_UNWIND_INIT:
9359 expand_builtin_unwind_init ();
9360 return const0_rtx;
9361 case BUILT_IN_FP:
9362 return frame_pointer_rtx;
9363 case BUILT_IN_SP:
9364 return stack_pointer_rtx;
9365#ifdef DWARF2_UNWIND_INFO
9366 case BUILT_IN_DWARF_FP_REGNUM:
9367 return expand_builtin_dwarf_fp_regnum ();
9368 case BUILT_IN_DWARF_REG_SIZE:
9369 return expand_builtin_dwarf_reg_size (TREE_VALUE (arglist), target);
fb2ca25a 9370#endif
b93a436e
JL
9371 case BUILT_IN_FROB_RETURN_ADDR:
9372 return expand_builtin_frob_return_addr (TREE_VALUE (arglist));
9373 case BUILT_IN_EXTRACT_RETURN_ADDR:
9374 return expand_builtin_extract_return_addr (TREE_VALUE (arglist));
9375 case BUILT_IN_SET_RETURN_ADDR_REG:
9376 expand_builtin_set_return_addr_reg (TREE_VALUE (arglist));
9377 return const0_rtx;
a1622f83
AM
9378 case BUILT_IN_EH_STUB_OLD:
9379 return expand_builtin_eh_stub_old ();
b93a436e
JL
9380 case BUILT_IN_EH_STUB:
9381 return expand_builtin_eh_stub ();
9382 case BUILT_IN_SET_EH_REGS:
9383 expand_builtin_set_eh_regs (TREE_VALUE (arglist),
9384 TREE_VALUE (TREE_CHAIN (arglist)));
9385 return const0_rtx;
ca695ac9 9386
b93a436e
JL
9387 default: /* just do library call, if unknown builtin */
9388 error ("built-in function `%s' not currently supported",
9389 IDENTIFIER_POINTER (DECL_NAME (fndecl)));
ca695ac9 9390 }
0006469d 9391
b93a436e
JL
9392 /* The switch statement above can drop through to cause the function
9393 to be called normally. */
0006469d 9394
b93a436e 9395 return expand_call (exp, target, ignore);
ca695ac9 9396}
b93a436e
JL
9397\f
9398/* Built-in functions to perform an untyped call and return. */
0006469d 9399
b93a436e
JL
9400/* For each register that may be used for calling a function, this
9401 gives a mode used to copy the register's value. VOIDmode indicates
9402 the register is not used for calling a function. If the machine
9403 has register windows, this gives only the outbound registers.
9404 INCOMING_REGNO gives the corresponding inbound register. */
9405static enum machine_mode apply_args_mode[FIRST_PSEUDO_REGISTER];
0006469d 9406
b93a436e
JL
9407/* For each register that may be used for returning values, this gives
9408 a mode used to copy the register's value. VOIDmode indicates the
9409 register is not used for returning values. If the machine has
9410 register windows, this gives only the outbound registers.
9411 INCOMING_REGNO gives the corresponding inbound register. */
9412static enum machine_mode apply_result_mode[FIRST_PSEUDO_REGISTER];
0006469d 9413
b93a436e
JL
9414/* For each register that may be used for calling a function, this
9415 gives the offset of that register into the block returned by
9416 __builtin_apply_args. 0 indicates that the register is not
9417 used for calling a function. */
9418static int apply_args_reg_offset[FIRST_PSEUDO_REGISTER];
9419
9420/* Return the offset of register REGNO into the block returned by
9421 __builtin_apply_args. This is not declared static, since it is
9422 needed in objc-act.c. */
0006469d 9423
b93a436e
JL
9424int
9425apply_args_register_offset (regno)
9426 int regno;
9427{
9428 apply_args_size ();
0006469d 9429
b93a436e
JL
9430 /* Arguments are always put in outgoing registers (in the argument
9431 block) if such make sense. */
9432#ifdef OUTGOING_REGNO
9433 regno = OUTGOING_REGNO(regno);
9434#endif
9435 return apply_args_reg_offset[regno];
9436}
904762c8 9437
b93a436e
JL
9438/* Return the size required for the block returned by __builtin_apply_args,
9439 and initialize apply_args_mode. */
9440
9441static int
9442apply_args_size ()
0006469d 9443{
b93a436e
JL
9444 static int size = -1;
9445 int align, regno;
2f6e6d22 9446 enum machine_mode mode;
0006469d 9447
b93a436e
JL
9448 /* The values computed by this function never change. */
9449 if (size < 0)
ca695ac9 9450 {
b93a436e
JL
9451 /* The first value is the incoming arg-pointer. */
9452 size = GET_MODE_SIZE (Pmode);
0006469d 9453
b93a436e
JL
9454 /* The second value is the structure value address unless this is
9455 passed as an "invisible" first argument. */
9456 if (struct_value_rtx)
9457 size += GET_MODE_SIZE (Pmode);
0006469d 9458
b93a436e
JL
9459 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9460 if (FUNCTION_ARG_REGNO_P (regno))
9461 {
9462 /* Search for the proper mode for copying this register's
9463 value. I'm not sure this is right, but it works so far. */
9464 enum machine_mode best_mode = VOIDmode;
0006469d 9465
b93a436e
JL
9466 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9467 mode != VOIDmode;
9468 mode = GET_MODE_WIDER_MODE (mode))
9469 if (HARD_REGNO_MODE_OK (regno, mode)
9470 && HARD_REGNO_NREGS (regno, mode) == 1)
9471 best_mode = mode;
0006469d 9472
b93a436e
JL
9473 if (best_mode == VOIDmode)
9474 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9475 mode != VOIDmode;
9476 mode = GET_MODE_WIDER_MODE (mode))
9477 if (HARD_REGNO_MODE_OK (regno, mode)
9478 && (mov_optab->handlers[(int) mode].insn_code
9479 != CODE_FOR_nothing))
9480 best_mode = mode;
0006469d 9481
b93a436e
JL
9482 mode = best_mode;
9483 if (mode == VOIDmode)
9484 abort ();
904762c8 9485
b93a436e
JL
9486 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9487 if (size % align != 0)
9488 size = CEIL (size, align) * align;
9489 apply_args_reg_offset[regno] = size;
9490 size += GET_MODE_SIZE (mode);
9491 apply_args_mode[regno] = mode;
9492 }
9493 else
9494 {
9495 apply_args_mode[regno] = VOIDmode;
9496 apply_args_reg_offset[regno] = 0;
9497 }
9498 }
9499 return size;
9500}
0006469d 9501
b93a436e
JL
9502/* Return the size required for the block returned by __builtin_apply,
9503 and initialize apply_result_mode. */
904762c8 9504
b93a436e
JL
9505static int
9506apply_result_size ()
9507{
9508 static int size = -1;
9509 int align, regno;
9510 enum machine_mode mode;
0006469d 9511
b93a436e
JL
9512 /* The values computed by this function never change. */
9513 if (size < 0)
9514 {
9515 size = 0;
0006469d 9516
b93a436e
JL
9517 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9518 if (FUNCTION_VALUE_REGNO_P (regno))
9519 {
9520 /* Search for the proper mode for copying this register's
9521 value. I'm not sure this is right, but it works so far. */
9522 enum machine_mode best_mode = VOIDmode;
0006469d 9523
b93a436e
JL
9524 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
9525 mode != TImode;
9526 mode = GET_MODE_WIDER_MODE (mode))
9527 if (HARD_REGNO_MODE_OK (regno, mode))
9528 best_mode = mode;
0006469d 9529
b93a436e
JL
9530 if (best_mode == VOIDmode)
9531 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
9532 mode != VOIDmode;
9533 mode = GET_MODE_WIDER_MODE (mode))
9534 if (HARD_REGNO_MODE_OK (regno, mode)
9535 && (mov_optab->handlers[(int) mode].insn_code
9536 != CODE_FOR_nothing))
9537 best_mode = mode;
0006469d 9538
b93a436e
JL
9539 mode = best_mode;
9540 if (mode == VOIDmode)
9541 abort ();
9542
9543 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9544 if (size % align != 0)
9545 size = CEIL (size, align) * align;
9546 size += GET_MODE_SIZE (mode);
9547 apply_result_mode[regno] = mode;
9548 }
9549 else
9550 apply_result_mode[regno] = VOIDmode;
9551
9552 /* Allow targets that use untyped_call and untyped_return to override
9553 the size so that machine-specific information can be stored here. */
9554#ifdef APPLY_RESULT_SIZE
9555 size = APPLY_RESULT_SIZE;
9556#endif
9557 }
9558 return size;
9559}
0006469d 9560
b93a436e
JL
9561#if defined (HAVE_untyped_call) || defined (HAVE_untyped_return)
9562/* Create a vector describing the result block RESULT. If SAVEP is true,
9563 the result block is used to save the values; otherwise it is used to
9564 restore the values. */
9565
9566static rtx
9567result_vector (savep, result)
9568 int savep;
9569 rtx result;
9570{
9571 int regno, size, align, nelts;
9572 enum machine_mode mode;
9573 rtx reg, mem;
9574 rtx *savevec = (rtx *) alloca (FIRST_PSEUDO_REGISTER * sizeof (rtx));
9575
9576 size = nelts = 0;
9577 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9578 if ((mode = apply_result_mode[regno]) != VOIDmode)
9579 {
9580 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9581 if (size % align != 0)
9582 size = CEIL (size, align) * align;
9583 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
9584 mem = change_address (result, mode,
9585 plus_constant (XEXP (result, 0), size));
9586 savevec[nelts++] = (savep
9587 ? gen_rtx_SET (VOIDmode, mem, reg)
9588 : gen_rtx_SET (VOIDmode, reg, mem));
9589 size += GET_MODE_SIZE (mode);
ca695ac9 9590 }
b93a436e
JL
9591 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
9592}
9593#endif /* HAVE_untyped_call or HAVE_untyped_return */
0006469d 9594
b93a436e
JL
9595/* Save the state required to perform an untyped call with the same
9596 arguments as were passed to the current function. */
904762c8 9597
b93a436e
JL
9598static rtx
9599expand_builtin_apply_args ()
9600{
9601 rtx registers;
9602 int size, align, regno;
9603 enum machine_mode mode;
0006469d 9604
b93a436e
JL
9605 /* Create a block where the arg-pointer, structure value address,
9606 and argument registers can be saved. */
9607 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
0cb1d109 9608
b93a436e
JL
9609 /* Walk past the arg-pointer and structure value address. */
9610 size = GET_MODE_SIZE (Pmode);
9611 if (struct_value_rtx)
9612 size += GET_MODE_SIZE (Pmode);
0cb1d109 9613
b93a436e
JL
9614 /* Save each register used in calling a function to the block. */
9615 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9616 if ((mode = apply_args_mode[regno]) != VOIDmode)
9617 {
9618 rtx tem;
0cb1d109 9619
b93a436e
JL
9620 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9621 if (size % align != 0)
9622 size = CEIL (size, align) * align;
0006469d 9623
b93a436e 9624 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
0e8c9172 9625
b93a436e
JL
9626#ifdef STACK_REGS
9627 /* For reg-stack.c's stack register household.
9628 Compare with a similar piece of code in function.c. */
0006469d 9629
b93a436e
JL
9630 emit_insn (gen_rtx_USE (mode, tem));
9631#endif
0e8c9172 9632
b93a436e
JL
9633 emit_move_insn (change_address (registers, mode,
9634 plus_constant (XEXP (registers, 0),
9635 size)),
9636 tem);
9637 size += GET_MODE_SIZE (mode);
0e8c9172 9638 }
0006469d 9639
b93a436e
JL
9640 /* Save the arg pointer to the block. */
9641 emit_move_insn (change_address (registers, Pmode, XEXP (registers, 0)),
9642 copy_to_reg (virtual_incoming_args_rtx));
9643 size = GET_MODE_SIZE (Pmode);
0006469d 9644
b93a436e
JL
9645 /* Save the structure value address unless this is passed as an
9646 "invisible" first argument. */
9647 if (struct_value_incoming_rtx)
9648 {
9649 emit_move_insn (change_address (registers, Pmode,
9650 plus_constant (XEXP (registers, 0),
9651 size)),
9652 copy_to_reg (struct_value_incoming_rtx));
9653 size += GET_MODE_SIZE (Pmode);
9654 }
0006469d 9655
b93a436e
JL
9656 /* Return the address of the block. */
9657 return copy_addr_to_reg (XEXP (registers, 0));
9658}
0006469d 9659
b93a436e
JL
9660/* Perform an untyped call and save the state required to perform an
9661 untyped return of whatever value was returned by the given function. */
0006469d 9662
b93a436e
JL
9663static rtx
9664expand_builtin_apply (function, arguments, argsize)
9665 rtx function, arguments, argsize;
9666{
9667 int size, align, regno;
9668 enum machine_mode mode;
9669 rtx incoming_args, result, reg, dest, call_insn;
9670 rtx old_stack_level = 0;
9671 rtx call_fusage = 0;
0006469d 9672
b93a436e
JL
9673 /* Create a block where the return registers can be saved. */
9674 result = assign_stack_local (BLKmode, apply_result_size (), -1);
9675
9676 /* ??? The argsize value should be adjusted here. */
9677
9678 /* Fetch the arg pointer from the ARGUMENTS block. */
9679 incoming_args = gen_reg_rtx (Pmode);
9680 emit_move_insn (incoming_args,
9681 gen_rtx_MEM (Pmode, arguments));
9682#ifndef STACK_GROWS_DOWNWARD
9683 incoming_args = expand_binop (Pmode, sub_optab, incoming_args, argsize,
9684 incoming_args, 0, OPTAB_LIB_WIDEN);
9685#endif
9686
9687 /* Perform postincrements before actually calling the function. */
ca695ac9 9688 emit_queue ();
0006469d 9689
b93a436e
JL
9690 /* Push a new argument block and copy the arguments. */
9691 do_pending_stack_adjust ();
0006469d 9692
b93a436e
JL
9693 /* Save the stack with nonlocal if available */
9694#ifdef HAVE_save_stack_nonlocal
9695 if (HAVE_save_stack_nonlocal)
9696 emit_stack_save (SAVE_NONLOCAL, &old_stack_level, NULL_RTX);
9697 else
9698#endif
9699 emit_stack_save (SAVE_BLOCK, &old_stack_level, NULL_RTX);
0006469d 9700
b93a436e
JL
9701 /* Push a block of memory onto the stack to store the memory arguments.
9702 Save the address in a register, and copy the memory arguments. ??? I
9703 haven't figured out how the calling convention macros effect this,
9704 but it's likely that the source and/or destination addresses in
9705 the block copy will need updating in machine specific ways. */
9706 dest = allocate_dynamic_stack_space (argsize, 0, 0);
9707 emit_block_move (gen_rtx_MEM (BLKmode, dest),
9708 gen_rtx_MEM (BLKmode, incoming_args),
9709 argsize,
9710 PARM_BOUNDARY / BITS_PER_UNIT);
9711
9712 /* Refer to the argument block. */
9713 apply_args_size ();
9714 arguments = gen_rtx_MEM (BLKmode, arguments);
9715
9716 /* Walk past the arg-pointer and structure value address. */
9717 size = GET_MODE_SIZE (Pmode);
9718 if (struct_value_rtx)
9719 size += GET_MODE_SIZE (Pmode);
9720
9721 /* Restore each of the registers previously saved. Make USE insns
9722 for each of these registers for use in making the call. */
9723 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9724 if ((mode = apply_args_mode[regno]) != VOIDmode)
9725 {
9726 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9727 if (size % align != 0)
9728 size = CEIL (size, align) * align;
9729 reg = gen_rtx_REG (mode, regno);
9730 emit_move_insn (reg,
9731 change_address (arguments, mode,
9732 plus_constant (XEXP (arguments, 0),
9733 size)));
9734
9735 use_reg (&call_fusage, reg);
9736 size += GET_MODE_SIZE (mode);
9737 }
9738
9739 /* Restore the structure value address unless this is passed as an
9740 "invisible" first argument. */
9741 size = GET_MODE_SIZE (Pmode);
9742 if (struct_value_rtx)
0006469d 9743 {
b93a436e
JL
9744 rtx value = gen_reg_rtx (Pmode);
9745 emit_move_insn (value,
9746 change_address (arguments, Pmode,
9747 plus_constant (XEXP (arguments, 0),
9748 size)));
9749 emit_move_insn (struct_value_rtx, value);
9750 if (GET_CODE (struct_value_rtx) == REG)
9751 use_reg (&call_fusage, struct_value_rtx);
9752 size += GET_MODE_SIZE (Pmode);
ca695ac9 9753 }
0006469d 9754
b93a436e
JL
9755 /* All arguments and registers used for the call are set up by now! */
9756 function = prepare_call_address (function, NULL_TREE, &call_fusage, 0);
0006469d 9757
b93a436e
JL
9758 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
9759 and we don't want to load it into a register as an optimization,
9760 because prepare_call_address already did it if it should be done. */
9761 if (GET_CODE (function) != SYMBOL_REF)
9762 function = memory_address (FUNCTION_MODE, function);
0006469d 9763
b93a436e
JL
9764 /* Generate the actual call instruction and save the return value. */
9765#ifdef HAVE_untyped_call
9766 if (HAVE_untyped_call)
9767 emit_call_insn (gen_untyped_call (gen_rtx_MEM (FUNCTION_MODE, function),
9768 result, result_vector (1, result)));
9769 else
9770#endif
9771#ifdef HAVE_call_value
9772 if (HAVE_call_value)
ca695ac9 9773 {
b93a436e 9774 rtx valreg = 0;
0006469d 9775
b93a436e
JL
9776 /* Locate the unique return register. It is not possible to
9777 express a call that sets more than one return register using
9778 call_value; use untyped_call for that. In fact, untyped_call
9779 only needs to save the return registers in the given block. */
9780 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9781 if ((mode = apply_result_mode[regno]) != VOIDmode)
9782 {
9783 if (valreg)
9784 abort (); /* HAVE_untyped_call required. */
9785 valreg = gen_rtx_REG (mode, regno);
9786 }
0006469d 9787
b93a436e
JL
9788 emit_call_insn (gen_call_value (valreg,
9789 gen_rtx_MEM (FUNCTION_MODE, function),
9790 const0_rtx, NULL_RTX, const0_rtx));
0006469d 9791
b93a436e
JL
9792 emit_move_insn (change_address (result, GET_MODE (valreg),
9793 XEXP (result, 0)),
9794 valreg);
ca695ac9 9795 }
b93a436e
JL
9796 else
9797#endif
9798 abort ();
0006469d 9799
b93a436e
JL
9800 /* Find the CALL insn we just emitted. */
9801 for (call_insn = get_last_insn ();
9802 call_insn && GET_CODE (call_insn) != CALL_INSN;
9803 call_insn = PREV_INSN (call_insn))
9804 ;
0006469d 9805
b93a436e
JL
9806 if (! call_insn)
9807 abort ();
0006469d 9808
b93a436e
JL
9809 /* Put the register usage information on the CALL. If there is already
9810 some usage information, put ours at the end. */
9811 if (CALL_INSN_FUNCTION_USAGE (call_insn))
0006469d 9812 {
b93a436e 9813 rtx link;
0006469d 9814
b93a436e
JL
9815 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
9816 link = XEXP (link, 1))
9817 ;
9818
9819 XEXP (link, 1) = call_fusage;
ca695ac9 9820 }
b93a436e
JL
9821 else
9822 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
0006469d 9823
b93a436e
JL
9824 /* Restore the stack. */
9825#ifdef HAVE_save_stack_nonlocal
9826 if (HAVE_save_stack_nonlocal)
9827 emit_stack_restore (SAVE_NONLOCAL, old_stack_level, NULL_RTX);
9828 else
9829#endif
9830 emit_stack_restore (SAVE_BLOCK, old_stack_level, NULL_RTX);
9831
9832 /* Return the address of the result block. */
9833 return copy_addr_to_reg (XEXP (result, 0));
0006469d 9834}
bbf6f052 9835
b93a436e 9836/* Perform an untyped return. */
ca695ac9
JB
9837
9838static void
b93a436e
JL
9839expand_builtin_return (result)
9840 rtx result;
bbf6f052 9841{
b93a436e
JL
9842 int size, align, regno;
9843 enum machine_mode mode;
9844 rtx reg;
9845 rtx call_fusage = 0;
bbf6f052 9846
b93a436e
JL
9847 apply_result_size ();
9848 result = gen_rtx_MEM (BLKmode, result);
bbf6f052 9849
b93a436e
JL
9850#ifdef HAVE_untyped_return
9851 if (HAVE_untyped_return)
ca695ac9 9852 {
b93a436e
JL
9853 emit_jump_insn (gen_untyped_return (result, result_vector (0, result)));
9854 emit_barrier ();
9855 return;
ca695ac9 9856 }
b93a436e 9857#endif
1499e0a8 9858
b93a436e
JL
9859 /* Restore the return value and note that each value is used. */
9860 size = 0;
9861 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
9862 if ((mode = apply_result_mode[regno]) != VOIDmode)
9863 {
9864 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
9865 if (size % align != 0)
9866 size = CEIL (size, align) * align;
9867 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
9868 emit_move_insn (reg,
9869 change_address (result, mode,
9870 plus_constant (XEXP (result, 0),
9871 size)));
9872
9873 push_to_sequence (call_fusage);
9874 emit_insn (gen_rtx_USE (VOIDmode, reg));
9875 call_fusage = get_insns ();
9876 end_sequence ();
9877 size += GET_MODE_SIZE (mode);
9878 }
9879
9880 /* Put the USE insns before the return. */
9881 emit_insns (call_fusage);
9882
9883 /* Return whatever values was restored by jumping directly to the end
9884 of the function. */
9885 expand_null_return ();
ca695ac9
JB
9886}
9887\f
b93a436e
JL
9888/* Expand code for a post- or pre- increment or decrement
9889 and return the RTX for the result.
9890 POST is 1 for postinc/decrements and 0 for preinc/decrements. */
1499e0a8 9891
b93a436e
JL
9892static rtx
9893expand_increment (exp, post, ignore)
9894 register tree exp;
9895 int post, ignore;
ca695ac9 9896{
b93a436e
JL
9897 register rtx op0, op1;
9898 register rtx temp, value;
9899 register tree incremented = TREE_OPERAND (exp, 0);
9900 optab this_optab = add_optab;
9901 int icode;
9902 enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9903 int op0_is_copy = 0;
9904 int single_insn = 0;
9905 /* 1 means we can't store into OP0 directly,
9906 because it is a subreg narrower than a word,
9907 and we don't dare clobber the rest of the word. */
9908 int bad_subreg = 0;
1499e0a8 9909
b93a436e
JL
9910 /* Stabilize any component ref that might need to be
9911 evaluated more than once below. */
9912 if (!post
9913 || TREE_CODE (incremented) == BIT_FIELD_REF
9914 || (TREE_CODE (incremented) == COMPONENT_REF
9915 && (TREE_CODE (TREE_OPERAND (incremented, 0)) != INDIRECT_REF
9916 || DECL_BIT_FIELD (TREE_OPERAND (incremented, 1)))))
9917 incremented = stabilize_reference (incremented);
9918 /* Nested *INCREMENT_EXPRs can happen in C++. We must force innermost
9919 ones into save exprs so that they don't accidentally get evaluated
9920 more than once by the code below. */
9921 if (TREE_CODE (incremented) == PREINCREMENT_EXPR
9922 || TREE_CODE (incremented) == PREDECREMENT_EXPR)
9923 incremented = save_expr (incremented);
e9a25f70 9924
b93a436e
JL
9925 /* Compute the operands as RTX.
9926 Note whether OP0 is the actual lvalue or a copy of it:
9927 I believe it is a copy iff it is a register or subreg
9928 and insns were generated in computing it. */
e9a25f70 9929
b93a436e
JL
9930 temp = get_last_insn ();
9931 op0 = expand_expr (incremented, NULL_RTX, VOIDmode, EXPAND_MEMORY_USE_RW);
e9a25f70 9932
b93a436e
JL
9933 /* If OP0 is a SUBREG made for a promoted variable, we cannot increment
9934 in place but instead must do sign- or zero-extension during assignment,
9935 so we copy it into a new register and let the code below use it as
9936 a copy.
e9a25f70 9937
b93a436e
JL
9938 Note that we can safely modify this SUBREG since it is know not to be
9939 shared (it was made by the expand_expr call above). */
9940
9941 if (GET_CODE (op0) == SUBREG && SUBREG_PROMOTED_VAR_P (op0))
9942 {
9943 if (post)
9944 SUBREG_REG (op0) = copy_to_reg (SUBREG_REG (op0));
9945 else
9946 bad_subreg = 1;
9947 }
9948 else if (GET_CODE (op0) == SUBREG
9949 && GET_MODE_BITSIZE (GET_MODE (op0)) < BITS_PER_WORD)
9950 {
9951 /* We cannot increment this SUBREG in place. If we are
9952 post-incrementing, get a copy of the old value. Otherwise,
9953 just mark that we cannot increment in place. */
9954 if (post)
9955 op0 = copy_to_reg (op0);
9956 else
9957 bad_subreg = 1;
e9a25f70
JL
9958 }
9959
b93a436e
JL
9960 op0_is_copy = ((GET_CODE (op0) == SUBREG || GET_CODE (op0) == REG)
9961 && temp != get_last_insn ());
9962 op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode,
9963 EXPAND_MEMORY_USE_BAD);
1499e0a8 9964
b93a436e
JL
9965 /* Decide whether incrementing or decrementing. */
9966 if (TREE_CODE (exp) == POSTDECREMENT_EXPR
9967 || TREE_CODE (exp) == PREDECREMENT_EXPR)
9968 this_optab = sub_optab;
9969
9970 /* Convert decrement by a constant into a negative increment. */
9971 if (this_optab == sub_optab
9972 && GET_CODE (op1) == CONST_INT)
ca695ac9 9973 {
b93a436e
JL
9974 op1 = GEN_INT (- INTVAL (op1));
9975 this_optab = add_optab;
ca695ac9 9976 }
1499e0a8 9977
b93a436e
JL
9978 /* For a preincrement, see if we can do this with a single instruction. */
9979 if (!post)
9980 {
9981 icode = (int) this_optab->handlers[(int) mode].insn_code;
9982 if (icode != (int) CODE_FOR_nothing
9983 /* Make sure that OP0 is valid for operands 0 and 1
9984 of the insn we want to queue. */
9985 && (*insn_operand_predicate[icode][0]) (op0, mode)
9986 && (*insn_operand_predicate[icode][1]) (op0, mode)
9987 && (*insn_operand_predicate[icode][2]) (op1, mode))
9988 single_insn = 1;
9989 }
bbf6f052 9990
b93a436e
JL
9991 /* If OP0 is not the actual lvalue, but rather a copy in a register,
9992 then we cannot just increment OP0. We must therefore contrive to
9993 increment the original value. Then, for postincrement, we can return
9994 OP0 since it is a copy of the old value. For preincrement, expand here
9995 unless we can do it with a single insn.
bbf6f052 9996
b93a436e
JL
9997 Likewise if storing directly into OP0 would clobber high bits
9998 we need to preserve (bad_subreg). */
9999 if (op0_is_copy || (!post && !single_insn) || bad_subreg)
a358cee0 10000 {
b93a436e
JL
10001 /* This is the easiest way to increment the value wherever it is.
10002 Problems with multiple evaluation of INCREMENTED are prevented
10003 because either (1) it is a component_ref or preincrement,
10004 in which case it was stabilized above, or (2) it is an array_ref
10005 with constant index in an array in a register, which is
10006 safe to reevaluate. */
10007 tree newexp = build (((TREE_CODE (exp) == POSTDECREMENT_EXPR
10008 || TREE_CODE (exp) == PREDECREMENT_EXPR)
10009 ? MINUS_EXPR : PLUS_EXPR),
10010 TREE_TYPE (exp),
10011 incremented,
10012 TREE_OPERAND (exp, 1));
a358cee0 10013
b93a436e
JL
10014 while (TREE_CODE (incremented) == NOP_EXPR
10015 || TREE_CODE (incremented) == CONVERT_EXPR)
10016 {
10017 newexp = convert (TREE_TYPE (incremented), newexp);
10018 incremented = TREE_OPERAND (incremented, 0);
10019 }
bbf6f052 10020
b93a436e
JL
10021 temp = expand_assignment (incremented, newexp, ! post && ! ignore , 0);
10022 return post ? op0 : temp;
10023 }
bbf6f052 10024
b93a436e
JL
10025 if (post)
10026 {
10027 /* We have a true reference to the value in OP0.
10028 If there is an insn to add or subtract in this mode, queue it.
10029 Queueing the increment insn avoids the register shuffling
10030 that often results if we must increment now and first save
10031 the old value for subsequent use. */
bbf6f052 10032
b93a436e
JL
10033#if 0 /* Turned off to avoid making extra insn for indexed memref. */
10034 op0 = stabilize (op0);
10035#endif
41dfd40c 10036
b93a436e
JL
10037 icode = (int) this_optab->handlers[(int) mode].insn_code;
10038 if (icode != (int) CODE_FOR_nothing
10039 /* Make sure that OP0 is valid for operands 0 and 1
10040 of the insn we want to queue. */
10041 && (*insn_operand_predicate[icode][0]) (op0, mode)
10042 && (*insn_operand_predicate[icode][1]) (op0, mode))
10043 {
10044 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10045 op1 = force_reg (mode, op1);
bbf6f052 10046
b93a436e
JL
10047 return enqueue_insn (op0, GEN_FCN (icode) (op0, op0, op1));
10048 }
10049 if (icode != (int) CODE_FOR_nothing && GET_CODE (op0) == MEM)
10050 {
10051 rtx addr = (general_operand (XEXP (op0, 0), mode)
10052 ? force_reg (Pmode, XEXP (op0, 0))
10053 : copy_to_reg (XEXP (op0, 0)));
10054 rtx temp, result;
ca695ac9 10055
b93a436e
JL
10056 op0 = change_address (op0, VOIDmode, addr);
10057 temp = force_reg (GET_MODE (op0), op0);
10058 if (! (*insn_operand_predicate[icode][2]) (op1, mode))
10059 op1 = force_reg (mode, op1);
ca695ac9 10060
b93a436e
JL
10061 /* The increment queue is LIFO, thus we have to `queue'
10062 the instructions in reverse order. */
10063 enqueue_insn (op0, gen_move_insn (op0, temp));
10064 result = enqueue_insn (temp, GEN_FCN (icode) (temp, temp, op1));
10065 return result;
bbf6f052
RK
10066 }
10067 }
ca695ac9 10068
b93a436e
JL
10069 /* Preincrement, or we can't increment with one simple insn. */
10070 if (post)
10071 /* Save a copy of the value before inc or dec, to return it later. */
10072 temp = value = copy_to_reg (op0);
10073 else
10074 /* Arrange to return the incremented value. */
10075 /* Copy the rtx because expand_binop will protect from the queue,
10076 and the results of that would be invalid for us to return
10077 if our caller does emit_queue before using our result. */
10078 temp = copy_rtx (value = op0);
bbf6f052 10079
b93a436e
JL
10080 /* Increment however we can. */
10081 op1 = expand_binop (mode, this_optab, value, op1,
10082 flag_check_memory_usage ? NULL_RTX : op0,
10083 TREE_UNSIGNED (TREE_TYPE (exp)), OPTAB_LIB_WIDEN);
10084 /* Make sure the value is stored into OP0. */
10085 if (op1 != op0)
10086 emit_move_insn (op0, op1);
5718612f 10087
b93a436e
JL
10088 return temp;
10089}
10090\f
10091/* Expand all function calls contained within EXP, innermost ones first.
10092 But don't look within expressions that have sequence points.
10093 For each CALL_EXPR, record the rtx for its value
10094 in the CALL_EXPR_RTL field. */
5718612f 10095
b93a436e
JL
10096static void
10097preexpand_calls (exp)
10098 tree exp;
10099{
10100 register int nops, i;
10101 int type = TREE_CODE_CLASS (TREE_CODE (exp));
5718612f 10102
b93a436e
JL
10103 if (! do_preexpand_calls)
10104 return;
5718612f 10105
b93a436e 10106 /* Only expressions and references can contain calls. */
bbf6f052 10107
b93a436e
JL
10108 if (type != 'e' && type != '<' && type != '1' && type != '2' && type != 'r')
10109 return;
bbf6f052 10110
b93a436e
JL
10111 switch (TREE_CODE (exp))
10112 {
10113 case CALL_EXPR:
10114 /* Do nothing if already expanded. */
10115 if (CALL_EXPR_RTL (exp) != 0
10116 /* Do nothing if the call returns a variable-sized object. */
10117 || TREE_CODE (TYPE_SIZE (TREE_TYPE(exp))) != INTEGER_CST
10118 /* Do nothing to built-in functions. */
10119 || (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
10120 && (TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
10121 == FUNCTION_DECL)
10122 && DECL_BUILT_IN (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))))
10123 return;
bbf6f052 10124
b93a436e
JL
10125 CALL_EXPR_RTL (exp) = expand_call (exp, NULL_RTX, 0);
10126 return;
bbf6f052 10127
b93a436e
JL
10128 case COMPOUND_EXPR:
10129 case COND_EXPR:
10130 case TRUTH_ANDIF_EXPR:
10131 case TRUTH_ORIF_EXPR:
10132 /* If we find one of these, then we can be sure
10133 the adjust will be done for it (since it makes jumps).
10134 Do it now, so that if this is inside an argument
10135 of a function, we don't get the stack adjustment
10136 after some other args have already been pushed. */
10137 do_pending_stack_adjust ();
10138 return;
bbf6f052 10139
b93a436e
JL
10140 case BLOCK:
10141 case RTL_EXPR:
10142 case WITH_CLEANUP_EXPR:
10143 case CLEANUP_POINT_EXPR:
10144 case TRY_CATCH_EXPR:
10145 return;
bbf6f052 10146
b93a436e
JL
10147 case SAVE_EXPR:
10148 if (SAVE_EXPR_RTL (exp) != 0)
10149 return;
10150
10151 default:
10152 break;
ca695ac9 10153 }
bbf6f052 10154
b93a436e
JL
10155 nops = tree_code_length[(int) TREE_CODE (exp)];
10156 for (i = 0; i < nops; i++)
10157 if (TREE_OPERAND (exp, i) != 0)
10158 {
10159 type = TREE_CODE_CLASS (TREE_CODE (TREE_OPERAND (exp, i)));
10160 if (type == 'e' || type == '<' || type == '1' || type == '2'
10161 || type == 'r')
10162 preexpand_calls (TREE_OPERAND (exp, i));
10163 }
10164}
10165\f
10166/* At the start of a function, record that we have no previously-pushed
10167 arguments waiting to be popped. */
bbf6f052 10168
b93a436e
JL
10169void
10170init_pending_stack_adjust ()
10171{
10172 pending_stack_adjust = 0;
10173}
bbf6f052 10174
b93a436e 10175/* When exiting from function, if safe, clear out any pending stack adjust
060fbabf
JL
10176 so the adjustment won't get done.
10177
10178 Note, if the current function calls alloca, then it must have a
10179 frame pointer regardless of the value of flag_omit_frame_pointer. */
bbf6f052 10180
b93a436e
JL
10181void
10182clear_pending_stack_adjust ()
10183{
10184#ifdef EXIT_IGNORE_STACK
10185 if (optimize > 0
060fbabf
JL
10186 && (! flag_omit_frame_pointer || current_function_calls_alloca)
10187 && EXIT_IGNORE_STACK
b93a436e
JL
10188 && ! (DECL_INLINE (current_function_decl) && ! flag_no_inline)
10189 && ! flag_inline_functions)
10190 pending_stack_adjust = 0;
10191#endif
10192}
bbf6f052 10193
b93a436e
JL
10194/* Pop any previously-pushed arguments that have not been popped yet. */
10195
10196void
10197do_pending_stack_adjust ()
10198{
10199 if (inhibit_defer_pop == 0)
ca695ac9 10200 {
b93a436e
JL
10201 if (pending_stack_adjust != 0)
10202 adjust_stack (GEN_INT (pending_stack_adjust));
10203 pending_stack_adjust = 0;
bbf6f052 10204 }
bbf6f052
RK
10205}
10206\f
b93a436e 10207/* Expand conditional expressions. */
bbf6f052 10208
b93a436e
JL
10209/* Generate code to evaluate EXP and jump to LABEL if the value is zero.
10210 LABEL is an rtx of code CODE_LABEL, in this function and all the
10211 functions here. */
bbf6f052 10212
b93a436e
JL
10213void
10214jumpifnot (exp, label)
ca695ac9 10215 tree exp;
b93a436e 10216 rtx label;
bbf6f052 10217{
b93a436e
JL
10218 do_jump (exp, label, NULL_RTX);
10219}
bbf6f052 10220
b93a436e 10221/* Generate code to evaluate EXP and jump to LABEL if the value is nonzero. */
ca695ac9 10222
b93a436e
JL
10223void
10224jumpif (exp, label)
10225 tree exp;
10226 rtx label;
10227{
10228 do_jump (exp, NULL_RTX, label);
10229}
ca695ac9 10230
b93a436e
JL
10231/* Generate code to evaluate EXP and jump to IF_FALSE_LABEL if
10232 the result is zero, or IF_TRUE_LABEL if the result is one.
10233 Either of IF_FALSE_LABEL and IF_TRUE_LABEL may be zero,
10234 meaning fall through in that case.
ca695ac9 10235
b93a436e
JL
10236 do_jump always does any pending stack adjust except when it does not
10237 actually perform a jump. An example where there is no jump
10238 is when EXP is `(foo (), 0)' and IF_FALSE_LABEL is null.
ca695ac9 10239
b93a436e
JL
10240 This function is responsible for optimizing cases such as
10241 &&, || and comparison operators in EXP. */
5718612f 10242
b93a436e
JL
10243void
10244do_jump (exp, if_false_label, if_true_label)
10245 tree exp;
10246 rtx if_false_label, if_true_label;
10247{
10248 register enum tree_code code = TREE_CODE (exp);
10249 /* Some cases need to create a label to jump to
10250 in order to properly fall through.
10251 These cases set DROP_THROUGH_LABEL nonzero. */
10252 rtx drop_through_label = 0;
10253 rtx temp;
10254 rtx comparison = 0;
10255 int i;
10256 tree type;
10257 enum machine_mode mode;
ca695ac9 10258
dbecbbe4
JL
10259#ifdef MAX_INTEGER_COMPUTATION_MODE
10260 check_max_integer_computation_mode (exp);
10261#endif
10262
b93a436e 10263 emit_queue ();
ca695ac9 10264
b93a436e 10265 switch (code)
ca695ac9 10266 {
b93a436e 10267 case ERROR_MARK:
ca695ac9 10268 break;
bbf6f052 10269
b93a436e
JL
10270 case INTEGER_CST:
10271 temp = integer_zerop (exp) ? if_false_label : if_true_label;
10272 if (temp)
10273 emit_jump (temp);
10274 break;
bbf6f052 10275
b93a436e
JL
10276#if 0
10277 /* This is not true with #pragma weak */
10278 case ADDR_EXPR:
10279 /* The address of something can never be zero. */
10280 if (if_true_label)
10281 emit_jump (if_true_label);
10282 break;
10283#endif
bbf6f052 10284
b93a436e
JL
10285 case NOP_EXPR:
10286 if (TREE_CODE (TREE_OPERAND (exp, 0)) == COMPONENT_REF
10287 || TREE_CODE (TREE_OPERAND (exp, 0)) == BIT_FIELD_REF
10288 || TREE_CODE (TREE_OPERAND (exp, 0)) == ARRAY_REF)
10289 goto normal;
10290 case CONVERT_EXPR:
10291 /* If we are narrowing the operand, we have to do the compare in the
10292 narrower mode. */
10293 if ((TYPE_PRECISION (TREE_TYPE (exp))
10294 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10295 goto normal;
10296 case NON_LVALUE_EXPR:
10297 case REFERENCE_EXPR:
10298 case ABS_EXPR:
10299 case NEGATE_EXPR:
10300 case LROTATE_EXPR:
10301 case RROTATE_EXPR:
10302 /* These cannot change zero->non-zero or vice versa. */
10303 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10304 break;
bbf6f052 10305
b93a436e
JL
10306#if 0
10307 /* This is never less insns than evaluating the PLUS_EXPR followed by
10308 a test and can be longer if the test is eliminated. */
10309 case PLUS_EXPR:
10310 /* Reduce to minus. */
10311 exp = build (MINUS_EXPR, TREE_TYPE (exp),
10312 TREE_OPERAND (exp, 0),
10313 fold (build1 (NEGATE_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
10314 TREE_OPERAND (exp, 1))));
10315 /* Process as MINUS. */
ca695ac9 10316#endif
bbf6f052 10317
b93a436e
JL
10318 case MINUS_EXPR:
10319 /* Non-zero iff operands of minus differ. */
10320 comparison = compare (build (NE_EXPR, TREE_TYPE (exp),
10321 TREE_OPERAND (exp, 0),
10322 TREE_OPERAND (exp, 1)),
10323 NE, NE);
10324 break;
bbf6f052 10325
b93a436e
JL
10326 case BIT_AND_EXPR:
10327 /* If we are AND'ing with a small constant, do this comparison in the
10328 smallest type that fits. If the machine doesn't have comparisons
10329 that small, it will be converted back to the wider comparison.
10330 This helps if we are testing the sign bit of a narrower object.
10331 combine can't do this for us because it can't know whether a
10332 ZERO_EXTRACT or a compare in a smaller mode exists, but we do. */
bbf6f052 10333
b93a436e
JL
10334 if (! SLOW_BYTE_ACCESS
10335 && TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST
10336 && TYPE_PRECISION (TREE_TYPE (exp)) <= HOST_BITS_PER_WIDE_INT
10337 && (i = floor_log2 (TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)))) >= 0
10338 && (mode = mode_for_size (i + 1, MODE_INT, 0)) != BLKmode
10339 && (type = type_for_mode (mode, 1)) != 0
10340 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10341 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10342 != CODE_FOR_nothing))
10343 {
10344 do_jump (convert (type, exp), if_false_label, if_true_label);
10345 break;
10346 }
10347 goto normal;
bbf6f052 10348
b93a436e
JL
10349 case TRUTH_NOT_EXPR:
10350 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10351 break;
bbf6f052 10352
b93a436e
JL
10353 case TRUTH_ANDIF_EXPR:
10354 if (if_false_label == 0)
10355 if_false_label = drop_through_label = gen_label_rtx ();
10356 do_jump (TREE_OPERAND (exp, 0), if_false_label, NULL_RTX);
10357 start_cleanup_deferral ();
10358 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10359 end_cleanup_deferral ();
10360 break;
bbf6f052 10361
b93a436e
JL
10362 case TRUTH_ORIF_EXPR:
10363 if (if_true_label == 0)
10364 if_true_label = drop_through_label = gen_label_rtx ();
10365 do_jump (TREE_OPERAND (exp, 0), NULL_RTX, if_true_label);
10366 start_cleanup_deferral ();
10367 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10368 end_cleanup_deferral ();
10369 break;
bbf6f052 10370
b93a436e
JL
10371 case COMPOUND_EXPR:
10372 push_temp_slots ();
10373 expand_expr (TREE_OPERAND (exp, 0), const0_rtx, VOIDmode, 0);
10374 preserve_temp_slots (NULL_RTX);
10375 free_temp_slots ();
10376 pop_temp_slots ();
10377 emit_queue ();
10378 do_pending_stack_adjust ();
10379 do_jump (TREE_OPERAND (exp, 1), if_false_label, if_true_label);
10380 break;
bbf6f052 10381
b93a436e
JL
10382 case COMPONENT_REF:
10383 case BIT_FIELD_REF:
10384 case ARRAY_REF:
10385 {
10386 int bitsize, bitpos, unsignedp;
10387 enum machine_mode mode;
10388 tree type;
10389 tree offset;
10390 int volatilep = 0;
10391 int alignment;
bbf6f052 10392
b93a436e
JL
10393 /* Get description of this reference. We don't actually care
10394 about the underlying object here. */
10395 get_inner_reference (exp, &bitsize, &bitpos, &offset,
10396 &mode, &unsignedp, &volatilep,
10397 &alignment);
bbf6f052 10398
b93a436e
JL
10399 type = type_for_size (bitsize, unsignedp);
10400 if (! SLOW_BYTE_ACCESS
10401 && type != 0 && bitsize >= 0
10402 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (exp))
10403 && (cmp_optab->handlers[(int) TYPE_MODE (type)].insn_code
10404 != CODE_FOR_nothing))
10405 {
10406 do_jump (convert (type, exp), if_false_label, if_true_label);
10407 break;
10408 }
10409 goto normal;
10410 }
bbf6f052 10411
b93a436e
JL
10412 case COND_EXPR:
10413 /* Do (a ? 1 : 0) and (a ? 0 : 1) as special cases. */
10414 if (integer_onep (TREE_OPERAND (exp, 1))
10415 && integer_zerop (TREE_OPERAND (exp, 2)))
10416 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
bbf6f052 10417
b93a436e
JL
10418 else if (integer_zerop (TREE_OPERAND (exp, 1))
10419 && integer_onep (TREE_OPERAND (exp, 2)))
10420 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
bbf6f052 10421
b93a436e
JL
10422 else
10423 {
10424 register rtx label1 = gen_label_rtx ();
10425 drop_through_label = gen_label_rtx ();
bbf6f052 10426
b93a436e 10427 do_jump (TREE_OPERAND (exp, 0), label1, NULL_RTX);
bbf6f052 10428
b93a436e
JL
10429 start_cleanup_deferral ();
10430 /* Now the THEN-expression. */
10431 do_jump (TREE_OPERAND (exp, 1),
10432 if_false_label ? if_false_label : drop_through_label,
10433 if_true_label ? if_true_label : drop_through_label);
10434 /* In case the do_jump just above never jumps. */
10435 do_pending_stack_adjust ();
10436 emit_label (label1);
bbf6f052 10437
b93a436e
JL
10438 /* Now the ELSE-expression. */
10439 do_jump (TREE_OPERAND (exp, 2),
10440 if_false_label ? if_false_label : drop_through_label,
10441 if_true_label ? if_true_label : drop_through_label);
10442 end_cleanup_deferral ();
10443 }
10444 break;
bbf6f052 10445
b93a436e
JL
10446 case EQ_EXPR:
10447 {
10448 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10449
9ec36da5
JL
10450 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10451 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10452 {
10453 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10454 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10455 do_jump
10456 (fold
10457 (build (TRUTH_ANDIF_EXPR, TREE_TYPE (exp),
10458 fold (build (EQ_EXPR, TREE_TYPE (exp),
10459 fold (build1 (REALPART_EXPR,
10460 TREE_TYPE (inner_type),
10461 exp0)),
10462 fold (build1 (REALPART_EXPR,
10463 TREE_TYPE (inner_type),
10464 exp1)))),
10465 fold (build (EQ_EXPR, TREE_TYPE (exp),
10466 fold (build1 (IMAGPART_EXPR,
10467 TREE_TYPE (inner_type),
10468 exp0)),
10469 fold (build1 (IMAGPART_EXPR,
10470 TREE_TYPE (inner_type),
10471 exp1)))))),
10472 if_false_label, if_true_label);
10473 }
9ec36da5
JL
10474
10475 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10476 do_jump (TREE_OPERAND (exp, 0), if_true_label, if_false_label);
10477
b93a436e
JL
10478 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10479 && !can_compare_p (TYPE_MODE (inner_type)))
10480 do_jump_by_parts_equality (exp, if_false_label, if_true_label);
10481 else
10482 comparison = compare (exp, EQ, EQ);
10483 break;
10484 }
bbf6f052 10485
b93a436e
JL
10486 case NE_EXPR:
10487 {
10488 tree inner_type = TREE_TYPE (TREE_OPERAND (exp, 0));
bbf6f052 10489
9ec36da5
JL
10490 if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_FLOAT
10491 || GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_COMPLEX_INT)
8d62b411
AS
10492 {
10493 tree exp0 = save_expr (TREE_OPERAND (exp, 0));
10494 tree exp1 = save_expr (TREE_OPERAND (exp, 1));
10495 do_jump
10496 (fold
10497 (build (TRUTH_ORIF_EXPR, TREE_TYPE (exp),
10498 fold (build (NE_EXPR, TREE_TYPE (exp),
10499 fold (build1 (REALPART_EXPR,
10500 TREE_TYPE (inner_type),
10501 exp0)),
10502 fold (build1 (REALPART_EXPR,
10503 TREE_TYPE (inner_type),
10504 exp1)))),
10505 fold (build (NE_EXPR, TREE_TYPE (exp),
10506 fold (build1 (IMAGPART_EXPR,
10507 TREE_TYPE (inner_type),
10508 exp0)),
10509 fold (build1 (IMAGPART_EXPR,
10510 TREE_TYPE (inner_type),
10511 exp1)))))),
10512 if_false_label, if_true_label);
10513 }
9ec36da5
JL
10514
10515 else if (integer_zerop (TREE_OPERAND (exp, 1)))
10516 do_jump (TREE_OPERAND (exp, 0), if_false_label, if_true_label);
10517
b93a436e
JL
10518 else if (GET_MODE_CLASS (TYPE_MODE (inner_type)) == MODE_INT
10519 && !can_compare_p (TYPE_MODE (inner_type)))
10520 do_jump_by_parts_equality (exp, if_true_label, if_false_label);
10521 else
10522 comparison = compare (exp, NE, NE);
10523 break;
10524 }
bbf6f052 10525
b93a436e
JL
10526 case LT_EXPR:
10527 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10528 == MODE_INT)
10529 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10530 do_jump_by_parts_greater (exp, 1, if_false_label, if_true_label);
10531 else
10532 comparison = compare (exp, LT, LTU);
10533 break;
bbf6f052 10534
b93a436e
JL
10535 case LE_EXPR:
10536 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10537 == MODE_INT)
10538 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10539 do_jump_by_parts_greater (exp, 0, if_true_label, if_false_label);
10540 else
10541 comparison = compare (exp, LE, LEU);
10542 break;
bbf6f052 10543
b93a436e
JL
10544 case GT_EXPR:
10545 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10546 == MODE_INT)
10547 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10548 do_jump_by_parts_greater (exp, 0, if_false_label, if_true_label);
10549 else
10550 comparison = compare (exp, GT, GTU);
10551 break;
bbf6f052 10552
b93a436e
JL
10553 case GE_EXPR:
10554 if ((GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10555 == MODE_INT)
10556 && !can_compare_p (TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)))))
10557 do_jump_by_parts_greater (exp, 1, if_true_label, if_false_label);
10558 else
10559 comparison = compare (exp, GE, GEU);
10560 break;
bbf6f052 10561
b93a436e
JL
10562 default:
10563 normal:
10564 temp = expand_expr (exp, NULL_RTX, VOIDmode, 0);
10565#if 0
10566 /* This is not needed any more and causes poor code since it causes
10567 comparisons and tests from non-SI objects to have different code
10568 sequences. */
10569 /* Copy to register to avoid generating bad insns by cse
10570 from (set (mem ...) (arithop)) (set (cc0) (mem ...)). */
10571 if (!cse_not_expected && GET_CODE (temp) == MEM)
10572 temp = copy_to_reg (temp);
ca695ac9 10573#endif
b93a436e
JL
10574 do_pending_stack_adjust ();
10575 if (GET_CODE (temp) == CONST_INT)
10576 comparison = (temp == const0_rtx ? const0_rtx : const_true_rtx);
10577 else if (GET_CODE (temp) == LABEL_REF)
10578 comparison = const_true_rtx;
10579 else if (GET_MODE_CLASS (GET_MODE (temp)) == MODE_INT
10580 && !can_compare_p (GET_MODE (temp)))
10581 /* Note swapping the labels gives us not-equal. */
10582 do_jump_by_parts_equality_rtx (temp, if_true_label, if_false_label);
10583 else if (GET_MODE (temp) != VOIDmode)
10584 comparison = compare_from_rtx (temp, CONST0_RTX (GET_MODE (temp)),
10585 NE, TREE_UNSIGNED (TREE_TYPE (exp)),
10586 GET_MODE (temp), NULL_RTX, 0);
10587 else
10588 abort ();
10589 }
bbf6f052 10590
b93a436e
JL
10591 /* Do any postincrements in the expression that was tested. */
10592 emit_queue ();
bbf6f052 10593
b93a436e
JL
10594 /* If COMPARISON is nonzero here, it is an rtx that can be substituted
10595 straight into a conditional jump instruction as the jump condition.
10596 Otherwise, all the work has been done already. */
bbf6f052 10597
b93a436e
JL
10598 if (comparison == const_true_rtx)
10599 {
10600 if (if_true_label)
10601 emit_jump (if_true_label);
10602 }
10603 else if (comparison == const0_rtx)
10604 {
10605 if (if_false_label)
10606 emit_jump (if_false_label);
10607 }
10608 else if (comparison)
10609 do_jump_for_compare (comparison, if_false_label, if_true_label);
bbf6f052 10610
b93a436e
JL
10611 if (drop_through_label)
10612 {
10613 /* If do_jump produces code that might be jumped around,
10614 do any stack adjusts from that code, before the place
10615 where control merges in. */
10616 do_pending_stack_adjust ();
10617 emit_label (drop_through_label);
10618 }
bbf6f052 10619}
b93a436e
JL
10620\f
10621/* Given a comparison expression EXP for values too wide to be compared
10622 with one insn, test the comparison and jump to the appropriate label.
10623 The code of EXP is ignored; we always test GT if SWAP is 0,
10624 and LT if SWAP is 1. */
bbf6f052 10625
b93a436e
JL
10626static void
10627do_jump_by_parts_greater (exp, swap, if_false_label, if_true_label)
10628 tree exp;
10629 int swap;
10630 rtx if_false_label, if_true_label;
10631{
10632 rtx op0 = expand_expr (TREE_OPERAND (exp, swap), NULL_RTX, VOIDmode, 0);
10633 rtx op1 = expand_expr (TREE_OPERAND (exp, !swap), NULL_RTX, VOIDmode, 0);
10634 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10635 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10636 rtx drop_through_label = 0;
10637 int unsignedp = TREE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)));
10638 int i;
bbf6f052 10639
b93a436e
JL
10640 if (! if_true_label || ! if_false_label)
10641 drop_through_label = gen_label_rtx ();
10642 if (! if_true_label)
10643 if_true_label = drop_through_label;
10644 if (! if_false_label)
10645 if_false_label = drop_through_label;
bbf6f052 10646
b93a436e
JL
10647 /* Compare a word at a time, high order first. */
10648 for (i = 0; i < nwords; i++)
f81497d9 10649 {
b93a436e
JL
10650 rtx comp;
10651 rtx op0_word, op1_word;
10652
10653 if (WORDS_BIG_ENDIAN)
10654 {
10655 op0_word = operand_subword_force (op0, i, mode);
10656 op1_word = operand_subword_force (op1, i, mode);
10657 }
f81497d9 10658 else
b93a436e
JL
10659 {
10660 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10661 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10662 }
10663
10664 /* All but high-order word must be compared as unsigned. */
10665 comp = compare_from_rtx (op0_word, op1_word,
10666 (unsignedp || i > 0) ? GTU : GT,
10667 unsignedp, word_mode, NULL_RTX, 0);
10668 if (comp == const_true_rtx)
10669 emit_jump (if_true_label);
10670 else if (comp != const0_rtx)
10671 do_jump_for_compare (comp, NULL_RTX, if_true_label);
10672
10673 /* Consider lower words only if these are equal. */
10674 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10675 NULL_RTX, 0);
10676 if (comp == const_true_rtx)
10677 emit_jump (if_false_label);
10678 else if (comp != const0_rtx)
10679 do_jump_for_compare (comp, NULL_RTX, if_false_label);
f81497d9 10680 }
ca695ac9 10681
b93a436e
JL
10682 if (if_false_label)
10683 emit_jump (if_false_label);
10684 if (drop_through_label)
10685 emit_label (drop_through_label);
f81497d9
RS
10686}
10687
b93a436e
JL
10688/* Compare OP0 with OP1, word at a time, in mode MODE.
10689 UNSIGNEDP says to do unsigned comparison.
10690 Jump to IF_TRUE_LABEL if OP0 is greater, IF_FALSE_LABEL otherwise. */
f81497d9 10691
b93a436e
JL
10692void
10693do_jump_by_parts_greater_rtx (mode, unsignedp, op0, op1, if_false_label, if_true_label)
10694 enum machine_mode mode;
10695 int unsignedp;
10696 rtx op0, op1;
10697 rtx if_false_label, if_true_label;
f81497d9 10698{
b93a436e
JL
10699 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10700 rtx drop_through_label = 0;
10701 int i;
f81497d9 10702
b93a436e
JL
10703 if (! if_true_label || ! if_false_label)
10704 drop_through_label = gen_label_rtx ();
10705 if (! if_true_label)
10706 if_true_label = drop_through_label;
10707 if (! if_false_label)
10708 if_false_label = drop_through_label;
f81497d9 10709
b93a436e
JL
10710 /* Compare a word at a time, high order first. */
10711 for (i = 0; i < nwords; i++)
10712 {
10713 rtx comp;
10714 rtx op0_word, op1_word;
bbf6f052 10715
b93a436e
JL
10716 if (WORDS_BIG_ENDIAN)
10717 {
10718 op0_word = operand_subword_force (op0, i, mode);
10719 op1_word = operand_subword_force (op1, i, mode);
10720 }
10721 else
10722 {
10723 op0_word = operand_subword_force (op0, nwords - 1 - i, mode);
10724 op1_word = operand_subword_force (op1, nwords - 1 - i, mode);
10725 }
bbf6f052 10726
b93a436e
JL
10727 /* All but high-order word must be compared as unsigned. */
10728 comp = compare_from_rtx (op0_word, op1_word,
10729 (unsignedp || i > 0) ? GTU : GT,
10730 unsignedp, word_mode, NULL_RTX, 0);
10731 if (comp == const_true_rtx)
10732 emit_jump (if_true_label);
10733 else if (comp != const0_rtx)
10734 do_jump_for_compare (comp, NULL_RTX, if_true_label);
bbf6f052 10735
b93a436e
JL
10736 /* Consider lower words only if these are equal. */
10737 comp = compare_from_rtx (op0_word, op1_word, NE, unsignedp, word_mode,
10738 NULL_RTX, 0);
10739 if (comp == const_true_rtx)
10740 emit_jump (if_false_label);
10741 else if (comp != const0_rtx)
10742 do_jump_for_compare (comp, NULL_RTX, if_false_label);
10743 }
bbf6f052 10744
b93a436e
JL
10745 if (if_false_label)
10746 emit_jump (if_false_label);
10747 if (drop_through_label)
10748 emit_label (drop_through_label);
bbf6f052
RK
10749}
10750
b93a436e
JL
10751/* Given an EQ_EXPR expression EXP for values too wide to be compared
10752 with one insn, test the comparison and jump to the appropriate label. */
bbf6f052 10753
b93a436e
JL
10754static void
10755do_jump_by_parts_equality (exp, if_false_label, if_true_label)
10756 tree exp;
10757 rtx if_false_label, if_true_label;
bbf6f052 10758{
b93a436e
JL
10759 rtx op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10760 rtx op1 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10761 enum machine_mode mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0)));
10762 int nwords = (GET_MODE_SIZE (mode) / UNITS_PER_WORD);
10763 int i;
10764 rtx drop_through_label = 0;
bbf6f052 10765
b93a436e
JL
10766 if (! if_false_label)
10767 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10768
b93a436e
JL
10769 for (i = 0; i < nwords; i++)
10770 {
10771 rtx comp = compare_from_rtx (operand_subword_force (op0, i, mode),
10772 operand_subword_force (op1, i, mode),
10773 EQ, TREE_UNSIGNED (TREE_TYPE (exp)),
10774 word_mode, NULL_RTX, 0);
10775 if (comp == const_true_rtx)
10776 emit_jump (if_false_label);
10777 else if (comp != const0_rtx)
10778 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10779 }
bbf6f052 10780
b93a436e
JL
10781 if (if_true_label)
10782 emit_jump (if_true_label);
10783 if (drop_through_label)
10784 emit_label (drop_through_label);
bbf6f052 10785}
b93a436e
JL
10786\f
10787/* Jump according to whether OP0 is 0.
10788 We assume that OP0 has an integer mode that is too wide
10789 for the available compare insns. */
bbf6f052 10790
f5963e61 10791void
b93a436e
JL
10792do_jump_by_parts_equality_rtx (op0, if_false_label, if_true_label)
10793 rtx op0;
10794 rtx if_false_label, if_true_label;
ca695ac9 10795{
b93a436e
JL
10796 int nwords = GET_MODE_SIZE (GET_MODE (op0)) / UNITS_PER_WORD;
10797 rtx part;
10798 int i;
10799 rtx drop_through_label = 0;
bbf6f052 10800
b93a436e
JL
10801 /* The fastest way of doing this comparison on almost any machine is to
10802 "or" all the words and compare the result. If all have to be loaded
10803 from memory and this is a very wide item, it's possible this may
10804 be slower, but that's highly unlikely. */
bbf6f052 10805
b93a436e
JL
10806 part = gen_reg_rtx (word_mode);
10807 emit_move_insn (part, operand_subword_force (op0, 0, GET_MODE (op0)));
10808 for (i = 1; i < nwords && part != 0; i++)
10809 part = expand_binop (word_mode, ior_optab, part,
10810 operand_subword_force (op0, i, GET_MODE (op0)),
10811 part, 1, OPTAB_WIDEN);
bbf6f052 10812
b93a436e
JL
10813 if (part != 0)
10814 {
10815 rtx comp = compare_from_rtx (part, const0_rtx, EQ, 1, word_mode,
10816 NULL_RTX, 0);
0f41302f 10817
b93a436e
JL
10818 if (comp == const_true_rtx)
10819 emit_jump (if_false_label);
10820 else if (comp == const0_rtx)
10821 emit_jump (if_true_label);
10822 else
10823 do_jump_for_compare (comp, if_false_label, if_true_label);
bbf6f052 10824
b93a436e
JL
10825 return;
10826 }
bbf6f052 10827
b93a436e
JL
10828 /* If we couldn't do the "or" simply, do this with a series of compares. */
10829 if (! if_false_label)
10830 drop_through_label = if_false_label = gen_label_rtx ();
bbf6f052 10831
b93a436e
JL
10832 for (i = 0; i < nwords; i++)
10833 {
10834 rtx comp = compare_from_rtx (operand_subword_force (op0, i,
10835 GET_MODE (op0)),
10836 const0_rtx, EQ, 1, word_mode, NULL_RTX, 0);
10837 if (comp == const_true_rtx)
10838 emit_jump (if_false_label);
10839 else if (comp != const0_rtx)
10840 do_jump_for_compare (comp, if_false_label, NULL_RTX);
10841 }
bbf6f052 10842
b93a436e
JL
10843 if (if_true_label)
10844 emit_jump (if_true_label);
0f41302f 10845
b93a436e
JL
10846 if (drop_through_label)
10847 emit_label (drop_through_label);
bbf6f052 10848}
bbf6f052 10849
b93a436e
JL
10850/* Given a comparison expression in rtl form, output conditional branches to
10851 IF_TRUE_LABEL, IF_FALSE_LABEL, or both. */
bbf6f052 10852
b93a436e
JL
10853static void
10854do_jump_for_compare (comparison, if_false_label, if_true_label)
10855 rtx comparison, if_false_label, if_true_label;
bbf6f052 10856{
b93a436e
JL
10857 if (if_true_label)
10858 {
10859 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10860 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)]) (if_true_label));
10861 else
10862 abort ();
ca695ac9 10863
b93a436e
JL
10864 if (if_false_label)
10865 emit_jump (if_false_label);
10866 }
10867 else if (if_false_label)
10868 {
10869 rtx insn;
10870 rtx prev = get_last_insn ();
10871 rtx branch = 0;
0f41302f 10872
b93a436e
JL
10873 /* Output the branch with the opposite condition. Then try to invert
10874 what is generated. If more than one insn is a branch, or if the
10875 branch is not the last insn written, abort. If we can't invert
10876 the branch, emit make a true label, redirect this jump to that,
10877 emit a jump to the false label and define the true label. */
bbf6f052 10878
b93a436e
JL
10879 if (bcc_gen_fctn[(int) GET_CODE (comparison)] != 0)
10880 emit_jump_insn ((*bcc_gen_fctn[(int) GET_CODE (comparison)])(if_false_label));
10881 else
10882 abort ();
bbf6f052 10883
b93a436e
JL
10884 /* Here we get the first insn that was just emitted. It used to be the
10885 case that, on some machines, emitting the branch would discard
10886 the previous compare insn and emit a replacement. This isn't
10887 done anymore, but abort if we see that PREV is deleted. */
bbf6f052 10888
b93a436e
JL
10889 if (prev == 0)
10890 insn = get_insns ();
10891 else if (INSN_DELETED_P (prev))
10892 abort ();
10893 else
10894 insn = NEXT_INSN (prev);
bbf6f052 10895
b93a436e
JL
10896 for (; insn; insn = NEXT_INSN (insn))
10897 if (GET_CODE (insn) == JUMP_INSN)
10898 {
10899 if (branch)
10900 abort ();
10901 branch = insn;
10902 }
a7c5971a 10903
b93a436e
JL
10904 if (branch != get_last_insn ())
10905 abort ();
bbf6f052 10906
b93a436e
JL
10907 JUMP_LABEL (branch) = if_false_label;
10908 if (! invert_jump (branch, if_false_label))
10909 {
10910 if_true_label = gen_label_rtx ();
10911 redirect_jump (branch, if_true_label);
10912 emit_jump (if_false_label);
10913 emit_label (if_true_label);
10914 }
10915 }
10916}
10917\f
10918/* Generate code for a comparison expression EXP
10919 (including code to compute the values to be compared)
10920 and set (CC0) according to the result.
10921 SIGNED_CODE should be the rtx operation for this comparison for
10922 signed data; UNSIGNED_CODE, likewise for use if data is unsigned.
bbf6f052 10923
b93a436e
JL
10924 We force a stack adjustment unless there are currently
10925 things pushed on the stack that aren't yet used. */
ca695ac9 10926
b93a436e
JL
10927static rtx
10928compare (exp, signed_code, unsigned_code)
10929 register tree exp;
10930 enum rtx_code signed_code, unsigned_code;
10931{
10932 register rtx op0
10933 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, VOIDmode, 0);
10934 register rtx op1
10935 = expand_expr (TREE_OPERAND (exp, 1), NULL_RTX, VOIDmode, 0);
10936 register tree type = TREE_TYPE (TREE_OPERAND (exp, 0));
10937 register enum machine_mode mode = TYPE_MODE (type);
10938 int unsignedp = TREE_UNSIGNED (type);
10939 enum rtx_code code = unsignedp ? unsigned_code : signed_code;
ca695ac9 10940
b93a436e
JL
10941#ifdef HAVE_canonicalize_funcptr_for_compare
10942 /* If function pointers need to be "canonicalized" before they can
10943 be reliably compared, then canonicalize them. */
10944 if (HAVE_canonicalize_funcptr_for_compare
10945 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
10946 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
10947 == FUNCTION_TYPE))
bbf6f052 10948 {
b93a436e 10949 rtx new_op0 = gen_reg_rtx (mode);
bbf6f052 10950
b93a436e
JL
10951 emit_insn (gen_canonicalize_funcptr_for_compare (new_op0, op0));
10952 op0 = new_op0;
ca695ac9 10953 }
bbf6f052 10954
b93a436e
JL
10955 if (HAVE_canonicalize_funcptr_for_compare
10956 && TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
10957 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
10958 == FUNCTION_TYPE))
10959 {
10960 rtx new_op1 = gen_reg_rtx (mode);
bbf6f052 10961
b93a436e
JL
10962 emit_insn (gen_canonicalize_funcptr_for_compare (new_op1, op1));
10963 op1 = new_op1;
10964 }
10965#endif
0f41302f 10966
b93a436e
JL
10967 return compare_from_rtx (op0, op1, code, unsignedp, mode,
10968 ((mode == BLKmode)
10969 ? expr_size (TREE_OPERAND (exp, 0)) : NULL_RTX),
10970 TYPE_ALIGN (TREE_TYPE (exp)) / BITS_PER_UNIT);
ca695ac9 10971}
bbf6f052 10972
b93a436e
JL
10973/* Like compare but expects the values to compare as two rtx's.
10974 The decision as to signed or unsigned comparison must be made by the caller.
bbf6f052 10975
b93a436e
JL
10976 If MODE is BLKmode, SIZE is an RTX giving the size of the objects being
10977 compared.
bbf6f052 10978
b93a436e
JL
10979 If ALIGN is non-zero, it is the alignment of this type; if zero, the
10980 size of MODE should be used. */
ca695ac9 10981
b93a436e
JL
10982rtx
10983compare_from_rtx (op0, op1, code, unsignedp, mode, size, align)
10984 register rtx op0, op1;
10985 enum rtx_code code;
10986 int unsignedp;
10987 enum machine_mode mode;
10988 rtx size;
10989 int align;
bbf6f052 10990{
b93a436e 10991 rtx tem;
bbf6f052 10992
b93a436e
JL
10993 /* If one operand is constant, make it the second one. Only do this
10994 if the other operand is not constant as well. */
e7c33f54 10995
b93a436e
JL
10996 if ((CONSTANT_P (op0) && ! CONSTANT_P (op1))
10997 || (GET_CODE (op0) == CONST_INT && GET_CODE (op1) != CONST_INT))
ca695ac9 10998 {
b93a436e
JL
10999 tem = op0;
11000 op0 = op1;
11001 op1 = tem;
11002 code = swap_condition (code);
11003 }
bbf6f052 11004
b93a436e
JL
11005 if (flag_force_mem)
11006 {
11007 op0 = force_not_mem (op0);
11008 op1 = force_not_mem (op1);
11009 }
bbf6f052 11010
b93a436e 11011 do_pending_stack_adjust ();
ca695ac9 11012
b93a436e
JL
11013 if (GET_CODE (op0) == CONST_INT && GET_CODE (op1) == CONST_INT
11014 && (tem = simplify_relational_operation (code, mode, op0, op1)) != 0)
11015 return tem;
ca695ac9 11016
b93a436e
JL
11017#if 0
11018 /* There's no need to do this now that combine.c can eliminate lots of
11019 sign extensions. This can be less efficient in certain cases on other
11020 machines. */
ca695ac9 11021
b93a436e
JL
11022 /* If this is a signed equality comparison, we can do it as an
11023 unsigned comparison since zero-extension is cheaper than sign
11024 extension and comparisons with zero are done as unsigned. This is
11025 the case even on machines that can do fast sign extension, since
11026 zero-extension is easier to combine with other operations than
11027 sign-extension is. If we are comparing against a constant, we must
11028 convert it to what it would look like unsigned. */
11029 if ((code == EQ || code == NE) && ! unsignedp
11030 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
11031 {
11032 if (GET_CODE (op1) == CONST_INT
11033 && (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0))) != INTVAL (op1))
11034 op1 = GEN_INT (INTVAL (op1) & GET_MODE_MASK (GET_MODE (op0)));
11035 unsignedp = 1;
11036 }
11037#endif
ca695ac9 11038
b93a436e 11039 emit_cmp_insn (op0, op1, code, size, mode, unsignedp, align);
ca695ac9 11040
b93a436e
JL
11041 return gen_rtx_fmt_ee (code, VOIDmode, cc0_rtx, const0_rtx);
11042}
11043\f
11044/* Generate code to calculate EXP using a store-flag instruction
11045 and return an rtx for the result. EXP is either a comparison
11046 or a TRUTH_NOT_EXPR whose operand is a comparison.
ca695ac9 11047
b93a436e 11048 If TARGET is nonzero, store the result there if convenient.
ca695ac9 11049
b93a436e
JL
11050 If ONLY_CHEAP is non-zero, only do this if it is likely to be very
11051 cheap.
ca695ac9 11052
b93a436e
JL
11053 Return zero if there is no suitable set-flag instruction
11054 available on this machine.
ca695ac9 11055
b93a436e
JL
11056 Once expand_expr has been called on the arguments of the comparison,
11057 we are committed to doing the store flag, since it is not safe to
11058 re-evaluate the expression. We emit the store-flag insn by calling
11059 emit_store_flag, but only expand the arguments if we have a reason
11060 to believe that emit_store_flag will be successful. If we think that
11061 it will, but it isn't, we have to simulate the store-flag with a
11062 set/jump/set sequence. */
ca695ac9 11063
b93a436e
JL
11064static rtx
11065do_store_flag (exp, target, mode, only_cheap)
11066 tree exp;
11067 rtx target;
11068 enum machine_mode mode;
11069 int only_cheap;
11070{
11071 enum rtx_code code;
11072 tree arg0, arg1, type;
11073 tree tem;
11074 enum machine_mode operand_mode;
11075 int invert = 0;
11076 int unsignedp;
11077 rtx op0, op1;
11078 enum insn_code icode;
11079 rtx subtarget = target;
381127e8 11080 rtx result, label;
ca695ac9 11081
b93a436e
JL
11082 /* If this is a TRUTH_NOT_EXPR, set a flag indicating we must invert the
11083 result at the end. We can't simply invert the test since it would
11084 have already been inverted if it were valid. This case occurs for
11085 some floating-point comparisons. */
ca695ac9 11086
b93a436e
JL
11087 if (TREE_CODE (exp) == TRUTH_NOT_EXPR)
11088 invert = 1, exp = TREE_OPERAND (exp, 0);
ca695ac9 11089
b93a436e
JL
11090 arg0 = TREE_OPERAND (exp, 0);
11091 arg1 = TREE_OPERAND (exp, 1);
11092 type = TREE_TYPE (arg0);
11093 operand_mode = TYPE_MODE (type);
11094 unsignedp = TREE_UNSIGNED (type);
ca695ac9 11095
b93a436e
JL
11096 /* We won't bother with BLKmode store-flag operations because it would mean
11097 passing a lot of information to emit_store_flag. */
11098 if (operand_mode == BLKmode)
11099 return 0;
ca695ac9 11100
b93a436e
JL
11101 /* We won't bother with store-flag operations involving function pointers
11102 when function pointers must be canonicalized before comparisons. */
11103#ifdef HAVE_canonicalize_funcptr_for_compare
11104 if (HAVE_canonicalize_funcptr_for_compare
11105 && ((TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == POINTER_TYPE
11106 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 0))))
11107 == FUNCTION_TYPE))
11108 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 1))) == POINTER_TYPE
11109 && (TREE_CODE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))))
11110 == FUNCTION_TYPE))))
11111 return 0;
ca695ac9
JB
11112#endif
11113
b93a436e
JL
11114 STRIP_NOPS (arg0);
11115 STRIP_NOPS (arg1);
ca695ac9 11116
b93a436e
JL
11117 /* Get the rtx comparison code to use. We know that EXP is a comparison
11118 operation of some type. Some comparisons against 1 and -1 can be
11119 converted to comparisons with zero. Do so here so that the tests
11120 below will be aware that we have a comparison with zero. These
11121 tests will not catch constants in the first operand, but constants
11122 are rarely passed as the first operand. */
ca695ac9 11123
b93a436e
JL
11124 switch (TREE_CODE (exp))
11125 {
11126 case EQ_EXPR:
11127 code = EQ;
bbf6f052 11128 break;
b93a436e
JL
11129 case NE_EXPR:
11130 code = NE;
bbf6f052 11131 break;
b93a436e
JL
11132 case LT_EXPR:
11133 if (integer_onep (arg1))
11134 arg1 = integer_zero_node, code = unsignedp ? LEU : LE;
11135 else
11136 code = unsignedp ? LTU : LT;
ca695ac9 11137 break;
b93a436e
JL
11138 case LE_EXPR:
11139 if (! unsignedp && integer_all_onesp (arg1))
11140 arg1 = integer_zero_node, code = LT;
11141 else
11142 code = unsignedp ? LEU : LE;
ca695ac9 11143 break;
b93a436e
JL
11144 case GT_EXPR:
11145 if (! unsignedp && integer_all_onesp (arg1))
11146 arg1 = integer_zero_node, code = GE;
11147 else
11148 code = unsignedp ? GTU : GT;
11149 break;
11150 case GE_EXPR:
11151 if (integer_onep (arg1))
11152 arg1 = integer_zero_node, code = unsignedp ? GTU : GT;
11153 else
11154 code = unsignedp ? GEU : GE;
ca695ac9 11155 break;
ca695ac9 11156 default:
b93a436e 11157 abort ();
bbf6f052 11158 }
bbf6f052 11159
b93a436e
JL
11160 /* Put a constant second. */
11161 if (TREE_CODE (arg0) == REAL_CST || TREE_CODE (arg0) == INTEGER_CST)
11162 {
11163 tem = arg0; arg0 = arg1; arg1 = tem;
11164 code = swap_condition (code);
ca695ac9 11165 }
bbf6f052 11166
b93a436e
JL
11167 /* If this is an equality or inequality test of a single bit, we can
11168 do this by shifting the bit being tested to the low-order bit and
11169 masking the result with the constant 1. If the condition was EQ,
11170 we xor it with 1. This does not require an scc insn and is faster
11171 than an scc insn even if we have it. */
d39985fa 11172
b93a436e
JL
11173 if ((code == NE || code == EQ)
11174 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
11175 && integer_pow2p (TREE_OPERAND (arg0, 1)))
11176 {
11177 tree inner = TREE_OPERAND (arg0, 0);
11178 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
11179 int ops_unsignedp;
bbf6f052 11180
b93a436e
JL
11181 /* If INNER is a right shift of a constant and it plus BITNUM does
11182 not overflow, adjust BITNUM and INNER. */
ca695ac9 11183
b93a436e
JL
11184 if (TREE_CODE (inner) == RSHIFT_EXPR
11185 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
11186 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
11187 && (bitnum + TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
11188 < TYPE_PRECISION (type)))
ca695ac9 11189 {
b93a436e
JL
11190 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
11191 inner = TREE_OPERAND (inner, 0);
ca695ac9 11192 }
ca695ac9 11193
b93a436e
JL
11194 /* If we are going to be able to omit the AND below, we must do our
11195 operations as unsigned. If we must use the AND, we have a choice.
11196 Normally unsigned is faster, but for some machines signed is. */
11197 ops_unsignedp = (bitnum == TYPE_PRECISION (type) - 1 ? 1
11198#ifdef LOAD_EXTEND_OP
11199 : (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND ? 0 : 1)
11200#else
11201 : 1
11202#endif
11203 );
bbf6f052 11204
b93a436e
JL
11205 if (subtarget == 0 || GET_CODE (subtarget) != REG
11206 || GET_MODE (subtarget) != operand_mode
e5e809f4 11207 || ! safe_from_p (subtarget, inner, 1))
b93a436e 11208 subtarget = 0;
bbf6f052 11209
b93a436e 11210 op0 = expand_expr (inner, subtarget, VOIDmode, 0);
bbf6f052 11211
b93a436e
JL
11212 if (bitnum != 0)
11213 op0 = expand_shift (RSHIFT_EXPR, GET_MODE (op0), op0,
11214 size_int (bitnum), subtarget, ops_unsignedp);
bbf6f052 11215
b93a436e
JL
11216 if (GET_MODE (op0) != mode)
11217 op0 = convert_to_mode (mode, op0, ops_unsignedp);
bbf6f052 11218
b93a436e
JL
11219 if ((code == EQ && ! invert) || (code == NE && invert))
11220 op0 = expand_binop (mode, xor_optab, op0, const1_rtx, subtarget,
11221 ops_unsignedp, OPTAB_LIB_WIDEN);
bbf6f052 11222
b93a436e
JL
11223 /* Put the AND last so it can combine with more things. */
11224 if (bitnum != TYPE_PRECISION (type) - 1)
11225 op0 = expand_and (op0, const1_rtx, subtarget);
bbf6f052 11226
b93a436e
JL
11227 return op0;
11228 }
bbf6f052 11229
b93a436e
JL
11230 /* Now see if we are likely to be able to do this. Return if not. */
11231 if (! can_compare_p (operand_mode))
11232 return 0;
11233 icode = setcc_gen_code[(int) code];
11234 if (icode == CODE_FOR_nothing
11235 || (only_cheap && insn_operand_mode[(int) icode][0] != mode))
ca695ac9 11236 {
b93a436e
JL
11237 /* We can only do this if it is one of the special cases that
11238 can be handled without an scc insn. */
11239 if ((code == LT && integer_zerop (arg1))
11240 || (! only_cheap && code == GE && integer_zerop (arg1)))
11241 ;
11242 else if (BRANCH_COST >= 0
11243 && ! only_cheap && (code == NE || code == EQ)
11244 && TREE_CODE (type) != REAL_TYPE
11245 && ((abs_optab->handlers[(int) operand_mode].insn_code
11246 != CODE_FOR_nothing)
11247 || (ffs_optab->handlers[(int) operand_mode].insn_code
11248 != CODE_FOR_nothing)))
11249 ;
11250 else
11251 return 0;
ca695ac9 11252 }
b93a436e
JL
11253
11254 preexpand_calls (exp);
11255 if (subtarget == 0 || GET_CODE (subtarget) != REG
11256 || GET_MODE (subtarget) != operand_mode
e5e809f4 11257 || ! safe_from_p (subtarget, arg1, 1))
b93a436e
JL
11258 subtarget = 0;
11259
11260 op0 = expand_expr (arg0, subtarget, VOIDmode, 0);
11261 op1 = expand_expr (arg1, NULL_RTX, VOIDmode, 0);
11262
11263 if (target == 0)
11264 target = gen_reg_rtx (mode);
11265
11266 /* Pass copies of OP0 and OP1 in case they contain a QUEUED. This is safe
11267 because, if the emit_store_flag does anything it will succeed and
11268 OP0 and OP1 will not be used subsequently. */
ca695ac9 11269
b93a436e
JL
11270 result = emit_store_flag (target, code,
11271 queued_subexp_p (op0) ? copy_rtx (op0) : op0,
11272 queued_subexp_p (op1) ? copy_rtx (op1) : op1,
11273 operand_mode, unsignedp, 1);
ca695ac9 11274
b93a436e
JL
11275 if (result)
11276 {
11277 if (invert)
11278 result = expand_binop (mode, xor_optab, result, const1_rtx,
11279 result, 0, OPTAB_LIB_WIDEN);
11280 return result;
ca695ac9 11281 }
bbf6f052 11282
b93a436e
JL
11283 /* If this failed, we have to do this with set/compare/jump/set code. */
11284 if (GET_CODE (target) != REG
11285 || reg_mentioned_p (target, op0) || reg_mentioned_p (target, op1))
11286 target = gen_reg_rtx (GET_MODE (target));
11287
11288 emit_move_insn (target, invert ? const0_rtx : const1_rtx);
11289 result = compare_from_rtx (op0, op1, code, unsignedp,
11290 operand_mode, NULL_RTX, 0);
11291 if (GET_CODE (result) == CONST_INT)
11292 return (((result == const0_rtx && ! invert)
11293 || (result != const0_rtx && invert))
11294 ? const0_rtx : const1_rtx);
ca695ac9 11295
b93a436e
JL
11296 label = gen_label_rtx ();
11297 if (bcc_gen_fctn[(int) code] == 0)
11298 abort ();
0f41302f 11299
b93a436e
JL
11300 emit_jump_insn ((*bcc_gen_fctn[(int) code]) (label));
11301 emit_move_insn (target, invert ? const1_rtx : const0_rtx);
11302 emit_label (label);
bbf6f052 11303
b93a436e 11304 return target;
ca695ac9 11305}
b93a436e
JL
11306\f
11307/* Generate a tablejump instruction (used for switch statements). */
11308
11309#ifdef HAVE_tablejump
e87b4f3f 11310
b93a436e
JL
11311/* INDEX is the value being switched on, with the lowest value
11312 in the table already subtracted.
11313 MODE is its expected mode (needed if INDEX is constant).
11314 RANGE is the length of the jump table.
11315 TABLE_LABEL is a CODE_LABEL rtx for the table itself.
88d3b7f0 11316
b93a436e
JL
11317 DEFAULT_LABEL is a CODE_LABEL rtx to jump to if the
11318 index value is out of range. */
0f41302f 11319
ca695ac9 11320void
b93a436e
JL
11321do_tablejump (index, mode, range, table_label, default_label)
11322 rtx index, range, table_label, default_label;
11323 enum machine_mode mode;
ca695ac9 11324{
b93a436e 11325 register rtx temp, vector;
88d3b7f0 11326
b93a436e
JL
11327 /* Do an unsigned comparison (in the proper mode) between the index
11328 expression and the value which represents the length of the range.
11329 Since we just finished subtracting the lower bound of the range
11330 from the index expression, this comparison allows us to simultaneously
11331 check that the original index expression value is both greater than
11332 or equal to the minimum value of the range and less than or equal to
11333 the maximum value of the range. */
709f5be1 11334
b93a436e
JL
11335 emit_cmp_insn (index, range, GTU, NULL_RTX, mode, 1, 0);
11336 emit_jump_insn (gen_bgtu (default_label));
bbf6f052 11337
b93a436e
JL
11338 /* If index is in range, it must fit in Pmode.
11339 Convert to Pmode so we can index with it. */
11340 if (mode != Pmode)
11341 index = convert_to_mode (Pmode, index, 1);
bbf6f052 11342
b93a436e
JL
11343 /* Don't let a MEM slip thru, because then INDEX that comes
11344 out of PIC_CASE_VECTOR_ADDRESS won't be a valid address,
11345 and break_out_memory_refs will go to work on it and mess it up. */
11346#ifdef PIC_CASE_VECTOR_ADDRESS
11347 if (flag_pic && GET_CODE (index) != REG)
11348 index = copy_to_mode_reg (Pmode, index);
11349#endif
ca695ac9 11350
b93a436e
JL
11351 /* If flag_force_addr were to affect this address
11352 it could interfere with the tricky assumptions made
11353 about addresses that contain label-refs,
11354 which may be valid only very near the tablejump itself. */
11355 /* ??? The only correct use of CASE_VECTOR_MODE is the one inside the
11356 GET_MODE_SIZE, because this indicates how large insns are. The other
11357 uses should all be Pmode, because they are addresses. This code
11358 could fail if addresses and insns are not the same size. */
11359 index = gen_rtx_PLUS (Pmode,
11360 gen_rtx_MULT (Pmode, index,
11361 GEN_INT (GET_MODE_SIZE (CASE_VECTOR_MODE))),
11362 gen_rtx_LABEL_REF (Pmode, table_label));
11363#ifdef PIC_CASE_VECTOR_ADDRESS
11364 if (flag_pic)
11365 index = PIC_CASE_VECTOR_ADDRESS (index);
11366 else
bbf6f052 11367#endif
b93a436e
JL
11368 index = memory_address_noforce (CASE_VECTOR_MODE, index);
11369 temp = gen_reg_rtx (CASE_VECTOR_MODE);
11370 vector = gen_rtx_MEM (CASE_VECTOR_MODE, index);
11371 RTX_UNCHANGING_P (vector) = 1;
11372 convert_move (temp, vector, 0);
11373
11374 emit_jump_insn (gen_tablejump (temp, table_label));
11375
11376 /* If we are generating PIC code or if the table is PC-relative, the
11377 table and JUMP_INSN must be adjacent, so don't output a BARRIER. */
11378 if (! CASE_VECTOR_PC_RELATIVE && ! flag_pic)
11379 emit_barrier ();
bbf6f052 11380}
b93a436e
JL
11381
11382#endif /* HAVE_tablejump */
This page took 2.403821 seconds and 5 git commands to generate.